]> gcc.gnu.org Git - gcc.git/blame - gcc/expr.c
entered into RCS
[gcc.git] / gcc / expr.c
CommitLineData
bbf6f052
RK
1/* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992 Free Software Foundation, Inc.
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21#include "config.h"
22#include "rtl.h"
23#include "tree.h"
24#include "flags.h"
25#include "function.h"
26#include "insn-flags.h"
27#include "insn-codes.h"
28#include "expr.h"
29#include "insn-config.h"
30#include "recog.h"
31#include "output.h"
32#include "gvarargs.h"
33#include "typeclass.h"
34
35#define CEIL(x,y) (((x) + (y) - 1) / (y))
36
37/* Decide whether a function's arguments should be processed
38 from first to last or from last to first. */
39
40#ifdef STACK_GROWS_DOWNWARD
41#ifdef PUSH_ROUNDING
42#define PUSH_ARGS_REVERSED /* If it's last to first */
43#endif
44#endif
45
46#ifndef STACK_PUSH_CODE
47#ifdef STACK_GROWS_DOWNWARD
48#define STACK_PUSH_CODE PRE_DEC
49#else
50#define STACK_PUSH_CODE PRE_INC
51#endif
52#endif
53
54/* Like STACK_BOUNDARY but in units of bytes, not bits. */
55#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
56
57/* If this is nonzero, we do not bother generating VOLATILE
58 around volatile memory references, and we are willing to
59 output indirect addresses. If cse is to follow, we reject
60 indirect addresses so a useful potential cse is generated;
61 if it is used only once, instruction combination will produce
62 the same indirect address eventually. */
63int cse_not_expected;
64
65/* Nonzero to generate code for all the subroutines within an
66 expression before generating the upper levels of the expression.
67 Nowadays this is never zero. */
68int do_preexpand_calls = 1;
69
70/* Number of units that we should eventually pop off the stack.
71 These are the arguments to function calls that have already returned. */
72int pending_stack_adjust;
73
74/* Nonzero means stack pops must not be deferred, and deferred stack
75 pops must not be output. It is nonzero inside a function call,
76 inside a conditional expression, inside a statement expression,
77 and in other cases as well. */
78int inhibit_defer_pop;
79
80/* A list of all cleanups which belong to the arguments of
81 function calls being expanded by expand_call. */
82tree cleanups_this_call;
83
84/* Nonzero means __builtin_saveregs has already been done in this function.
85 The value is the pseudoreg containing the value __builtin_saveregs
86 returned. */
87static rtx saveregs_value;
88
89rtx store_expr ();
90static void store_constructor ();
91static rtx store_field ();
92static rtx expand_builtin ();
93static rtx compare ();
94static rtx do_store_flag ();
95static void preexpand_calls ();
96static rtx expand_increment ();
97static void init_queue ();
98
99void do_pending_stack_adjust ();
100static void do_jump_for_compare ();
101static void do_jump_by_parts_equality ();
102static void do_jump_by_parts_equality_rtx ();
103static void do_jump_by_parts_greater ();
104
105/* MOVE_RATIO is the number of move instructions that is better than
106 a block move. */
107
108#ifndef MOVE_RATIO
109#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi)
110#define MOVE_RATIO 2
111#else
112/* A value of around 6 would minimize code size; infinity would minimize
113 execution time. */
114#define MOVE_RATIO 15
115#endif
116#endif
117\f
118/* This is run at the start of compiling a function. */
119
120void
121init_expr ()
122{
123 init_queue ();
124
125 pending_stack_adjust = 0;
126 inhibit_defer_pop = 0;
127 cleanups_this_call = 0;
128 saveregs_value = 0;
129}
130
131/* Save all variables describing the current status into the structure *P.
132 This is used before starting a nested function. */
133
134void
135save_expr_status (p)
136 struct function *p;
137{
138 /* Instead of saving the postincrement queue, empty it. */
139 emit_queue ();
140
141 p->pending_stack_adjust = pending_stack_adjust;
142 p->inhibit_defer_pop = inhibit_defer_pop;
143 p->cleanups_this_call = cleanups_this_call;
144 p->saveregs_value = saveregs_value;
145
146 pending_stack_adjust = 0;
147 inhibit_defer_pop = 0;
148 cleanups_this_call = 0;
149 saveregs_value = 0;
150}
151
152/* Restore all variables describing the current status from the structure *P.
153 This is used after a nested function. */
154
155void
156restore_expr_status (p)
157 struct function *p;
158{
159 pending_stack_adjust = p->pending_stack_adjust;
160 inhibit_defer_pop = p->inhibit_defer_pop;
161 cleanups_this_call = p->cleanups_this_call;
162 saveregs_value = p->saveregs_value;
163}
164\f
165/* Manage the queue of increment instructions to be output
166 for POSTINCREMENT_EXPR expressions, etc. */
167
168static rtx pending_chain;
169
170/* Queue up to increment (or change) VAR later. BODY says how:
171 BODY should be the same thing you would pass to emit_insn
172 to increment right away. It will go to emit_insn later on.
173
174 The value is a QUEUED expression to be used in place of VAR
175 where you want to guarantee the pre-incrementation value of VAR. */
176
177static rtx
178enqueue_insn (var, body)
179 rtx var, body;
180{
181 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
182 var, 0, 0, body, pending_chain);
183 return pending_chain;
184}
185
186/* Use protect_from_queue to convert a QUEUED expression
187 into something that you can put immediately into an instruction.
188 If the queued incrementation has not happened yet,
189 protect_from_queue returns the variable itself.
190 If the incrementation has happened, protect_from_queue returns a temp
191 that contains a copy of the old value of the variable.
192
193 Any time an rtx which might possibly be a QUEUED is to be put
194 into an instruction, it must be passed through protect_from_queue first.
195 QUEUED expressions are not meaningful in instructions.
196
197 Do not pass a value through protect_from_queue and then hold
198 on to it for a while before putting it in an instruction!
199 If the queue is flushed in between, incorrect code will result. */
200
201rtx
202protect_from_queue (x, modify)
203 register rtx x;
204 int modify;
205{
206 register RTX_CODE code = GET_CODE (x);
207
208#if 0 /* A QUEUED can hang around after the queue is forced out. */
209 /* Shortcut for most common case. */
210 if (pending_chain == 0)
211 return x;
212#endif
213
214 if (code != QUEUED)
215 {
216 /* A special hack for read access to (MEM (QUEUED ...))
217 to facilitate use of autoincrement.
218 Make a copy of the contents of the memory location
219 rather than a copy of the address, but not
220 if the value is of mode BLKmode. */
221 if (code == MEM && GET_MODE (x) != BLKmode
222 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
223 {
224 register rtx y = XEXP (x, 0);
225 XEXP (x, 0) = QUEUED_VAR (y);
226 if (QUEUED_INSN (y))
227 {
228 register rtx temp = gen_reg_rtx (GET_MODE (x));
229 emit_insn_before (gen_move_insn (temp, x),
230 QUEUED_INSN (y));
231 return temp;
232 }
233 return x;
234 }
235 /* Otherwise, recursively protect the subexpressions of all
236 the kinds of rtx's that can contain a QUEUED. */
237 if (code == MEM)
238 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
239 else if (code == PLUS || code == MULT)
240 {
241 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
242 XEXP (x, 1) = protect_from_queue (XEXP (x, 1), 0);
243 }
244 return x;
245 }
246 /* If the increment has not happened, use the variable itself. */
247 if (QUEUED_INSN (x) == 0)
248 return QUEUED_VAR (x);
249 /* If the increment has happened and a pre-increment copy exists,
250 use that copy. */
251 if (QUEUED_COPY (x) != 0)
252 return QUEUED_COPY (x);
253 /* The increment has happened but we haven't set up a pre-increment copy.
254 Set one up now, and use it. */
255 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
256 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
257 QUEUED_INSN (x));
258 return QUEUED_COPY (x);
259}
260
261/* Return nonzero if X contains a QUEUED expression:
262 if it contains anything that will be altered by a queued increment.
263 We handle only combinations of MEM, PLUS, MINUS and MULT operators
264 since memory addresses generally contain only those. */
265
266static int
267queued_subexp_p (x)
268 rtx x;
269{
270 register enum rtx_code code = GET_CODE (x);
271 switch (code)
272 {
273 case QUEUED:
274 return 1;
275 case MEM:
276 return queued_subexp_p (XEXP (x, 0));
277 case MULT:
278 case PLUS:
279 case MINUS:
280 return queued_subexp_p (XEXP (x, 0))
281 || queued_subexp_p (XEXP (x, 1));
282 }
283 return 0;
284}
285
286/* Perform all the pending incrementations. */
287
288void
289emit_queue ()
290{
291 register rtx p;
292 while (p = pending_chain)
293 {
294 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
295 pending_chain = QUEUED_NEXT (p);
296 }
297}
298
299static void
300init_queue ()
301{
302 if (pending_chain)
303 abort ();
304}
305\f
306/* Copy data from FROM to TO, where the machine modes are not the same.
307 Both modes may be integer, or both may be floating.
308 UNSIGNEDP should be nonzero if FROM is an unsigned type.
309 This causes zero-extension instead of sign-extension. */
310
311void
312convert_move (to, from, unsignedp)
313 register rtx to, from;
314 int unsignedp;
315{
316 enum machine_mode to_mode = GET_MODE (to);
317 enum machine_mode from_mode = GET_MODE (from);
318 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
319 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
320 enum insn_code code;
321 rtx libcall;
322
323 /* rtx code for making an equivalent value. */
324 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
325
326 to = protect_from_queue (to, 1);
327 from = protect_from_queue (from, 0);
328
329 if (to_real != from_real)
330 abort ();
331
332 if (to_mode == from_mode
333 || (from_mode == VOIDmode && CONSTANT_P (from)))
334 {
335 emit_move_insn (to, from);
336 return;
337 }
338
339 if (to_real)
340 {
341#ifdef HAVE_extendsfdf2
342 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
343 {
344 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
345 return;
346 }
347#endif
348#ifdef HAVE_extendsftf2
349 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
350 {
351 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
352 return;
353 }
354#endif
355#ifdef HAVE_extenddftf2
356 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
357 {
358 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
359 return;
360 }
361#endif
362#ifdef HAVE_truncdfsf2
363 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
364 {
365 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
366 return;
367 }
368#endif
369#ifdef HAVE_trunctfsf2
370 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
371 {
372 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
373 return;
374 }
375#endif
376#ifdef HAVE_trunctfdf2
377 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
378 {
379 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
380 return;
381 }
382#endif
383
384 if (from_mode == SFmode && to_mode == DFmode)
385 libcall = extendsfdf2_libfunc;
386 else if (from_mode == DFmode && to_mode == SFmode)
387 libcall = truncdfsf2_libfunc;
388 else
389 /* This conversion is not implemented yet. There aren't any TFmode
390 library calls. */
391 abort ();
392
393 emit_library_call (libcall, 0, to_mode, 1, from, from_mode);
394 emit_move_insn (to, hard_libcall_value (to_mode));
395 return;
396 }
397
398 /* Now both modes are integers. */
399
400 /* Handle expanding beyond a word. */
401 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
402 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
403 {
404 rtx insns;
405 rtx lowpart;
406 rtx fill_value;
407 rtx lowfrom;
408 int i;
409 enum machine_mode lowpart_mode;
410 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
411
412 /* Try converting directly if the insn is supported. */
413 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
414 != CODE_FOR_nothing)
415 {
416 emit_unop_insn (code, to, from, equiv_code);
417 return;
418 }
419 /* Next, try converting via full word. */
420 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
421 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
422 != CODE_FOR_nothing))
423 {
424 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
425 emit_unop_insn (code, to,
426 gen_lowpart (word_mode, to), equiv_code);
427 return;
428 }
429
430 /* No special multiword conversion insn; do it by hand. */
431 start_sequence ();
432
433 /* Get a copy of FROM widened to a word, if necessary. */
434 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
435 lowpart_mode = word_mode;
436 else
437 lowpart_mode = from_mode;
438
439 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
440
441 lowpart = gen_lowpart (lowpart_mode, to);
442 emit_move_insn (lowpart, lowfrom);
443
444 /* Compute the value to put in each remaining word. */
445 if (unsignedp)
446 fill_value = const0_rtx;
447 else
448 {
449#ifdef HAVE_slt
450 if (HAVE_slt
451 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
452 && STORE_FLAG_VALUE == -1)
453 {
454 emit_cmp_insn (lowfrom, const0_rtx, NE, 0, lowpart_mode, 0, 0);
455 fill_value = gen_reg_rtx (word_mode);
456 emit_insn (gen_slt (fill_value));
457 }
458 else
459#endif
460 {
461 fill_value
462 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
463 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
464 0, 0);
465 fill_value = convert_to_mode (word_mode, fill_value, 1);
466 }
467 }
468
469 /* Fill the remaining words. */
470 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
471 {
472 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
473 rtx subword = operand_subword (to, index, 1, to_mode);
474
475 if (subword == 0)
476 abort ();
477
478 if (fill_value != subword)
479 emit_move_insn (subword, fill_value);
480 }
481
482 insns = get_insns ();
483 end_sequence ();
484
485 emit_no_conflict_block (insns, to, from, 0,
486 gen_rtx (equiv_code, to_mode, from));
487 return;
488 }
489
490 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD)
491 {
492 convert_move (to, gen_lowpart (word_mode, from), 0);
493 return;
494 }
495
496 /* Handle pointer conversion */ /* SPEE 900220 */
497 if (to_mode == PSImode)
498 {
499 if (from_mode != SImode)
500 from = convert_to_mode (SImode, from, unsignedp);
501
502#ifdef HAVE_truncsipsi
503 if (HAVE_truncsipsi)
504 {
505 emit_unop_insn (CODE_FOR_truncsipsi, to, from, UNKNOWN);
506 return;
507 }
508#endif /* HAVE_truncsipsi */
509 abort ();
510 }
511
512 if (from_mode == PSImode)
513 {
514 if (to_mode != SImode)
515 {
516 from = convert_to_mode (SImode, from, unsignedp);
517 from_mode = SImode;
518 }
519 else
520 {
521#ifdef HAVE_extendpsisi
522 if (HAVE_extendpsisi)
523 {
524 emit_unop_insn (CODE_FOR_extendpsisi, to, from, UNKNOWN);
525 return;
526 }
527#endif /* HAVE_extendpsisi */
528 abort ();
529 }
530 }
531
532 /* Now follow all the conversions between integers
533 no more than a word long. */
534
535 /* For truncation, usually we can just refer to FROM in a narrower mode. */
536 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
537 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
538 GET_MODE_BITSIZE (from_mode))
539 && ((GET_CODE (from) == MEM
540 && ! MEM_VOLATILE_P (from)
541 && ! mode_dependent_address_p (XEXP (from, 0)))
542 || GET_CODE (from) == REG
543 || GET_CODE (from) == SUBREG))
544 {
545 emit_move_insn (to, gen_lowpart (to_mode, from));
546 return;
547 }
548
549 /* For truncation, usually we can just refer to FROM in a narrower mode. */
550 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
551 {
552 /* Convert directly if that works. */
553 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
554 != CODE_FOR_nothing)
555 {
556 emit_unop_insn (code, to, from, equiv_code);
557 return;
558 }
559 else
560 {
561 enum machine_mode intermediate;
562
563 /* Search for a mode to convert via. */
564 for (intermediate = from_mode; intermediate != VOIDmode;
565 intermediate = GET_MODE_WIDER_MODE (intermediate))
566 if ((can_extend_p (to_mode, intermediate, unsignedp)
567 != CODE_FOR_nothing)
568 && (can_extend_p (intermediate, from_mode, unsignedp)
569 != CODE_FOR_nothing))
570 {
571 convert_move (to, convert_to_mode (intermediate, from,
572 unsignedp), unsignedp);
573 return;
574 }
575
576 /* No suitable intermediate mode. */
577 abort ();
578 }
579 }
580
581 /* Support special truncate insns for certain modes. */
582
583 if (from_mode == DImode && to_mode == SImode)
584 {
585#ifdef HAVE_truncdisi2
586 if (HAVE_truncdisi2)
587 {
588 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
589 return;
590 }
591#endif
592 convert_move (to, force_reg (from_mode, from), unsignedp);
593 return;
594 }
595
596 if (from_mode == DImode && to_mode == HImode)
597 {
598#ifdef HAVE_truncdihi2
599 if (HAVE_truncdihi2)
600 {
601 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
602 return;
603 }
604#endif
605 convert_move (to, force_reg (from_mode, from), unsignedp);
606 return;
607 }
608
609 if (from_mode == DImode && to_mode == QImode)
610 {
611#ifdef HAVE_truncdiqi2
612 if (HAVE_truncdiqi2)
613 {
614 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
615 return;
616 }
617#endif
618 convert_move (to, force_reg (from_mode, from), unsignedp);
619 return;
620 }
621
622 if (from_mode == SImode && to_mode == HImode)
623 {
624#ifdef HAVE_truncsihi2
625 if (HAVE_truncsihi2)
626 {
627 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
628 return;
629 }
630#endif
631 convert_move (to, force_reg (from_mode, from), unsignedp);
632 return;
633 }
634
635 if (from_mode == SImode && to_mode == QImode)
636 {
637#ifdef HAVE_truncsiqi2
638 if (HAVE_truncsiqi2)
639 {
640 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
641 return;
642 }
643#endif
644 convert_move (to, force_reg (from_mode, from), unsignedp);
645 return;
646 }
647
648 if (from_mode == HImode && to_mode == QImode)
649 {
650#ifdef HAVE_trunchiqi2
651 if (HAVE_trunchiqi2)
652 {
653 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
654 return;
655 }
656#endif
657 convert_move (to, force_reg (from_mode, from), unsignedp);
658 return;
659 }
660
661 /* Handle truncation of volatile memrefs, and so on;
662 the things that couldn't be truncated directly,
663 and for which there was no special instruction. */
664 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
665 {
666 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
667 emit_move_insn (to, temp);
668 return;
669 }
670
671 /* Mode combination is not recognized. */
672 abort ();
673}
674
675/* Return an rtx for a value that would result
676 from converting X to mode MODE.
677 Both X and MODE may be floating, or both integer.
678 UNSIGNEDP is nonzero if X is an unsigned value.
679 This can be done by referring to a part of X in place
680 or by copying to a new temporary with conversion. */
681
682rtx
683convert_to_mode (mode, x, unsignedp)
684 enum machine_mode mode;
685 rtx x;
686 int unsignedp;
687{
688 register rtx temp;
689
690 x = protect_from_queue (x, 0);
691
692 if (mode == GET_MODE (x))
693 return x;
694
695 /* There is one case that we must handle specially: If we are converting
696 a CONST_INT into a mode whose size is twice HOST_BITS_PER_INT and
697 we are to interpret the constant as unsigned, gen_lowpart will do
698 the wrong if the constant appears negative. What we want to do is
699 make the high-order word of the constant zero, not all ones. */
700
701 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
702 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_INT
703 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
704 return immed_double_const (INTVAL (x), 0, mode);
705
706 /* We can do this with a gen_lowpart if both desired and current modes
707 are integer, and this is either a constant integer, a register, or a
708 non-volatile MEM. Except for the constant case, we must be narrowing
709 the operand. */
710
711 if (GET_CODE (x) == CONST_INT
712 || (GET_MODE_CLASS (mode) == MODE_INT
713 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
714 && (GET_CODE (x) == CONST_DOUBLE
715 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (x))
716 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x))
717 || GET_CODE (x) == REG)))))
718 return gen_lowpart (mode, x);
719
720 temp = gen_reg_rtx (mode);
721 convert_move (temp, x, unsignedp);
722 return temp;
723}
724\f
725/* Generate several move instructions to copy LEN bytes
726 from block FROM to block TO. (These are MEM rtx's with BLKmode).
727 The caller must pass FROM and TO
728 through protect_from_queue before calling.
729 ALIGN (in bytes) is maximum alignment we can assume. */
730
731struct move_by_pieces
732{
733 rtx to;
734 rtx to_addr;
735 int autinc_to;
736 int explicit_inc_to;
737 rtx from;
738 rtx from_addr;
739 int autinc_from;
740 int explicit_inc_from;
741 int len;
742 int offset;
743 int reverse;
744};
745
746static void move_by_pieces_1 ();
747static int move_by_pieces_ninsns ();
748
749static void
750move_by_pieces (to, from, len, align)
751 rtx to, from;
752 int len, align;
753{
754 struct move_by_pieces data;
755 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
756 int max_size = 10000;
757
758 data.offset = 0;
759 data.to_addr = to_addr;
760 data.from_addr = from_addr;
761 data.to = to;
762 data.from = from;
763 data.autinc_to
764 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
765 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
766 data.autinc_from
767 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
768 || GET_CODE (from_addr) == POST_INC
769 || GET_CODE (from_addr) == POST_DEC);
770
771 data.explicit_inc_from = 0;
772 data.explicit_inc_to = 0;
773 data.reverse
774 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
775 if (data.reverse) data.offset = len;
776 data.len = len;
777
778 /* If copying requires more than two move insns,
779 copy addresses to registers (to make displacements shorter)
780 and use post-increment if available. */
781 if (!(data.autinc_from && data.autinc_to)
782 && move_by_pieces_ninsns (len, align) > 2)
783 {
784#ifdef HAVE_PRE_DECREMENT
785 if (data.reverse && ! data.autinc_from)
786 {
787 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
788 data.autinc_from = 1;
789 data.explicit_inc_from = -1;
790 }
791#endif
792#ifdef HAVE_POST_INCREMENT
793 if (! data.autinc_from)
794 {
795 data.from_addr = copy_addr_to_reg (from_addr);
796 data.autinc_from = 1;
797 data.explicit_inc_from = 1;
798 }
799#endif
800 if (!data.autinc_from && CONSTANT_P (from_addr))
801 data.from_addr = copy_addr_to_reg (from_addr);
802#ifdef HAVE_PRE_DECREMENT
803 if (data.reverse && ! data.autinc_to)
804 {
805 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
806 data.autinc_to = 1;
807 data.explicit_inc_to = -1;
808 }
809#endif
810#ifdef HAVE_POST_INCREMENT
811 if (! data.reverse && ! data.autinc_to)
812 {
813 data.to_addr = copy_addr_to_reg (to_addr);
814 data.autinc_to = 1;
815 data.explicit_inc_to = 1;
816 }
817#endif
818 if (!data.autinc_to && CONSTANT_P (to_addr))
819 data.to_addr = copy_addr_to_reg (to_addr);
820 }
821
822#if defined (STRICT_ALIGNMENT) || defined (SLOW_UNALIGNED_ACCESS)
823 if (align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
824 align = MOVE_MAX;
825#else
826 align = MOVE_MAX;
827#endif
828
829 /* First move what we can in the largest integer mode, then go to
830 successively smaller modes. */
831
832 while (max_size > 1)
833 {
834 enum machine_mode mode = VOIDmode, tmode;
835 enum insn_code icode;
836
837 for (tmode = VOIDmode; (int) tmode < (int) MAX_MACHINE_MODE;
838 tmode = (enum machine_mode) ((int) tmode + 1))
839 if (GET_MODE_CLASS (tmode) == MODE_INT
840 && GET_MODE_SIZE (tmode) < max_size)
841 mode = tmode;
842
843 if (mode == VOIDmode)
844 break;
845
846 icode = mov_optab->handlers[(int) mode].insn_code;
847 if (icode != CODE_FOR_nothing
848 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
849 GET_MODE_SIZE (mode)))
850 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
851
852 max_size = GET_MODE_SIZE (mode);
853 }
854
855 /* The code above should have handled everything. */
856 if (data.len != 0)
857 abort ();
858}
859
860/* Return number of insns required to move L bytes by pieces.
861 ALIGN (in bytes) is maximum alignment we can assume. */
862
863static int
864move_by_pieces_ninsns (l, align)
865 unsigned int l;
866 int align;
867{
868 register int n_insns = 0;
869 int max_size = 10000;
870
871#if defined (STRICT_ALIGNMENT) || defined (SLOW_UNALIGNED_ACCESS)
872 if (align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
873 align = MOVE_MAX;
874#else
875 align = MOVE_MAX;
876#endif
877
878 while (max_size > 1)
879 {
880 enum machine_mode mode = VOIDmode, tmode;
881 enum insn_code icode;
882
883 for (tmode = VOIDmode; (int) tmode < (int) MAX_MACHINE_MODE;
884 tmode = (enum machine_mode) ((int) tmode + 1))
885 if (GET_MODE_CLASS (tmode) == MODE_INT
886 && GET_MODE_SIZE (tmode) < max_size)
887 mode = tmode;
888
889 if (mode == VOIDmode)
890 break;
891
892 icode = mov_optab->handlers[(int) mode].insn_code;
893 if (icode != CODE_FOR_nothing
894 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
895 GET_MODE_SIZE (mode)))
896 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
897
898 max_size = GET_MODE_SIZE (mode);
899 }
900
901 return n_insns;
902}
903
904/* Subroutine of move_by_pieces. Move as many bytes as appropriate
905 with move instructions for mode MODE. GENFUN is the gen_... function
906 to make a move insn for that mode. DATA has all the other info. */
907
908static void
909move_by_pieces_1 (genfun, mode, data)
910 rtx (*genfun) ();
911 enum machine_mode mode;
912 struct move_by_pieces *data;
913{
914 register int size = GET_MODE_SIZE (mode);
915 register rtx to1, from1;
916
917 while (data->len >= size)
918 {
919 if (data->reverse) data->offset -= size;
920
921 to1 = (data->autinc_to
922 ? gen_rtx (MEM, mode, data->to_addr)
923 : change_address (data->to, mode,
924 plus_constant (data->to_addr, data->offset)));
925 from1 =
926 (data->autinc_from
927 ? gen_rtx (MEM, mode, data->from_addr)
928 : change_address (data->from, mode,
929 plus_constant (data->from_addr, data->offset)));
930
931#ifdef HAVE_PRE_DECREMENT
932 if (data->explicit_inc_to < 0)
933 emit_insn (gen_sub2_insn (data->to_addr,
934 gen_rtx (CONST_INT, VOIDmode, size)));
935 if (data->explicit_inc_from < 0)
936 emit_insn (gen_sub2_insn (data->from_addr,
937 gen_rtx (CONST_INT, VOIDmode, size)));
938#endif
939
940 emit_insn ((*genfun) (to1, from1));
941#ifdef HAVE_POST_INCREMENT
942 if (data->explicit_inc_to > 0)
943 emit_insn (gen_add2_insn (data->to_addr,
944 gen_rtx (CONST_INT, VOIDmode, size)));
945 if (data->explicit_inc_from > 0)
946 emit_insn (gen_add2_insn (data->from_addr,
947 gen_rtx (CONST_INT, VOIDmode, size)));
948#endif
949
950 if (! data->reverse) data->offset += size;
951
952 data->len -= size;
953 }
954}
955\f
956/* Emit code to move a block Y to a block X.
957 This may be done with string-move instructions,
958 with multiple scalar move instructions, or with a library call.
959
960 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
961 with mode BLKmode.
962 SIZE is an rtx that says how long they are.
963 ALIGN is the maximum alignment we can assume they have,
964 measured in bytes. */
965
966void
967emit_block_move (x, y, size, align)
968 rtx x, y;
969 rtx size;
970 int align;
971{
972 if (GET_MODE (x) != BLKmode)
973 abort ();
974
975 if (GET_MODE (y) != BLKmode)
976 abort ();
977
978 x = protect_from_queue (x, 1);
979 y = protect_from_queue (y, 0);
980
981 if (GET_CODE (x) != MEM)
982 abort ();
983 if (GET_CODE (y) != MEM)
984 abort ();
985 if (size == 0)
986 abort ();
987
988 if (GET_CODE (size) == CONST_INT
989 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
990 < MOVE_RATIO))
991 move_by_pieces (x, y, INTVAL (size), align);
992 else
993 {
994 /* Try the most limited insn first, because there's no point
995 including more than one in the machine description unless
996 the more limited one has some advantage. */
997#ifdef HAVE_movstrqi
998 if (HAVE_movstrqi
999 && GET_CODE (size) == CONST_INT
1000 && ((unsigned) INTVAL (size)
1001 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
1002 {
1003 rtx insn = gen_movstrqi (x, y, size,
1004 gen_rtx (CONST_INT, VOIDmode, align));
1005 if (insn)
1006 {
1007 emit_insn (insn);
1008 return;
1009 }
1010 }
1011#endif
1012#ifdef HAVE_movstrhi
1013 if (HAVE_movstrhi
1014 && GET_CODE (size) == CONST_INT
1015 && ((unsigned) INTVAL (size)
1016 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
1017 {
1018 rtx insn = gen_movstrhi (x, y, size,
1019 gen_rtx (CONST_INT, VOIDmode, align));
1020 if (insn)
1021 {
1022 emit_insn (insn);
1023 return;
1024 }
1025 }
1026#endif
1027#ifdef HAVE_movstrsi
1028 if (HAVE_movstrsi)
1029 {
1030 rtx insn = gen_movstrsi (x, y, size,
1031 gen_rtx (CONST_INT, VOIDmode, align));
1032 if (insn)
1033 {
1034 emit_insn (insn);
1035 return;
1036 }
1037 }
1038#endif
1039#ifdef HAVE_movstrdi
1040 if (HAVE_movstrdi)
1041 {
1042 rtx insn = gen_movstrdi (x, y, size,
1043 gen_rtx (CONST_INT, VOIDmode, align));
1044 if (insn)
1045 {
1046 emit_insn (insn);
1047 return;
1048 }
1049 }
1050#endif
1051
1052#ifdef TARGET_MEM_FUNCTIONS
1053 emit_library_call (memcpy_libfunc, 0,
1054 VOIDmode, 3, XEXP (x, 0), Pmode,
1055 XEXP (y, 0), Pmode,
1056 size, Pmode);
1057#else
1058 emit_library_call (bcopy_libfunc, 0,
1059 VOIDmode, 3, XEXP (y, 0), Pmode,
1060 XEXP (x, 0), Pmode,
1061 size, Pmode);
1062#endif
1063 }
1064}
1065\f
1066/* Copy all or part of a value X into registers starting at REGNO.
1067 The number of registers to be filled is NREGS. */
1068
1069void
1070move_block_to_reg (regno, x, nregs, mode)
1071 int regno;
1072 rtx x;
1073 int nregs;
1074 enum machine_mode mode;
1075{
1076 int i;
1077 rtx pat, last;
1078
1079 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1080 x = validize_mem (force_const_mem (mode, x));
1081
1082 /* See if the machine can do this with a load multiple insn. */
1083#ifdef HAVE_load_multiple
1084 last = get_last_insn ();
1085 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1086 gen_rtx (CONST_INT, VOIDmode, nregs));
1087 if (pat)
1088 {
1089 emit_insn (pat);
1090 return;
1091 }
1092 else
1093 delete_insns_since (last);
1094#endif
1095
1096 for (i = 0; i < nregs; i++)
1097 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1098 operand_subword_force (x, i, mode));
1099}
1100
1101/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1102 The number of registers to be filled is NREGS. */
1103
1104void
1105move_block_from_reg (regno, x, nregs)
1106 int regno;
1107 rtx x;
1108 int nregs;
1109{
1110 int i;
1111 rtx pat, last;
1112
1113 /* See if the machine can do this with a store multiple insn. */
1114#ifdef HAVE_store_multiple
1115 last = get_last_insn ();
1116 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1117 gen_rtx (CONST_INT, VOIDmode, nregs));
1118 if (pat)
1119 {
1120 emit_insn (pat);
1121 return;
1122 }
1123 else
1124 delete_insns_since (last);
1125#endif
1126
1127 for (i = 0; i < nregs; i++)
1128 {
1129 rtx tem = operand_subword (x, i, 1, BLKmode);
1130
1131 if (tem == 0)
1132 abort ();
1133
1134 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1135 }
1136}
1137
1138/* Mark NREGS consecutive regs, starting at REGNO, as being live now. */
1139
1140void
1141use_regs (regno, nregs)
1142 int regno;
1143 int nregs;
1144{
1145 int i;
1146
1147 for (i = 0; i < nregs; i++)
1148 emit_insn (gen_rtx (USE, VOIDmode, gen_rtx (REG, word_mode, regno + i)));
1149}
1150\f
1151/* Write zeros through the storage of OBJECT.
1152 If OBJECT has BLKmode, SIZE is its length in bytes. */
1153
1154void
1155clear_storage (object, size)
1156 rtx object;
1157 int size;
1158{
1159 if (GET_MODE (object) == BLKmode)
1160 {
1161#ifdef TARGET_MEM_FUNCTIONS
1162 emit_library_call (memset_libfunc, 0,
1163 VOIDmode, 3,
1164 XEXP (object, 0), Pmode, const0_rtx, Pmode,
1165 gen_rtx (CONST_INT, VOIDmode, size), Pmode);
1166#else
1167 emit_library_call (bzero_libfunc, 0,
1168 VOIDmode, 2,
1169 XEXP (object, 0), Pmode,
1170 gen_rtx (CONST_INT, VOIDmode, size), Pmode);
1171#endif
1172 }
1173 else
1174 emit_move_insn (object, const0_rtx);
1175}
1176
1177/* Generate code to copy Y into X.
1178 Both Y and X must have the same mode, except that
1179 Y can be a constant with VOIDmode.
1180 This mode cannot be BLKmode; use emit_block_move for that.
1181
1182 Return the last instruction emitted. */
1183
1184rtx
1185emit_move_insn (x, y)
1186 rtx x, y;
1187{
1188 enum machine_mode mode = GET_MODE (x);
1189 int i;
1190
1191 x = protect_from_queue (x, 1);
1192 y = protect_from_queue (y, 0);
1193
1194 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1195 abort ();
1196
1197 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1198 y = force_const_mem (mode, y);
1199
1200 /* If X or Y are memory references, verify that their addresses are valid
1201 for the machine. */
1202 if (GET_CODE (x) == MEM
1203 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1204 && ! push_operand (x, GET_MODE (x)))
1205 || (flag_force_addr
1206 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1207 x = change_address (x, VOIDmode, XEXP (x, 0));
1208
1209 if (GET_CODE (y) == MEM
1210 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1211 || (flag_force_addr
1212 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1213 y = change_address (y, VOIDmode, XEXP (y, 0));
1214
1215 if (mode == BLKmode)
1216 abort ();
1217
1218 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1219 return
1220 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1221
1222 /* This will handle any multi-word mode that lacks a move_insn pattern.
1223 However, you will get better code if you define such patterns,
1224 even if they must turn into multiple assembler instructions. */
1225 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
1226 {
1227 rtx last_insn = 0;
1228
1229 for (i = 0;
1230 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1231 i++)
1232 {
1233 rtx xpart = operand_subword (x, i, 1, mode);
1234 rtx ypart = operand_subword (y, i, 1, mode);
1235
1236 /* If we can't get a part of Y, put Y into memory if it is a
1237 constant. Otherwise, force it into a register. If we still
1238 can't get a part of Y, abort. */
1239 if (ypart == 0 && CONSTANT_P (y))
1240 {
1241 y = force_const_mem (mode, y);
1242 ypart = operand_subword (y, i, 1, mode);
1243 }
1244 else if (ypart == 0)
1245 ypart = operand_subword_force (y, i, mode);
1246
1247 if (xpart == 0 || ypart == 0)
1248 abort ();
1249
1250 last_insn = emit_move_insn (xpart, ypart);
1251 }
1252 return last_insn;
1253 }
1254 else
1255 abort ();
1256}
1257\f
1258/* Pushing data onto the stack. */
1259
1260/* Push a block of length SIZE (perhaps variable)
1261 and return an rtx to address the beginning of the block.
1262 Note that it is not possible for the value returned to be a QUEUED.
1263 The value may be virtual_outgoing_args_rtx.
1264
1265 EXTRA is the number of bytes of padding to push in addition to SIZE.
1266 BELOW nonzero means this padding comes at low addresses;
1267 otherwise, the padding comes at high addresses. */
1268
1269rtx
1270push_block (size, extra, below)
1271 rtx size;
1272 int extra, below;
1273{
1274 register rtx temp;
1275 if (CONSTANT_P (size))
1276 anti_adjust_stack (plus_constant (size, extra));
1277 else if (GET_CODE (size) == REG && extra == 0)
1278 anti_adjust_stack (size);
1279 else
1280 {
1281 rtx temp = copy_to_mode_reg (Pmode, size);
1282 if (extra != 0)
1283 temp = expand_binop (Pmode, add_optab,
1284 temp,
1285 gen_rtx (CONST_INT, VOIDmode, extra),
1286 temp, 0, OPTAB_LIB_WIDEN);
1287 anti_adjust_stack (temp);
1288 }
1289
1290#ifdef STACK_GROWS_DOWNWARD
1291 temp = virtual_outgoing_args_rtx;
1292 if (extra != 0 && below)
1293 temp = plus_constant (temp, extra);
1294#else
1295 if (GET_CODE (size) == CONST_INT)
1296 temp = plus_constant (virtual_outgoing_args_rtx,
1297 - INTVAL (size) - (below ? 0 : extra));
1298 else if (extra != 0 && !below)
1299 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1300 negate_rtx (Pmode, plus_constant (size, extra)));
1301 else
1302 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1303 negate_rtx (Pmode, size));
1304#endif
1305
1306 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
1307}
1308
1309static rtx
1310gen_push_operand ()
1311{
1312 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
1313}
1314
1315/* Generate code to push X onto the stack, assuming it has mode MODE and
1316 type TYPE.
1317 MODE is redundant except when X is a CONST_INT (since they don't
1318 carry mode info).
1319 SIZE is an rtx for the size of data to be copied (in bytes),
1320 needed only if X is BLKmode.
1321
1322 ALIGN (in bytes) is maximum alignment we can assume.
1323
1324 If PARTIAL is nonzero, then copy that many of the first words
1325 of X into registers starting with REG, and push the rest of X.
1326 The amount of space pushed is decreased by PARTIAL words,
1327 rounded *down* to a multiple of PARM_BOUNDARY.
1328 REG must be a hard register in this case.
1329
1330 EXTRA is the amount in bytes of extra space to leave next to this arg.
1331 This is ignored if an argument block has already been allocted.
1332
1333 On a machine that lacks real push insns, ARGS_ADDR is the address of
1334 the bottom of the argument block for this call. We use indexing off there
1335 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
1336 argument block has not been preallocated.
1337
1338 ARGS_SO_FAR is the size of args previously pushed for this call. */
1339
1340void
1341emit_push_insn (x, mode, type, size, align, partial, reg, extra,
1342 args_addr, args_so_far)
1343 register rtx x;
1344 enum machine_mode mode;
1345 tree type;
1346 rtx size;
1347 int align;
1348 int partial;
1349 rtx reg;
1350 int extra;
1351 rtx args_addr;
1352 rtx args_so_far;
1353{
1354 rtx xinner;
1355 enum direction stack_direction
1356#ifdef STACK_GROWS_DOWNWARD
1357 = downward;
1358#else
1359 = upward;
1360#endif
1361
1362 /* Decide where to pad the argument: `downward' for below,
1363 `upward' for above, or `none' for don't pad it.
1364 Default is below for small data on big-endian machines; else above. */
1365 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
1366
1367 /* Invert direction if stack is post-update. */
1368 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
1369 if (where_pad != none)
1370 where_pad = (where_pad == downward ? upward : downward);
1371
1372 xinner = x = protect_from_queue (x, 0);
1373
1374 if (mode == BLKmode)
1375 {
1376 /* Copy a block into the stack, entirely or partially. */
1377
1378 register rtx temp;
1379 int used = partial * UNITS_PER_WORD;
1380 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
1381 int skip;
1382
1383 if (size == 0)
1384 abort ();
1385
1386 used -= offset;
1387
1388 /* USED is now the # of bytes we need not copy to the stack
1389 because registers will take care of them. */
1390
1391 if (partial != 0)
1392 xinner = change_address (xinner, BLKmode,
1393 plus_constant (XEXP (xinner, 0), used));
1394
1395 /* If the partial register-part of the arg counts in its stack size,
1396 skip the part of stack space corresponding to the registers.
1397 Otherwise, start copying to the beginning of the stack space,
1398 by setting SKIP to 0. */
1399#ifndef REG_PARM_STACK_SPACE
1400 skip = 0;
1401#else
1402 skip = used;
1403#endif
1404
1405#ifdef PUSH_ROUNDING
1406 /* Do it with several push insns if that doesn't take lots of insns
1407 and if there is no difficulty with push insns that skip bytes
1408 on the stack for alignment purposes. */
1409 if (args_addr == 0
1410 && GET_CODE (size) == CONST_INT
1411 && skip == 0
1412 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
1413 < MOVE_RATIO)
1414#if defined (STRICT_ALIGNMENT) || defined (SLOW_UNALIGNED_ACCESS)
1415 /* Here we avoid the case of a structure whose weak alignment
1416 forces many pushes of a small amount of data,
1417 and such small pushes do rounding that causes trouble. */
1418 && (align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
1419 || PUSH_ROUNDING (align) == align)
1420#endif
1421 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
1422 {
1423 /* Push padding now if padding above and stack grows down,
1424 or if padding below and stack grows up.
1425 But if space already allocated, this has already been done. */
1426 if (extra && args_addr == 0
1427 && where_pad != none && where_pad != stack_direction)
1428 anti_adjust_stack (gen_rtx (CONST_INT, VOIDmode, extra));
1429
1430 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
1431 INTVAL (size) - used, align);
1432 }
1433 else
1434#endif /* PUSH_ROUNDING */
1435 {
1436 /* Otherwise make space on the stack and copy the data
1437 to the address of that space. */
1438
1439 /* Deduct words put into registers from the size we must copy. */
1440 if (partial != 0)
1441 {
1442 if (GET_CODE (size) == CONST_INT)
1443 size = gen_rtx (CONST_INT, VOIDmode, INTVAL (size) - used);
1444 else
1445 size = expand_binop (GET_MODE (size), sub_optab, size,
1446 gen_rtx (CONST_INT, VOIDmode, used),
1447 0, 0, OPTAB_LIB_WIDEN);
1448 }
1449
1450 /* Get the address of the stack space.
1451 In this case, we do not deal with EXTRA separately.
1452 A single stack adjust will do. */
1453 if (! args_addr)
1454 {
1455 temp = push_block (size, extra, where_pad == downward);
1456 extra = 0;
1457 }
1458 else if (GET_CODE (args_so_far) == CONST_INT)
1459 temp = memory_address (BLKmode,
1460 plus_constant (args_addr,
1461 skip + INTVAL (args_so_far)));
1462 else
1463 temp = memory_address (BLKmode,
1464 plus_constant (gen_rtx (PLUS, Pmode,
1465 args_addr, args_so_far),
1466 skip));
1467
1468 /* TEMP is the address of the block. Copy the data there. */
1469 if (GET_CODE (size) == CONST_INT
1470 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
1471 < MOVE_RATIO))
1472 {
1473 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
1474 INTVAL (size), align);
1475 goto ret;
1476 }
1477 /* Try the most limited insn first, because there's no point
1478 including more than one in the machine description unless
1479 the more limited one has some advantage. */
1480#ifdef HAVE_movstrqi
1481 if (HAVE_movstrqi
1482 && GET_CODE (size) == CONST_INT
1483 && ((unsigned) INTVAL (size)
1484 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
1485 {
1486 emit_insn (gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
1487 xinner, size,
1488 gen_rtx (CONST_INT, VOIDmode, align)));
1489 goto ret;
1490 }
1491#endif
1492#ifdef HAVE_movstrhi
1493 if (HAVE_movstrhi
1494 && GET_CODE (size) == CONST_INT
1495 && ((unsigned) INTVAL (size)
1496 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
1497 {
1498 emit_insn (gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
1499 xinner, size,
1500 gen_rtx (CONST_INT, VOIDmode, align)));
1501 goto ret;
1502 }
1503#endif
1504#ifdef HAVE_movstrsi
1505 if (HAVE_movstrsi)
1506 {
1507 emit_insn (gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
1508 xinner, size,
1509 gen_rtx (CONST_INT, VOIDmode, align)));
1510 goto ret;
1511 }
1512#endif
1513#ifdef HAVE_movstrdi
1514 if (HAVE_movstrdi)
1515 {
1516 emit_insn (gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
1517 xinner, size,
1518 gen_rtx (CONST_INT, VOIDmode, align)));
1519 goto ret;
1520 }
1521#endif
1522
1523#ifndef ACCUMULATE_OUTGOING_ARGS
1524 /* If the source is referenced relative to the stack pointer,
1525 copy it to another register to stabilize it. We do not need
1526 to do this if we know that we won't be changing sp. */
1527
1528 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
1529 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
1530 temp = copy_to_reg (temp);
1531#endif
1532
1533 /* Make inhibit_defer_pop nonzero around the library call
1534 to force it to pop the bcopy-arguments right away. */
1535 NO_DEFER_POP;
1536#ifdef TARGET_MEM_FUNCTIONS
1537 emit_library_call (memcpy_libfunc, 0,
1538 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
1539 size, Pmode);
1540#else
1541 emit_library_call (bcopy_libfunc, 0,
1542 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
1543 size, Pmode);
1544#endif
1545 OK_DEFER_POP;
1546 }
1547 }
1548 else if (partial > 0)
1549 {
1550 /* Scalar partly in registers. */
1551
1552 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
1553 int i;
1554 int not_stack;
1555 /* # words of start of argument
1556 that we must make space for but need not store. */
1557 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
1558 int args_offset = INTVAL (args_so_far);
1559 int skip;
1560
1561 /* Push padding now if padding above and stack grows down,
1562 or if padding below and stack grows up.
1563 But if space already allocated, this has already been done. */
1564 if (extra && args_addr == 0
1565 && where_pad != none && where_pad != stack_direction)
1566 anti_adjust_stack (gen_rtx (CONST_INT, VOIDmode, extra));
1567
1568 /* If we make space by pushing it, we might as well push
1569 the real data. Otherwise, we can leave OFFSET nonzero
1570 and leave the space uninitialized. */
1571 if (args_addr == 0)
1572 offset = 0;
1573
1574 /* Now NOT_STACK gets the number of words that we don't need to
1575 allocate on the stack. */
1576 not_stack = partial - offset;
1577
1578 /* If the partial register-part of the arg counts in its stack size,
1579 skip the part of stack space corresponding to the registers.
1580 Otherwise, start copying to the beginning of the stack space,
1581 by setting SKIP to 0. */
1582#ifndef REG_PARM_STACK_SPACE
1583 skip = 0;
1584#else
1585 skip = not_stack;
1586#endif
1587
1588 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1589 x = validize_mem (force_const_mem (mode, x));
1590
1591 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
1592 SUBREGs of such registers are not allowed. */
1593 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
1594 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
1595 x = copy_to_reg (x);
1596
1597 /* Loop over all the words allocated on the stack for this arg. */
1598 /* We can do it by words, because any scalar bigger than a word
1599 has a size a multiple of a word. */
1600#ifndef PUSH_ARGS_REVERSED
1601 for (i = not_stack; i < size; i++)
1602#else
1603 for (i = size - 1; i >= not_stack; i--)
1604#endif
1605 if (i >= not_stack + offset)
1606 emit_push_insn (operand_subword_force (x, i, mode),
1607 word_mode, 0, 0, align, 0, 0, 0, args_addr,
1608 gen_rtx (CONST_INT, VOIDmode,
1609 args_offset + ((i - not_stack + skip)
1610 * UNITS_PER_WORD)));
1611 }
1612 else
1613 {
1614 rtx addr;
1615
1616 /* Push padding now if padding above and stack grows down,
1617 or if padding below and stack grows up.
1618 But if space already allocated, this has already been done. */
1619 if (extra && args_addr == 0
1620 && where_pad != none && where_pad != stack_direction)
1621 anti_adjust_stack (gen_rtx (CONST_INT, VOIDmode, extra));
1622
1623#ifdef PUSH_ROUNDING
1624 if (args_addr == 0)
1625 addr = gen_push_operand ();
1626 else
1627#endif
1628 if (GET_CODE (args_so_far) == CONST_INT)
1629 addr
1630 = memory_address (mode,
1631 plus_constant (args_addr, INTVAL (args_so_far)));
1632 else
1633 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
1634 args_so_far));
1635
1636 emit_move_insn (gen_rtx (MEM, mode, addr), x);
1637 }
1638
1639 ret:
1640 /* If part should go in registers, copy that part
1641 into the appropriate registers. Do this now, at the end,
1642 since mem-to-mem copies above may do function calls. */
1643 if (partial > 0)
1644 move_block_to_reg (REGNO (reg), x, partial, mode);
1645
1646 if (extra && args_addr == 0 && where_pad == stack_direction)
1647 anti_adjust_stack (gen_rtx (CONST_INT, VOIDmode, extra));
1648}
1649\f
1650/* Output a library call to function FUN (a SYMBOL_REF rtx)
1651 (emitting the queue unless NO_QUEUE is nonzero),
1652 for a value of mode OUTMODE,
1653 with NARGS different arguments, passed as alternating rtx values
1654 and machine_modes to convert them to.
1655 The rtx values should have been passed through protect_from_queue already.
1656
1657 NO_QUEUE will be true if and only if the library call is a `const' call
1658 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
1659 to the variable is_const in expand_call. */
1660
1661void
1662emit_library_call (va_alist)
1663 va_dcl
1664{
1665 va_list p;
1666 struct args_size args_size;
1667 register int argnum;
1668 enum machine_mode outmode;
1669 int nargs;
1670 rtx fun;
1671 rtx orgfun;
1672 int inc;
1673 int count;
1674 rtx argblock = 0;
1675 CUMULATIVE_ARGS args_so_far;
1676 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
1677 struct args_size offset; struct args_size size; };
1678 struct arg *argvec;
1679 int old_inhibit_defer_pop = inhibit_defer_pop;
1680 int no_queue = 0;
1681 rtx use_insns;
1682
1683 va_start (p);
1684 orgfun = fun = va_arg (p, rtx);
1685 no_queue = va_arg (p, int);
1686 outmode = va_arg (p, enum machine_mode);
1687 nargs = va_arg (p, int);
1688
1689 /* Copy all the libcall-arguments out of the varargs data
1690 and into a vector ARGVEC.
1691
1692 Compute how to pass each argument. We only support a very small subset
1693 of the full argument passing conventions to limit complexity here since
1694 library functions shouldn't have many args. */
1695
1696 argvec = (struct arg *) alloca (nargs * sizeof (struct arg));
1697
1698 INIT_CUMULATIVE_ARGS (args_so_far, (tree)0, fun);
1699
1700 args_size.constant = 0;
1701 args_size.var = 0;
1702
1703 for (count = 0; count < nargs; count++)
1704 {
1705 rtx val = va_arg (p, rtx);
1706 enum machine_mode mode = va_arg (p, enum machine_mode);
1707
1708 /* We cannot convert the arg value to the mode the library wants here;
1709 must do it earlier where we know the signedness of the arg. */
1710 if (mode == BLKmode
1711 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
1712 abort ();
1713
1714 /* On some machines, there's no way to pass a float to a library fcn.
1715 Pass it as a double instead. */
1716#ifdef LIBGCC_NEEDS_DOUBLE
1717 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
1718 val = convert_to_mode (DFmode, val), mode = DFmode;
1719#endif
1720
1721 /* Make sure it is a reasonable operand for a move or push insn. */
1722 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
1723 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
1724 val = force_operand (val, 0);
1725
1726 argvec[count].value = val;
1727 argvec[count].mode = mode;
1728
1729#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
1730 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, (tree)0, 1))
1731 abort ();
1732#endif
1733
1734 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, (tree)0, 1);
1735 if (argvec[count].reg && GET_CODE (argvec[count].reg) == EXPR_LIST)
1736 abort ();
1737#ifdef FUNCTION_ARG_PARTIAL_NREGS
1738 argvec[count].partial
1739 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, (tree)0, 1);
1740#else
1741 argvec[count].partial = 0;
1742#endif
1743
1744 locate_and_pad_parm (mode, 0,
1745 argvec[count].reg && argvec[count].partial == 0,
1746 0, &args_size, &argvec[count].offset,
1747 &argvec[count].size);
1748
1749 if (argvec[count].size.var)
1750 abort ();
1751
1752#ifndef REG_PARM_STACK_SPACE
1753 if (argvec[count].partial)
1754 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
1755#endif
1756
1757 if (argvec[count].reg == 0 || argvec[count].partial != 0
1758#ifdef REG_PARM_STACK_SPACE
1759 || 1
1760#endif
1761 )
1762 args_size.constant += argvec[count].size.constant;
1763
1764#ifdef ACCUMULATE_OUTGOING_ARGS
1765 /* If this arg is actually passed on the stack, it might be
1766 clobbering something we already put there (this library call might
1767 be inside the evaluation of an argument to a function whose call
1768 requires the stack). This will only occur when the library call
1769 has sufficient args to run out of argument registers. Abort in
1770 this case; if this ever occurs, code must be added to save and
1771 restore the arg slot. */
1772
1773 if (argvec[count].reg == 0 || argvec[count].partial != 0)
1774 abort ();
1775#endif
1776
1777 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree)0, 1);
1778 }
1779 va_end (p);
1780
1781 /* If this machine requires an external definition for library
1782 functions, write one out. */
1783 assemble_external_libcall (fun);
1784
1785#ifdef STACK_BOUNDARY
1786 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
1787 / STACK_BYTES) * STACK_BYTES);
1788#endif
1789
1790#ifdef REG_PARM_STACK_SPACE
1791 args_size.constant = MAX (args_size.constant,
1792 REG_PARM_STACK_SPACE ((tree) 0));
1793#endif
1794
1795#ifdef ACCUMULATE_OUTGOING_ARGS
1796 if (args_size.constant > current_function_outgoing_args_size)
1797 current_function_outgoing_args_size = args_size.constant;
1798 args_size.constant = 0;
1799#endif
1800
1801#ifndef PUSH_ROUNDING
1802 argblock = push_block (gen_rtx (CONST_INT, VOIDmode, args_size.constant),
1803 0, 0);
1804#endif
1805
1806#ifdef PUSH_ARGS_REVERSED
1807 inc = -1;
1808 argnum = nargs - 1;
1809#else
1810 inc = 1;
1811 argnum = 0;
1812#endif
1813
1814 /* Push the args that need to be pushed. */
1815
1816 for (count = 0; count < nargs; count++, argnum += inc)
1817 {
1818 register enum machine_mode mode = argvec[argnum].mode;
1819 register rtx val = argvec[argnum].value;
1820 rtx reg = argvec[argnum].reg;
1821 int partial = argvec[argnum].partial;
1822
1823 if (! (reg != 0 && partial == 0))
1824 emit_push_insn (val, mode, 0, 0, 0, partial, reg, 0, argblock,
1825 gen_rtx (CONST_INT, VOIDmode,
1826 argvec[count].offset.constant));
1827 NO_DEFER_POP;
1828 }
1829
1830#ifdef PUSH_ARGS_REVERSED
1831 argnum = nargs - 1;
1832#else
1833 argnum = 0;
1834#endif
1835
1836 /* Now load any reg parms into their regs. */
1837
1838 for (count = 0; count < nargs; count++, argnum += inc)
1839 {
1840 register enum machine_mode mode = argvec[argnum].mode;
1841 register rtx val = argvec[argnum].value;
1842 rtx reg = argvec[argnum].reg;
1843 int partial = argvec[argnum].partial;
1844
1845 if (reg != 0 && partial == 0)
1846 emit_move_insn (reg, val);
1847 NO_DEFER_POP;
1848 }
1849
1850 /* For version 1.37, try deleting this entirely. */
1851 if (! no_queue)
1852 emit_queue ();
1853
1854 /* Any regs containing parms remain in use through the call. */
1855 start_sequence ();
1856 for (count = 0; count < nargs; count++)
1857 if (argvec[count].reg != 0)
1858 emit_insn (gen_rtx (USE, VOIDmode, argvec[count].reg));
1859
1860 use_insns = get_insns ();
1861 end_sequence ();
1862
1863 fun = prepare_call_address (fun, 0, &use_insns);
1864
1865 /* Don't allow popping to be deferred, since then
1866 cse'ing of library calls could delete a call and leave the pop. */
1867 NO_DEFER_POP;
1868
1869 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
1870 will set inhibit_defer_pop to that value. */
1871
1872 emit_call_1 (fun, get_identifier (XSTR (orgfun, 0)), args_size.constant, 0,
1873 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
1874 outmode != VOIDmode ? hard_libcall_value (outmode) : 0,
1875 old_inhibit_defer_pop + 1, use_insns, no_queue);
1876
1877 /* Now restore inhibit_defer_pop to its actual original value. */
1878 OK_DEFER_POP;
1879}
1880\f
1881/* Expand an assignment that stores the value of FROM into TO.
1882 If WANT_VALUE is nonzero, return an rtx for the value of TO.
1883 (This may contain a QUEUED rtx.)
1884 Otherwise, the returned value is not meaningful.
1885
1886 SUGGEST_REG is no longer actually used.
1887 It used to mean, copy the value through a register
1888 and return that register, if that is possible.
1889 But now we do this if WANT_VALUE.
1890
1891 If the value stored is a constant, we return the constant. */
1892
1893rtx
1894expand_assignment (to, from, want_value, suggest_reg)
1895 tree to, from;
1896 int want_value;
1897 int suggest_reg;
1898{
1899 register rtx to_rtx = 0;
1900 rtx result;
1901
1902 /* Don't crash if the lhs of the assignment was erroneous. */
1903
1904 if (TREE_CODE (to) == ERROR_MARK)
1905 return expand_expr (from, 0, VOIDmode, 0);
1906
1907 /* Assignment of a structure component needs special treatment
1908 if the structure component's rtx is not simply a MEM.
1909 Assignment of an array element at a constant index
1910 has the same problem. */
1911
1912 if (TREE_CODE (to) == COMPONENT_REF
1913 || TREE_CODE (to) == BIT_FIELD_REF
1914 || (TREE_CODE (to) == ARRAY_REF
1915 && TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
1916 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST))
1917 {
1918 enum machine_mode mode1;
1919 int bitsize;
1920 int bitpos;
1921 int unsignedp;
1922 int volatilep = 0;
1923 tree tem = get_inner_reference (to, &bitsize, &bitpos,
1924 &mode1, &unsignedp, &volatilep);
1925
1926 /* If we are going to use store_bit_field and extract_bit_field,
1927 make sure to_rtx will be safe for multiple use. */
1928
1929 if (mode1 == VOIDmode && want_value)
1930 tem = stabilize_reference (tem);
1931
1932 to_rtx = expand_expr (tem, 0, VOIDmode, 0);
1933 if (volatilep)
1934 {
1935 if (GET_CODE (to_rtx) == MEM)
1936 MEM_VOLATILE_P (to_rtx) = 1;
1937#if 0 /* This was turned off because, when a field is volatile
1938 in an object which is not volatile, the object may be in a register,
1939 and then we would abort over here. */
1940 else
1941 abort ();
1942#endif
1943 }
1944
1945 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
1946 (want_value
1947 /* Spurious cast makes HPUX compiler happy. */
1948 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
1949 : VOIDmode),
1950 unsignedp,
1951 /* Required alignment of containing datum. */
1952 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
1953 int_size_in_bytes (TREE_TYPE (tem)));
1954 preserve_temp_slots (result);
1955 free_temp_slots ();
1956
1957 return result;
1958 }
1959
1960 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
1961 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
1962
1963 if (to_rtx == 0)
1964 to_rtx = expand_expr (to, 0, VOIDmode, 0);
1965
1966 /* In case we are returning the contents of an object which overlaps
1967 the place the value is being stored, use a safe function when copying
1968 a value through a pointer into a structure value return block. */
1969 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
1970 && current_function_returns_struct
1971 && !current_function_returns_pcc_struct)
1972 {
1973 rtx from_rtx = expand_expr (from, 0, VOIDmode, 0);
1974 rtx size = expr_size (from);
1975
1976#ifdef TARGET_MEM_FUNCTIONS
1977 emit_library_call (memcpy_libfunc, 0,
1978 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
1979 XEXP (from_rtx, 0), Pmode,
1980 size, Pmode);
1981#else
1982 emit_library_call (bcopy_libfunc, 0,
1983 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
1984 XEXP (to_rtx, 0), Pmode,
1985 size, Pmode);
1986#endif
1987
1988 preserve_temp_slots (to_rtx);
1989 free_temp_slots ();
1990 return to_rtx;
1991 }
1992
1993 /* Compute FROM and store the value in the rtx we got. */
1994
1995 result = store_expr (from, to_rtx, want_value);
1996 preserve_temp_slots (result);
1997 free_temp_slots ();
1998 return result;
1999}
2000
2001/* Generate code for computing expression EXP,
2002 and storing the value into TARGET.
2003 Returns TARGET or an equivalent value.
2004 TARGET may contain a QUEUED rtx.
2005
2006 If SUGGEST_REG is nonzero, copy the value through a register
2007 and return that register, if that is possible.
2008
2009 If the value stored is a constant, we return the constant. */
2010
2011rtx
2012store_expr (exp, target, suggest_reg)
2013 register tree exp;
2014 register rtx target;
2015 int suggest_reg;
2016{
2017 register rtx temp;
2018 int dont_return_target = 0;
2019
2020 if (TREE_CODE (exp) == COMPOUND_EXPR)
2021 {
2022 /* Perform first part of compound expression, then assign from second
2023 part. */
2024 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2025 emit_queue ();
2026 return store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2027 }
2028 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2029 {
2030 /* For conditional expression, get safe form of the target. Then
2031 test the condition, doing the appropriate assignment on either
2032 side. This avoids the creation of unnecessary temporaries.
2033 For non-BLKmode, it is more efficient not to do this. */
2034
2035 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2036
2037 emit_queue ();
2038 target = protect_from_queue (target, 1);
2039
2040 NO_DEFER_POP;
2041 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2042 store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2043 emit_queue ();
2044 emit_jump_insn (gen_jump (lab2));
2045 emit_barrier ();
2046 emit_label (lab1);
2047 store_expr (TREE_OPERAND (exp, 2), target, suggest_reg);
2048 emit_queue ();
2049 emit_label (lab2);
2050 OK_DEFER_POP;
2051 return target;
2052 }
2053 else if (suggest_reg && GET_CODE (target) == MEM
2054 && GET_MODE (target) != BLKmode)
2055 /* If target is in memory and caller wants value in a register instead,
2056 arrange that. Pass TARGET as target for expand_expr so that,
2057 if EXP is another assignment, SUGGEST_REG will be nonzero for it.
2058 We know expand_expr will not use the target in that case. */
2059 {
2060 temp = expand_expr (exp, cse_not_expected ? 0 : target,
2061 GET_MODE (target), 0);
2062 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2063 temp = copy_to_reg (temp);
2064 dont_return_target = 1;
2065 }
2066 else if (queued_subexp_p (target))
2067 /* If target contains a postincrement, it is not safe
2068 to use as the returned value. It would access the wrong
2069 place by the time the queued increment gets output.
2070 So copy the value through a temporary and use that temp
2071 as the result. */
2072 {
2073 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2074 {
2075 /* Expand EXP into a new pseudo. */
2076 temp = gen_reg_rtx (GET_MODE (target));
2077 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2078 }
2079 else
2080 temp = expand_expr (exp, 0, GET_MODE (target), 0);
2081 dont_return_target = 1;
2082 }
2083 else
2084 {
2085 temp = expand_expr (exp, target, GET_MODE (target), 0);
2086 /* DO return TARGET if it's a specified hardware register.
2087 expand_return relies on this. */
2088 if (!(target && GET_CODE (target) == REG
2089 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2090 && CONSTANT_P (temp))
2091 dont_return_target = 1;
2092 }
2093
2094 /* If value was not generated in the target, store it there.
2095 Convert the value to TARGET's type first if nec. */
2096
2097 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2098 {
2099 target = protect_from_queue (target, 1);
2100 if (GET_MODE (temp) != GET_MODE (target)
2101 && GET_MODE (temp) != VOIDmode)
2102 {
2103 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2104 if (dont_return_target)
2105 {
2106 /* In this case, we will return TEMP,
2107 so make sure it has the proper mode.
2108 But don't forget to store the value into TARGET. */
2109 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2110 emit_move_insn (target, temp);
2111 }
2112 else
2113 convert_move (target, temp, unsignedp);
2114 }
2115
2116 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2117 {
2118 /* Handle copying a string constant into an array.
2119 The string constant may be shorter than the array.
2120 So copy just the string's actual length, and clear the rest. */
2121 rtx size;
2122
2123 emit_block_move (target, temp,
2124 gen_rtx (CONST_INT, VOIDmode,
2125 TREE_STRING_LENGTH (exp)),
2126 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2127
2128 temp = plus_constant (XEXP (target, 0), TREE_STRING_LENGTH (exp));
2129 size = plus_constant (expr_size (exp), - TREE_STRING_LENGTH (exp));
2130 if (size != const0_rtx)
2131 {
2132#ifdef TARGET_MEM_FUNCTIONS
2133 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
2134 temp, Pmode, const0_rtx, Pmode, size, Pmode);
2135#else
2136 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
2137 temp, Pmode, size, Pmode);
2138#endif
2139 }
2140 }
2141 else if (GET_MODE (temp) == BLKmode)
2142 emit_block_move (target, temp, expr_size (exp),
2143 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2144 else
2145 emit_move_insn (target, temp);
2146 }
2147 if (dont_return_target)
2148 return temp;
2149 return target;
2150}
2151\f
2152/* Store the value of constructor EXP into the rtx TARGET.
2153 TARGET is either a REG or a MEM. */
2154
2155static void
2156store_constructor (exp, target)
2157 tree exp;
2158 rtx target;
2159{
2160 /* We know our target cannot conflict, since safe_from_p has been called. */
2161#if 0
2162 /* Don't try copying piece by piece into a hard register
2163 since that is vulnerable to being clobbered by EXP.
2164 Instead, construct in a pseudo register and then copy it all. */
2165 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
2166 {
2167 rtx temp = gen_reg_rtx (GET_MODE (target));
2168 store_constructor (exp, temp);
2169 emit_move_insn (target, temp);
2170 return;
2171 }
2172#endif
2173
2174 if (TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
2175 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE)
2176 {
2177 register tree elt;
2178
2179 if (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE)
2180 /* Inform later passes that the whole union value is dead. */
2181 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2182 /* If the constructor has fewer fields than the structure,
2183 clear the whole structure first. */
2184 else if (list_length (CONSTRUCTOR_ELTS (exp))
2185 != list_length (TYPE_FIELDS (TREE_TYPE (exp))))
2186 clear_storage (target, int_size_in_bytes (TREE_TYPE (exp)));
2187 else
2188 /* Inform later passes that the old value is dead. */
2189 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2190
2191 /* Store each element of the constructor into
2192 the corresponding field of TARGET. */
2193
2194 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
2195 {
2196 register tree field = TREE_PURPOSE (elt);
2197 register enum machine_mode mode;
2198 int bitsize;
2199 int bitpos;
2200 int unsignedp;
2201
2202 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
2203 unsignedp = TREE_UNSIGNED (field);
2204 mode = DECL_MODE (field);
2205 if (DECL_BIT_FIELD (field))
2206 mode = VOIDmode;
2207
2208 if (TREE_CODE (DECL_FIELD_BITPOS (field)) != INTEGER_CST)
2209 /* ??? This case remains to be written. */
2210 abort ();
2211
2212 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
2213
2214 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2215 /* The alignment of TARGET is
2216 at least what its type requires. */
2217 VOIDmode, 0,
2218 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT,
2219 int_size_in_bytes (TREE_TYPE (exp)));
2220 }
2221 }
2222 else if (TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE)
2223 {
2224 register tree elt;
2225 register int i;
2226 tree domain = TYPE_DOMAIN (TREE_TYPE (exp));
2227 int minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
2228 int maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
2229 tree elttype = TREE_TYPE (TREE_TYPE (exp));
2230
2231 /* If the constructor has fewer fields than the structure,
2232 clear the whole structure first. */
2233
2234 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1)
2235 clear_storage (target, maxelt - minelt + 1);
2236 else
2237 /* Inform later passes that the old value is dead. */
2238 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2239
2240 /* Store each element of the constructor into
2241 the corresponding element of TARGET, determined
2242 by counting the elements. */
2243 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
2244 elt;
2245 elt = TREE_CHAIN (elt), i++)
2246 {
2247 register enum machine_mode mode;
2248 int bitsize;
2249 int bitpos;
2250 int unsignedp;
2251
2252 mode = TYPE_MODE (elttype);
2253 bitsize = GET_MODE_BITSIZE (mode);
2254 unsignedp = TREE_UNSIGNED (elttype);
2255
2256 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2257
2258 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2259 /* The alignment of TARGET is
2260 at least what its type requires. */
2261 VOIDmode, 0,
2262 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT,
2263 int_size_in_bytes (TREE_TYPE (exp)));
2264 }
2265 }
2266
2267 else
2268 abort ();
2269}
2270
2271/* Store the value of EXP (an expression tree)
2272 into a subfield of TARGET which has mode MODE and occupies
2273 BITSIZE bits, starting BITPOS bits from the start of TARGET.
2274 If MODE is VOIDmode, it means that we are storing into a bit-field.
2275
2276 If VALUE_MODE is VOIDmode, return nothing in particular.
2277 UNSIGNEDP is not used in this case.
2278
2279 Otherwise, return an rtx for the value stored. This rtx
2280 has mode VALUE_MODE if that is convenient to do.
2281 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
2282
2283 ALIGN is the alignment that TARGET is known to have, measured in bytes.
2284 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
2285
2286static rtx
2287store_field (target, bitsize, bitpos, mode, exp, value_mode,
2288 unsignedp, align, total_size)
2289 rtx target;
2290 int bitsize, bitpos;
2291 enum machine_mode mode;
2292 tree exp;
2293 enum machine_mode value_mode;
2294 int unsignedp;
2295 int align;
2296 int total_size;
2297{
2298 int width_mask = 0;
2299
2300 if (bitsize < HOST_BITS_PER_INT)
2301 width_mask = (1 << bitsize) - 1;
2302
2303 /* If we are storing into an unaligned field of an aligned union that is
2304 in a register, we may have the mode of TARGET being an integer mode but
2305 MODE == BLKmode. In that case, get an aligned object whose size and
2306 alignment are the same as TARGET and store TARGET into it (we can avoid
2307 the store if the field being stored is the entire width of TARGET). Then
2308 call ourselves recursively to store the field into a BLKmode version of
2309 that object. Finally, load from the object into TARGET. This is not
2310 very efficient in general, but should only be slightly more expensive
2311 than the otherwise-required unaligned accesses. Perhaps this can be
2312 cleaned up later. */
2313
2314 if (mode == BLKmode
2315 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
2316 {
2317 rtx object = assign_stack_temp (GET_MODE (target),
2318 GET_MODE_SIZE (GET_MODE (target)), 0);
2319 rtx blk_object = copy_rtx (object);
2320
2321 PUT_MODE (blk_object, BLKmode);
2322
2323 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
2324 emit_move_insn (object, target);
2325
2326 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
2327 align, total_size);
2328
2329 emit_move_insn (target, object);
2330
2331 return target;
2332 }
2333
2334 /* If the structure is in a register or if the component
2335 is a bit field, we cannot use addressing to access it.
2336 Use bit-field techniques or SUBREG to store in it. */
2337
2338 if (mode == VOIDmode || GET_CODE (target) == REG
2339 || GET_CODE (target) == SUBREG)
2340 {
2341 rtx temp = expand_expr (exp, 0, VOIDmode, 0);
2342 /* Store the value in the bitfield. */
2343 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
2344 if (value_mode != VOIDmode)
2345 {
2346 /* The caller wants an rtx for the value. */
2347 /* If possible, avoid refetching from the bitfield itself. */
2348 if (width_mask != 0
2349 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
2350 return expand_and (temp,
2351 gen_rtx (CONST_INT, VOIDmode, width_mask), 0);
2352 return extract_bit_field (target, bitsize, bitpos, unsignedp,
2353 0, value_mode, 0, align, total_size);
2354 }
2355 return const0_rtx;
2356 }
2357 else
2358 {
2359 rtx addr = XEXP (target, 0);
2360 rtx to_rtx;
2361
2362 /* If a value is wanted, it must be the lhs;
2363 so make the address stable for multiple use. */
2364
2365 if (value_mode != VOIDmode && GET_CODE (addr) != REG
2366 && ! CONSTANT_ADDRESS_P (addr)
2367 /* A frame-pointer reference is already stable. */
2368 && ! (GET_CODE (addr) == PLUS
2369 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2370 && (XEXP (addr, 0) == virtual_incoming_args_rtx
2371 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
2372 addr = copy_to_reg (addr);
2373
2374 /* Now build a reference to just the desired component. */
2375
2376 to_rtx = change_address (target, mode,
2377 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
2378 MEM_IN_STRUCT_P (to_rtx) = 1;
2379
2380 return store_expr (exp, to_rtx, value_mode != VOIDmode);
2381 }
2382}
2383\f
2384/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
2385 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
2386 ARRAY_REFs at constant positions and find the ultimate containing object,
2387 which we return.
2388
2389 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
2390 bit position, and *PUNSIGNEDP to the signedness of the field.
2391
2392 If any of the extraction expressions is volatile,
2393 we store 1 in *PVOLATILEP. Otherwise we don't change that.
2394
2395 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
2396 is a mode that can be used to access the field. In that case, *PBITSIZE
2397 is redundant. */
2398
2399tree
2400get_inner_reference (exp, pbitsize, pbitpos, pmode, punsignedp, pvolatilep)
2401 tree exp;
2402 int *pbitsize;
2403 int *pbitpos;
2404 enum machine_mode *pmode;
2405 int *punsignedp;
2406 int *pvolatilep;
2407{
2408 tree size_tree = 0;
2409 enum machine_mode mode = VOIDmode;
2410
2411 if (TREE_CODE (exp) == COMPONENT_REF)
2412 {
2413 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
2414 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
2415 mode = DECL_MODE (TREE_OPERAND (exp, 1));
2416 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
2417 }
2418 else if (TREE_CODE (exp) == BIT_FIELD_REF)
2419 {
2420 size_tree = TREE_OPERAND (exp, 1);
2421 *punsignedp = TREE_UNSIGNED (exp);
2422 }
2423 else
2424 {
2425 mode = TYPE_MODE (TREE_TYPE (exp));
2426 *pbitsize = GET_MODE_BITSIZE (mode);
2427 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2428 }
2429
2430 if (size_tree)
2431 {
2432 if (TREE_CODE (size_tree) != INTEGER_CST)
2433 abort ();
2434
2435 *pbitsize = TREE_INT_CST_LOW (size_tree);
2436 }
2437
2438 /* Compute cumulative bit-offset for nested component-refs and array-refs,
2439 and find the ultimate containing object. */
2440
2441 *pbitpos = 0;
2442
2443 while (1)
2444 {
2445 if (TREE_CODE (exp) == COMPONENT_REF)
2446 {
2447 tree field = TREE_OPERAND (exp, 1);
2448
2449 if (TREE_CODE (DECL_FIELD_BITPOS (field)) != INTEGER_CST)
2450 /* ??? This case remains to be written. */
2451 abort ();
2452
2453 *pbitpos += TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
2454 if (TREE_THIS_VOLATILE (exp))
2455 *pvolatilep = 1;
2456 }
2457 else if (TREE_CODE (exp) == BIT_FIELD_REF)
2458 {
2459 if (TREE_CODE (TREE_OPERAND (exp, 2)) != INTEGER_CST)
2460 /* ??? This case remains to be written. */
2461 abort ();
2462
2463 *pbitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 2));
2464 if (TREE_THIS_VOLATILE (exp))
2465 *pvolatilep = 1;
2466 }
2467 else if (TREE_CODE (exp) == ARRAY_REF
2468 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
2469 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST)
2470 {
2471 *pbitpos += (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
2472 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp))));
2473 if (TREE_THIS_VOLATILE (exp))
2474 *pvolatilep = 1;
2475 }
2476 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
2477 && ! ((TREE_CODE (exp) == NOP_EXPR
2478 || TREE_CODE (exp) == CONVERT_EXPR)
2479 && (TYPE_MODE (TREE_TYPE (exp))
2480 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
2481 break;
2482 exp = TREE_OPERAND (exp, 0);
2483 }
2484
2485 /* If this was a bit-field, see if there is a mode that allows direct
2486 access in case EXP is in memory. */
2487 if (mode == VOIDmode && *pbitpos % *pbitsize == 0)
2488 {
2489 mode = mode_for_size (*pbitsize, MODE_INT, 0);
2490 if (mode == BLKmode)
2491 mode = VOIDmode;
2492 }
2493
2494 *pmode = mode;
2495
2496 return exp;
2497}
2498\f
2499/* Given an rtx VALUE that may contain additions and multiplications,
2500 return an equivalent value that just refers to a register or memory.
2501 This is done by generating instructions to perform the arithmetic
2502 and returning a pseudo-register containing the value. */
2503
2504rtx
2505force_operand (value, target)
2506 rtx value, target;
2507{
2508 register optab binoptab = 0;
2509 /* Use a temporary to force order of execution of calls to
2510 `force_operand'. */
2511 rtx tmp;
2512 register rtx op2;
2513 /* Use subtarget as the target for operand 0 of a binary operation. */
2514 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
2515
2516 if (GET_CODE (value) == PLUS)
2517 binoptab = add_optab;
2518 else if (GET_CODE (value) == MINUS)
2519 binoptab = sub_optab;
2520 else if (GET_CODE (value) == MULT)
2521 {
2522 op2 = XEXP (value, 1);
2523 if (!CONSTANT_P (op2)
2524 && !(GET_CODE (op2) == REG && op2 != subtarget))
2525 subtarget = 0;
2526 tmp = force_operand (XEXP (value, 0), subtarget);
2527 return expand_mult (GET_MODE (value), tmp,
2528 force_operand (op2, 0),
2529 target, 0);
2530 }
2531
2532 if (binoptab)
2533 {
2534 op2 = XEXP (value, 1);
2535 if (!CONSTANT_P (op2)
2536 && !(GET_CODE (op2) == REG && op2 != subtarget))
2537 subtarget = 0;
2538 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
2539 {
2540 binoptab = add_optab;
2541 op2 = negate_rtx (GET_MODE (value), op2);
2542 }
2543
2544 /* Check for an addition with OP2 a constant integer and our first
2545 operand a PLUS of a virtual register and something else. In that
2546 case, we want to emit the sum of the virtual register and the
2547 constant first and then add the other value. This allows virtual
2548 register instantiation to simply modify the constant rather than
2549 creating another one around this addition. */
2550 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
2551 && GET_CODE (XEXP (value, 0)) == PLUS
2552 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
2553 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
2554 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
2555 {
2556 rtx temp = expand_binop (GET_MODE (value), binoptab,
2557 XEXP (XEXP (value, 0), 0), op2,
2558 subtarget, 0, OPTAB_LIB_WIDEN);
2559 return expand_binop (GET_MODE (value), binoptab, temp,
2560 force_operand (XEXP (XEXP (value, 0), 1), 0),
2561 target, 0, OPTAB_LIB_WIDEN);
2562 }
2563
2564 tmp = force_operand (XEXP (value, 0), subtarget);
2565 return expand_binop (GET_MODE (value), binoptab, tmp,
2566 force_operand (op2, 0),
2567 target, 0, OPTAB_LIB_WIDEN);
2568 /* We give UNSIGNEP = 0 to expand_binop
2569 because the only operations we are expanding here are signed ones. */
2570 }
2571 return value;
2572}
2573\f
2574/* Subroutine of expand_expr:
2575 save the non-copied parts (LIST) of an expr (LHS), and return a list
2576 which can restore these values to their previous values,
2577 should something modify their storage. */
2578
2579static tree
2580save_noncopied_parts (lhs, list)
2581 tree lhs;
2582 tree list;
2583{
2584 tree tail;
2585 tree parts = 0;
2586
2587 for (tail = list; tail; tail = TREE_CHAIN (tail))
2588 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
2589 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
2590 else
2591 {
2592 tree part = TREE_VALUE (tail);
2593 tree part_type = TREE_TYPE (part);
2594 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part, 0);
2595 rtx target = assign_stack_temp (TYPE_MODE (part_type),
2596 int_size_in_bytes (part_type), 0);
2597 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
2598 target = change_address (target, TYPE_MODE (part_type), 0);
2599 parts = tree_cons (to_be_saved,
2600 build (RTL_EXPR, part_type, 0, (tree) target),
2601 parts);
2602 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
2603 }
2604 return parts;
2605}
2606
2607/* Subroutine of expand_expr:
2608 record the non-copied parts (LIST) of an expr (LHS), and return a list
2609 which specifies the initial values of these parts. */
2610
2611static tree
2612init_noncopied_parts (lhs, list)
2613 tree lhs;
2614 tree list;
2615{
2616 tree tail;
2617 tree parts = 0;
2618
2619 for (tail = list; tail; tail = TREE_CHAIN (tail))
2620 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
2621 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
2622 else
2623 {
2624 tree part = TREE_VALUE (tail);
2625 tree part_type = TREE_TYPE (part);
2626 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part, 0);
2627 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
2628 }
2629 return parts;
2630}
2631
2632/* Subroutine of expand_expr: return nonzero iff there is no way that
2633 EXP can reference X, which is being modified. */
2634
2635static int
2636safe_from_p (x, exp)
2637 rtx x;
2638 tree exp;
2639{
2640 rtx exp_rtl = 0;
2641 int i, nops;
2642
2643 if (x == 0)
2644 return 1;
2645
2646 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
2647 find the underlying pseudo. */
2648 if (GET_CODE (x) == SUBREG)
2649 {
2650 x = SUBREG_REG (x);
2651 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
2652 return 0;
2653 }
2654
2655 /* If X is a location in the outgoing argument area, it is always safe. */
2656 if (GET_CODE (x) == MEM
2657 && (XEXP (x, 0) == virtual_outgoing_args_rtx
2658 || (GET_CODE (XEXP (x, 0)) == PLUS
2659 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
2660 return 1;
2661
2662 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2663 {
2664 case 'd':
2665 exp_rtl = DECL_RTL (exp);
2666 break;
2667
2668 case 'c':
2669 return 1;
2670
2671 case 'x':
2672 if (TREE_CODE (exp) == TREE_LIST)
2673 return (safe_from_p (x, TREE_VALUE (exp))
2674 && (TREE_CHAIN (exp) == 0
2675 || safe_from_p (x, TREE_CHAIN (exp))));
2676 else
2677 return 0;
2678
2679 case '1':
2680 return safe_from_p (x, TREE_OPERAND (exp, 0));
2681
2682 case '2':
2683 case '<':
2684 return (safe_from_p (x, TREE_OPERAND (exp, 0))
2685 && safe_from_p (x, TREE_OPERAND (exp, 1)));
2686
2687 case 'e':
2688 case 'r':
2689 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
2690 the expression. If it is set, we conflict iff we are that rtx or
2691 both are in memory. Otherwise, we check all operands of the
2692 expression recursively. */
2693
2694 switch (TREE_CODE (exp))
2695 {
2696 case ADDR_EXPR:
2697 return staticp (TREE_OPERAND (exp, 0));
2698
2699 case INDIRECT_REF:
2700 if (GET_CODE (x) == MEM)
2701 return 0;
2702 break;
2703
2704 case CALL_EXPR:
2705 exp_rtl = CALL_EXPR_RTL (exp);
2706 if (exp_rtl == 0)
2707 {
2708 /* Assume that the call will clobber all hard registers and
2709 all of memory. */
2710 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
2711 || GET_CODE (x) == MEM)
2712 return 0;
2713 }
2714
2715 break;
2716
2717 case RTL_EXPR:
2718 exp_rtl = RTL_EXPR_RTL (exp);
2719 if (exp_rtl == 0)
2720 /* We don't know what this can modify. */
2721 return 0;
2722
2723 break;
2724
2725 case WITH_CLEANUP_EXPR:
2726 exp_rtl = RTL_EXPR_RTL (exp);
2727 break;
2728
2729 case SAVE_EXPR:
2730 exp_rtl = SAVE_EXPR_RTL (exp);
2731 break;
2732
2733 case METHOD_CALL_EXPR:
2734 /* This takes a rtx argument, but shouldn't appear here. */
2735 abort ();
2736 }
2737
2738 /* If we have an rtx, we do not need to scan our operands. */
2739 if (exp_rtl)
2740 break;
2741
2742 nops = tree_code_length[(int) TREE_CODE (exp)];
2743 for (i = 0; i < nops; i++)
2744 if (TREE_OPERAND (exp, i) != 0
2745 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
2746 return 0;
2747 }
2748
2749 /* If we have an rtl, find any enclosed object. Then see if we conflict
2750 with it. */
2751 if (exp_rtl)
2752 {
2753 if (GET_CODE (exp_rtl) == SUBREG)
2754 {
2755 exp_rtl = SUBREG_REG (exp_rtl);
2756 if (GET_CODE (exp_rtl) == REG
2757 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
2758 return 0;
2759 }
2760
2761 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
2762 are memory and EXP is not readonly. */
2763 return ! (rtx_equal_p (x, exp_rtl)
2764 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
2765 && ! TREE_READONLY (exp)));
2766 }
2767
2768 /* If we reach here, it is safe. */
2769 return 1;
2770}
2771
2772/* Subroutine of expand_expr: return nonzero iff EXP is an
2773 expression whose type is statically determinable. */
2774
2775static int
2776fixed_type_p (exp)
2777 tree exp;
2778{
2779 if (TREE_CODE (exp) == PARM_DECL
2780 || TREE_CODE (exp) == VAR_DECL
2781 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
2782 || TREE_CODE (exp) == COMPONENT_REF
2783 || TREE_CODE (exp) == ARRAY_REF)
2784 return 1;
2785 return 0;
2786}
2787\f
2788/* expand_expr: generate code for computing expression EXP.
2789 An rtx for the computed value is returned. The value is never null.
2790 In the case of a void EXP, const0_rtx is returned.
2791
2792 The value may be stored in TARGET if TARGET is nonzero.
2793 TARGET is just a suggestion; callers must assume that
2794 the rtx returned may not be the same as TARGET.
2795
2796 If TARGET is CONST0_RTX, it means that the value will be ignored.
2797
2798 If TMODE is not VOIDmode, it suggests generating the
2799 result in mode TMODE. But this is done only when convenient.
2800 Otherwise, TMODE is ignored and the value generated in its natural mode.
2801 TMODE is just a suggestion; callers must assume that
2802 the rtx returned may not have mode TMODE.
2803
2804 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
2805 with a constant address even if that address is not normally legitimate.
2806 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
2807
2808 If MODIFIER is EXPAND_SUM then when EXP is an addition
2809 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
2810 or a nest of (PLUS ...) and (MINUS ...) where the terms are
2811 products as above, or REG or MEM, or constant.
2812 Ordinarily in such cases we would output mul or add instructions
2813 and then return a pseudo reg containing the sum.
2814
2815 EXPAND_INITIALIZER is much like EXPAND_SUM except that
2816 it also marks a label as absolutely required (it can't be dead).
2817 This is used for outputting expressions used in intializers. */
2818
2819rtx
2820expand_expr (exp, target, tmode, modifier)
2821 register tree exp;
2822 rtx target;
2823 enum machine_mode tmode;
2824 enum expand_modifier modifier;
2825{
2826 register rtx op0, op1, temp;
2827 tree type = TREE_TYPE (exp);
2828 int unsignedp = TREE_UNSIGNED (type);
2829 register enum machine_mode mode = TYPE_MODE (type);
2830 register enum tree_code code = TREE_CODE (exp);
2831 optab this_optab;
2832 /* Use subtarget as the target for operand 0 of a binary operation. */
2833 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
2834 rtx original_target = target;
2835 int ignore = target == const0_rtx;
2836 tree context;
2837
2838 /* Don't use hard regs as subtargets, because the combiner
2839 can only handle pseudo regs. */
2840 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
2841 subtarget = 0;
2842 /* Avoid subtargets inside loops,
2843 since they hide some invariant expressions. */
2844 if (preserve_subexpressions_p ())
2845 subtarget = 0;
2846
2847 if (ignore) target = 0, original_target = 0;
2848
2849 /* If will do cse, generate all results into pseudo registers
2850 since 1) that allows cse to find more things
2851 and 2) otherwise cse could produce an insn the machine
2852 cannot support. */
2853
2854 if (! cse_not_expected && mode != BLKmode && target
2855 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
2856 target = subtarget;
2857
2858 /* Ensure we reference a volatile object even if value is ignored. */
2859 if (ignore && TREE_THIS_VOLATILE (exp)
2860 && mode != VOIDmode && mode != BLKmode)
2861 {
2862 target = gen_reg_rtx (mode);
2863 temp = expand_expr (exp, target, VOIDmode, modifier);
2864 if (temp != target)
2865 emit_move_insn (target, temp);
2866 return target;
2867 }
2868
2869 switch (code)
2870 {
2871 case LABEL_DECL:
2872 if (modifier == EXPAND_INITIALIZER)
2873 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
2874 label_rtx (exp), forced_labels);
2875 return gen_rtx (MEM, FUNCTION_MODE,
2876 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
2877
2878 case PARM_DECL:
2879 if (DECL_RTL (exp) == 0)
2880 {
2881 error_with_decl (exp, "prior parameter's size depends on `%s'");
2882 return const0_rtx;
2883 }
2884
2885 case FUNCTION_DECL:
2886 case VAR_DECL:
2887 case RESULT_DECL:
2888 if (DECL_RTL (exp) == 0)
2889 abort ();
2890 /* Ensure variable marked as used
2891 even if it doesn't go through a parser. */
2892 TREE_USED (exp) = 1;
2893 /* Handle variables inherited from containing functions. */
2894 context = decl_function_context (exp);
2895
2896 /* We treat inline_function_decl as an alias for the current function
2897 because that is the inline function whose vars, types, etc.
2898 are being merged into the current function.
2899 See expand_inline_function. */
2900 if (context != 0 && context != current_function_decl
2901 && context != inline_function_decl
2902 /* If var is static, we don't need a static chain to access it. */
2903 && ! (GET_CODE (DECL_RTL (exp)) == MEM
2904 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
2905 {
2906 rtx addr;
2907
2908 /* Mark as non-local and addressable. */
2909 TREE_NONLOCAL (exp) = 1;
2910 mark_addressable (exp);
2911 if (GET_CODE (DECL_RTL (exp)) != MEM)
2912 abort ();
2913 addr = XEXP (DECL_RTL (exp), 0);
2914 if (GET_CODE (addr) == MEM)
2915 addr = gen_rtx (MEM, Pmode, fix_lexical_addr (XEXP (addr, 0), exp));
2916 else
2917 addr = fix_lexical_addr (addr, exp);
2918 return change_address (DECL_RTL (exp), mode, addr);
2919 }
2920 /* This is the case of an array whose size is to be determined
2921 from its initializer, while the initializer is still being parsed.
2922 See expand_decl. */
2923 if (GET_CODE (DECL_RTL (exp)) == MEM
2924 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
2925 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
2926 XEXP (DECL_RTL (exp), 0));
2927 if (GET_CODE (DECL_RTL (exp)) == MEM
2928 && modifier != EXPAND_CONST_ADDRESS
2929 && modifier != EXPAND_SUM
2930 && modifier != EXPAND_INITIALIZER)
2931 {
2932 /* DECL_RTL probably contains a constant address.
2933 On RISC machines where a constant address isn't valid,
2934 make some insns to get that address into a register. */
2935 if (!memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
2936 || (flag_force_addr
2937 && CONSTANT_ADDRESS_P (XEXP (DECL_RTL (exp), 0))))
2938 return change_address (DECL_RTL (exp), VOIDmode,
2939 copy_rtx (XEXP (DECL_RTL (exp), 0)));
2940 }
2941 return DECL_RTL (exp);
2942
2943 case INTEGER_CST:
2944 return immed_double_const (TREE_INT_CST_LOW (exp),
2945 TREE_INT_CST_HIGH (exp),
2946 mode);
2947
2948 case CONST_DECL:
2949 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
2950
2951 case REAL_CST:
2952 /* If optimized, generate immediate CONST_DOUBLE
2953 which will be turned into memory by reload if necessary.
2954
2955 We used to force a register so that loop.c could see it. But
2956 this does not allow gen_* patterns to perform optimizations with
2957 the constants. It also produces two insns in cases like "x = 1.0;".
2958 On most machines, floating-point constants are not permitted in
2959 many insns, so we'd end up copying it to a register in any case.
2960
2961 Now, we do the copying in expand_binop, if appropriate. */
2962 return immed_real_const (exp);
2963
2964 case COMPLEX_CST:
2965 case STRING_CST:
2966 if (! TREE_CST_RTL (exp))
2967 output_constant_def (exp);
2968
2969 /* TREE_CST_RTL probably contains a constant address.
2970 On RISC machines where a constant address isn't valid,
2971 make some insns to get that address into a register. */
2972 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
2973 && modifier != EXPAND_CONST_ADDRESS
2974 && modifier != EXPAND_INITIALIZER
2975 && modifier != EXPAND_SUM
2976 && !memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0)))
2977 return change_address (TREE_CST_RTL (exp), VOIDmode,
2978 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
2979 return TREE_CST_RTL (exp);
2980
2981 case SAVE_EXPR:
2982 context = decl_function_context (exp);
2983 /* We treat inline_function_decl as an alias for the current function
2984 because that is the inline function whose vars, types, etc.
2985 are being merged into the current function.
2986 See expand_inline_function. */
2987 if (context == current_function_decl || context == inline_function_decl)
2988 context = 0;
2989
2990 /* If this is non-local, handle it. */
2991 if (context)
2992 {
2993 temp = SAVE_EXPR_RTL (exp);
2994 if (temp && GET_CODE (temp) == REG)
2995 {
2996 put_var_into_stack (exp);
2997 temp = SAVE_EXPR_RTL (exp);
2998 }
2999 if (temp == 0 || GET_CODE (temp) != MEM)
3000 abort ();
3001 return change_address (temp, mode,
3002 fix_lexical_addr (XEXP (temp, 0), exp));
3003 }
3004 if (SAVE_EXPR_RTL (exp) == 0)
3005 {
3006 if (mode == BLKmode)
3007 temp
3008 = assign_stack_temp (mode,
3009 int_size_in_bytes (TREE_TYPE (exp)), 0);
3010 else
3011 temp = gen_reg_rtx (mode);
3012 SAVE_EXPR_RTL (exp) = temp;
3013 store_expr (TREE_OPERAND (exp, 0), temp, 0);
3014 if (!optimize && GET_CODE (temp) == REG)
3015 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
3016 save_expr_regs);
3017 }
3018 return SAVE_EXPR_RTL (exp);
3019
3020 case EXIT_EXPR:
3021 /* Exit the current loop if the body-expression is true. */
3022 {
3023 rtx label = gen_label_rtx ();
3024 do_jump (TREE_OPERAND (exp, 0), label, 0);
3025 expand_exit_loop (0);
3026 emit_label (label);
3027 }
3028 return const0_rtx;
3029
3030 case LOOP_EXPR:
3031 expand_start_loop (1);
3032 expand_expr_stmt (TREE_OPERAND (exp, 0));
3033 expand_end_loop ();
3034
3035 return const0_rtx;
3036
3037 case BIND_EXPR:
3038 {
3039 tree vars = TREE_OPERAND (exp, 0);
3040 int vars_need_expansion = 0;
3041
3042 /* Need to open a binding contour here because
3043 if there are any cleanups they most be contained here. */
3044 expand_start_bindings (0);
3045
3046 /* Mark the corresponding BLOCK for output. */
3047 if (TREE_OPERAND (exp, 2) != 0)
3048 TREE_USED (TREE_OPERAND (exp, 2)) = 1;
3049
3050 /* If VARS have not yet been expanded, expand them now. */
3051 while (vars)
3052 {
3053 if (DECL_RTL (vars) == 0)
3054 {
3055 vars_need_expansion = 1;
3056 expand_decl (vars);
3057 }
3058 expand_decl_init (vars);
3059 vars = TREE_CHAIN (vars);
3060 }
3061
3062 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
3063
3064 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
3065
3066 return temp;
3067 }
3068
3069 case RTL_EXPR:
3070 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
3071 abort ();
3072 emit_insns (RTL_EXPR_SEQUENCE (exp));
3073 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
3074 return RTL_EXPR_RTL (exp);
3075
3076 case CONSTRUCTOR:
3077 /* All elts simple constants => refer to a constant in memory. */
3078 if (TREE_STATIC (exp))
3079 /* For aggregate types with non-BLKmode modes,
3080 this should ideally construct a CONST_INT. */
3081 {
3082 rtx constructor = output_constant_def (exp);
3083 if (! memory_address_p (GET_MODE (constructor),
3084 XEXP (constructor, 0)))
3085 constructor = change_address (constructor, VOIDmode,
3086 XEXP (constructor, 0));
3087 return constructor;
3088 }
3089
3090 if (ignore)
3091 {
3092 tree elt;
3093 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3094 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
3095 return const0_rtx;
3096 }
3097 else
3098 {
3099 if (target == 0 || ! safe_from_p (target, exp))
3100 {
3101 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
3102 target = gen_reg_rtx (mode);
3103 else
3104 {
3105 rtx safe_target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
3106 if (target)
3107 MEM_IN_STRUCT_P (safe_target) = MEM_IN_STRUCT_P (target);
3108 target = safe_target;
3109 }
3110 }
3111 store_constructor (exp, target);
3112 return target;
3113 }
3114
3115 case INDIRECT_REF:
3116 {
3117 tree exp1 = TREE_OPERAND (exp, 0);
3118 tree exp2;
3119
3120 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
3121 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
3122 This code has the same general effect as simply doing
3123 expand_expr on the save expr, except that the expression PTR
3124 is computed for use as a memory address. This means different
3125 code, suitable for indexing, may be generated. */
3126 if (TREE_CODE (exp1) == SAVE_EXPR
3127 && SAVE_EXPR_RTL (exp1) == 0
3128 && TREE_CODE (exp2 = TREE_OPERAND (exp1, 0)) != ERROR_MARK
3129 && TYPE_MODE (TREE_TYPE (exp1)) == Pmode
3130 && TYPE_MODE (TREE_TYPE (exp2)) == Pmode)
3131 {
3132 temp = expand_expr (TREE_OPERAND (exp1, 0), 0, VOIDmode, EXPAND_SUM);
3133 op0 = memory_address (mode, temp);
3134 op0 = copy_all_regs (op0);
3135 SAVE_EXPR_RTL (exp1) = op0;
3136 }
3137 else
3138 {
3139 op0 = expand_expr (exp1, 0, VOIDmode, EXPAND_SUM);
3140 op0 = memory_address (mode, op0);
3141 }
3142 }
3143 temp = gen_rtx (MEM, mode, op0);
3144 /* If address was computed by addition,
3145 mark this as an element of an aggregate. */
3146 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
3147 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
3148 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
3149 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
3150 || TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3151 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE)
3152 MEM_IN_STRUCT_P (temp) = 1;
3153 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) || flag_volatile;
3154#if 0 /* It is incorrectto set RTX_UNCHANGING_P here, because the fact that
3155 a location is accessed through a pointer to const does not mean
3156 that the value there can never change. */
3157 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
3158#endif
3159 return temp;
3160
3161 case ARRAY_REF:
3162 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
3163 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3164 {
3165 /* Nonconstant array index or nonconstant element size.
3166 Generate the tree for *(&array+index) and expand that,
3167 except do it in a language-independent way
3168 and don't complain about non-lvalue arrays.
3169 `mark_addressable' should already have been called
3170 for any array for which this case will be reached. */
3171
3172 /* Don't forget the const or volatile flag from the array element. */
3173 tree variant_type = build_type_variant (type,
3174 TREE_READONLY (exp),
3175 TREE_THIS_VOLATILE (exp));
3176 tree array_adr = build1 (ADDR_EXPR, build_pointer_type (variant_type),
3177 TREE_OPERAND (exp, 0));
3178 tree index = TREE_OPERAND (exp, 1);
3179 tree elt;
3180
3181 /* Convert the integer argument to a type the same size as a pointer
3182 so the multiply won't overflow spuriously. */
3183 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
3184 index = convert (type_for_size (POINTER_SIZE, 0), index);
3185
3186 /* Don't think the address has side effects
3187 just because the array does.
3188 (In some cases the address might have side effects,
3189 and we fail to record that fact here. However, it should not
3190 matter, since expand_expr should not care.) */
3191 TREE_SIDE_EFFECTS (array_adr) = 0;
3192
3193 elt = build1 (INDIRECT_REF, type,
3194 fold (build (PLUS_EXPR, TYPE_POINTER_TO (variant_type),
3195 array_adr,
3196 fold (build (MULT_EXPR,
3197 TYPE_POINTER_TO (variant_type),
3198 index, size_in_bytes (type))))));
3199
3200 /* Volatility, etc., of new expression is same as old expression. */
3201 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
3202 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
3203 TREE_READONLY (elt) = TREE_READONLY (exp);
3204
3205 return expand_expr (elt, target, tmode, modifier);
3206 }
3207
3208 /* Fold an expression like: "foo"[2].
3209 This is not done in fold so it won't happen inside &. */
3210 {
3211 int i;
3212 tree arg0 = TREE_OPERAND (exp, 0);
3213 tree arg1 = TREE_OPERAND (exp, 1);
3214
3215 if (TREE_CODE (arg0) == STRING_CST
3216 && TREE_CODE (arg1) == INTEGER_CST
3217 && !TREE_INT_CST_HIGH (arg1)
3218 && (i = TREE_INT_CST_LOW (arg1)) < TREE_STRING_LENGTH (arg0))
3219 {
3220 if (TREE_TYPE (TREE_TYPE (arg0)) == integer_type_node)
3221 {
3222 exp = build_int_2 (((int *)TREE_STRING_POINTER (arg0))[i], 0);
3223 TREE_TYPE (exp) = integer_type_node;
3224 return expand_expr (exp, target, tmode, modifier);
3225 }
3226 if (TREE_TYPE (TREE_TYPE (arg0)) == char_type_node)
3227 {
3228 exp = build_int_2 (TREE_STRING_POINTER (arg0)[i], 0);
3229 TREE_TYPE (exp) = integer_type_node;
3230 return expand_expr (convert (TREE_TYPE (TREE_TYPE (arg0)), exp), target, tmode, modifier);
3231 }
3232 }
3233 }
3234
3235 /* If this is a constant index into a constant array,
3236 just get the value from the array. */
3237 if (TREE_READONLY (TREE_OPERAND (exp, 0))
3238 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
3239 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == ARRAY_TYPE
3240 && TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3241 && DECL_INITIAL (TREE_OPERAND (exp, 0))
3242 && TREE_CODE (DECL_INITIAL (TREE_OPERAND (exp, 0))) != ERROR_MARK)
3243 {
3244 tree index = fold (TREE_OPERAND (exp, 1));
3245 if (TREE_CODE (index) == INTEGER_CST)
3246 {
3247 int i = TREE_INT_CST_LOW (index);
3248 tree init = CONSTRUCTOR_ELTS (DECL_INITIAL (TREE_OPERAND (exp, 0)));
3249
3250 while (init && i--)
3251 init = TREE_CHAIN (init);
3252 if (init)
3253 return expand_expr (fold (TREE_VALUE (init)), target, tmode, modifier);
3254 }
3255 }
3256 /* Treat array-ref with constant index as a component-ref. */
3257
3258 case COMPONENT_REF:
3259 case BIT_FIELD_REF:
3260 {
3261 enum machine_mode mode1;
3262 int bitsize;
3263 int bitpos;
3264 int volatilep = 0;
3265 tree tem = get_inner_reference (exp, &bitsize, &bitpos,
3266 &mode1, &unsignedp, &volatilep);
3267
3268 /* In some cases, we will be offsetting OP0's address by a constant.
3269 So get it as a sum, if possible. If we will be using it
3270 directly in an insn, we validate it. */
3271 op0 = expand_expr (tem, 0, VOIDmode, EXPAND_SUM);
3272
3273 /* Don't forget about volatility even if this is a bitfield. */
3274 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
3275 {
3276 op0 = copy_rtx (op0);
3277 MEM_VOLATILE_P (op0) = 1;
3278 }
3279
3280 if (mode1 == VOIDmode
3281 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG)
3282 {
3283 /* In cases where an aligned union has an unaligned object
3284 as a field, we might be extracting a BLKmode value from
3285 an integer-mode (e.g., SImode) object. Handle this case
3286 by doing the extract into an object as wide as the field
3287 (which we know to be the width of a basic mode), then
3288 storing into memory, and changing the mode to BLKmode. */
3289 enum machine_mode ext_mode = mode;
3290
3291 if (ext_mode == BLKmode)
3292 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
3293
3294 if (ext_mode == BLKmode)
3295 abort ();
3296
3297 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
3298 unsignedp, target, ext_mode, ext_mode,
3299 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
3300 int_size_in_bytes (TREE_TYPE (tem)));
3301 if (mode == BLKmode)
3302 {
3303 rtx new = assign_stack_temp (ext_mode,
3304 bitsize / BITS_PER_UNIT, 0);
3305
3306 emit_move_insn (new, op0);
3307 op0 = copy_rtx (new);
3308 PUT_MODE (op0, BLKmode);
3309 }
3310
3311 return op0;
3312 }
3313
3314 /* Get a reference to just this component. */
3315 if (modifier == EXPAND_CONST_ADDRESS
3316 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
3317 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
3318 (bitpos / BITS_PER_UNIT)));
3319 else
3320 op0 = change_address (op0, mode1,
3321 plus_constant (XEXP (op0, 0),
3322 (bitpos / BITS_PER_UNIT)));
3323 MEM_IN_STRUCT_P (op0) = 1;
3324 MEM_VOLATILE_P (op0) |= volatilep;
3325 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
3326 return op0;
3327 if (target == 0)
3328 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
3329 convert_move (target, op0, unsignedp);
3330 return target;
3331 }
3332
3333 case OFFSET_REF:
3334 {
3335 tree base = build_unary_op (ADDR_EXPR, TREE_OPERAND (exp, 0), 0);
3336 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
3337 op0 = expand_expr (addr, 0, VOIDmode, EXPAND_SUM);
3338 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
3339 MEM_IN_STRUCT_P (temp) = 1;
3340 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) || flag_volatile;
3341#if 0 /* It is incorrectto set RTX_UNCHANGING_P here, because the fact that
3342 a location is accessed through a pointer to const does not mean
3343 that the value there can never change. */
3344 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
3345#endif
3346 return temp;
3347 }
3348
3349 /* Intended for a reference to a buffer of a file-object in Pascal.
3350 But it's not certain that a special tree code will really be
3351 necessary for these. INDIRECT_REF might work for them. */
3352 case BUFFER_REF:
3353 abort ();
3354
3355 case WITH_CLEANUP_EXPR:
3356 if (RTL_EXPR_RTL (exp) == 0)
3357 {
3358 RTL_EXPR_RTL (exp)
3359 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
3360 cleanups_this_call = tree_cons (0, TREE_OPERAND (exp, 2), cleanups_this_call);
3361 /* That's it for this cleanup. */
3362 TREE_OPERAND (exp, 2) = 0;
3363 }
3364 return RTL_EXPR_RTL (exp);
3365
3366 case CALL_EXPR:
3367 /* Check for a built-in function. */
3368 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
3369 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == FUNCTION_DECL
3370 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
3371 return expand_builtin (exp, target, subtarget, tmode, ignore);
3372 /* If this call was expanded already by preexpand_calls,
3373 just return the result we got. */
3374 if (CALL_EXPR_RTL (exp) != 0)
3375 return CALL_EXPR_RTL (exp);
3376 return expand_call (exp, target, ignore, modifier);
3377
3378 case NON_LVALUE_EXPR:
3379 case NOP_EXPR:
3380 case CONVERT_EXPR:
3381 case REFERENCE_EXPR:
3382 if (TREE_CODE (type) == VOID_TYPE || ignore)
3383 {
3384 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
3385 return const0_rtx;
3386 }
3387 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
3388 return expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
3389 if (TREE_CODE (type) == UNION_TYPE)
3390 {
3391 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
3392 if (target == 0)
3393 {
3394 if (mode == BLKmode)
3395 {
3396 if (TYPE_SIZE (type) == 0
3397 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3398 abort ();
3399 target = assign_stack_temp (BLKmode,
3400 (TREE_INT_CST_LOW (TYPE_SIZE (type))
3401 + BITS_PER_UNIT - 1)
3402 / BITS_PER_UNIT, 0);
3403 }
3404 else
3405 target = gen_reg_rtx (mode);
3406 }
3407 if (GET_CODE (target) == MEM)
3408 /* Store data into beginning of memory target. */
3409 store_expr (TREE_OPERAND (exp, 0),
3410 change_address (target, TYPE_MODE (valtype), 0), 0);
3411 else if (GET_CODE (target) == REG)
3412 /* Store this field into a union of the proper type. */
3413 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
3414 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
3415 VOIDmode, 0, 1,
3416 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
3417 else
3418 abort ();
3419
3420 /* Return the entire union. */
3421 return target;
3422 }
3423 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, mode, 0);
3424 if (GET_MODE (op0) == mode || GET_MODE (op0) == VOIDmode)
3425 return op0;
3426 if (flag_force_mem && GET_CODE (op0) == MEM)
3427 op0 = copy_to_reg (op0);
3428
3429 if (target == 0)
3430 return convert_to_mode (mode, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
3431 else
3432 convert_move (target, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
3433 return target;
3434
3435 case PLUS_EXPR:
3436 /* We come here from MINUS_EXPR when the second operand is a constant. */
3437 plus_expr:
3438 this_optab = add_optab;
3439
3440 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
3441 something else, make sure we add the register to the constant and
3442 then to the other thing. This case can occur during strength
3443 reduction and doing it this way will produce better code if the
3444 frame pointer or argument pointer is eliminated.
3445
3446 fold-const.c will ensure that the constant is always in the inner
3447 PLUS_EXPR, so the only case we need to do anything about is if
3448 sp, ap, or fp is our second argument, in which case we must swap
3449 the innermost first argument and our second argument. */
3450
3451 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
3452 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
3453 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
3454 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
3455 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
3456 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
3457 {
3458 tree t = TREE_OPERAND (exp, 1);
3459
3460 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
3461 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
3462 }
3463
3464 /* If the result is to be Pmode and we are adding an integer to
3465 something, we might be forming a constant. So try to use
3466 plus_constant. If it produces a sum and we can't accept it,
3467 use force_operand. This allows P = &ARR[const] to generate
3468 efficient code on machines where a SYMBOL_REF is not a valid
3469 address.
3470
3471 If this is an EXPAND_SUM call, always return the sum. */
3472 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
3473 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
3474 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
3475 || mode == Pmode))
3476 {
3477 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
3478 EXPAND_SUM);
3479 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
3480 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
3481 op1 = force_operand (op1, target);
3482 return op1;
3483 }
3484
3485 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
3486 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
3487 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
3488 || mode == Pmode))
3489 {
3490 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
3491 EXPAND_SUM);
3492 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
3493 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
3494 op0 = force_operand (op0, target);
3495 return op0;
3496 }
3497
3498 /* No sense saving up arithmetic to be done
3499 if it's all in the wrong mode to form part of an address.
3500 And force_operand won't know whether to sign-extend or
3501 zero-extend. */
3502 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
3503 || mode != Pmode) goto binop;
3504
3505 preexpand_calls (exp);
3506 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
3507 subtarget = 0;
3508
3509 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
3510 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, modifier);
3511
3512 /* Make sure any term that's a sum with a constant comes last. */
3513 if (GET_CODE (op0) == PLUS
3514 && CONSTANT_P (XEXP (op0, 1)))
3515 {
3516 temp = op0;
3517 op0 = op1;
3518 op1 = temp;
3519 }
3520 /* If adding to a sum including a constant,
3521 associate it to put the constant outside. */
3522 if (GET_CODE (op1) == PLUS
3523 && CONSTANT_P (XEXP (op1, 1)))
3524 {
3525 rtx constant_term = const0_rtx;
3526
3527 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
3528 if (temp != 0)
3529 op0 = temp;
3530 else
3531 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
3532
3533 /* Let's also eliminate constants from op0 if possible. */
3534 op0 = eliminate_constant_term (op0, &constant_term);
3535
3536 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
3537 their sum should be a constant. Form it into OP1, since the
3538 result we want will then be OP0 + OP1. */
3539
3540 temp = simplify_binary_operation (PLUS, mode, constant_term,
3541 XEXP (op1, 1));
3542 if (temp != 0)
3543 op1 = temp;
3544 else
3545 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
3546 }
3547
3548 /* Put a constant term last and put a multiplication first. */
3549 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
3550 temp = op1, op1 = op0, op0 = temp;
3551
3552 temp = simplify_binary_operation (PLUS, mode, op0, op1);
3553 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
3554
3555 case MINUS_EXPR:
3556 /* Handle difference of two symbolic constants,
3557 for the sake of an initializer. */
3558 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
3559 && really_constant_p (TREE_OPERAND (exp, 0))
3560 && really_constant_p (TREE_OPERAND (exp, 1)))
3561 {
3562 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, modifier);
3563 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, modifier);
3564 return gen_rtx (MINUS, mode, op0, op1);
3565 }
3566 /* Convert A - const to A + (-const). */
3567 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
3568 {
3569 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0),
3570 fold (build1 (NEGATE_EXPR, type,
3571 TREE_OPERAND (exp, 1))));
3572 goto plus_expr;
3573 }
3574 this_optab = sub_optab;
3575 goto binop;
3576
3577 case MULT_EXPR:
3578 preexpand_calls (exp);
3579 /* If first operand is constant, swap them.
3580 Thus the following special case checks need only
3581 check the second operand. */
3582 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
3583 {
3584 register tree t1 = TREE_OPERAND (exp, 0);
3585 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
3586 TREE_OPERAND (exp, 1) = t1;
3587 }
3588
3589 /* Attempt to return something suitable for generating an
3590 indexed address, for machines that support that. */
3591
3592 if (modifier == EXPAND_SUM && mode == Pmode
3593 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
3594 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT)
3595 {
3596 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
3597
3598 /* Apply distributive law if OP0 is x+c. */
3599 if (GET_CODE (op0) == PLUS
3600 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
3601 return gen_rtx (PLUS, mode,
3602 gen_rtx (MULT, mode, XEXP (op0, 0),
3603 gen_rtx (CONST_INT, VOIDmode,
3604 TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
3605 gen_rtx (CONST_INT, VOIDmode,
3606 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
3607 * INTVAL (XEXP (op0, 1)))));
3608
3609 if (GET_CODE (op0) != REG)
3610 op0 = force_operand (op0, 0);
3611 if (GET_CODE (op0) != REG)
3612 op0 = copy_to_mode_reg (mode, op0);
3613
3614 return gen_rtx (MULT, mode, op0,
3615 gen_rtx (CONST_INT, VOIDmode,
3616 TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
3617 }
3618
3619 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
3620 subtarget = 0;
3621
3622 /* Check for multiplying things that have been extended
3623 from a narrower type. If this machine supports multiplying
3624 in that narrower type with a result in the desired type,
3625 do it that way, and avoid the explicit type-conversion. */
3626 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
3627 && TREE_CODE (type) == INTEGER_TYPE
3628 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
3629 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
3630 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
3631 && int_fits_type_p (TREE_OPERAND (exp, 1),
3632 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
3633 /* Don't use a widening multiply if a shift will do. */
3634 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
3635 > HOST_BITS_PER_INT)
3636 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
3637 ||
3638 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
3639 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
3640 ==
3641 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
3642 /* If both operands are extended, they must either both
3643 be zero-extended or both be sign-extended. */
3644 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
3645 ==
3646 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
3647 {
3648 enum machine_mode innermode
3649 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
3650 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
3651 ? umul_widen_optab : smul_widen_optab);
3652 if (mode == GET_MODE_WIDER_MODE (innermode)
3653 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
3654 {
3655 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
3656 0, VOIDmode, 0);
3657 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
3658 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
3659 else
3660 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
3661 0, VOIDmode, 0);
3662 goto binop2;
3663 }
3664 }
3665 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
3666 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
3667 return expand_mult (mode, op0, op1, target, unsignedp);
3668
3669 case TRUNC_DIV_EXPR:
3670 case FLOOR_DIV_EXPR:
3671 case CEIL_DIV_EXPR:
3672 case ROUND_DIV_EXPR:
3673 case EXACT_DIV_EXPR:
3674 preexpand_calls (exp);
3675 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
3676 subtarget = 0;
3677 /* Possible optimization: compute the dividend with EXPAND_SUM
3678 then if the divisor is constant can optimize the case
3679 where some terms of the dividend have coeffs divisible by it. */
3680 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
3681 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
3682 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
3683
3684 case RDIV_EXPR:
3685 this_optab = flodiv_optab;
3686 goto binop;
3687
3688 case TRUNC_MOD_EXPR:
3689 case FLOOR_MOD_EXPR:
3690 case CEIL_MOD_EXPR:
3691 case ROUND_MOD_EXPR:
3692 preexpand_calls (exp);
3693 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
3694 subtarget = 0;
3695 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
3696 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
3697 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
3698
3699 case FIX_ROUND_EXPR:
3700 case FIX_FLOOR_EXPR:
3701 case FIX_CEIL_EXPR:
3702 abort (); /* Not used for C. */
3703
3704 case FIX_TRUNC_EXPR:
3705 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
3706 if (target == 0)
3707 target = gen_reg_rtx (mode);
3708 expand_fix (target, op0, unsignedp);
3709 return target;
3710
3711 case FLOAT_EXPR:
3712 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
3713 if (target == 0)
3714 target = gen_reg_rtx (mode);
3715 /* expand_float can't figure out what to do if FROM has VOIDmode.
3716 So give it the correct mode. With -O, cse will optimize this. */
3717 if (GET_MODE (op0) == VOIDmode)
3718 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
3719 op0);
3720 expand_float (target, op0,
3721 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
3722 return target;
3723
3724 case NEGATE_EXPR:
3725 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
3726 temp = expand_unop (mode, neg_optab, op0, target, 0);
3727 if (temp == 0)
3728 abort ();
3729 return temp;
3730
3731 case ABS_EXPR:
3732 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
3733
3734 /* Unsigned abs is simply the operand. Testing here means we don't
3735 risk generating incorrect code below. */
3736 if (TREE_UNSIGNED (type))
3737 return op0;
3738
3739 /* First try to do it with a special abs instruction. */
3740 temp = expand_unop (mode, abs_optab, op0, target, 0);
3741 if (temp != 0)
3742 return temp;
3743
3744 /* If this machine has expensive jumps, we can do integer absolute
3745 value of X as (((signed) x >> (W-1)) ^ x) - ((signed) x >> (W-1)),
3746 where W is the width of MODE. */
3747
3748 if (GET_MODE_CLASS (mode) == MODE_INT && BRANCH_COST >= 2)
3749 {
3750 rtx extended = expand_shift (RSHIFT_EXPR, mode, op0,
3751 size_int (GET_MODE_BITSIZE (mode) - 1),
3752 0, 0);
3753
3754 temp = expand_binop (mode, xor_optab, extended, op0, target, 0,
3755 OPTAB_LIB_WIDEN);
3756 if (temp != 0)
3757 temp = expand_binop (mode, sub_optab, temp, extended, target, 0,
3758 OPTAB_LIB_WIDEN);
3759
3760 if (temp != 0)
3761 return temp;
3762 }
3763
3764 /* If that does not win, use conditional jump and negate. */
3765 target = original_target;
3766 temp = gen_label_rtx ();
3767 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 0))
3768 || (GET_CODE (target) == REG
3769 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3770 target = gen_reg_rtx (mode);
3771 emit_move_insn (target, op0);
3772 emit_cmp_insn (target,
3773 expand_expr (convert (type, integer_zero_node),
3774 0, VOIDmode, 0),
3775 GE, 0, mode, 0, 0);
3776 NO_DEFER_POP;
3777 emit_jump_insn (gen_bge (temp));
3778 op0 = expand_unop (mode, neg_optab, target, target, 0);
3779 if (op0 != target)
3780 emit_move_insn (target, op0);
3781 emit_label (temp);
3782 OK_DEFER_POP;
3783 return target;
3784
3785 case MAX_EXPR:
3786 case MIN_EXPR:
3787 target = original_target;
3788 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
3789 || (GET_CODE (target) == REG
3790 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3791 target = gen_reg_rtx (mode);
3792 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
3793 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
3794
3795 /* First try to do it with a special MIN or MAX instruction.
3796 If that does not win, use a conditional jump to select the proper
3797 value. */
3798 this_optab = (TREE_UNSIGNED (type)
3799 ? (code == MIN_EXPR ? umin_optab : umax_optab)
3800 : (code == MIN_EXPR ? smin_optab : smax_optab));
3801
3802 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
3803 OPTAB_WIDEN);
3804 if (temp != 0)
3805 return temp;
3806
3807 if (target != op0)
3808 emit_move_insn (target, op0);
3809 op0 = gen_label_rtx ();
3810 if (code == MAX_EXPR)
3811 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
3812 ? compare_from_rtx (target, op1, GEU, 1, mode, 0, 0)
3813 : compare_from_rtx (target, op1, GE, 0, mode, 0, 0));
3814 else
3815 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
3816 ? compare_from_rtx (target, op1, LEU, 1, mode, 0, 0)
3817 : compare_from_rtx (target, op1, LE, 0, mode, 0, 0));
3818 if (temp == const0_rtx)
3819 emit_move_insn (target, op1);
3820 else if (temp != const_true_rtx)
3821 {
3822 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
3823 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
3824 else
3825 abort ();
3826 emit_move_insn (target, op1);
3827 }
3828 emit_label (op0);
3829 return target;
3830
3831/* ??? Can optimize when the operand of this is a bitwise operation,
3832 by using a different bitwise operation. */
3833 case BIT_NOT_EXPR:
3834 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
3835 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
3836 if (temp == 0)
3837 abort ();
3838 return temp;
3839
3840 case FFS_EXPR:
3841 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
3842 temp = expand_unop (mode, ffs_optab, op0, target, 1);
3843 if (temp == 0)
3844 abort ();
3845 return temp;
3846
3847/* ??? Can optimize bitwise operations with one arg constant.
3848 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
3849 and (a bitwise1 b) bitwise2 b (etc)
3850 but that is probably not worth while. */
3851
3852/* BIT_AND_EXPR is for bitwise anding.
3853 TRUTH_AND_EXPR is for anding two boolean values
3854 when we want in all cases to compute both of them.
3855 In general it is fastest to do TRUTH_AND_EXPR by
3856 computing both operands as actual zero-or-1 values
3857 and then bitwise anding. In cases where there cannot
3858 be any side effects, better code would be made by
3859 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR;
3860 but the question is how to recognize those cases. */
3861
3862 case TRUTH_AND_EXPR:
3863 case BIT_AND_EXPR:
3864 this_optab = and_optab;
3865 goto binop;
3866
3867/* See comment above about TRUTH_AND_EXPR; it applies here too. */
3868 case TRUTH_OR_EXPR:
3869 case BIT_IOR_EXPR:
3870 this_optab = ior_optab;
3871 goto binop;
3872
3873 case BIT_XOR_EXPR:
3874 this_optab = xor_optab;
3875 goto binop;
3876
3877 case LSHIFT_EXPR:
3878 case RSHIFT_EXPR:
3879 case LROTATE_EXPR:
3880 case RROTATE_EXPR:
3881 preexpand_calls (exp);
3882 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
3883 subtarget = 0;
3884 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
3885 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
3886 unsignedp);
3887
3888/* Could determine the answer when only additive constants differ.
3889 Also, the addition of one can be handled by changing the condition. */
3890 case LT_EXPR:
3891 case LE_EXPR:
3892 case GT_EXPR:
3893 case GE_EXPR:
3894 case EQ_EXPR:
3895 case NE_EXPR:
3896 preexpand_calls (exp);
3897 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
3898 if (temp != 0)
3899 return temp;
3900 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
3901 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
3902 && original_target
3903 && GET_CODE (original_target) == REG
3904 && (GET_MODE (original_target)
3905 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
3906 {
3907 temp = expand_expr (TREE_OPERAND (exp, 0), original_target, VOIDmode, 0);
3908 if (temp != original_target)
3909 temp = copy_to_reg (temp);
3910 op1 = gen_label_rtx ();
3911 emit_cmp_insn (temp, const0_rtx, EQ, 0,
3912 GET_MODE (temp), unsignedp, 0);
3913 emit_jump_insn (gen_beq (op1));
3914 emit_move_insn (temp, const1_rtx);
3915 emit_label (op1);
3916 return temp;
3917 }
3918 /* If no set-flag instruction, must generate a conditional
3919 store into a temporary variable. Drop through
3920 and handle this like && and ||. */
3921
3922 case TRUTH_ANDIF_EXPR:
3923 case TRUTH_ORIF_EXPR:
3924 if (target == 0 || ! safe_from_p (target, exp)
3925 /* Make sure we don't have a hard reg (such as function's return
3926 value) live across basic blocks, if not optimizing. */
3927 || (!optimize && GET_CODE (target) == REG
3928 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3929 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
3930 emit_clr_insn (target);
3931 op1 = gen_label_rtx ();
3932 jumpifnot (exp, op1);
3933 emit_0_to_1_insn (target);
3934 emit_label (op1);
3935 return target;
3936
3937 case TRUTH_NOT_EXPR:
3938 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
3939 /* The parser is careful to generate TRUTH_NOT_EXPR
3940 only with operands that are always zero or one. */
3941 temp = expand_binop (mode, xor_optab, op0,
3942 gen_rtx (CONST_INT, mode, 1),
3943 target, 1, OPTAB_LIB_WIDEN);
3944 if (temp == 0)
3945 abort ();
3946 return temp;
3947
3948 case COMPOUND_EXPR:
3949 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3950 emit_queue ();
3951 return expand_expr (TREE_OPERAND (exp, 1),
3952 (ignore ? const0_rtx : target),
3953 VOIDmode, 0);
3954
3955 case COND_EXPR:
3956 {
3957 /* Note that COND_EXPRs whose type is a structure or union
3958 are required to be constructed to contain assignments of
3959 a temporary variable, so that we can evaluate them here
3960 for side effect only. If type is void, we must do likewise. */
3961
3962 /* If an arm of the branch requires a cleanup,
3963 only that cleanup is performed. */
3964
3965 tree singleton = 0;
3966 tree binary_op = 0, unary_op = 0;
3967 tree old_cleanups = cleanups_this_call;
3968 cleanups_this_call = 0;
3969
3970 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
3971 convert it to our mode, if necessary. */
3972 if (integer_onep (TREE_OPERAND (exp, 1))
3973 && integer_zerop (TREE_OPERAND (exp, 2))
3974 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
3975 {
3976 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
3977 if (GET_MODE (op0) == mode)
3978 return op0;
3979 if (target == 0)
3980 target = gen_reg_rtx (mode);
3981 convert_move (target, op0, unsignedp);
3982 return target;
3983 }
3984
3985 /* If we are not to produce a result, we have no target. Otherwise,
3986 if a target was specified use it; it will not be used as an
3987 intermediate target unless it is safe. If no target, use a
3988 temporary. */
3989
3990 if (mode == VOIDmode || ignore)
3991 temp = 0;
3992 else if (original_target
3993 && safe_from_p (original_target, TREE_OPERAND (exp, 0)))
3994 temp = original_target;
3995 else if (mode == BLKmode)
3996 {
3997 if (TYPE_SIZE (type) == 0
3998 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3999 abort ();
4000 temp = assign_stack_temp (BLKmode,
4001 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4002 + BITS_PER_UNIT - 1)
4003 / BITS_PER_UNIT, 0);
4004 }
4005 else
4006 temp = gen_reg_rtx (mode);
4007
4008 /* Check for X ? A + B : A. If we have this, we can copy
4009 A to the output and conditionally add B. Similarly for unary
4010 operations. Don't do this if X has side-effects because
4011 those side effects might affect A or B and the "?" operation is
4012 a sequence point in ANSI. (We test for side effects later.) */
4013
4014 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
4015 && operand_equal_p (TREE_OPERAND (exp, 2),
4016 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4017 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
4018 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
4019 && operand_equal_p (TREE_OPERAND (exp, 1),
4020 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4021 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
4022 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
4023 && operand_equal_p (TREE_OPERAND (exp, 2),
4024 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4025 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
4026 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
4027 && operand_equal_p (TREE_OPERAND (exp, 1),
4028 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4029 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
4030
4031 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
4032 operation, do this as A + (X != 0). Similarly for other simple
4033 binary operators. */
4034 if (singleton && binary_op
4035 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4036 && (TREE_CODE (binary_op) == PLUS_EXPR
4037 || TREE_CODE (binary_op) == MINUS_EXPR
4038 || TREE_CODE (binary_op) == BIT_IOR_EXPR
4039 || TREE_CODE (binary_op) == BIT_XOR_EXPR
4040 || TREE_CODE (binary_op) == BIT_AND_EXPR)
4041 && integer_onep (TREE_OPERAND (binary_op, 1))
4042 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4043 {
4044 rtx result;
4045 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
4046 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
4047 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
4048 : TREE_CODE (binary_op) == BIT_XOR_EXPR ? xor_optab
4049 : and_optab);
4050
4051 /* If we had X ? A : A + 1, do this as A + (X == 0).
4052
4053 We have to invert the truth value here and then put it
4054 back later if do_store_flag fails. We cannot simply copy
4055 TREE_OPERAND (exp, 0) to another variable and modify that
4056 because invert_truthvalue can modify the tree pointed to
4057 by its argument. */
4058 if (singleton == TREE_OPERAND (exp, 1))
4059 TREE_OPERAND (exp, 0)
4060 = invert_truthvalue (TREE_OPERAND (exp, 0));
4061
4062 result = do_store_flag (TREE_OPERAND (exp, 0),
4063 safe_from_p (temp, singleton) ? temp : 0,
4064 mode, BRANCH_COST <= 1);
4065
4066 if (result)
4067 {
4068 op1 = expand_expr (singleton, 0, VOIDmode, 0);
4069 return expand_binop (mode, boptab, op1, result, temp,
4070 unsignedp, OPTAB_LIB_WIDEN);
4071 }
4072 else if (singleton == TREE_OPERAND (exp, 1))
4073 TREE_OPERAND (exp, 0)
4074 = invert_truthvalue (TREE_OPERAND (exp, 0));
4075 }
4076
4077 NO_DEFER_POP;
4078 op0 = gen_label_rtx ();
4079
4080 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
4081 {
4082 if (temp != 0)
4083 {
4084 /* If the target conflicts with the other operand of the
4085 binary op, we can't use it. Also, we can't use the target
4086 if it is a hard register, because evaluating the condition
4087 might clobber it. */
4088 if ((binary_op
4089 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
4090 || (GET_CODE (temp) == REG
4091 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
4092 temp = gen_reg_rtx (mode);
4093 store_expr (singleton, temp, 0);
4094 }
4095 else
4096 expand_expr (singleton, ignore ? const1_rtx : 0, VOIDmode, 0);
4097 if (cleanups_this_call)
4098 {
4099 sorry ("aggregate value in COND_EXPR");
4100 cleanups_this_call = 0;
4101 }
4102 if (singleton == TREE_OPERAND (exp, 1))
4103 jumpif (TREE_OPERAND (exp, 0), op0);
4104 else
4105 jumpifnot (TREE_OPERAND (exp, 0), op0);
4106
4107 if (binary_op && temp == 0)
4108 /* Just touch the other operand. */
4109 expand_expr (TREE_OPERAND (binary_op, 1),
4110 ignore ? const0_rtx : 0, VOIDmode, 0);
4111 else if (binary_op)
4112 store_expr (build (TREE_CODE (binary_op), type,
4113 make_tree (type, temp),
4114 TREE_OPERAND (binary_op, 1)),
4115 temp, 0);
4116 else
4117 store_expr (build1 (TREE_CODE (unary_op), type,
4118 make_tree (type, temp)),
4119 temp, 0);
4120 op1 = op0;
4121 }
4122#if 0
4123 /* This is now done in jump.c and is better done there because it
4124 produces shorter register lifetimes. */
4125
4126 /* Check for both possibilities either constants or variables
4127 in registers (but not the same as the target!). If so, can
4128 save branches by assigning one, branching, and assigning the
4129 other. */
4130 else if (temp && GET_MODE (temp) != BLKmode
4131 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
4132 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
4133 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
4134 && DECL_RTL (TREE_OPERAND (exp, 1))
4135 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
4136 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
4137 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
4138 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
4139 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
4140 && DECL_RTL (TREE_OPERAND (exp, 2))
4141 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
4142 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
4143 {
4144 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4145 temp = gen_reg_rtx (mode);
4146 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4147 jumpifnot (TREE_OPERAND (exp, 0), op0);
4148 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4149 op1 = op0;
4150 }
4151#endif
4152 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
4153 comparison operator. If we have one of these cases, set the
4154 output to A, branch on A (cse will merge these two references),
4155 then set the output to FOO. */
4156 else if (temp
4157 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
4158 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
4159 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4160 TREE_OPERAND (exp, 1), 0)
4161 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4162 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
4163 {
4164 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4165 temp = gen_reg_rtx (mode);
4166 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4167 jumpif (TREE_OPERAND (exp, 0), op0);
4168 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4169 op1 = op0;
4170 }
4171 else if (temp
4172 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
4173 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
4174 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4175 TREE_OPERAND (exp, 2), 0)
4176 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4177 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
4178 {
4179 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4180 temp = gen_reg_rtx (mode);
4181 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4182 jumpifnot (TREE_OPERAND (exp, 0), op0);
4183 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4184 op1 = op0;
4185 }
4186 else
4187 {
4188 op1 = gen_label_rtx ();
4189 jumpifnot (TREE_OPERAND (exp, 0), op0);
4190 if (temp != 0)
4191 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4192 else
4193 expand_expr (TREE_OPERAND (exp, 1), ignore ? const0_rtx : 0,
4194 VOIDmode, 0);
4195 if (cleanups_this_call)
4196 {
4197 sorry ("aggregate value in COND_EXPR");
4198 cleanups_this_call = 0;
4199 }
4200
4201 emit_queue ();
4202 emit_jump_insn (gen_jump (op1));
4203 emit_barrier ();
4204 emit_label (op0);
4205 if (temp != 0)
4206 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4207 else
4208 expand_expr (TREE_OPERAND (exp, 2), ignore ? const0_rtx : 0,
4209 VOIDmode, 0);
4210 }
4211
4212 if (cleanups_this_call)
4213 {
4214 sorry ("aggregate value in COND_EXPR");
4215 cleanups_this_call = 0;
4216 }
4217
4218 emit_queue ();
4219 emit_label (op1);
4220 OK_DEFER_POP;
4221 cleanups_this_call = old_cleanups;
4222 return temp;
4223 }
4224
4225 case TARGET_EXPR:
4226 {
4227 /* Something needs to be initialized, but we didn't know
4228 where that thing was when building the tree. For example,
4229 it could be the return value of a function, or a parameter
4230 to a function which lays down in the stack, or a temporary
4231 variable which must be passed by reference.
4232
4233 We guarantee that the expression will either be constructed
4234 or copied into our original target. */
4235
4236 tree slot = TREE_OPERAND (exp, 0);
4237
4238 if (TREE_CODE (slot) != VAR_DECL)
4239 abort ();
4240
4241 if (target == 0)
4242 {
4243 if (DECL_RTL (slot) != 0)
4244 target = DECL_RTL (slot);
4245 else
4246 {
4247 target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
4248 /* All temp slots at this level must not conflict. */
4249 preserve_temp_slots (target);
4250 DECL_RTL (slot) = target;
4251 }
4252
4253#if 0
4254 /* Since SLOT is not known to the called function
4255 to belong to its stack frame, we must build an explicit
4256 cleanup. This case occurs when we must build up a reference
4257 to pass the reference as an argument. In this case,
4258 it is very likely that such a reference need not be
4259 built here. */
4260
4261 if (TREE_OPERAND (exp, 2) == 0)
4262 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
4263 if (TREE_OPERAND (exp, 2))
4264 cleanups_this_call = tree_cons (0, TREE_OPERAND (exp, 2),
4265 cleanups_this_call);
4266#endif
4267 }
4268 else
4269 {
4270 /* This case does occur, when expanding a parameter which
4271 needs to be constructed on the stack. The target
4272 is the actual stack address that we want to initialize.
4273 The function we call will perform the cleanup in this case. */
4274
4275 DECL_RTL (slot) = target;
4276 }
4277
4278 return expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
4279 }
4280
4281 case INIT_EXPR:
4282 {
4283 tree lhs = TREE_OPERAND (exp, 0);
4284 tree rhs = TREE_OPERAND (exp, 1);
4285 tree noncopied_parts = 0;
4286 tree lhs_type = TREE_TYPE (lhs);
4287
4288 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
4289 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
4290 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
4291 TYPE_NONCOPIED_PARTS (lhs_type));
4292 while (noncopied_parts != 0)
4293 {
4294 expand_assignment (TREE_VALUE (noncopied_parts),
4295 TREE_PURPOSE (noncopied_parts), 0, 0);
4296 noncopied_parts = TREE_CHAIN (noncopied_parts);
4297 }
4298 return temp;
4299 }
4300
4301 case MODIFY_EXPR:
4302 {
4303 /* If lhs is complex, expand calls in rhs before computing it.
4304 That's so we don't compute a pointer and save it over a call.
4305 If lhs is simple, compute it first so we can give it as a
4306 target if the rhs is just a call. This avoids an extra temp and copy
4307 and that prevents a partial-subsumption which makes bad code.
4308 Actually we could treat component_ref's of vars like vars. */
4309
4310 tree lhs = TREE_OPERAND (exp, 0);
4311 tree rhs = TREE_OPERAND (exp, 1);
4312 tree noncopied_parts = 0;
4313 tree lhs_type = TREE_TYPE (lhs);
4314
4315 temp = 0;
4316
4317 if (TREE_CODE (lhs) != VAR_DECL
4318 && TREE_CODE (lhs) != RESULT_DECL
4319 && TREE_CODE (lhs) != PARM_DECL)
4320 preexpand_calls (exp);
4321
4322 /* Check for |= or &= of a bitfield of size one into another bitfield
4323 of size 1. In this case, (unless we need the result of the
4324 assignment) we can do this more efficiently with a
4325 test followed by an assignment, if necessary.
4326
4327 ??? At this point, we can't get a BIT_FIELD_REF here. But if
4328 things change so we do, this code should be enhanced to
4329 support it. */
4330 if (ignore
4331 && TREE_CODE (lhs) == COMPONENT_REF
4332 && (TREE_CODE (rhs) == BIT_IOR_EXPR
4333 || TREE_CODE (rhs) == BIT_AND_EXPR)
4334 && TREE_OPERAND (rhs, 0) == lhs
4335 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
4336 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
4337 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
4338 {
4339 rtx label = gen_label_rtx ();
4340
4341 do_jump (TREE_OPERAND (rhs, 1),
4342 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
4343 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
4344 expand_assignment (lhs, convert (TREE_TYPE (rhs),
4345 (TREE_CODE (rhs) == BIT_IOR_EXPR
4346 ? integer_one_node
4347 : integer_zero_node)),
4348 0, 0);
4349 emit_label (label);
4350 return const0_rtx;
4351 }
4352
4353 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
4354 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
4355 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
4356 TYPE_NONCOPIED_PARTS (lhs_type));
4357
4358 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
4359 while (noncopied_parts != 0)
4360 {
4361 expand_assignment (TREE_PURPOSE (noncopied_parts),
4362 TREE_VALUE (noncopied_parts), 0, 0);
4363 noncopied_parts = TREE_CHAIN (noncopied_parts);
4364 }
4365 return temp;
4366 }
4367
4368 case PREINCREMENT_EXPR:
4369 case PREDECREMENT_EXPR:
4370 return expand_increment (exp, 0);
4371
4372 case POSTINCREMENT_EXPR:
4373 case POSTDECREMENT_EXPR:
4374 /* Faster to treat as pre-increment if result is not used. */
4375 return expand_increment (exp, ! ignore);
4376
4377 case ADDR_EXPR:
4378 /* Are we taking the address of a nested function? */
4379 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
4380 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
4381 {
4382 op0 = trampoline_address (TREE_OPERAND (exp, 0));
4383 op0 = force_operand (op0, target);
4384 }
4385 else
4386 {
4387 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode,
4388 (modifier == EXPAND_INITIALIZER
4389 ? modifier : EXPAND_CONST_ADDRESS));
4390 if (GET_CODE (op0) != MEM)
4391 abort ();
4392
4393 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4394 return XEXP (op0, 0);
4395 op0 = force_operand (XEXP (op0, 0), target);
4396 }
4397 if (flag_force_addr && GET_CODE (op0) != REG)
4398 return force_reg (Pmode, op0);
4399 return op0;
4400
4401 case ENTRY_VALUE_EXPR:
4402 abort ();
4403
4404 case ERROR_MARK:
4405 return const0_rtx;
4406
4407 default:
4408 return (*lang_expand_expr) (exp, target, tmode, modifier);
4409 }
4410
4411 /* Here to do an ordinary binary operator, generating an instruction
4412 from the optab already placed in `this_optab'. */
4413 binop:
4414 preexpand_calls (exp);
4415 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4416 subtarget = 0;
4417 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4418 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
4419 binop2:
4420 temp = expand_binop (mode, this_optab, op0, op1, target,
4421 unsignedp, OPTAB_LIB_WIDEN);
4422 if (temp == 0)
4423 abort ();
4424 return temp;
4425}
4426\f
4427/* Return the alignment of EXP, a pointer valued expression for the mem*
4428 builtin functions. Alignments greater than MAX_ALIGN are not significant.
4429 The alignment returned is, by default, the alignment of the thing that
4430 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
4431
4432 Otherwise, look at the expression to see if we can do better, i.e., if the
4433 expression is actually pointing at an object whose alignment is tighter. */
4434
4435static int
4436get_pointer_alignment (exp, max_align)
4437 tree exp;
4438 unsigned max_align;
4439{
4440 unsigned align, inner;
4441
4442 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
4443 return 0;
4444
4445 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
4446 align = MIN (align, max_align);
4447
4448 while (1)
4449 {
4450 switch (TREE_CODE (exp))
4451 {
4452 case NOP_EXPR:
4453 case CONVERT_EXPR:
4454 case NON_LVALUE_EXPR:
4455 exp = TREE_OPERAND (exp, 0);
4456 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
4457 return align;
4458 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
4459 inner = MIN (inner, max_align);
4460 align = MAX (align, inner);
4461 break;
4462
4463 case PLUS_EXPR:
4464 /* If sum of pointer + int, restrict our maximum alignment to that
4465 imposed by the integer. If not, we can't do any better than
4466 ALIGN. */
4467 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
4468 return align;
4469
4470 while ((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
4471 & (max_align - 1)) != 0)
4472 max_align >>= 1;
4473
4474 exp = TREE_OPERAND (exp, 0);
4475 break;
4476
4477 case ADDR_EXPR:
4478 /* See what we are pointing at and look at its alignment. */
4479 exp = TREE_OPERAND (exp, 0);
4480 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
4481 align = MAX (align, DECL_ALIGN (exp));
4482#ifdef CONSTANT_ALIGNMENT
4483 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
4484 align = CONSTANT_ALIGNMENT (exp, align);
4485#endif
4486 return MIN (align, max_align);
4487
4488 default:
4489 return align;
4490 }
4491 }
4492}
4493\f
4494/* Return the tree node and offset if a given argument corresponds to
4495 a string constant. */
4496
4497static tree
4498string_constant (arg, ptr_offset)
4499 tree arg;
4500 tree *ptr_offset;
4501{
4502 STRIP_NOPS (arg);
4503
4504 if (TREE_CODE (arg) == ADDR_EXPR
4505 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
4506 {
4507 *ptr_offset = integer_zero_node;
4508 return TREE_OPERAND (arg, 0);
4509 }
4510 else if (TREE_CODE (arg) == PLUS_EXPR)
4511 {
4512 tree arg0 = TREE_OPERAND (arg, 0);
4513 tree arg1 = TREE_OPERAND (arg, 1);
4514
4515 STRIP_NOPS (arg0);
4516 STRIP_NOPS (arg1);
4517
4518 if (TREE_CODE (arg0) == ADDR_EXPR
4519 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
4520 {
4521 *ptr_offset = arg1;
4522 return TREE_OPERAND (arg0, 0);
4523 }
4524 else if (TREE_CODE (arg1) == ADDR_EXPR
4525 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
4526 {
4527 *ptr_offset = arg0;
4528 return TREE_OPERAND (arg1, 0);
4529 }
4530 }
4531
4532 return 0;
4533}
4534
4535/* Compute the length of a C string. TREE_STRING_LENGTH is not the right
4536 way, because it could contain a zero byte in the middle.
4537 TREE_STRING_LENGTH is the size of the character array, not the string.
4538
4539 Unfortunately, string_constant can't access the values of const char
4540 arrays with initializers, so neither can we do so here. */
4541
4542static tree
4543c_strlen (src)
4544 tree src;
4545{
4546 tree offset_node;
4547 int offset, max;
4548 char *ptr;
4549
4550 src = string_constant (src, &offset_node);
4551 if (src == 0)
4552 return 0;
4553 max = TREE_STRING_LENGTH (src);
4554 ptr = TREE_STRING_POINTER (src);
4555 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
4556 {
4557 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
4558 compute the offset to the following null if we don't know where to
4559 start searching for it. */
4560 int i;
4561 for (i = 0; i < max; i++)
4562 if (ptr[i] == 0)
4563 return 0;
4564 /* We don't know the starting offset, but we do know that the string
4565 has no internal zero bytes. We can assume that the offset falls
4566 within the bounds of the string; otherwise, the programmer deserves
4567 what he gets. Subtract the offset from the length of the string,
4568 and return that. */
4569 /* This would perhaps not be valid if we were dealing with named
4570 arrays in addition to literal string constants. */
4571 return size_binop (MINUS_EXPR, size_int (max), offset_node);
4572 }
4573
4574 /* We have a known offset into the string. Start searching there for
4575 a null character. */
4576 if (offset_node == 0)
4577 offset = 0;
4578 else
4579 {
4580 /* Did we get a long long offset? If so, punt. */
4581 if (TREE_INT_CST_HIGH (offset_node) != 0)
4582 return 0;
4583 offset = TREE_INT_CST_LOW (offset_node);
4584 }
4585 /* If the offset is known to be out of bounds, warn, and call strlen at
4586 runtime. */
4587 if (offset < 0 || offset > max)
4588 {
4589 warning ("offset outside bounds of constant string");
4590 return 0;
4591 }
4592 /* Use strlen to search for the first zero byte. Since any strings
4593 constructed with build_string will have nulls appended, we win even
4594 if we get handed something like (char[4])"abcd".
4595
4596 Since OFFSET is our starting index into the string, no further
4597 calculation is needed. */
4598 return size_int (strlen (ptr + offset));
4599}
4600\f
4601/* Expand an expression EXP that calls a built-in function,
4602 with result going to TARGET if that's convenient
4603 (and in mode MODE if that's convenient).
4604 SUBTARGET may be used as the target for computing one of EXP's operands.
4605 IGNORE is nonzero if the value is to be ignored. */
4606
4607static rtx
4608expand_builtin (exp, target, subtarget, mode, ignore)
4609 tree exp;
4610 rtx target;
4611 rtx subtarget;
4612 enum machine_mode mode;
4613 int ignore;
4614{
4615 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4616 tree arglist = TREE_OPERAND (exp, 1);
4617 rtx op0;
4618 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
4619
4620 switch (DECL_FUNCTION_CODE (fndecl))
4621 {
4622 case BUILT_IN_ABS:
4623 case BUILT_IN_LABS:
4624 case BUILT_IN_FABS:
4625 /* build_function_call changes these into ABS_EXPR. */
4626 abort ();
4627
4628 case BUILT_IN_SAVEREGS:
4629 /* Don't do __builtin_saveregs more than once in a function.
4630 Save the result of the first call and reuse it. */
4631 if (saveregs_value != 0)
4632 return saveregs_value;
4633 {
4634 /* When this function is called, it means that registers must be
4635 saved on entry to this function. So we migrate the
4636 call to the first insn of this function. */
4637 rtx temp;
4638 rtx seq;
4639 rtx valreg, saved_valreg;
4640
4641 /* Now really call the function. `expand_call' does not call
4642 expand_builtin, so there is no danger of infinite recursion here. */
4643 start_sequence ();
4644
4645#ifdef EXPAND_BUILTIN_SAVEREGS
4646 /* Do whatever the machine needs done in this case. */
4647 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
4648#else
4649 /* The register where the function returns its value
4650 is likely to have something else in it, such as an argument.
4651 So preserve that register around the call. */
4652 if (value_mode != VOIDmode)
4653 {
4654 valreg = hard_libcall_value (value_mode);
4655 saved_valreg = gen_reg_rtx (value_mode);
4656 emit_move_insn (saved_valreg, valreg);
4657 }
4658
4659 /* Generate the call, putting the value in a pseudo. */
4660 temp = expand_call (exp, target, ignore);
4661
4662 if (value_mode != VOIDmode)
4663 emit_move_insn (valreg, saved_valreg);
4664#endif
4665
4666 seq = get_insns ();
4667 end_sequence ();
4668
4669 saveregs_value = temp;
4670
4671 /* This won't work inside a SEQUENCE--it really has to be
4672 at the start of the function. */
4673 if (in_sequence_p ())
4674 {
4675 /* Better to do this than to crash. */
4676 error ("`va_start' used within `({...})'");
4677 return temp;
4678 }
4679
4680 /* Put the sequence after the NOTE that starts the function. */
4681 emit_insns_before (seq, NEXT_INSN (get_insns ()));
4682 return temp;
4683 }
4684
4685 /* __builtin_args_info (N) returns word N of the arg space info
4686 for the current function. The number and meanings of words
4687 is controlled by the definition of CUMULATIVE_ARGS. */
4688 case BUILT_IN_ARGS_INFO:
4689 {
4690 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4691 int i;
4692 int *word_ptr = (int *) &current_function_args_info;
4693 tree type, elts, result;
4694
4695 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
4696 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
4697 __FILE__, __LINE__);
4698
4699 if (arglist != 0)
4700 {
4701 tree arg = TREE_VALUE (arglist);
4702 if (TREE_CODE (arg) != INTEGER_CST)
4703 error ("argument of __builtin_args_info must be constant");
4704 else
4705 {
4706 int wordnum = TREE_INT_CST_LOW (arg);
4707
4708 if (wordnum < 0 || wordnum >= nwords)
4709 error ("argument of __builtin_args_info out of range");
4710 else
4711 return gen_rtx (CONST_INT, VOIDmode, word_ptr[wordnum]);
4712 }
4713 }
4714 else
4715 error ("missing argument in __builtin_args_info");
4716
4717 return const0_rtx;
4718
4719#if 0
4720 for (i = 0; i < nwords; i++)
4721 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
4722
4723 type = build_array_type (integer_type_node,
4724 build_index_type (build_int_2 (nwords, 0)));
4725 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
4726 TREE_CONSTANT (result) = 1;
4727 TREE_STATIC (result) = 1;
4728 result = build (INDIRECT_REF, build_pointer_type (type), result);
4729 TREE_CONSTANT (result) = 1;
4730 return expand_expr (result, 0, VOIDmode, 0);
4731#endif
4732 }
4733
4734 /* Return the address of the first anonymous stack arg. */
4735 case BUILT_IN_NEXT_ARG:
4736 {
4737 tree fntype = TREE_TYPE (current_function_decl);
4738 if (!(TYPE_ARG_TYPES (fntype) != 0
4739 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4740 != void_type_node)))
4741 {
4742 error ("`va_start' used in function with fixed args");
4743 return const0_rtx;
4744 }
4745 }
4746
4747 return expand_binop (Pmode, add_optab,
4748 current_function_internal_arg_pointer,
4749 current_function_arg_offset_rtx,
4750 0, 0, OPTAB_LIB_WIDEN);
4751
4752 case BUILT_IN_CLASSIFY_TYPE:
4753 if (arglist != 0)
4754 {
4755 tree type = TREE_TYPE (TREE_VALUE (arglist));
4756 enum tree_code code = TREE_CODE (type);
4757 if (code == VOID_TYPE)
4758 return gen_rtx (CONST_INT, VOIDmode, void_type_class);
4759 if (code == INTEGER_TYPE)
4760 return gen_rtx (CONST_INT, VOIDmode, integer_type_class);
4761 if (code == CHAR_TYPE)
4762 return gen_rtx (CONST_INT, VOIDmode, char_type_class);
4763 if (code == ENUMERAL_TYPE)
4764 return gen_rtx (CONST_INT, VOIDmode, enumeral_type_class);
4765 if (code == BOOLEAN_TYPE)
4766 return gen_rtx (CONST_INT, VOIDmode, boolean_type_class);
4767 if (code == POINTER_TYPE)
4768 return gen_rtx (CONST_INT, VOIDmode, pointer_type_class);
4769 if (code == REFERENCE_TYPE)
4770 return gen_rtx (CONST_INT, VOIDmode, reference_type_class);
4771 if (code == OFFSET_TYPE)
4772 return gen_rtx (CONST_INT, VOIDmode, offset_type_class);
4773 if (code == REAL_TYPE)
4774 return gen_rtx (CONST_INT, VOIDmode, real_type_class);
4775 if (code == COMPLEX_TYPE)
4776 return gen_rtx (CONST_INT, VOIDmode, complex_type_class);
4777 if (code == FUNCTION_TYPE)
4778 return gen_rtx (CONST_INT, VOIDmode, function_type_class);
4779 if (code == METHOD_TYPE)
4780 return gen_rtx (CONST_INT, VOIDmode, method_type_class);
4781 if (code == RECORD_TYPE)
4782 return gen_rtx (CONST_INT, VOIDmode, record_type_class);
4783 if (code == UNION_TYPE)
4784 return gen_rtx (CONST_INT, VOIDmode, union_type_class);
4785 if (code == ARRAY_TYPE)
4786 return gen_rtx (CONST_INT, VOIDmode, array_type_class);
4787 if (code == STRING_TYPE)
4788 return gen_rtx (CONST_INT, VOIDmode, string_type_class);
4789 if (code == SET_TYPE)
4790 return gen_rtx (CONST_INT, VOIDmode, set_type_class);
4791 if (code == FILE_TYPE)
4792 return gen_rtx (CONST_INT, VOIDmode, file_type_class);
4793 if (code == LANG_TYPE)
4794 return gen_rtx (CONST_INT, VOIDmode, lang_type_class);
4795 }
4796 return gen_rtx (CONST_INT, VOIDmode, no_type_class);
4797
4798 case BUILT_IN_CONSTANT_P:
4799 if (arglist == 0)
4800 return const0_rtx;
4801 else
4802 return (TREE_CODE_CLASS (TREE_VALUE (arglist)) == 'c'
4803 ? const1_rtx : const0_rtx);
4804
4805 case BUILT_IN_FRAME_ADDRESS:
4806 /* The argument must be a nonnegative integer constant.
4807 It counts the number of frames to scan up the stack.
4808 The value is the address of that frame. */
4809 case BUILT_IN_RETURN_ADDRESS:
4810 /* The argument must be a nonnegative integer constant.
4811 It counts the number of frames to scan up the stack.
4812 The value is the return address saved in that frame. */
4813 if (arglist == 0)
4814 /* Warning about missing arg was already issued. */
4815 return const0_rtx;
4816 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
4817 {
4818 error ("invalid arg to __builtin_return_address");
4819 return const0_rtx;
4820 }
4821 else if (tree_int_cst_lt (TREE_VALUE (arglist), integer_zero_node))
4822 {
4823 error ("invalid arg to __builtin_return_address");
4824 return const0_rtx;
4825 }
4826 else
4827 {
4828 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
4829 rtx tem = frame_pointer_rtx;
4830 int i;
4831
4832 /* Scan back COUNT frames to the specified frame. */
4833 for (i = 0; i < count; i++)
4834 {
4835 /* Assume the dynamic chain pointer is in the word that
4836 the frame address points to, unless otherwise specified. */
4837#ifdef DYNAMIC_CHAIN_ADDRESS
4838 tem = DYNAMIC_CHAIN_ADDRESS (tem);
4839#endif
4840 tem = memory_address (Pmode, tem);
4841 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
4842 }
4843
4844 /* For __builtin_frame_address, return what we've got. */
4845 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4846 return tem;
4847
4848 /* For __builtin_return_address,
4849 Get the return address from that frame. */
4850#ifdef RETURN_ADDR_RTX
4851 return RETURN_ADDR_RTX (count, tem);
4852#else
4853 tem = memory_address (Pmode,
4854 plus_constant (tem, GET_MODE_SIZE (Pmode)));
4855 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
4856#endif
4857 }
4858
4859 case BUILT_IN_ALLOCA:
4860 if (arglist == 0
4861 /* Arg could be non-integer if user redeclared this fcn wrong. */
4862 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
4863 return const0_rtx;
4864 current_function_calls_alloca = 1;
4865 /* Compute the argument. */
4866 op0 = expand_expr (TREE_VALUE (arglist), 0, VOIDmode, 0);
4867
4868 /* Allocate the desired space. */
4869 target = allocate_dynamic_stack_space (op0, target);
4870
4871 /* Record the new stack level for nonlocal gotos. */
4872 if (nonlocal_goto_stack_level != 0)
4873 emit_move_insn (nonlocal_goto_stack_level, stack_pointer_rtx);
4874 return target;
4875
4876 case BUILT_IN_FFS:
4877 /* If not optimizing, call the library function. */
4878 if (!optimize)
4879 break;
4880
4881 if (arglist == 0
4882 /* Arg could be non-integer if user redeclared this fcn wrong. */
4883 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
4884 return const0_rtx;
4885
4886 /* Compute the argument. */
4887 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
4888 /* Compute ffs, into TARGET if possible.
4889 Set TARGET to wherever the result comes back. */
4890 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
4891 ffs_optab, op0, target, 1);
4892 if (target == 0)
4893 abort ();
4894 return target;
4895
4896 case BUILT_IN_STRLEN:
4897 /* If not optimizing, call the library function. */
4898 if (!optimize)
4899 break;
4900
4901 if (arglist == 0
4902 /* Arg could be non-pointer if user redeclared this fcn wrong. */
4903 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
4904 return const0_rtx;
4905 else
4906 {
4907 tree len = c_strlen (TREE_VALUE (arglist));
4908
4909 if (len == 0)
4910 break;
4911 return expand_expr (len, target, mode, 0);
4912 }
4913
4914 case BUILT_IN_STRCPY:
4915 /* If not optimizing, call the library function. */
4916 if (!optimize)
4917 break;
4918
4919 if (arglist == 0
4920 /* Arg could be non-pointer if user redeclared this fcn wrong. */
4921 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
4922 || TREE_CHAIN (arglist) == 0
4923 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
4924 return const0_rtx;
4925 else
4926 {
4927 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
4928
4929 if (len == 0)
4930 break;
4931
4932 len = size_binop (PLUS_EXPR, len, integer_one_node);
4933
4934 chainon (arglist, build_tree_list (0, len));
4935 }
4936
4937 /* Drops in. */
4938 case BUILT_IN_MEMCPY:
4939 /* If not optimizing, call the library function. */
4940 if (!optimize)
4941 break;
4942
4943 if (arglist == 0
4944 /* Arg could be non-pointer if user redeclared this fcn wrong. */
4945 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
4946 || TREE_CHAIN (arglist) == 0
4947 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
4948 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
4949 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
4950 return const0_rtx;
4951 else
4952 {
4953 tree dest = TREE_VALUE (arglist);
4954 tree src = TREE_VALUE (TREE_CHAIN (arglist));
4955 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4956
4957 int src_align
4958 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4959 int dest_align
4960 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4961 rtx dest_rtx;
4962
4963 /* If either SRC or DEST is not a pointer type, don't do
4964 this operation in-line. */
4965 if (src_align == 0 || dest_align == 0)
4966 {
4967 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
4968 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
4969 break;
4970 }
4971
4972 dest_rtx = expand_expr (dest, 0, Pmode, EXPAND_NORMAL);
4973
4974 /* Copy word part most expediently. */
4975 emit_block_move (gen_rtx (MEM, BLKmode,
4976 memory_address (BLKmode, dest_rtx)),
4977 gen_rtx (MEM, BLKmode,
4978 memory_address (BLKmode,
4979 expand_expr (src, 0, Pmode,
4980 EXPAND_NORMAL))),
4981 expand_expr (len, 0, VOIDmode, 0),
4982 MIN (src_align, dest_align));
4983 return dest_rtx;
4984 }
4985
4986/* These comparison functions need an instruction that returns an actual
4987 index. An ordinary compare that just sets the condition codes
4988 is not enough. */
4989#ifdef HAVE_cmpstrsi
4990 case BUILT_IN_STRCMP:
4991 /* If not optimizing, call the library function. */
4992 if (!optimize)
4993 break;
4994
4995 if (arglist == 0
4996 /* Arg could be non-pointer if user redeclared this fcn wrong. */
4997 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
4998 || TREE_CHAIN (arglist) == 0
4999 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
5000 return const0_rtx;
5001 else if (!HAVE_cmpstrsi)
5002 break;
5003 {
5004 tree arg1 = TREE_VALUE (arglist);
5005 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
5006 tree offset;
5007 tree len, len2;
5008
5009 len = c_strlen (arg1);
5010 if (len)
5011 len = size_binop (PLUS_EXPR, integer_one_node, len);
5012 len2 = c_strlen (arg2);
5013 if (len2)
5014 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
5015
5016 /* If we don't have a constant length for the first, use the length
5017 of the second, if we know it. We don't require a constant for
5018 this case; some cost analysis could be done if both are available
5019 but neither is constant. For now, assume they're equally cheap.
5020
5021 If both strings have constant lengths, use the smaller. This
5022 could arise if optimization results in strcpy being called with
5023 two fixed strings, or if the code was machine-generated. We should
5024 add some code to the `memcmp' handler below to deal with such
5025 situations, someday. */
5026 if (!len || TREE_CODE (len) != INTEGER_CST)
5027 {
5028 if (len2)
5029 len = len2;
5030 else if (len == 0)
5031 break;
5032 }
5033 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
5034 {
5035 if (tree_int_cst_lt (len2, len))
5036 len = len2;
5037 }
5038
5039 chainon (arglist, build_tree_list (0, len));
5040 }
5041
5042 /* Drops in. */
5043 case BUILT_IN_MEMCMP:
5044 /* If not optimizing, call the library function. */
5045 if (!optimize)
5046 break;
5047
5048 if (arglist == 0
5049 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5050 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5051 || TREE_CHAIN (arglist) == 0
5052 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
5053 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
5054 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
5055 return const0_rtx;
5056 else if (!HAVE_cmpstrsi)
5057 break;
5058 {
5059 tree arg1 = TREE_VALUE (arglist);
5060 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
5061 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5062 rtx result;
5063
5064 int arg1_align
5065 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5066 int arg2_align
5067 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5068 enum machine_mode insn_mode
5069 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
5070
5071 /* If we don't have POINTER_TYPE, call the function. */
5072 if (arg1_align == 0 || arg2_align == 0)
5073 {
5074 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
5075 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
5076 break;
5077 }
5078
5079 /* Make a place to write the result of the instruction. */
5080 result = target;
5081 if (! (result != 0
5082 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
5083 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
5084 result = gen_reg_rtx (insn_mode);
5085
5086 emit_insn (gen_cmpstrsi (result,
5087 gen_rtx (MEM, BLKmode,
5088 expand_expr (arg1, 0, Pmode, EXPAND_NORMAL)),
5089 gen_rtx (MEM, BLKmode,
5090 expand_expr (arg2, 0, Pmode, EXPAND_NORMAL)),
5091 expand_expr (len, 0, VOIDmode, 0),
5092 gen_rtx (CONST_INT, VOIDmode,
5093 MIN (arg1_align, arg2_align))));
5094
5095 /* Return the value in the proper mode for this function. */
5096 mode = TYPE_MODE (TREE_TYPE (exp));
5097 if (GET_MODE (result) == mode)
5098 return result;
5099 else if (target != 0)
5100 {
5101 convert_move (target, result, 0);
5102 return target;
5103 }
5104 else
5105 return convert_to_mode (mode, result, 0);
5106 }
5107#else
5108 case BUILT_IN_STRCMP:
5109 case BUILT_IN_MEMCMP:
5110 break;
5111#endif
5112
5113 default: /* just do library call, if unknown builtin */
5114 error ("built-in function %s not currently supported",
5115 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
5116 }
5117
5118 /* The switch statement above can drop through to cause the function
5119 to be called normally. */
5120
5121 return expand_call (exp, target, ignore);
5122}
5123\f
5124/* Expand code for a post- or pre- increment or decrement
5125 and return the RTX for the result.
5126 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
5127
5128static rtx
5129expand_increment (exp, post)
5130 register tree exp;
5131 int post;
5132{
5133 register rtx op0, op1;
5134 register rtx temp, value;
5135 register tree incremented = TREE_OPERAND (exp, 0);
5136 optab this_optab = add_optab;
5137 int icode;
5138 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
5139 int op0_is_copy = 0;
5140
5141 /* Stabilize any component ref that might need to be
5142 evaluated more than once below. */
5143 if (TREE_CODE (incremented) == BIT_FIELD_REF
5144 || (TREE_CODE (incremented) == COMPONENT_REF
5145 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
5146 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
5147 incremented = stabilize_reference (incremented);
5148
5149 /* Compute the operands as RTX.
5150 Note whether OP0 is the actual lvalue or a copy of it:
5151 I believe it is a copy iff it is a register and insns were
5152 generated in computing it or if it is a SUBREG (generated when
5153 the low-order field in a register was referenced). */
5154 temp = get_last_insn ();
5155 op0 = expand_expr (incremented, 0, VOIDmode, 0);
5156 op0_is_copy = (GET_CODE (op0) == SUBREG
5157 || (GET_CODE (op0) == REG && temp != get_last_insn ()));
5158 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
5159
5160 /* Decide whether incrementing or decrementing. */
5161 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
5162 || TREE_CODE (exp) == PREDECREMENT_EXPR)
5163 this_optab = sub_optab;
5164
5165 /* If OP0 is not the actual lvalue, but rather a copy in a register,
5166 then we cannot just increment OP0. We must
5167 therefore contrive to increment the original value.
5168 Then we can return OP0 since it is a copy of the old value. */
5169 if (op0_is_copy)
5170 {
5171 /* This is the easiest way to increment the value wherever it is.
5172 Problems with multiple evaluation of INCREMENTED
5173 are prevented because either (1) it is a component_ref,
5174 in which case it was stabilized above, or (2) it is an array_ref
5175 with constant index in an array in a register, which is
5176 safe to reevaluate. */
5177 tree newexp = build ((this_optab == add_optab
5178 ? PLUS_EXPR : MINUS_EXPR),
5179 TREE_TYPE (exp),
5180 incremented,
5181 TREE_OPERAND (exp, 1));
5182 temp = expand_assignment (incremented, newexp, ! post, 0);
5183 return post ? op0 : temp;
5184 }
5185
5186 /* Convert decrement by a constant into a negative increment. */
5187 if (this_optab == sub_optab
5188 && GET_CODE (op1) == CONST_INT)
5189 {
5190 op1 = gen_rtx (CONST_INT, VOIDmode, - INTVAL (op1));
5191 this_optab = add_optab;
5192 }
5193
5194 if (post)
5195 {
5196 /* We have a true reference to the value in OP0.
5197 If there is an insn to add or subtract in this mode, queue it. */
5198
5199#if 0 /* Turned off to avoid making extra insn for indexed memref. */
5200 op0 = stabilize (op0);
5201#endif
5202
5203 icode = (int) this_optab->handlers[(int) mode].insn_code;
5204 if (icode != (int) CODE_FOR_nothing
5205 /* Make sure that OP0 is valid for operands 0 and 1
5206 of the insn we want to queue. */
5207 && (*insn_operand_predicate[icode][0]) (op0, mode)
5208 && (*insn_operand_predicate[icode][1]) (op0, mode))
5209 {
5210 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
5211 op1 = force_reg (mode, op1);
5212
5213 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
5214 }
5215 }
5216
5217 /* Preincrement, or we can't increment with one simple insn. */
5218 if (post)
5219 /* Save a copy of the value before inc or dec, to return it later. */
5220 temp = value = copy_to_reg (op0);
5221 else
5222 /* Arrange to return the incremented value. */
5223 /* Copy the rtx because expand_binop will protect from the queue,
5224 and the results of that would be invalid for us to return
5225 if our caller does emit_queue before using our result. */
5226 temp = copy_rtx (value = op0);
5227
5228 /* Increment however we can. */
5229 op1 = expand_binop (mode, this_optab, value, op1, op0,
5230 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
5231 /* Make sure the value is stored into OP0. */
5232 if (op1 != op0)
5233 emit_move_insn (op0, op1);
5234
5235 return temp;
5236}
5237\f
5238/* Expand all function calls contained within EXP, innermost ones first.
5239 But don't look within expressions that have sequence points.
5240 For each CALL_EXPR, record the rtx for its value
5241 in the CALL_EXPR_RTL field. */
5242
5243static void
5244preexpand_calls (exp)
5245 tree exp;
5246{
5247 register int nops, i;
5248 int type = TREE_CODE_CLASS (TREE_CODE (exp));
5249
5250 if (! do_preexpand_calls)
5251 return;
5252
5253 /* Only expressions and references can contain calls. */
5254
5255 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
5256 return;
5257
5258 switch (TREE_CODE (exp))
5259 {
5260 case CALL_EXPR:
5261 /* Do nothing if already expanded. */
5262 if (CALL_EXPR_RTL (exp) != 0)
5263 return;
5264
5265 /* Do nothing to built-in functions. */
5266 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
5267 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
5268 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5269 CALL_EXPR_RTL (exp) = expand_call (exp, 0, 0, 0);
5270 return;
5271
5272 case COMPOUND_EXPR:
5273 case COND_EXPR:
5274 case TRUTH_ANDIF_EXPR:
5275 case TRUTH_ORIF_EXPR:
5276 /* If we find one of these, then we can be sure
5277 the adjust will be done for it (since it makes jumps).
5278 Do it now, so that if this is inside an argument
5279 of a function, we don't get the stack adjustment
5280 after some other args have already been pushed. */
5281 do_pending_stack_adjust ();
5282 return;
5283
5284 case BLOCK:
5285 case RTL_EXPR:
5286 case WITH_CLEANUP_EXPR:
5287 return;
5288
5289 case SAVE_EXPR:
5290 if (SAVE_EXPR_RTL (exp) != 0)
5291 return;
5292 }
5293
5294 nops = tree_code_length[(int) TREE_CODE (exp)];
5295 for (i = 0; i < nops; i++)
5296 if (TREE_OPERAND (exp, i) != 0)
5297 {
5298 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
5299 if (type == 'e' || type == '<' || type == '1' || type == '2'
5300 || type == 'r')
5301 preexpand_calls (TREE_OPERAND (exp, i));
5302 }
5303}
5304\f
5305/* At the start of a function, record that we have no previously-pushed
5306 arguments waiting to be popped. */
5307
5308void
5309init_pending_stack_adjust ()
5310{
5311 pending_stack_adjust = 0;
5312}
5313
5314/* When exiting from function, if safe, clear out any pending stack adjust
5315 so the adjustment won't get done. */
5316
5317void
5318clear_pending_stack_adjust ()
5319{
5320#ifdef EXIT_IGNORE_STACK
5321 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
5322 && ! (TREE_INLINE (current_function_decl) && ! flag_no_inline)
5323 && ! flag_inline_functions)
5324 pending_stack_adjust = 0;
5325#endif
5326}
5327
5328/* Pop any previously-pushed arguments that have not been popped yet. */
5329
5330void
5331do_pending_stack_adjust ()
5332{
5333 if (inhibit_defer_pop == 0)
5334 {
5335 if (pending_stack_adjust != 0)
5336 adjust_stack (gen_rtx (CONST_INT, VOIDmode, pending_stack_adjust));
5337 pending_stack_adjust = 0;
5338 }
5339}
5340
5341/* Expand all cleanups up to OLD_CLEANUPS.
5342 Needed here, and also for language-dependent calls. */
5343
5344void
5345expand_cleanups_to (old_cleanups)
5346 tree old_cleanups;
5347{
5348 while (cleanups_this_call != old_cleanups)
5349 {
5350 expand_expr (TREE_VALUE (cleanups_this_call), 0, VOIDmode, 0);
5351 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
5352 }
5353}
5354\f
5355/* Expand conditional expressions. */
5356
5357/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
5358 LABEL is an rtx of code CODE_LABEL, in this function and all the
5359 functions here. */
5360
5361void
5362jumpifnot (exp, label)
5363 tree exp;
5364 rtx label;
5365{
5366 do_jump (exp, label, 0);
5367}
5368
5369/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
5370
5371void
5372jumpif (exp, label)
5373 tree exp;
5374 rtx label;
5375{
5376 do_jump (exp, 0, label);
5377}
5378
5379/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
5380 the result is zero, or IF_TRUE_LABEL if the result is one.
5381 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
5382 meaning fall through in that case.
5383
5384 This function is responsible for optimizing cases such as
5385 &&, || and comparison operators in EXP. */
5386
5387void
5388do_jump (exp, if_false_label, if_true_label)
5389 tree exp;
5390 rtx if_false_label, if_true_label;
5391{
5392 register enum tree_code code = TREE_CODE (exp);
5393 /* Some cases need to create a label to jump to
5394 in order to properly fall through.
5395 These cases set DROP_THROUGH_LABEL nonzero. */
5396 rtx drop_through_label = 0;
5397 rtx temp;
5398 rtx comparison = 0;
5399 int i;
5400 tree type;
5401
5402 emit_queue ();
5403
5404 switch (code)
5405 {
5406 case ERROR_MARK:
5407 break;
5408
5409 case INTEGER_CST:
5410 temp = integer_zerop (exp) ? if_false_label : if_true_label;
5411 if (temp)
5412 emit_jump (temp);
5413 break;
5414
5415#if 0
5416 /* This is not true with #pragma weak */
5417 case ADDR_EXPR:
5418 /* The address of something can never be zero. */
5419 if (if_true_label)
5420 emit_jump (if_true_label);
5421 break;
5422#endif
5423
5424 case NOP_EXPR:
5425 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
5426 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
5427 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
5428 goto normal;
5429 case CONVERT_EXPR:
5430 /* If we are narrowing the operand, we have to do the compare in the
5431 narrower mode. */
5432 if ((TYPE_PRECISION (TREE_TYPE (exp))
5433 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5434 goto normal;
5435 case NON_LVALUE_EXPR:
5436 case REFERENCE_EXPR:
5437 case ABS_EXPR:
5438 case NEGATE_EXPR:
5439 case LROTATE_EXPR:
5440 case RROTATE_EXPR:
5441 /* These cannot change zero->non-zero or vice versa. */
5442 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
5443 break;
5444
5445#if 0
5446 /* This is never less insns than evaluating the PLUS_EXPR followed by
5447 a test and can be longer if the test is eliminated. */
5448 case PLUS_EXPR:
5449 /* Reduce to minus. */
5450 exp = build (MINUS_EXPR, TREE_TYPE (exp),
5451 TREE_OPERAND (exp, 0),
5452 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
5453 TREE_OPERAND (exp, 1))));
5454 /* Process as MINUS. */
5455#endif
5456
5457 case MINUS_EXPR:
5458 /* Non-zero iff operands of minus differ. */
5459 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
5460 TREE_OPERAND (exp, 0),
5461 TREE_OPERAND (exp, 1)),
5462 NE, NE);
5463 break;
5464
5465 case BIT_AND_EXPR:
5466 /* If we are AND'ing with a small constant, do this comparison in the
5467 smallest type that fits. If the machine doesn't have comparisons
5468 that small, it will be converted back to the wider comparison.
5469 This helps if we are testing the sign bit of a narrower object.
5470 combine can't do this for us because it can't know whether a
5471 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
5472
5473 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
5474 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_INT
5475 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
5476 && (type = type_for_size (i + 1, 1)) != 0
5477 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp)))
5478 {
5479 do_jump (convert (type, exp), if_false_label, if_true_label);
5480 break;
5481 }
5482 goto normal;
5483
5484 case TRUTH_NOT_EXPR:
5485 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
5486 break;
5487
5488 case TRUTH_ANDIF_EXPR:
5489 if (if_false_label == 0)
5490 if_false_label = drop_through_label = gen_label_rtx ();
5491 do_jump (TREE_OPERAND (exp, 0), if_false_label, 0);
5492 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
5493 break;
5494
5495 case TRUTH_ORIF_EXPR:
5496 if (if_true_label == 0)
5497 if_true_label = drop_through_label = gen_label_rtx ();
5498 do_jump (TREE_OPERAND (exp, 0), 0, if_true_label);
5499 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
5500 break;
5501
5502 case COMPOUND_EXPR:
5503 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
5504 free_temp_slots ();
5505 emit_queue ();
5506 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
5507 break;
5508
5509 case COMPONENT_REF:
5510 case BIT_FIELD_REF:
5511 case ARRAY_REF:
5512 {
5513 int bitsize, bitpos, unsignedp;
5514 enum machine_mode mode;
5515 tree type;
5516 int volatilep = 0;
5517
5518 /* Get description of this reference. We don't actually care
5519 about the underlying object here. */
5520 get_inner_reference (exp, &bitsize, &bitpos, &mode, &unsignedp,
5521 &volatilep);
5522
5523 type = type_for_size (bitsize, unsignedp);
5524 if (type != 0
5525 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp)))
5526 {
5527 do_jump (convert (type, exp), if_false_label, if_true_label);
5528 break;
5529 }
5530 goto normal;
5531 }
5532
5533 case COND_EXPR:
5534 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
5535 if (integer_onep (TREE_OPERAND (exp, 1))
5536 && integer_zerop (TREE_OPERAND (exp, 2)))
5537 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
5538
5539 else if (integer_zerop (TREE_OPERAND (exp, 1))
5540 && integer_onep (TREE_OPERAND (exp, 2)))
5541 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
5542
5543 else
5544 {
5545 register rtx label1 = gen_label_rtx ();
5546 drop_through_label = gen_label_rtx ();
5547 do_jump (TREE_OPERAND (exp, 0), label1, 0);
5548 /* Now the THEN-expression. */
5549 do_jump (TREE_OPERAND (exp, 1),
5550 if_false_label ? if_false_label : drop_through_label,
5551 if_true_label ? if_true_label : drop_through_label);
5552 emit_label (label1);
5553 /* Now the ELSE-expression. */
5554 do_jump (TREE_OPERAND (exp, 2),
5555 if_false_label ? if_false_label : drop_through_label,
5556 if_true_label ? if_true_label : drop_through_label);
5557 }
5558 break;
5559
5560 case EQ_EXPR:
5561 if (integer_zerop (TREE_OPERAND (exp, 1)))
5562 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
5563 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5564 == MODE_INT)
5565 &&
5566 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5567 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
5568 else
5569 comparison = compare (exp, EQ, EQ);
5570 break;
5571
5572 case NE_EXPR:
5573 if (integer_zerop (TREE_OPERAND (exp, 1)))
5574 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
5575 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5576 == MODE_INT)
5577 &&
5578 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5579 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
5580 else
5581 comparison = compare (exp, NE, NE);
5582 break;
5583
5584 case LT_EXPR:
5585 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5586 == MODE_INT)
5587 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5588 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
5589 else
5590 comparison = compare (exp, LT, LTU);
5591 break;
5592
5593 case LE_EXPR:
5594 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5595 == MODE_INT)
5596 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5597 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
5598 else
5599 comparison = compare (exp, LE, LEU);
5600 break;
5601
5602 case GT_EXPR:
5603 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5604 == MODE_INT)
5605 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5606 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
5607 else
5608 comparison = compare (exp, GT, GTU);
5609 break;
5610
5611 case GE_EXPR:
5612 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5613 == MODE_INT)
5614 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5615 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
5616 else
5617 comparison = compare (exp, GE, GEU);
5618 break;
5619
5620 default:
5621 normal:
5622 temp = expand_expr (exp, 0, VOIDmode, 0);
5623#if 0
5624 /* This is not needed any more and causes poor code since it causes
5625 comparisons and tests from non-SI objects to have different code
5626 sequences. */
5627 /* Copy to register to avoid generating bad insns by cse
5628 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
5629 if (!cse_not_expected && GET_CODE (temp) == MEM)
5630 temp = copy_to_reg (temp);
5631#endif
5632 do_pending_stack_adjust ();
5633 if (GET_CODE (temp) == CONST_INT)
5634 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
5635 else if (GET_CODE (temp) == LABEL_REF)
5636 comparison = const_true_rtx;
5637 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5638 && !can_compare_p (GET_MODE (temp)))
5639 /* Note swapping the labels gives us not-equal. */
5640 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
5641 else if (GET_MODE (temp) != VOIDmode)
5642 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
5643 NE, 1, GET_MODE (temp), 0, 0);
5644 else
5645 abort ();
5646 }
5647
5648 /* Do any postincrements in the expression that was tested. */
5649 emit_queue ();
5650
5651 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
5652 straight into a conditional jump instruction as the jump condition.
5653 Otherwise, all the work has been done already. */
5654
5655 if (comparison == const_true_rtx)
5656 {
5657 if (if_true_label)
5658 emit_jump (if_true_label);
5659 }
5660 else if (comparison == const0_rtx)
5661 {
5662 if (if_false_label)
5663 emit_jump (if_false_label);
5664 }
5665 else if (comparison)
5666 do_jump_for_compare (comparison, if_false_label, if_true_label);
5667
5668 free_temp_slots ();
5669
5670 if (drop_through_label)
5671 emit_label (drop_through_label);
5672}
5673\f
5674/* Given a comparison expression EXP for values too wide to be compared
5675 with one insn, test the comparison and jump to the appropriate label.
5676 The code of EXP is ignored; we always test GT if SWAP is 0,
5677 and LT if SWAP is 1. */
5678
5679static void
5680do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
5681 tree exp;
5682 int swap;
5683 rtx if_false_label, if_true_label;
5684{
5685 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), 0, VOIDmode, 0);
5686 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), 0, VOIDmode, 0);
5687 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5688 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
5689 rtx drop_through_label = 0;
5690 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
5691 int i;
5692
5693 if (! if_true_label || ! if_false_label)
5694 drop_through_label = gen_label_rtx ();
5695 if (! if_true_label)
5696 if_true_label = drop_through_label;
5697 if (! if_false_label)
5698 if_false_label = drop_through_label;
5699
5700 /* Compare a word at a time, high order first. */
5701 for (i = 0; i < nwords; i++)
5702 {
5703 rtx comp;
5704 rtx op0_word, op1_word;
5705
5706 if (WORDS_BIG_ENDIAN)
5707 {
5708 op0_word = operand_subword_force (op0, i, mode);
5709 op1_word = operand_subword_force (op1, i, mode);
5710 }
5711 else
5712 {
5713 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
5714 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
5715 }
5716
5717 /* All but high-order word must be compared as unsigned. */
5718 comp = compare_from_rtx (op0_word, op1_word,
5719 (unsignedp || i > 0) ? GTU : GT,
5720 unsignedp, word_mode, 0, 0);
5721 if (comp == const_true_rtx)
5722 emit_jump (if_true_label);
5723 else if (comp != const0_rtx)
5724 do_jump_for_compare (comp, 0, if_true_label);
5725
5726 /* Consider lower words only if these are equal. */
5727 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
5728 0, 0);
5729 if (comp == const_true_rtx)
5730 emit_jump (if_false_label);
5731 else if (comp != const0_rtx)
5732 do_jump_for_compare (comp, 0, if_false_label);
5733 }
5734
5735 if (if_false_label)
5736 emit_jump (if_false_label);
5737 if (drop_through_label)
5738 emit_label (drop_through_label);
5739}
5740
5741/* Given an EQ_EXPR expression EXP for values too wide to be compared
5742 with one insn, test the comparison and jump to the appropriate label. */
5743
5744static void
5745do_jump_by_parts_equality (exp, if_false_label, if_true_label)
5746 tree exp;
5747 rtx if_false_label, if_true_label;
5748{
5749 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5750 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
5751 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5752 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
5753 int i;
5754 rtx drop_through_label = 0;
5755
5756 if (! if_false_label)
5757 drop_through_label = if_false_label = gen_label_rtx ();
5758
5759 for (i = 0; i < nwords; i++)
5760 {
5761 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
5762 operand_subword_force (op1, i, mode),
5763 EQ, 0, word_mode, 0, 0);
5764 if (comp == const_true_rtx)
5765 emit_jump (if_false_label);
5766 else if (comp != const0_rtx)
5767 do_jump_for_compare (comp, if_false_label, 0);
5768 }
5769
5770 if (if_true_label)
5771 emit_jump (if_true_label);
5772 if (drop_through_label)
5773 emit_label (drop_through_label);
5774}
5775\f
5776/* Jump according to whether OP0 is 0.
5777 We assume that OP0 has an integer mode that is too wide
5778 for the available compare insns. */
5779
5780static void
5781do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
5782 rtx op0;
5783 rtx if_false_label, if_true_label;
5784{
5785 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
5786 int i;
5787 rtx drop_through_label = 0;
5788
5789 if (! if_false_label)
5790 drop_through_label = if_false_label = gen_label_rtx ();
5791
5792 for (i = 0; i < nwords; i++)
5793 {
5794 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
5795 GET_MODE (op0)),
5796 const0_rtx, EQ, 0, word_mode, 0, 0);
5797 if (comp == const_true_rtx)
5798 emit_jump (if_false_label);
5799 else if (comp != const0_rtx)
5800 do_jump_for_compare (comp, if_false_label, 0);
5801 }
5802
5803 if (if_true_label)
5804 emit_jump (if_true_label);
5805 if (drop_through_label)
5806 emit_label (drop_through_label);
5807}
5808
5809/* Given a comparison expression in rtl form, output conditional branches to
5810 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
5811
5812static void
5813do_jump_for_compare (comparison, if_false_label, if_true_label)
5814 rtx comparison, if_false_label, if_true_label;
5815{
5816 if (if_true_label)
5817 {
5818 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
5819 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
5820 else
5821 abort ();
5822
5823 if (if_false_label)
5824 emit_jump (if_false_label);
5825 }
5826 else if (if_false_label)
5827 {
5828 rtx insn;
5829 rtx prev = PREV_INSN (get_last_insn ());
5830 rtx branch = 0;
5831
5832 /* Output the branch with the opposite condition. Then try to invert
5833 what is generated. If more than one insn is a branch, or if the
5834 branch is not the last insn written, abort. If we can't invert
5835 the branch, emit make a true label, redirect this jump to that,
5836 emit a jump to the false label and define the true label. */
5837
5838 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
5839 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_false_label));
5840 else
5841 abort ();
5842
5843 /* Here we get the insn before what was just emitted.
5844 On some machines, emitting the branch can discard
5845 the previous compare insn and emit a replacement. */
5846 if (prev == 0)
5847 /* If there's only one preceding insn... */
5848 insn = get_insns ();
5849 else
5850 insn = NEXT_INSN (prev);
5851
5852 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
5853 if (GET_CODE (insn) == JUMP_INSN)
5854 {
5855 if (branch)
5856 abort ();
5857 branch = insn;
5858 }
5859
5860 if (branch != get_last_insn ())
5861 abort ();
5862
5863 if (! invert_jump (branch, if_false_label))
5864 {
5865 if_true_label = gen_label_rtx ();
5866 redirect_jump (branch, if_true_label);
5867 emit_jump (if_false_label);
5868 emit_label (if_true_label);
5869 }
5870 }
5871}
5872\f
5873/* Generate code for a comparison expression EXP
5874 (including code to compute the values to be compared)
5875 and set (CC0) according to the result.
5876 SIGNED_CODE should be the rtx operation for this comparison for
5877 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
5878
5879 We force a stack adjustment unless there are currently
5880 things pushed on the stack that aren't yet used. */
5881
5882static rtx
5883compare (exp, signed_code, unsigned_code)
5884 register tree exp;
5885 enum rtx_code signed_code, unsigned_code;
5886{
5887 register rtx op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5888 register rtx op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
5889 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
5890 register enum machine_mode mode = TYPE_MODE (type);
5891 int unsignedp = TREE_UNSIGNED (type);
5892 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
5893
5894 return compare_from_rtx (op0, op1, code, unsignedp, mode,
5895 ((mode == BLKmode)
5896 ? expr_size (TREE_OPERAND (exp, 0)) : 0),
5897 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
5898}
5899
5900/* Like compare but expects the values to compare as two rtx's.
5901 The decision as to signed or unsigned comparison must be made by the caller.
5902
5903 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
5904 compared.
5905
5906 If ALIGN is non-zero, it is the alignment of this type; if zero, the
5907 size of MODE should be used. */
5908
5909rtx
5910compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
5911 register rtx op0, op1;
5912 enum rtx_code code;
5913 int unsignedp;
5914 enum machine_mode mode;
5915 rtx size;
5916 int align;
5917{
5918 /* If one operand is constant, make it the second one. */
5919
5920 if (GET_CODE (op0) == CONST_INT || GET_CODE (op0) == CONST_DOUBLE)
5921 {
5922 rtx tem = op0;
5923 op0 = op1;
5924 op1 = tem;
5925 code = swap_condition (code);
5926 }
5927
5928 if (flag_force_mem)
5929 {
5930 op0 = force_not_mem (op0);
5931 op1 = force_not_mem (op1);
5932 }
5933
5934 do_pending_stack_adjust ();
5935
5936 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT)
5937 return simplify_relational_operation (code, mode, op0, op1);
5938
5939 /* If this is a signed equality comparison, we can do it as an
5940 unsigned comparison since zero-extension is cheaper than sign
5941 extension and comparisons with zero are done as unsigned. If we
5942 are comparing against a constant, we must convert it to what it
5943 would look like unsigned. */
5944 if ((code == EQ || code == NE) && ! unsignedp
5945 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_INT)
5946 {
5947 if (GET_CODE (op1) == CONST_INT
5948 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
5949 op1 = gen_rtx (CONST_INT, VOIDmode,
5950 INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
5951 unsignedp = 1;
5952 }
5953
5954 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
5955
5956 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
5957}
5958\f
5959/* Generate code to calculate EXP using a store-flag instruction
5960 and return an rtx for the result.
5961 If TARGET is nonzero, store the result there if convenient.
5962
5963 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
5964 cheap.
5965
5966 Return zero if there is no suitable set-flag instruction
5967 available on this machine.
5968
5969 Once expand_expr has been called on the arguments of the comparison,
5970 we are committed to doing the store flag, since it is not safe to
5971 re-evaluate the expression. We emit the store-flag insn by calling
5972 emit_store_flag, but only expand the arguments if we have a reason
5973 to believe that emit_store_flag will be successful. If we think that
5974 it will, but it isn't, we have to simulate the store-flag with a
5975 set/jump/set sequence. */
5976
5977static rtx
5978do_store_flag (exp, target, mode, only_cheap)
5979 tree exp;
5980 rtx target;
5981 enum machine_mode mode;
5982 int only_cheap;
5983{
5984 enum rtx_code code;
5985 tree arg0 = TREE_OPERAND (exp, 0);
5986 tree arg1 = TREE_OPERAND (exp, 1);
5987 tree tem;
5988 tree type = TREE_TYPE (arg0);
5989 enum machine_mode operand_mode = TYPE_MODE (type);
5990 int unsignedp = TREE_UNSIGNED (type);
5991 rtx op0, op1;
5992 enum insn_code icode;
5993 rtx subtarget = target;
5994 rtx result, label, pattern, jump_pat;
5995
5996 /* We won't bother with BLKmode store-flag operations because it would mean
5997 passing a lot of information to emit_store_flag. */
5998 if (operand_mode == BLKmode)
5999 return 0;
6000
6001 while (TREE_CODE (arg0) == NON_LVALUE_EXPR)
6002 arg0 = TREE_OPERAND (arg0, 0);
6003
6004 while (TREE_CODE (arg1) == NON_LVALUE_EXPR)
6005 arg1 = TREE_OPERAND (arg1, 0);
6006
6007 /* Get the rtx comparison code to use. We know that EXP is a comparison
6008 operation of some type. Some comparisons against 1 and -1 can be
6009 converted to comparisons with zero. Do so here so that the tests
6010 below will be aware that we have a comparison with zero. These
6011 tests will not catch constants in the first operand, but constants
6012 are rarely passed as the first operand. */
6013
6014 switch (TREE_CODE (exp))
6015 {
6016 case EQ_EXPR:
6017 code = EQ;
6018 break;
6019 case NE_EXPR:
6020 code = NE;
6021 break;
6022 case LT_EXPR:
6023 if (integer_onep (arg1))
6024 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
6025 else
6026 code = unsignedp ? LTU : LT;
6027 break;
6028 case LE_EXPR:
6029 if (integer_all_onesp (arg1))
6030 arg1 = integer_zero_node, code = unsignedp ? LTU : LT;
6031 else
6032 code = unsignedp ? LEU : LE;
6033 break;
6034 case GT_EXPR:
6035 if (integer_all_onesp (arg1))
6036 arg1 = integer_zero_node, code = unsignedp ? GEU : GE;
6037 else
6038 code = unsignedp ? GTU : GT;
6039 break;
6040 case GE_EXPR:
6041 if (integer_onep (arg1))
6042 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
6043 else
6044 code = unsignedp ? GEU : GE;
6045 break;
6046 default:
6047 abort ();
6048 }
6049
6050 /* Put a constant second. */
6051 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
6052 {
6053 tem = arg0; arg0 = arg1; arg1 = tem;
6054 code = swap_condition (code);
6055 }
6056
6057 /* If this is an equality or inequality test of a single bit, we can
6058 do this by shifting the bit being tested to the low-order bit and
6059 masking the result with the constant 1. If the condition was EQ,
6060 we xor it with 1. This does not require an scc insn and is faster
6061 than an scc insn even if we have it. */
6062
6063 if ((code == NE || code == EQ)
6064 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6065 && integer_pow2p (TREE_OPERAND (arg0, 1))
6066 && TYPE_PRECISION (type) <= HOST_BITS_PER_INT)
6067 {
6068 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
6069 0, VOIDmode, 0)));
6070
6071 if (subtarget == 0 || GET_CODE (subtarget) != REG
6072 || GET_MODE (subtarget) != operand_mode
6073 || ! safe_from_p (subtarget, TREE_OPERAND (arg0, 0)))
6074 subtarget = 0;
6075
6076 op0 = expand_expr (TREE_OPERAND (arg0, 0), subtarget, VOIDmode, 0);
6077
6078 if (bitnum != 0)
6079 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
6080 size_int (bitnum), target, 1);
6081
6082 if (GET_MODE (op0) != mode)
6083 op0 = convert_to_mode (mode, op0, 1);
6084
6085 if (bitnum != TYPE_PRECISION (type) - 1)
6086 op0 = expand_and (op0, const1_rtx, target);
6087
6088 if (code == EQ)
6089 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, target, 0,
6090 OPTAB_LIB_WIDEN);
6091
6092 return op0;
6093 }
6094
6095 /* Now see if we are likely to be able to do this. Return if not. */
6096 if (! can_compare_p (operand_mode))
6097 return 0;
6098 icode = setcc_gen_code[(int) code];
6099 if (icode == CODE_FOR_nothing
6100 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
6101 {
6102 /* We can only do this if it is one of the special cases that
6103 can be handled without an scc insn. */
6104 if ((code == LT && integer_zerop (arg1))
6105 || (! only_cheap && code == GE && integer_zerop (arg1)))
6106 ;
6107 else if (BRANCH_COST >= 0
6108 && ! only_cheap && (code == NE || code == EQ)
6109 && TREE_CODE (type) != REAL_TYPE
6110 && ((abs_optab->handlers[(int) operand_mode].insn_code
6111 != CODE_FOR_nothing)
6112 || (ffs_optab->handlers[(int) operand_mode].insn_code
6113 != CODE_FOR_nothing)))
6114 ;
6115 else
6116 return 0;
6117 }
6118
6119 preexpand_calls (exp);
6120 if (subtarget == 0 || GET_CODE (subtarget) != REG
6121 || GET_MODE (subtarget) != operand_mode
6122 || ! safe_from_p (subtarget, arg1))
6123 subtarget = 0;
6124
6125 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
6126 op1 = expand_expr (arg1, 0, VOIDmode, 0);
6127
6128 if (target == 0)
6129 target = gen_reg_rtx (mode);
6130
6131 result = emit_store_flag (target, code, op0, op1, operand_mode,
6132 unsignedp, 1);
6133
6134 if (result)
6135 return result;
6136
6137 /* If this failed, we have to do this with set/compare/jump/set code. */
6138 if (target == 0 || GET_CODE (target) != REG
6139 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
6140 target = gen_reg_rtx (GET_MODE (target));
6141
6142 emit_move_insn (target, const1_rtx);
6143 result = compare_from_rtx (op0, op1, code, unsignedp, operand_mode, 0, 0);
6144 if (GET_CODE (result) == CONST_INT)
6145 return result == const0_rtx ? const0_rtx : const1_rtx;
6146
6147 label = gen_label_rtx ();
6148 if (bcc_gen_fctn[(int) code] == 0)
6149 abort ();
6150
6151 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
6152 emit_move_insn (target, const0_rtx);
6153 emit_label (label);
6154
6155 return target;
6156}
6157\f
6158/* Generate a tablejump instruction (used for switch statements). */
6159
6160#ifdef HAVE_tablejump
6161
6162/* INDEX is the value being switched on, with the lowest value
6163 in the table already subtracted.
6164 RANGE is the length of the jump table.
6165 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
6166
6167 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
6168 index value is out of range. */
6169
6170void
6171do_tablejump (index, range, table_label, default_label)
6172 rtx index, range, table_label, default_label;
6173{
6174 register rtx temp, vector;
6175
6176 emit_cmp_insn (range, index, LTU, 0, GET_MODE (index), 0, 0);
6177 emit_jump_insn (gen_bltu (default_label));
6178 /* If flag_force_addr were to affect this address
6179 it could interfere with the tricky assumptions made
6180 about addresses that contain label-refs,
6181 which may be valid only very near the tablejump itself. */
6182 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
6183 GET_MODE_SIZE, because this indicates how large insns are. The other
6184 uses should all be Pmode, because they are addresses. This code
6185 could fail if addresses and insns are not the same size. */
6186 index = memory_address_noforce
6187 (CASE_VECTOR_MODE,
6188 gen_rtx (PLUS, Pmode,
6189 gen_rtx (MULT, Pmode, index,
6190 gen_rtx (CONST_INT, VOIDmode,
6191 GET_MODE_SIZE (CASE_VECTOR_MODE))),
6192 gen_rtx (LABEL_REF, Pmode, table_label)));
6193 temp = gen_reg_rtx (CASE_VECTOR_MODE);
6194 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
6195 RTX_UNCHANGING_P (vector) = 1;
6196 convert_move (temp, vector, 0);
6197
6198 emit_jump_insn (gen_tablejump (temp, table_label));
6199
6200#ifndef CASE_VECTOR_PC_RELATIVE
6201 /* If we are generating PIC code or if the table is PC-relative, the
6202 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
6203 if (! flag_pic)
6204 emit_barrier ();
6205#endif
6206}
6207
6208#endif /* HAVE_tablejump */
This page took 0.597194 seconds and 5 git commands to generate.