]> gcc.gnu.org Git - gcc.git/blob - gcc/expr.c
(store_field): If trying to sign-extend a constant, use value_mode as
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21 #include "config.h"
22 #include "rtl.h"
23 #include "tree.h"
24 #include "flags.h"
25 #include "function.h"
26 #include "insn-flags.h"
27 #include "insn-codes.h"
28 #include "expr.h"
29 #include "insn-config.h"
30 #include "recog.h"
31 #include "output.h"
32 #include "gvarargs.h"
33 #include "typeclass.h"
34
35 #define CEIL(x,y) (((x) + (y) - 1) / (y))
36
37 /* Decide whether a function's arguments should be processed
38 from first to last or from last to first. */
39
40 #ifdef STACK_GROWS_DOWNWARD
41 #ifdef PUSH_ROUNDING
42 #define PUSH_ARGS_REVERSED /* If it's last to first */
43 #endif
44 #endif
45
46 #ifndef STACK_PUSH_CODE
47 #ifdef STACK_GROWS_DOWNWARD
48 #define STACK_PUSH_CODE PRE_DEC
49 #else
50 #define STACK_PUSH_CODE PRE_INC
51 #endif
52 #endif
53
54 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
55 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
56
57 /* If this is nonzero, we do not bother generating VOLATILE
58 around volatile memory references, and we are willing to
59 output indirect addresses. If cse is to follow, we reject
60 indirect addresses so a useful potential cse is generated;
61 if it is used only once, instruction combination will produce
62 the same indirect address eventually. */
63 int cse_not_expected;
64
65 /* Nonzero to generate code for all the subroutines within an
66 expression before generating the upper levels of the expression.
67 Nowadays this is never zero. */
68 int do_preexpand_calls = 1;
69
70 /* Number of units that we should eventually pop off the stack.
71 These are the arguments to function calls that have already returned. */
72 int pending_stack_adjust;
73
74 /* Nonzero means stack pops must not be deferred, and deferred stack
75 pops must not be output. It is nonzero inside a function call,
76 inside a conditional expression, inside a statement expression,
77 and in other cases as well. */
78 int inhibit_defer_pop;
79
80 /* A list of all cleanups which belong to the arguments of
81 function calls being expanded by expand_call. */
82 tree cleanups_this_call;
83
84 /* Nonzero means __builtin_saveregs has already been done in this function.
85 The value is the pseudoreg containing the value __builtin_saveregs
86 returned. */
87 static rtx saveregs_value;
88
89 rtx store_expr ();
90 static void store_constructor ();
91 static rtx store_field ();
92 static rtx expand_builtin ();
93 static rtx compare ();
94 static rtx do_store_flag ();
95 static void preexpand_calls ();
96 static rtx expand_increment ();
97 static void init_queue ();
98
99 void do_pending_stack_adjust ();
100 static void do_jump_for_compare ();
101 static void do_jump_by_parts_equality ();
102 static void do_jump_by_parts_equality_rtx ();
103 static void do_jump_by_parts_greater ();
104
105 /* Record for each mode whether we can move a register directly to or
106 from an object of that mode in memory. If we can't, we won't try
107 to use that mode directly when accessing a field of that mode. */
108
109 static char direct_load[NUM_MACHINE_MODES];
110 static char direct_store[NUM_MACHINE_MODES];
111
112 /* MOVE_RATIO is the number of move instructions that is better than
113 a block move. */
114
115 #ifndef MOVE_RATIO
116 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
117 #define MOVE_RATIO 2
118 #else
119 /* A value of around 6 would minimize code size; infinity would minimize
120 execution time. */
121 #define MOVE_RATIO 15
122 #endif
123 #endif
124
125 /* This array records the insn_code of insns to perform block moves. */
126 static enum insn_code movstr_optab[NUM_MACHINE_MODES];
127
128 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
129
130 #ifndef SLOW_UNALIGNED_ACCESS
131 #define SLOW_UNALIGNED_ACCESS 0
132 #endif
133 \f
134 /* This is run once per compilation to set up which modes can be used
135 directly in memory and to initialize the block move optab. */
136
137 void
138 init_expr_once ()
139 {
140 rtx insn, pat;
141 enum machine_mode mode;
142 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
143
144 start_sequence ();
145 insn = emit_insn (gen_rtx (SET, 0, 0));
146 pat = PATTERN (insn);
147
148 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
149 mode = (enum machine_mode) ((int) mode + 1))
150 {
151 int regno;
152 rtx reg;
153 int num_clobbers;
154
155 direct_load[(int) mode] = direct_store[(int) mode] = 0;
156 PUT_MODE (mem, mode);
157
158 /* See if there is some register that can be used in this mode and
159 directly loaded or stored from memory. */
160
161 if (mode != VOIDmode && mode != BLKmode)
162 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
163 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
164 regno++)
165 {
166 if (! HARD_REGNO_MODE_OK (regno, mode))
167 continue;
168
169 reg = gen_rtx (REG, mode, regno);
170
171 SET_SRC (pat) = mem;
172 SET_DEST (pat) = reg;
173 if (recog (pat, insn, &num_clobbers) >= 0)
174 direct_load[(int) mode] = 1;
175
176 SET_SRC (pat) = reg;
177 SET_DEST (pat) = mem;
178 if (recog (pat, insn, &num_clobbers) >= 0)
179 direct_store[(int) mode] = 1;
180 }
181
182 movstr_optab[(int) mode] = CODE_FOR_nothing;
183 }
184
185 end_sequence ();
186
187 #ifdef HAVE_movstrqi
188 if (HAVE_movstrqi)
189 movstr_optab[(int) QImode] = CODE_FOR_movstrqi;
190 #endif
191 #ifdef HAVE_movstrhi
192 if (HAVE_movstrhi)
193 movstr_optab[(int) HImode] = CODE_FOR_movstrhi;
194 #endif
195 #ifdef HAVE_movstrsi
196 if (HAVE_movstrsi)
197 movstr_optab[(int) SImode] = CODE_FOR_movstrsi;
198 #endif
199 #ifdef HAVE_movstrdi
200 if (HAVE_movstrdi)
201 movstr_optab[(int) DImode] = CODE_FOR_movstrdi;
202 #endif
203 #ifdef HAVE_movstrti
204 if (HAVE_movstrti)
205 movstr_optab[(int) TImode] = CODE_FOR_movstrti;
206 #endif
207 }
208
209 /* This is run at the start of compiling a function. */
210
211 void
212 init_expr ()
213 {
214 init_queue ();
215
216 pending_stack_adjust = 0;
217 inhibit_defer_pop = 0;
218 cleanups_this_call = 0;
219 saveregs_value = 0;
220 forced_labels = 0;
221 }
222
223 /* Save all variables describing the current status into the structure *P.
224 This is used before starting a nested function. */
225
226 void
227 save_expr_status (p)
228 struct function *p;
229 {
230 /* Instead of saving the postincrement queue, empty it. */
231 emit_queue ();
232
233 p->pending_stack_adjust = pending_stack_adjust;
234 p->inhibit_defer_pop = inhibit_defer_pop;
235 p->cleanups_this_call = cleanups_this_call;
236 p->saveregs_value = saveregs_value;
237 p->forced_labels = forced_labels;
238
239 pending_stack_adjust = 0;
240 inhibit_defer_pop = 0;
241 cleanups_this_call = 0;
242 saveregs_value = 0;
243 forced_labels = 0;
244 }
245
246 /* Restore all variables describing the current status from the structure *P.
247 This is used after a nested function. */
248
249 void
250 restore_expr_status (p)
251 struct function *p;
252 {
253 pending_stack_adjust = p->pending_stack_adjust;
254 inhibit_defer_pop = p->inhibit_defer_pop;
255 cleanups_this_call = p->cleanups_this_call;
256 saveregs_value = p->saveregs_value;
257 forced_labels = p->forced_labels;
258 }
259 \f
260 /* Manage the queue of increment instructions to be output
261 for POSTINCREMENT_EXPR expressions, etc. */
262
263 static rtx pending_chain;
264
265 /* Queue up to increment (or change) VAR later. BODY says how:
266 BODY should be the same thing you would pass to emit_insn
267 to increment right away. It will go to emit_insn later on.
268
269 The value is a QUEUED expression to be used in place of VAR
270 where you want to guarantee the pre-incrementation value of VAR. */
271
272 static rtx
273 enqueue_insn (var, body)
274 rtx var, body;
275 {
276 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
277 var, NULL_RTX, NULL_RTX, body, pending_chain);
278 return pending_chain;
279 }
280
281 /* Use protect_from_queue to convert a QUEUED expression
282 into something that you can put immediately into an instruction.
283 If the queued incrementation has not happened yet,
284 protect_from_queue returns the variable itself.
285 If the incrementation has happened, protect_from_queue returns a temp
286 that contains a copy of the old value of the variable.
287
288 Any time an rtx which might possibly be a QUEUED is to be put
289 into an instruction, it must be passed through protect_from_queue first.
290 QUEUED expressions are not meaningful in instructions.
291
292 Do not pass a value through protect_from_queue and then hold
293 on to it for a while before putting it in an instruction!
294 If the queue is flushed in between, incorrect code will result. */
295
296 rtx
297 protect_from_queue (x, modify)
298 register rtx x;
299 int modify;
300 {
301 register RTX_CODE code = GET_CODE (x);
302
303 #if 0 /* A QUEUED can hang around after the queue is forced out. */
304 /* Shortcut for most common case. */
305 if (pending_chain == 0)
306 return x;
307 #endif
308
309 if (code != QUEUED)
310 {
311 /* A special hack for read access to (MEM (QUEUED ...))
312 to facilitate use of autoincrement.
313 Make a copy of the contents of the memory location
314 rather than a copy of the address, but not
315 if the value is of mode BLKmode. */
316 if (code == MEM && GET_MODE (x) != BLKmode
317 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
318 {
319 register rtx y = XEXP (x, 0);
320 XEXP (x, 0) = QUEUED_VAR (y);
321 if (QUEUED_INSN (y))
322 {
323 register rtx temp = gen_reg_rtx (GET_MODE (x));
324 emit_insn_before (gen_move_insn (temp, x),
325 QUEUED_INSN (y));
326 return temp;
327 }
328 return x;
329 }
330 /* Otherwise, recursively protect the subexpressions of all
331 the kinds of rtx's that can contain a QUEUED. */
332 if (code == MEM)
333 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
334 else if (code == PLUS || code == MULT)
335 {
336 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
337 XEXP (x, 1) = protect_from_queue (XEXP (x, 1), 0);
338 }
339 return x;
340 }
341 /* If the increment has not happened, use the variable itself. */
342 if (QUEUED_INSN (x) == 0)
343 return QUEUED_VAR (x);
344 /* If the increment has happened and a pre-increment copy exists,
345 use that copy. */
346 if (QUEUED_COPY (x) != 0)
347 return QUEUED_COPY (x);
348 /* The increment has happened but we haven't set up a pre-increment copy.
349 Set one up now, and use it. */
350 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
351 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
352 QUEUED_INSN (x));
353 return QUEUED_COPY (x);
354 }
355
356 /* Return nonzero if X contains a QUEUED expression:
357 if it contains anything that will be altered by a queued increment.
358 We handle only combinations of MEM, PLUS, MINUS and MULT operators
359 since memory addresses generally contain only those. */
360
361 static int
362 queued_subexp_p (x)
363 rtx x;
364 {
365 register enum rtx_code code = GET_CODE (x);
366 switch (code)
367 {
368 case QUEUED:
369 return 1;
370 case MEM:
371 return queued_subexp_p (XEXP (x, 0));
372 case MULT:
373 case PLUS:
374 case MINUS:
375 return queued_subexp_p (XEXP (x, 0))
376 || queued_subexp_p (XEXP (x, 1));
377 }
378 return 0;
379 }
380
381 /* Perform all the pending incrementations. */
382
383 void
384 emit_queue ()
385 {
386 register rtx p;
387 while (p = pending_chain)
388 {
389 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
390 pending_chain = QUEUED_NEXT (p);
391 }
392 }
393
394 static void
395 init_queue ()
396 {
397 if (pending_chain)
398 abort ();
399 }
400 \f
401 /* Copy data from FROM to TO, where the machine modes are not the same.
402 Both modes may be integer, or both may be floating.
403 UNSIGNEDP should be nonzero if FROM is an unsigned type.
404 This causes zero-extension instead of sign-extension. */
405
406 void
407 convert_move (to, from, unsignedp)
408 register rtx to, from;
409 int unsignedp;
410 {
411 enum machine_mode to_mode = GET_MODE (to);
412 enum machine_mode from_mode = GET_MODE (from);
413 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
414 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
415 enum insn_code code;
416 rtx libcall;
417
418 /* rtx code for making an equivalent value. */
419 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
420
421 to = protect_from_queue (to, 1);
422 from = protect_from_queue (from, 0);
423
424 if (to_real != from_real)
425 abort ();
426
427 /* If FROM is a SUBREG that indicates that we have already done at least
428 the required extension, strip it. We don't handle such SUBREGs as
429 TO here. */
430
431 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
432 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
433 >= GET_MODE_SIZE (to_mode))
434 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
435 from = gen_lowpart (to_mode, from), from_mode = to_mode;
436
437 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
438 abort ();
439
440 if (to_mode == from_mode
441 || (from_mode == VOIDmode && CONSTANT_P (from)))
442 {
443 emit_move_insn (to, from);
444 return;
445 }
446
447 if (to_real)
448 {
449 #ifdef HAVE_extendsfdf2
450 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
451 {
452 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
453 return;
454 }
455 #endif
456 #ifdef HAVE_extendsfxf2
457 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
458 {
459 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
460 return;
461 }
462 #endif
463 #ifdef HAVE_extendsftf2
464 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
465 {
466 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
467 return;
468 }
469 #endif
470 #ifdef HAVE_extenddfxf2
471 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
472 {
473 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
474 return;
475 }
476 #endif
477 #ifdef HAVE_extenddftf2
478 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
479 {
480 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
481 return;
482 }
483 #endif
484 #ifdef HAVE_truncdfsf2
485 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
486 {
487 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
488 return;
489 }
490 #endif
491 #ifdef HAVE_truncxfsf2
492 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
493 {
494 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
495 return;
496 }
497 #endif
498 #ifdef HAVE_trunctfsf2
499 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
500 {
501 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
502 return;
503 }
504 #endif
505 #ifdef HAVE_truncxfdf2
506 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
507 {
508 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
509 return;
510 }
511 #endif
512 #ifdef HAVE_trunctfdf2
513 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
514 {
515 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
516 return;
517 }
518 #endif
519
520 libcall = (rtx) 0;
521 switch (from_mode)
522 {
523 case SFmode:
524 switch (to_mode)
525 {
526 case DFmode:
527 libcall = extendsfdf2_libfunc;
528 break;
529
530 case XFmode:
531 libcall = extendsfxf2_libfunc;
532 break;
533
534 case TFmode:
535 libcall = extendsftf2_libfunc;
536 break;
537 }
538 break;
539
540 case DFmode:
541 switch (to_mode)
542 {
543 case SFmode:
544 libcall = truncdfsf2_libfunc;
545 break;
546
547 case XFmode:
548 libcall = extenddfxf2_libfunc;
549 break;
550
551 case TFmode:
552 libcall = extenddftf2_libfunc;
553 break;
554 }
555 break;
556
557 case XFmode:
558 switch (to_mode)
559 {
560 case SFmode:
561 libcall = truncxfsf2_libfunc;
562 break;
563
564 case DFmode:
565 libcall = truncxfdf2_libfunc;
566 break;
567 }
568 break;
569
570 case TFmode:
571 switch (to_mode)
572 {
573 case SFmode:
574 libcall = trunctfsf2_libfunc;
575 break;
576
577 case DFmode:
578 libcall = trunctfdf2_libfunc;
579 break;
580 }
581 break;
582 }
583
584 if (libcall == (rtx) 0)
585 /* This conversion is not implemented yet. */
586 abort ();
587
588 emit_library_call (libcall, 1, to_mode, 1, from, from_mode);
589 emit_move_insn (to, hard_libcall_value (to_mode));
590 return;
591 }
592
593 /* Now both modes are integers. */
594
595 /* Handle expanding beyond a word. */
596 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
597 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
598 {
599 rtx insns;
600 rtx lowpart;
601 rtx fill_value;
602 rtx lowfrom;
603 int i;
604 enum machine_mode lowpart_mode;
605 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
606
607 /* Try converting directly if the insn is supported. */
608 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
609 != CODE_FOR_nothing)
610 {
611 /* If FROM is a SUBREG, put it into a register. Do this
612 so that we always generate the same set of insns for
613 better cse'ing; if an intermediate assignment occurred,
614 we won't be doing the operation directly on the SUBREG. */
615 if (optimize > 0 && GET_CODE (from) == SUBREG)
616 from = force_reg (from_mode, from);
617 emit_unop_insn (code, to, from, equiv_code);
618 return;
619 }
620 /* Next, try converting via full word. */
621 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
622 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
623 != CODE_FOR_nothing))
624 {
625 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
626 emit_unop_insn (code, to,
627 gen_lowpart (word_mode, to), equiv_code);
628 return;
629 }
630
631 /* No special multiword conversion insn; do it by hand. */
632 start_sequence ();
633
634 /* Get a copy of FROM widened to a word, if necessary. */
635 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
636 lowpart_mode = word_mode;
637 else
638 lowpart_mode = from_mode;
639
640 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
641
642 lowpart = gen_lowpart (lowpart_mode, to);
643 emit_move_insn (lowpart, lowfrom);
644
645 /* Compute the value to put in each remaining word. */
646 if (unsignedp)
647 fill_value = const0_rtx;
648 else
649 {
650 #ifdef HAVE_slt
651 if (HAVE_slt
652 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
653 && STORE_FLAG_VALUE == -1)
654 {
655 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
656 lowpart_mode, 0, 0);
657 fill_value = gen_reg_rtx (word_mode);
658 emit_insn (gen_slt (fill_value));
659 }
660 else
661 #endif
662 {
663 fill_value
664 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
665 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
666 NULL_RTX, 0);
667 fill_value = convert_to_mode (word_mode, fill_value, 1);
668 }
669 }
670
671 /* Fill the remaining words. */
672 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
673 {
674 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
675 rtx subword = operand_subword (to, index, 1, to_mode);
676
677 if (subword == 0)
678 abort ();
679
680 if (fill_value != subword)
681 emit_move_insn (subword, fill_value);
682 }
683
684 insns = get_insns ();
685 end_sequence ();
686
687 emit_no_conflict_block (insns, to, from, NULL_RTX,
688 gen_rtx (equiv_code, to_mode, from));
689 return;
690 }
691
692 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD)
693 {
694 convert_move (to, gen_lowpart (word_mode, from), 0);
695 return;
696 }
697
698 /* Handle pointer conversion */ /* SPEE 900220 */
699 if (to_mode == PSImode)
700 {
701 if (from_mode != SImode)
702 from = convert_to_mode (SImode, from, unsignedp);
703
704 #ifdef HAVE_truncsipsi
705 if (HAVE_truncsipsi)
706 {
707 emit_unop_insn (CODE_FOR_truncsipsi, to, from, UNKNOWN);
708 return;
709 }
710 #endif /* HAVE_truncsipsi */
711 abort ();
712 }
713
714 if (from_mode == PSImode)
715 {
716 if (to_mode != SImode)
717 {
718 from = convert_to_mode (SImode, from, unsignedp);
719 from_mode = SImode;
720 }
721 else
722 {
723 #ifdef HAVE_extendpsisi
724 if (HAVE_extendpsisi)
725 {
726 emit_unop_insn (CODE_FOR_extendpsisi, to, from, UNKNOWN);
727 return;
728 }
729 #endif /* HAVE_extendpsisi */
730 abort ();
731 }
732 }
733
734 /* Now follow all the conversions between integers
735 no more than a word long. */
736
737 /* For truncation, usually we can just refer to FROM in a narrower mode. */
738 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
739 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
740 GET_MODE_BITSIZE (from_mode))
741 && ((GET_CODE (from) == MEM
742 && ! MEM_VOLATILE_P (from)
743 && direct_load[(int) to_mode]
744 && ! mode_dependent_address_p (XEXP (from, 0)))
745 || GET_CODE (from) == REG
746 || GET_CODE (from) == SUBREG))
747 {
748 emit_move_insn (to, gen_lowpart (to_mode, from));
749 return;
750 }
751
752 /* For truncation, usually we can just refer to FROM in a narrower mode. */
753 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
754 {
755 /* Convert directly if that works. */
756 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
757 != CODE_FOR_nothing)
758 {
759 /* If FROM is a SUBREG, put it into a register. Do this
760 so that we always generate the same set of insns for
761 better cse'ing; if an intermediate assignment occurred,
762 we won't be doing the operation directly on the SUBREG. */
763 if (optimize > 0 && GET_CODE (from) == SUBREG)
764 from = force_reg (from_mode, from);
765 emit_unop_insn (code, to, from, equiv_code);
766 return;
767 }
768 else
769 {
770 enum machine_mode intermediate;
771
772 /* Search for a mode to convert via. */
773 for (intermediate = from_mode; intermediate != VOIDmode;
774 intermediate = GET_MODE_WIDER_MODE (intermediate))
775 if ((can_extend_p (to_mode, intermediate, unsignedp)
776 != CODE_FOR_nothing)
777 && (can_extend_p (intermediate, from_mode, unsignedp)
778 != CODE_FOR_nothing))
779 {
780 convert_move (to, convert_to_mode (intermediate, from,
781 unsignedp), unsignedp);
782 return;
783 }
784
785 /* No suitable intermediate mode. */
786 abort ();
787 }
788 }
789
790 /* Support special truncate insns for certain modes. */
791
792 if (from_mode == DImode && to_mode == SImode)
793 {
794 #ifdef HAVE_truncdisi2
795 if (HAVE_truncdisi2)
796 {
797 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
798 return;
799 }
800 #endif
801 convert_move (to, force_reg (from_mode, from), unsignedp);
802 return;
803 }
804
805 if (from_mode == DImode && to_mode == HImode)
806 {
807 #ifdef HAVE_truncdihi2
808 if (HAVE_truncdihi2)
809 {
810 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
811 return;
812 }
813 #endif
814 convert_move (to, force_reg (from_mode, from), unsignedp);
815 return;
816 }
817
818 if (from_mode == DImode && to_mode == QImode)
819 {
820 #ifdef HAVE_truncdiqi2
821 if (HAVE_truncdiqi2)
822 {
823 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
824 return;
825 }
826 #endif
827 convert_move (to, force_reg (from_mode, from), unsignedp);
828 return;
829 }
830
831 if (from_mode == SImode && to_mode == HImode)
832 {
833 #ifdef HAVE_truncsihi2
834 if (HAVE_truncsihi2)
835 {
836 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
837 return;
838 }
839 #endif
840 convert_move (to, force_reg (from_mode, from), unsignedp);
841 return;
842 }
843
844 if (from_mode == SImode && to_mode == QImode)
845 {
846 #ifdef HAVE_truncsiqi2
847 if (HAVE_truncsiqi2)
848 {
849 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
850 return;
851 }
852 #endif
853 convert_move (to, force_reg (from_mode, from), unsignedp);
854 return;
855 }
856
857 if (from_mode == HImode && to_mode == QImode)
858 {
859 #ifdef HAVE_trunchiqi2
860 if (HAVE_trunchiqi2)
861 {
862 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
863 return;
864 }
865 #endif
866 convert_move (to, force_reg (from_mode, from), unsignedp);
867 return;
868 }
869
870 /* Handle truncation of volatile memrefs, and so on;
871 the things that couldn't be truncated directly,
872 and for which there was no special instruction. */
873 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
874 {
875 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
876 emit_move_insn (to, temp);
877 return;
878 }
879
880 /* Mode combination is not recognized. */
881 abort ();
882 }
883
884 /* Return an rtx for a value that would result
885 from converting X to mode MODE.
886 Both X and MODE may be floating, or both integer.
887 UNSIGNEDP is nonzero if X is an unsigned value.
888 This can be done by referring to a part of X in place
889 or by copying to a new temporary with conversion.
890
891 This function *must not* call protect_from_queue
892 except when putting X into an insn (in which case convert_move does it). */
893
894 rtx
895 convert_to_mode (mode, x, unsignedp)
896 enum machine_mode mode;
897 rtx x;
898 int unsignedp;
899 {
900 register rtx temp;
901
902 /* If FROM is a SUBREG that indicates that we have already done at least
903 the required extension, strip it. */
904
905 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
906 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
907 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
908 x = gen_lowpart (mode, x);
909
910 if (mode == GET_MODE (x))
911 return x;
912
913 /* There is one case that we must handle specially: If we are converting
914 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
915 we are to interpret the constant as unsigned, gen_lowpart will do
916 the wrong if the constant appears negative. What we want to do is
917 make the high-order word of the constant zero, not all ones. */
918
919 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
920 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
921 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
922 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
923
924 /* We can do this with a gen_lowpart if both desired and current modes
925 are integer, and this is either a constant integer, a register, or a
926 non-volatile MEM. Except for the constant case, we must be narrowing
927 the operand. */
928
929 if (GET_CODE (x) == CONST_INT
930 || (GET_MODE_CLASS (mode) == MODE_INT
931 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
932 && (GET_CODE (x) == CONST_DOUBLE
933 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (x))
934 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x))
935 && direct_load[(int) mode]
936 || GET_CODE (x) == REG)))))
937 return gen_lowpart (mode, x);
938
939 temp = gen_reg_rtx (mode);
940 convert_move (temp, x, unsignedp);
941 return temp;
942 }
943 \f
944 /* Generate several move instructions to copy LEN bytes
945 from block FROM to block TO. (These are MEM rtx's with BLKmode).
946 The caller must pass FROM and TO
947 through protect_from_queue before calling.
948 ALIGN (in bytes) is maximum alignment we can assume. */
949
950 struct move_by_pieces
951 {
952 rtx to;
953 rtx to_addr;
954 int autinc_to;
955 int explicit_inc_to;
956 rtx from;
957 rtx from_addr;
958 int autinc_from;
959 int explicit_inc_from;
960 int len;
961 int offset;
962 int reverse;
963 };
964
965 static void move_by_pieces_1 ();
966 static int move_by_pieces_ninsns ();
967
968 static void
969 move_by_pieces (to, from, len, align)
970 rtx to, from;
971 int len, align;
972 {
973 struct move_by_pieces data;
974 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
975 int max_size = MOVE_MAX + 1;
976
977 data.offset = 0;
978 data.to_addr = to_addr;
979 data.from_addr = from_addr;
980 data.to = to;
981 data.from = from;
982 data.autinc_to
983 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
984 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
985 data.autinc_from
986 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
987 || GET_CODE (from_addr) == POST_INC
988 || GET_CODE (from_addr) == POST_DEC);
989
990 data.explicit_inc_from = 0;
991 data.explicit_inc_to = 0;
992 data.reverse
993 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
994 if (data.reverse) data.offset = len;
995 data.len = len;
996
997 /* If copying requires more than two move insns,
998 copy addresses to registers (to make displacements shorter)
999 and use post-increment if available. */
1000 if (!(data.autinc_from && data.autinc_to)
1001 && move_by_pieces_ninsns (len, align) > 2)
1002 {
1003 #ifdef HAVE_PRE_DECREMENT
1004 if (data.reverse && ! data.autinc_from)
1005 {
1006 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1007 data.autinc_from = 1;
1008 data.explicit_inc_from = -1;
1009 }
1010 #endif
1011 #ifdef HAVE_POST_INCREMENT
1012 if (! data.autinc_from)
1013 {
1014 data.from_addr = copy_addr_to_reg (from_addr);
1015 data.autinc_from = 1;
1016 data.explicit_inc_from = 1;
1017 }
1018 #endif
1019 if (!data.autinc_from && CONSTANT_P (from_addr))
1020 data.from_addr = copy_addr_to_reg (from_addr);
1021 #ifdef HAVE_PRE_DECREMENT
1022 if (data.reverse && ! data.autinc_to)
1023 {
1024 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1025 data.autinc_to = 1;
1026 data.explicit_inc_to = -1;
1027 }
1028 #endif
1029 #ifdef HAVE_POST_INCREMENT
1030 if (! data.reverse && ! data.autinc_to)
1031 {
1032 data.to_addr = copy_addr_to_reg (to_addr);
1033 data.autinc_to = 1;
1034 data.explicit_inc_to = 1;
1035 }
1036 #endif
1037 if (!data.autinc_to && CONSTANT_P (to_addr))
1038 data.to_addr = copy_addr_to_reg (to_addr);
1039 }
1040
1041 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1042 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1043 align = MOVE_MAX;
1044
1045 /* First move what we can in the largest integer mode, then go to
1046 successively smaller modes. */
1047
1048 while (max_size > 1)
1049 {
1050 enum machine_mode mode = VOIDmode, tmode;
1051 enum insn_code icode;
1052
1053 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1054 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1055 if (GET_MODE_SIZE (tmode) < max_size)
1056 mode = tmode;
1057
1058 if (mode == VOIDmode)
1059 break;
1060
1061 icode = mov_optab->handlers[(int) mode].insn_code;
1062 if (icode != CODE_FOR_nothing
1063 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1064 GET_MODE_SIZE (mode)))
1065 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1066
1067 max_size = GET_MODE_SIZE (mode);
1068 }
1069
1070 /* The code above should have handled everything. */
1071 if (data.len != 0)
1072 abort ();
1073 }
1074
1075 /* Return number of insns required to move L bytes by pieces.
1076 ALIGN (in bytes) is maximum alignment we can assume. */
1077
1078 static int
1079 move_by_pieces_ninsns (l, align)
1080 unsigned int l;
1081 int align;
1082 {
1083 register int n_insns = 0;
1084 int max_size = MOVE_MAX + 1;
1085
1086 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1087 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1088 align = MOVE_MAX;
1089
1090 while (max_size > 1)
1091 {
1092 enum machine_mode mode = VOIDmode, tmode;
1093 enum insn_code icode;
1094
1095 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1096 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1097 if (GET_MODE_SIZE (tmode) < max_size)
1098 mode = tmode;
1099
1100 if (mode == VOIDmode)
1101 break;
1102
1103 icode = mov_optab->handlers[(int) mode].insn_code;
1104 if (icode != CODE_FOR_nothing
1105 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1106 GET_MODE_SIZE (mode)))
1107 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1108
1109 max_size = GET_MODE_SIZE (mode);
1110 }
1111
1112 return n_insns;
1113 }
1114
1115 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1116 with move instructions for mode MODE. GENFUN is the gen_... function
1117 to make a move insn for that mode. DATA has all the other info. */
1118
1119 static void
1120 move_by_pieces_1 (genfun, mode, data)
1121 rtx (*genfun) ();
1122 enum machine_mode mode;
1123 struct move_by_pieces *data;
1124 {
1125 register int size = GET_MODE_SIZE (mode);
1126 register rtx to1, from1;
1127
1128 while (data->len >= size)
1129 {
1130 if (data->reverse) data->offset -= size;
1131
1132 to1 = (data->autinc_to
1133 ? gen_rtx (MEM, mode, data->to_addr)
1134 : change_address (data->to, mode,
1135 plus_constant (data->to_addr, data->offset)));
1136 from1 =
1137 (data->autinc_from
1138 ? gen_rtx (MEM, mode, data->from_addr)
1139 : change_address (data->from, mode,
1140 plus_constant (data->from_addr, data->offset)));
1141
1142 #ifdef HAVE_PRE_DECREMENT
1143 if (data->explicit_inc_to < 0)
1144 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1145 if (data->explicit_inc_from < 0)
1146 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1147 #endif
1148
1149 emit_insn ((*genfun) (to1, from1));
1150 #ifdef HAVE_POST_INCREMENT
1151 if (data->explicit_inc_to > 0)
1152 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1153 if (data->explicit_inc_from > 0)
1154 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1155 #endif
1156
1157 if (! data->reverse) data->offset += size;
1158
1159 data->len -= size;
1160 }
1161 }
1162 \f
1163 /* Emit code to move a block Y to a block X.
1164 This may be done with string-move instructions,
1165 with multiple scalar move instructions, or with a library call.
1166
1167 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1168 with mode BLKmode.
1169 SIZE is an rtx that says how long they are.
1170 ALIGN is the maximum alignment we can assume they have,
1171 measured in bytes. */
1172
1173 void
1174 emit_block_move (x, y, size, align)
1175 rtx x, y;
1176 rtx size;
1177 int align;
1178 {
1179 if (GET_MODE (x) != BLKmode)
1180 abort ();
1181
1182 if (GET_MODE (y) != BLKmode)
1183 abort ();
1184
1185 x = protect_from_queue (x, 1);
1186 y = protect_from_queue (y, 0);
1187 size = protect_from_queue (size, 0);
1188
1189 if (GET_CODE (x) != MEM)
1190 abort ();
1191 if (GET_CODE (y) != MEM)
1192 abort ();
1193 if (size == 0)
1194 abort ();
1195
1196 if (GET_CODE (size) == CONST_INT
1197 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1198 move_by_pieces (x, y, INTVAL (size), align);
1199 else
1200 {
1201 /* Try the most limited insn first, because there's no point
1202 including more than one in the machine description unless
1203 the more limited one has some advantage. */
1204
1205 rtx opalign = GEN_INT (align);
1206 enum machine_mode mode;
1207
1208 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1209 mode = GET_MODE_WIDER_MODE (mode))
1210 {
1211 enum insn_code code = movstr_optab[(int) mode];
1212
1213 if (code != CODE_FOR_nothing
1214 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1215 here because if SIZE is less than the mode mask, as it is
1216 returned by the macro, it will definately be less than the
1217 actual mode mask. */
1218 && (unsigned) INTVAL (size) <= GET_MODE_MASK (mode)
1219 && (insn_operand_predicate[(int) code][0] == 0
1220 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1221 && (insn_operand_predicate[(int) code][1] == 0
1222 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1223 && (insn_operand_predicate[(int) code][3] == 0
1224 || (*insn_operand_predicate[(int) code][3]) (opalign,
1225 VOIDmode)))
1226 {
1227 rtx op2;
1228 rtx last = get_last_insn ();
1229 rtx pat;
1230
1231 op2 = convert_to_mode (mode, size, 1);
1232 if (insn_operand_predicate[(int) code][2] != 0
1233 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1234 op2 = copy_to_mode_reg (mode, op2);
1235
1236 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1237 if (pat)
1238 {
1239 emit_insn (pat);
1240 return;
1241 }
1242 else
1243 delete_insns_since (last);
1244 }
1245 }
1246
1247 #ifdef TARGET_MEM_FUNCTIONS
1248 emit_library_call (memcpy_libfunc, 0,
1249 VOIDmode, 3, XEXP (x, 0), Pmode,
1250 XEXP (y, 0), Pmode,
1251 convert_to_mode (Pmode, size, 1), Pmode);
1252 #else
1253 emit_library_call (bcopy_libfunc, 0,
1254 VOIDmode, 3, XEXP (y, 0), Pmode,
1255 XEXP (x, 0), Pmode,
1256 convert_to_mode (Pmode, size, 1), Pmode);
1257 #endif
1258 }
1259 }
1260 \f
1261 /* Copy all or part of a value X into registers starting at REGNO.
1262 The number of registers to be filled is NREGS. */
1263
1264 void
1265 move_block_to_reg (regno, x, nregs, mode)
1266 int regno;
1267 rtx x;
1268 int nregs;
1269 enum machine_mode mode;
1270 {
1271 int i;
1272 rtx pat, last;
1273
1274 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1275 x = validize_mem (force_const_mem (mode, x));
1276
1277 /* See if the machine can do this with a load multiple insn. */
1278 #ifdef HAVE_load_multiple
1279 last = get_last_insn ();
1280 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1281 GEN_INT (nregs));
1282 if (pat)
1283 {
1284 emit_insn (pat);
1285 return;
1286 }
1287 else
1288 delete_insns_since (last);
1289 #endif
1290
1291 for (i = 0; i < nregs; i++)
1292 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1293 operand_subword_force (x, i, mode));
1294 }
1295
1296 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1297 The number of registers to be filled is NREGS. */
1298
1299 void
1300 move_block_from_reg (regno, x, nregs)
1301 int regno;
1302 rtx x;
1303 int nregs;
1304 {
1305 int i;
1306 rtx pat, last;
1307
1308 /* See if the machine can do this with a store multiple insn. */
1309 #ifdef HAVE_store_multiple
1310 last = get_last_insn ();
1311 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1312 GEN_INT (nregs));
1313 if (pat)
1314 {
1315 emit_insn (pat);
1316 return;
1317 }
1318 else
1319 delete_insns_since (last);
1320 #endif
1321
1322 for (i = 0; i < nregs; i++)
1323 {
1324 rtx tem = operand_subword (x, i, 1, BLKmode);
1325
1326 if (tem == 0)
1327 abort ();
1328
1329 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1330 }
1331 }
1332
1333 /* Mark NREGS consecutive regs, starting at REGNO, as being live now. */
1334
1335 void
1336 use_regs (regno, nregs)
1337 int regno;
1338 int nregs;
1339 {
1340 int i;
1341
1342 for (i = 0; i < nregs; i++)
1343 emit_insn (gen_rtx (USE, VOIDmode, gen_rtx (REG, word_mode, regno + i)));
1344 }
1345
1346 /* Mark the instructions since PREV as a libcall block.
1347 Add REG_LIBCALL to PREV and add a REG_RETVAL to the most recent insn. */
1348
1349 static rtx
1350 group_insns (prev)
1351 rtx prev;
1352 {
1353 rtx insn_first;
1354 rtx insn_last;
1355
1356 /* Find the instructions to mark */
1357 if (prev)
1358 insn_first = NEXT_INSN (prev);
1359 else
1360 insn_first = get_insns ();
1361
1362 insn_last = get_last_insn ();
1363
1364 REG_NOTES (insn_last) = gen_rtx (INSN_LIST, REG_RETVAL, insn_first,
1365 REG_NOTES (insn_last));
1366
1367 REG_NOTES (insn_first) = gen_rtx (INSN_LIST, REG_LIBCALL, insn_last,
1368 REG_NOTES (insn_first));
1369 }
1370 \f
1371 /* Write zeros through the storage of OBJECT.
1372 If OBJECT has BLKmode, SIZE is its length in bytes. */
1373
1374 void
1375 clear_storage (object, size)
1376 rtx object;
1377 int size;
1378 {
1379 if (GET_MODE (object) == BLKmode)
1380 {
1381 #ifdef TARGET_MEM_FUNCTIONS
1382 emit_library_call (memset_libfunc, 0,
1383 VOIDmode, 3,
1384 XEXP (object, 0), Pmode, const0_rtx, Pmode,
1385 GEN_INT (size), Pmode);
1386 #else
1387 emit_library_call (bzero_libfunc, 0,
1388 VOIDmode, 2,
1389 XEXP (object, 0), Pmode,
1390 GEN_INT (size), Pmode);
1391 #endif
1392 }
1393 else
1394 emit_move_insn (object, const0_rtx);
1395 }
1396
1397 /* Generate code to copy Y into X.
1398 Both Y and X must have the same mode, except that
1399 Y can be a constant with VOIDmode.
1400 This mode cannot be BLKmode; use emit_block_move for that.
1401
1402 Return the last instruction emitted. */
1403
1404 rtx
1405 emit_move_insn (x, y)
1406 rtx x, y;
1407 {
1408 enum machine_mode mode = GET_MODE (x);
1409 enum machine_mode submode;
1410 enum mode_class class = GET_MODE_CLASS (mode);
1411 int i;
1412
1413 x = protect_from_queue (x, 1);
1414 y = protect_from_queue (y, 0);
1415
1416 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1417 abort ();
1418
1419 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1420 y = force_const_mem (mode, y);
1421
1422 /* If X or Y are memory references, verify that their addresses are valid
1423 for the machine. */
1424 if (GET_CODE (x) == MEM
1425 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1426 && ! push_operand (x, GET_MODE (x)))
1427 || (flag_force_addr
1428 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1429 x = change_address (x, VOIDmode, XEXP (x, 0));
1430
1431 if (GET_CODE (y) == MEM
1432 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1433 || (flag_force_addr
1434 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1435 y = change_address (y, VOIDmode, XEXP (y, 0));
1436
1437 if (mode == BLKmode)
1438 abort ();
1439
1440 if (class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1441 submode = mode_for_size (GET_MODE_UNIT_SIZE (mode) * BITS_PER_UNIT,
1442 (class == MODE_COMPLEX_INT
1443 ? MODE_INT : MODE_FLOAT),
1444 0);
1445
1446 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1447 return
1448 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1449
1450 /* Expand complex moves by moving real part and imag part, if posible. */
1451 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1452 && submode != BLKmode
1453 && (mov_optab->handlers[(int) submode].insn_code
1454 != CODE_FOR_nothing))
1455 {
1456 /* Don't split destination if it is a stack push. */
1457 int stack = push_operand (x, GET_MODE (x));
1458 rtx prev = get_last_insn ();
1459
1460 /* Tell flow that the whole of the destination is being set. */
1461 if (GET_CODE (x) == REG)
1462 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
1463
1464 /* If this is a stack, push the highpart first, so it
1465 will be in the argument order.
1466
1467 In that case, change_address is used only to convert
1468 the mode, not to change the address. */
1469 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1470 ((stack ? change_address (x, submode, (rtx) 0)
1471 : gen_highpart (submode, x)),
1472 gen_highpart (submode, y)));
1473 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1474 ((stack ? change_address (x, submode, (rtx) 0)
1475 : gen_lowpart (submode, x)),
1476 gen_lowpart (submode, y)));
1477
1478 group_insns (prev);
1479 }
1480
1481 /* This will handle any multi-word mode that lacks a move_insn pattern.
1482 However, you will get better code if you define such patterns,
1483 even if they must turn into multiple assembler instructions. */
1484 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1485 {
1486 rtx last_insn = 0;
1487 rtx prev_insn = get_last_insn ();
1488
1489 for (i = 0;
1490 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1491 i++)
1492 {
1493 rtx xpart = operand_subword (x, i, 1, mode);
1494 rtx ypart = operand_subword (y, i, 1, mode);
1495
1496 /* If we can't get a part of Y, put Y into memory if it is a
1497 constant. Otherwise, force it into a register. If we still
1498 can't get a part of Y, abort. */
1499 if (ypart == 0 && CONSTANT_P (y))
1500 {
1501 y = force_const_mem (mode, y);
1502 ypart = operand_subword (y, i, 1, mode);
1503 }
1504 else if (ypart == 0)
1505 ypart = operand_subword_force (y, i, mode);
1506
1507 if (xpart == 0 || ypart == 0)
1508 abort ();
1509
1510 last_insn = emit_move_insn (xpart, ypart);
1511 }
1512 /* Mark these insns as a libcall block. */
1513 group_insns (prev_insn);
1514
1515 return last_insn;
1516 }
1517 else
1518 abort ();
1519 }
1520 \f
1521 /* Pushing data onto the stack. */
1522
1523 /* Push a block of length SIZE (perhaps variable)
1524 and return an rtx to address the beginning of the block.
1525 Note that it is not possible for the value returned to be a QUEUED.
1526 The value may be virtual_outgoing_args_rtx.
1527
1528 EXTRA is the number of bytes of padding to push in addition to SIZE.
1529 BELOW nonzero means this padding comes at low addresses;
1530 otherwise, the padding comes at high addresses. */
1531
1532 rtx
1533 push_block (size, extra, below)
1534 rtx size;
1535 int extra, below;
1536 {
1537 register rtx temp;
1538 if (CONSTANT_P (size))
1539 anti_adjust_stack (plus_constant (size, extra));
1540 else if (GET_CODE (size) == REG && extra == 0)
1541 anti_adjust_stack (size);
1542 else
1543 {
1544 rtx temp = copy_to_mode_reg (Pmode, size);
1545 if (extra != 0)
1546 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
1547 temp, 0, OPTAB_LIB_WIDEN);
1548 anti_adjust_stack (temp);
1549 }
1550
1551 #ifdef STACK_GROWS_DOWNWARD
1552 temp = virtual_outgoing_args_rtx;
1553 if (extra != 0 && below)
1554 temp = plus_constant (temp, extra);
1555 #else
1556 if (GET_CODE (size) == CONST_INT)
1557 temp = plus_constant (virtual_outgoing_args_rtx,
1558 - INTVAL (size) - (below ? 0 : extra));
1559 else if (extra != 0 && !below)
1560 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1561 negate_rtx (Pmode, plus_constant (size, extra)));
1562 else
1563 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1564 negate_rtx (Pmode, size));
1565 #endif
1566
1567 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
1568 }
1569
1570 static rtx
1571 gen_push_operand ()
1572 {
1573 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
1574 }
1575
1576 /* Generate code to push X onto the stack, assuming it has mode MODE and
1577 type TYPE.
1578 MODE is redundant except when X is a CONST_INT (since they don't
1579 carry mode info).
1580 SIZE is an rtx for the size of data to be copied (in bytes),
1581 needed only if X is BLKmode.
1582
1583 ALIGN (in bytes) is maximum alignment we can assume.
1584
1585 If PARTIAL is nonzero, then copy that many of the first words
1586 of X into registers starting with REG, and push the rest of X.
1587 The amount of space pushed is decreased by PARTIAL words,
1588 rounded *down* to a multiple of PARM_BOUNDARY.
1589 REG must be a hard register in this case.
1590
1591 EXTRA is the amount in bytes of extra space to leave next to this arg.
1592 This is ignored if an argument block has already been allocated.
1593
1594 On a machine that lacks real push insns, ARGS_ADDR is the address of
1595 the bottom of the argument block for this call. We use indexing off there
1596 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
1597 argument block has not been preallocated.
1598
1599 ARGS_SO_FAR is the size of args previously pushed for this call. */
1600
1601 void
1602 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
1603 args_addr, args_so_far)
1604 register rtx x;
1605 enum machine_mode mode;
1606 tree type;
1607 rtx size;
1608 int align;
1609 int partial;
1610 rtx reg;
1611 int extra;
1612 rtx args_addr;
1613 rtx args_so_far;
1614 {
1615 rtx xinner;
1616 enum direction stack_direction
1617 #ifdef STACK_GROWS_DOWNWARD
1618 = downward;
1619 #else
1620 = upward;
1621 #endif
1622
1623 /* Decide where to pad the argument: `downward' for below,
1624 `upward' for above, or `none' for don't pad it.
1625 Default is below for small data on big-endian machines; else above. */
1626 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
1627
1628 /* Invert direction if stack is post-update. */
1629 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
1630 if (where_pad != none)
1631 where_pad = (where_pad == downward ? upward : downward);
1632
1633 xinner = x = protect_from_queue (x, 0);
1634
1635 if (mode == BLKmode)
1636 {
1637 /* Copy a block into the stack, entirely or partially. */
1638
1639 register rtx temp;
1640 int used = partial * UNITS_PER_WORD;
1641 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
1642 int skip;
1643
1644 if (size == 0)
1645 abort ();
1646
1647 used -= offset;
1648
1649 /* USED is now the # of bytes we need not copy to the stack
1650 because registers will take care of them. */
1651
1652 if (partial != 0)
1653 xinner = change_address (xinner, BLKmode,
1654 plus_constant (XEXP (xinner, 0), used));
1655
1656 /* If the partial register-part of the arg counts in its stack size,
1657 skip the part of stack space corresponding to the registers.
1658 Otherwise, start copying to the beginning of the stack space,
1659 by setting SKIP to 0. */
1660 #ifndef REG_PARM_STACK_SPACE
1661 skip = 0;
1662 #else
1663 skip = used;
1664 #endif
1665
1666 #ifdef PUSH_ROUNDING
1667 /* Do it with several push insns if that doesn't take lots of insns
1668 and if there is no difficulty with push insns that skip bytes
1669 on the stack for alignment purposes. */
1670 if (args_addr == 0
1671 && GET_CODE (size) == CONST_INT
1672 && skip == 0
1673 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
1674 < MOVE_RATIO)
1675 /* Here we avoid the case of a structure whose weak alignment
1676 forces many pushes of a small amount of data,
1677 and such small pushes do rounding that causes trouble. */
1678 && ((! STRICT_ALIGNMENT && ! SLOW_UNALIGNED_ACCESS)
1679 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
1680 || PUSH_ROUNDING (align) == align)
1681 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
1682 {
1683 /* Push padding now if padding above and stack grows down,
1684 or if padding below and stack grows up.
1685 But if space already allocated, this has already been done. */
1686 if (extra && args_addr == 0
1687 && where_pad != none && where_pad != stack_direction)
1688 anti_adjust_stack (GEN_INT (extra));
1689
1690 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
1691 INTVAL (size) - used, align);
1692 }
1693 else
1694 #endif /* PUSH_ROUNDING */
1695 {
1696 /* Otherwise make space on the stack and copy the data
1697 to the address of that space. */
1698
1699 /* Deduct words put into registers from the size we must copy. */
1700 if (partial != 0)
1701 {
1702 if (GET_CODE (size) == CONST_INT)
1703 size = GEN_INT (INTVAL (size) - used);
1704 else
1705 size = expand_binop (GET_MODE (size), sub_optab, size,
1706 GEN_INT (used), NULL_RTX, 0,
1707 OPTAB_LIB_WIDEN);
1708 }
1709
1710 /* Get the address of the stack space.
1711 In this case, we do not deal with EXTRA separately.
1712 A single stack adjust will do. */
1713 if (! args_addr)
1714 {
1715 temp = push_block (size, extra, where_pad == downward);
1716 extra = 0;
1717 }
1718 else if (GET_CODE (args_so_far) == CONST_INT)
1719 temp = memory_address (BLKmode,
1720 plus_constant (args_addr,
1721 skip + INTVAL (args_so_far)));
1722 else
1723 temp = memory_address (BLKmode,
1724 plus_constant (gen_rtx (PLUS, Pmode,
1725 args_addr, args_so_far),
1726 skip));
1727
1728 /* TEMP is the address of the block. Copy the data there. */
1729 if (GET_CODE (size) == CONST_INT
1730 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
1731 < MOVE_RATIO))
1732 {
1733 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
1734 INTVAL (size), align);
1735 goto ret;
1736 }
1737 /* Try the most limited insn first, because there's no point
1738 including more than one in the machine description unless
1739 the more limited one has some advantage. */
1740 #ifdef HAVE_movstrqi
1741 if (HAVE_movstrqi
1742 && GET_CODE (size) == CONST_INT
1743 && ((unsigned) INTVAL (size)
1744 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
1745 {
1746 emit_insn (gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
1747 xinner, size, GEN_INT (align)));
1748 goto ret;
1749 }
1750 #endif
1751 #ifdef HAVE_movstrhi
1752 if (HAVE_movstrhi
1753 && GET_CODE (size) == CONST_INT
1754 && ((unsigned) INTVAL (size)
1755 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
1756 {
1757 emit_insn (gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
1758 xinner, size, GEN_INT (align)));
1759 goto ret;
1760 }
1761 #endif
1762 #ifdef HAVE_movstrsi
1763 if (HAVE_movstrsi)
1764 {
1765 emit_insn (gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
1766 xinner, size, GEN_INT (align)));
1767 goto ret;
1768 }
1769 #endif
1770 #ifdef HAVE_movstrdi
1771 if (HAVE_movstrdi)
1772 {
1773 emit_insn (gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
1774 xinner, size, GEN_INT (align)));
1775 goto ret;
1776 }
1777 #endif
1778
1779 #ifndef ACCUMULATE_OUTGOING_ARGS
1780 /* If the source is referenced relative to the stack pointer,
1781 copy it to another register to stabilize it. We do not need
1782 to do this if we know that we won't be changing sp. */
1783
1784 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
1785 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
1786 temp = copy_to_reg (temp);
1787 #endif
1788
1789 /* Make inhibit_defer_pop nonzero around the library call
1790 to force it to pop the bcopy-arguments right away. */
1791 NO_DEFER_POP;
1792 #ifdef TARGET_MEM_FUNCTIONS
1793 emit_library_call (memcpy_libfunc, 0,
1794 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
1795 size, Pmode);
1796 #else
1797 emit_library_call (bcopy_libfunc, 0,
1798 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
1799 size, Pmode);
1800 #endif
1801 OK_DEFER_POP;
1802 }
1803 }
1804 else if (partial > 0)
1805 {
1806 /* Scalar partly in registers. */
1807
1808 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
1809 int i;
1810 int not_stack;
1811 /* # words of start of argument
1812 that we must make space for but need not store. */
1813 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
1814 int args_offset = INTVAL (args_so_far);
1815 int skip;
1816
1817 /* Push padding now if padding above and stack grows down,
1818 or if padding below and stack grows up.
1819 But if space already allocated, this has already been done. */
1820 if (extra && args_addr == 0
1821 && where_pad != none && where_pad != stack_direction)
1822 anti_adjust_stack (GEN_INT (extra));
1823
1824 /* If we make space by pushing it, we might as well push
1825 the real data. Otherwise, we can leave OFFSET nonzero
1826 and leave the space uninitialized. */
1827 if (args_addr == 0)
1828 offset = 0;
1829
1830 /* Now NOT_STACK gets the number of words that we don't need to
1831 allocate on the stack. */
1832 not_stack = partial - offset;
1833
1834 /* If the partial register-part of the arg counts in its stack size,
1835 skip the part of stack space corresponding to the registers.
1836 Otherwise, start copying to the beginning of the stack space,
1837 by setting SKIP to 0. */
1838 #ifndef REG_PARM_STACK_SPACE
1839 skip = 0;
1840 #else
1841 skip = not_stack;
1842 #endif
1843
1844 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1845 x = validize_mem (force_const_mem (mode, x));
1846
1847 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
1848 SUBREGs of such registers are not allowed. */
1849 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
1850 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
1851 x = copy_to_reg (x);
1852
1853 /* Loop over all the words allocated on the stack for this arg. */
1854 /* We can do it by words, because any scalar bigger than a word
1855 has a size a multiple of a word. */
1856 #ifndef PUSH_ARGS_REVERSED
1857 for (i = not_stack; i < size; i++)
1858 #else
1859 for (i = size - 1; i >= not_stack; i--)
1860 #endif
1861 if (i >= not_stack + offset)
1862 emit_push_insn (operand_subword_force (x, i, mode),
1863 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
1864 0, args_addr,
1865 GEN_INT (args_offset + ((i - not_stack + skip)
1866 * UNITS_PER_WORD)));
1867 }
1868 else
1869 {
1870 rtx addr;
1871
1872 /* Push padding now if padding above and stack grows down,
1873 or if padding below and stack grows up.
1874 But if space already allocated, this has already been done. */
1875 if (extra && args_addr == 0
1876 && where_pad != none && where_pad != stack_direction)
1877 anti_adjust_stack (GEN_INT (extra));
1878
1879 #ifdef PUSH_ROUNDING
1880 if (args_addr == 0)
1881 addr = gen_push_operand ();
1882 else
1883 #endif
1884 if (GET_CODE (args_so_far) == CONST_INT)
1885 addr
1886 = memory_address (mode,
1887 plus_constant (args_addr, INTVAL (args_so_far)));
1888 else
1889 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
1890 args_so_far));
1891
1892 emit_move_insn (gen_rtx (MEM, mode, addr), x);
1893 }
1894
1895 ret:
1896 /* If part should go in registers, copy that part
1897 into the appropriate registers. Do this now, at the end,
1898 since mem-to-mem copies above may do function calls. */
1899 if (partial > 0)
1900 move_block_to_reg (REGNO (reg), x, partial, mode);
1901
1902 if (extra && args_addr == 0 && where_pad == stack_direction)
1903 anti_adjust_stack (GEN_INT (extra));
1904 }
1905 \f
1906 /* Output a library call to function FUN (a SYMBOL_REF rtx)
1907 (emitting the queue unless NO_QUEUE is nonzero),
1908 for a value of mode OUTMODE,
1909 with NARGS different arguments, passed as alternating rtx values
1910 and machine_modes to convert them to.
1911 The rtx values should have been passed through protect_from_queue already.
1912
1913 NO_QUEUE will be true if and only if the library call is a `const' call
1914 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
1915 to the variable is_const in expand_call.
1916
1917 NO_QUEUE must be true for const calls, because if it isn't, then
1918 any pending increment will be emitted between REG_LIBCALL/REG_RETVAL notes,
1919 and will be lost if the libcall sequence is optimized away.
1920
1921 NO_QUEUE must be false for non-const calls, because if it isn't, the
1922 call insn will have its CONST_CALL_P bit set, and it will be incorrectly
1923 optimized. For instance, the instruction scheduler may incorrectly
1924 move memory references across the non-const call. */
1925
1926 void
1927 emit_library_call (va_alist)
1928 va_dcl
1929 {
1930 va_list p;
1931 struct args_size args_size;
1932 register int argnum;
1933 enum machine_mode outmode;
1934 int nargs;
1935 rtx fun;
1936 rtx orgfun;
1937 int inc;
1938 int count;
1939 rtx argblock = 0;
1940 CUMULATIVE_ARGS args_so_far;
1941 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
1942 struct args_size offset; struct args_size size; };
1943 struct arg *argvec;
1944 int old_inhibit_defer_pop = inhibit_defer_pop;
1945 int no_queue = 0;
1946 rtx use_insns;
1947
1948 va_start (p);
1949 orgfun = fun = va_arg (p, rtx);
1950 no_queue = va_arg (p, int);
1951 outmode = va_arg (p, enum machine_mode);
1952 nargs = va_arg (p, int);
1953
1954 /* Copy all the libcall-arguments out of the varargs data
1955 and into a vector ARGVEC.
1956
1957 Compute how to pass each argument. We only support a very small subset
1958 of the full argument passing conventions to limit complexity here since
1959 library functions shouldn't have many args. */
1960
1961 argvec = (struct arg *) alloca (nargs * sizeof (struct arg));
1962
1963 INIT_CUMULATIVE_ARGS (args_so_far, (tree)0, fun);
1964
1965 args_size.constant = 0;
1966 args_size.var = 0;
1967
1968 for (count = 0; count < nargs; count++)
1969 {
1970 rtx val = va_arg (p, rtx);
1971 enum machine_mode mode = va_arg (p, enum machine_mode);
1972
1973 /* We cannot convert the arg value to the mode the library wants here;
1974 must do it earlier where we know the signedness of the arg. */
1975 if (mode == BLKmode
1976 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
1977 abort ();
1978
1979 /* On some machines, there's no way to pass a float to a library fcn.
1980 Pass it as a double instead. */
1981 #ifdef LIBGCC_NEEDS_DOUBLE
1982 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
1983 val = convert_to_mode (DFmode, val, 0), mode = DFmode;
1984 #endif
1985
1986 /* There's no need to call protect_from_queue, because
1987 either emit_move_insn or emit_push_insn will do that. */
1988
1989 /* Make sure it is a reasonable operand for a move or push insn. */
1990 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
1991 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
1992 val = force_operand (val, NULL_RTX);
1993
1994 argvec[count].value = val;
1995 argvec[count].mode = mode;
1996
1997 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
1998 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
1999 abort ();
2000 #endif
2001
2002 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
2003 if (argvec[count].reg && GET_CODE (argvec[count].reg) == EXPR_LIST)
2004 abort ();
2005 #ifdef FUNCTION_ARG_PARTIAL_NREGS
2006 argvec[count].partial
2007 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
2008 #else
2009 argvec[count].partial = 0;
2010 #endif
2011
2012 locate_and_pad_parm (mode, NULL_TREE,
2013 argvec[count].reg && argvec[count].partial == 0,
2014 NULL_TREE, &args_size, &argvec[count].offset,
2015 &argvec[count].size);
2016
2017 if (argvec[count].size.var)
2018 abort ();
2019
2020 #ifndef REG_PARM_STACK_SPACE
2021 if (argvec[count].partial)
2022 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
2023 #endif
2024
2025 if (argvec[count].reg == 0 || argvec[count].partial != 0
2026 #ifdef REG_PARM_STACK_SPACE
2027 || 1
2028 #endif
2029 )
2030 args_size.constant += argvec[count].size.constant;
2031
2032 #ifdef ACCUMULATE_OUTGOING_ARGS
2033 /* If this arg is actually passed on the stack, it might be
2034 clobbering something we already put there (this library call might
2035 be inside the evaluation of an argument to a function whose call
2036 requires the stack). This will only occur when the library call
2037 has sufficient args to run out of argument registers. Abort in
2038 this case; if this ever occurs, code must be added to save and
2039 restore the arg slot. */
2040
2041 if (argvec[count].reg == 0 || argvec[count].partial != 0)
2042 abort ();
2043 #endif
2044
2045 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree)0, 1);
2046 }
2047 va_end (p);
2048
2049 /* If this machine requires an external definition for library
2050 functions, write one out. */
2051 assemble_external_libcall (fun);
2052
2053 #ifdef STACK_BOUNDARY
2054 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
2055 / STACK_BYTES) * STACK_BYTES);
2056 #endif
2057
2058 #ifdef REG_PARM_STACK_SPACE
2059 args_size.constant = MAX (args_size.constant,
2060 REG_PARM_STACK_SPACE ((tree) 0));
2061 #endif
2062
2063 #ifdef ACCUMULATE_OUTGOING_ARGS
2064 if (args_size.constant > current_function_outgoing_args_size)
2065 current_function_outgoing_args_size = args_size.constant;
2066 args_size.constant = 0;
2067 #endif
2068
2069 #ifndef PUSH_ROUNDING
2070 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
2071 #endif
2072
2073 #ifdef PUSH_ARGS_REVERSED
2074 inc = -1;
2075 argnum = nargs - 1;
2076 #else
2077 inc = 1;
2078 argnum = 0;
2079 #endif
2080
2081 /* Push the args that need to be pushed. */
2082
2083 for (count = 0; count < nargs; count++, argnum += inc)
2084 {
2085 register enum machine_mode mode = argvec[argnum].mode;
2086 register rtx val = argvec[argnum].value;
2087 rtx reg = argvec[argnum].reg;
2088 int partial = argvec[argnum].partial;
2089
2090 if (! (reg != 0 && partial == 0))
2091 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
2092 argblock, GEN_INT (argvec[count].offset.constant));
2093 NO_DEFER_POP;
2094 }
2095
2096 #ifdef PUSH_ARGS_REVERSED
2097 argnum = nargs - 1;
2098 #else
2099 argnum = 0;
2100 #endif
2101
2102 /* Now load any reg parms into their regs. */
2103
2104 for (count = 0; count < nargs; count++, argnum += inc)
2105 {
2106 register enum machine_mode mode = argvec[argnum].mode;
2107 register rtx val = argvec[argnum].value;
2108 rtx reg = argvec[argnum].reg;
2109 int partial = argvec[argnum].partial;
2110
2111 if (reg != 0 && partial == 0)
2112 emit_move_insn (reg, val);
2113 NO_DEFER_POP;
2114 }
2115
2116 /* For version 1.37, try deleting this entirely. */
2117 if (! no_queue)
2118 emit_queue ();
2119
2120 /* Any regs containing parms remain in use through the call. */
2121 start_sequence ();
2122 for (count = 0; count < nargs; count++)
2123 if (argvec[count].reg != 0)
2124 emit_insn (gen_rtx (USE, VOIDmode, argvec[count].reg));
2125
2126 use_insns = get_insns ();
2127 end_sequence ();
2128
2129 fun = prepare_call_address (fun, NULL_TREE, &use_insns);
2130
2131 /* Don't allow popping to be deferred, since then
2132 cse'ing of library calls could delete a call and leave the pop. */
2133 NO_DEFER_POP;
2134
2135 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2136 will set inhibit_defer_pop to that value. */
2137
2138 emit_call_1 (fun, get_identifier (XSTR (orgfun, 0)), args_size.constant, 0,
2139 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2140 outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
2141 old_inhibit_defer_pop + 1, use_insns, no_queue);
2142
2143 /* Now restore inhibit_defer_pop to its actual original value. */
2144 OK_DEFER_POP;
2145 }
2146 \f
2147 /* Expand an assignment that stores the value of FROM into TO.
2148 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2149 (This may contain a QUEUED rtx.)
2150 Otherwise, the returned value is not meaningful.
2151
2152 SUGGEST_REG is no longer actually used.
2153 It used to mean, copy the value through a register
2154 and return that register, if that is possible.
2155 But now we do this if WANT_VALUE.
2156
2157 If the value stored is a constant, we return the constant. */
2158
2159 rtx
2160 expand_assignment (to, from, want_value, suggest_reg)
2161 tree to, from;
2162 int want_value;
2163 int suggest_reg;
2164 {
2165 register rtx to_rtx = 0;
2166 rtx result;
2167
2168 /* Don't crash if the lhs of the assignment was erroneous. */
2169
2170 if (TREE_CODE (to) == ERROR_MARK)
2171 return expand_expr (from, NULL_RTX, VOIDmode, 0);
2172
2173 /* Assignment of a structure component needs special treatment
2174 if the structure component's rtx is not simply a MEM.
2175 Assignment of an array element at a constant index
2176 has the same problem. */
2177
2178 if (TREE_CODE (to) == COMPONENT_REF
2179 || TREE_CODE (to) == BIT_FIELD_REF
2180 || (TREE_CODE (to) == ARRAY_REF
2181 && TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2182 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST))
2183 {
2184 enum machine_mode mode1;
2185 int bitsize;
2186 int bitpos;
2187 tree offset;
2188 int unsignedp;
2189 int volatilep = 0;
2190 tree tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
2191 &mode1, &unsignedp, &volatilep);
2192
2193 /* If we are going to use store_bit_field and extract_bit_field,
2194 make sure to_rtx will be safe for multiple use. */
2195
2196 if (mode1 == VOIDmode && want_value)
2197 tem = stabilize_reference (tem);
2198
2199 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
2200 if (offset != 0)
2201 {
2202 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2203
2204 if (GET_CODE (to_rtx) != MEM)
2205 abort ();
2206 to_rtx = change_address (to_rtx, VOIDmode,
2207 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2208 force_reg (Pmode, offset_rtx)));
2209 }
2210 if (volatilep)
2211 {
2212 if (GET_CODE (to_rtx) == MEM)
2213 MEM_VOLATILE_P (to_rtx) = 1;
2214 #if 0 /* This was turned off because, when a field is volatile
2215 in an object which is not volatile, the object may be in a register,
2216 and then we would abort over here. */
2217 else
2218 abort ();
2219 #endif
2220 }
2221
2222 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2223 (want_value
2224 /* Spurious cast makes HPUX compiler happy. */
2225 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2226 : VOIDmode),
2227 unsignedp,
2228 /* Required alignment of containing datum. */
2229 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
2230 int_size_in_bytes (TREE_TYPE (tem)));
2231 preserve_temp_slots (result);
2232 free_temp_slots ();
2233
2234 return result;
2235 }
2236
2237 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2238 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2239
2240 if (to_rtx == 0)
2241 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2242
2243 /* In case we are returning the contents of an object which overlaps
2244 the place the value is being stored, use a safe function when copying
2245 a value through a pointer into a structure value return block. */
2246 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2247 && current_function_returns_struct
2248 && !current_function_returns_pcc_struct)
2249 {
2250 rtx from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
2251 rtx size = expr_size (from);
2252
2253 #ifdef TARGET_MEM_FUNCTIONS
2254 emit_library_call (memcpy_libfunc, 0,
2255 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2256 XEXP (from_rtx, 0), Pmode,
2257 size, Pmode);
2258 #else
2259 emit_library_call (bcopy_libfunc, 0,
2260 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2261 XEXP (to_rtx, 0), Pmode,
2262 size, Pmode);
2263 #endif
2264
2265 preserve_temp_slots (to_rtx);
2266 free_temp_slots ();
2267 return to_rtx;
2268 }
2269
2270 /* Compute FROM and store the value in the rtx we got. */
2271
2272 result = store_expr (from, to_rtx, want_value);
2273 preserve_temp_slots (result);
2274 free_temp_slots ();
2275 return result;
2276 }
2277
2278 /* Generate code for computing expression EXP,
2279 and storing the value into TARGET.
2280 Returns TARGET or an equivalent value.
2281 TARGET may contain a QUEUED rtx.
2282
2283 If SUGGEST_REG is nonzero, copy the value through a register
2284 and return that register, if that is possible.
2285
2286 If the value stored is a constant, we return the constant. */
2287
2288 rtx
2289 store_expr (exp, target, suggest_reg)
2290 register tree exp;
2291 register rtx target;
2292 int suggest_reg;
2293 {
2294 register rtx temp;
2295 int dont_return_target = 0;
2296
2297 if (TREE_CODE (exp) == COMPOUND_EXPR)
2298 {
2299 /* Perform first part of compound expression, then assign from second
2300 part. */
2301 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2302 emit_queue ();
2303 return store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2304 }
2305 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2306 {
2307 /* For conditional expression, get safe form of the target. Then
2308 test the condition, doing the appropriate assignment on either
2309 side. This avoids the creation of unnecessary temporaries.
2310 For non-BLKmode, it is more efficient not to do this. */
2311
2312 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2313
2314 emit_queue ();
2315 target = protect_from_queue (target, 1);
2316
2317 NO_DEFER_POP;
2318 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2319 store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2320 emit_queue ();
2321 emit_jump_insn (gen_jump (lab2));
2322 emit_barrier ();
2323 emit_label (lab1);
2324 store_expr (TREE_OPERAND (exp, 2), target, suggest_reg);
2325 emit_queue ();
2326 emit_label (lab2);
2327 OK_DEFER_POP;
2328 return target;
2329 }
2330 else if (suggest_reg && GET_CODE (target) == MEM
2331 && GET_MODE (target) != BLKmode)
2332 /* If target is in memory and caller wants value in a register instead,
2333 arrange that. Pass TARGET as target for expand_expr so that,
2334 if EXP is another assignment, SUGGEST_REG will be nonzero for it.
2335 We know expand_expr will not use the target in that case. */
2336 {
2337 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
2338 GET_MODE (target), 0);
2339 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2340 temp = copy_to_reg (temp);
2341 dont_return_target = 1;
2342 }
2343 else if (queued_subexp_p (target))
2344 /* If target contains a postincrement, it is not safe
2345 to use as the returned value. It would access the wrong
2346 place by the time the queued increment gets output.
2347 So copy the value through a temporary and use that temp
2348 as the result. */
2349 {
2350 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2351 {
2352 /* Expand EXP into a new pseudo. */
2353 temp = gen_reg_rtx (GET_MODE (target));
2354 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2355 }
2356 else
2357 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
2358 dont_return_target = 1;
2359 }
2360 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2361 /* If this is an scalar in a register that is stored in a wider mode
2362 than the declared mode, compute the result into its declared mode
2363 and then convert to the wider mode. Our value is the computed
2364 expression. */
2365 {
2366 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2367 convert_move (SUBREG_REG (target), temp,
2368 SUBREG_PROMOTED_UNSIGNED_P (target));
2369 return temp;
2370 }
2371 else
2372 {
2373 temp = expand_expr (exp, target, GET_MODE (target), 0);
2374 /* DO return TARGET if it's a specified hardware register.
2375 expand_return relies on this. */
2376 if (!(target && GET_CODE (target) == REG
2377 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2378 && CONSTANT_P (temp))
2379 dont_return_target = 1;
2380 }
2381
2382 /* If value was not generated in the target, store it there.
2383 Convert the value to TARGET's type first if nec. */
2384
2385 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2386 {
2387 target = protect_from_queue (target, 1);
2388 if (GET_MODE (temp) != GET_MODE (target)
2389 && GET_MODE (temp) != VOIDmode)
2390 {
2391 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2392 if (dont_return_target)
2393 {
2394 /* In this case, we will return TEMP,
2395 so make sure it has the proper mode.
2396 But don't forget to store the value into TARGET. */
2397 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2398 emit_move_insn (target, temp);
2399 }
2400 else
2401 convert_move (target, temp, unsignedp);
2402 }
2403
2404 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2405 {
2406 /* Handle copying a string constant into an array.
2407 The string constant may be shorter than the array.
2408 So copy just the string's actual length, and clear the rest. */
2409 rtx size;
2410
2411 /* Get the size of the data type of the string,
2412 which is actually the size of the target. */
2413 size = expr_size (exp);
2414 if (GET_CODE (size) == CONST_INT
2415 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2416 emit_block_move (target, temp, size,
2417 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2418 else
2419 {
2420 /* Compute the size of the data to copy from the string. */
2421 tree copy_size
2422 = fold (build (MIN_EXPR, sizetype,
2423 size_binop (CEIL_DIV_EXPR,
2424 TYPE_SIZE (TREE_TYPE (exp)),
2425 size_int (BITS_PER_UNIT)),
2426 convert (sizetype,
2427 build_int_2 (TREE_STRING_LENGTH (exp), 0))));
2428 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
2429 VOIDmode, 0);
2430 rtx label = 0;
2431
2432 /* Copy that much. */
2433 emit_block_move (target, temp, copy_size_rtx,
2434 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2435
2436 /* Figure out how much is left in TARGET
2437 that we have to clear. */
2438 if (GET_CODE (copy_size_rtx) == CONST_INT)
2439 {
2440 temp = plus_constant (XEXP (target, 0),
2441 TREE_STRING_LENGTH (exp));
2442 size = plus_constant (size,
2443 - TREE_STRING_LENGTH (exp));
2444 }
2445 else
2446 {
2447 enum machine_mode size_mode = Pmode;
2448
2449 temp = force_reg (Pmode, XEXP (target, 0));
2450 temp = expand_binop (size_mode, add_optab, temp,
2451 copy_size_rtx, NULL_RTX, 0,
2452 OPTAB_LIB_WIDEN);
2453
2454 size = expand_binop (size_mode, sub_optab, size,
2455 copy_size_rtx, NULL_RTX, 0,
2456 OPTAB_LIB_WIDEN);
2457
2458 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
2459 GET_MODE (size), 0, 0);
2460 label = gen_label_rtx ();
2461 emit_jump_insn (gen_blt (label));
2462 }
2463
2464 if (size != const0_rtx)
2465 {
2466 #ifdef TARGET_MEM_FUNCTIONS
2467 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
2468 temp, Pmode, const0_rtx, Pmode, size, Pmode);
2469 #else
2470 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
2471 temp, Pmode, size, Pmode);
2472 #endif
2473 }
2474 if (label)
2475 emit_label (label);
2476 }
2477 }
2478 else if (GET_MODE (temp) == BLKmode)
2479 emit_block_move (target, temp, expr_size (exp),
2480 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2481 else
2482 emit_move_insn (target, temp);
2483 }
2484 if (dont_return_target)
2485 return temp;
2486 return target;
2487 }
2488 \f
2489 /* Store the value of constructor EXP into the rtx TARGET.
2490 TARGET is either a REG or a MEM. */
2491
2492 static void
2493 store_constructor (exp, target)
2494 tree exp;
2495 rtx target;
2496 {
2497 tree type = TREE_TYPE (exp);
2498
2499 /* We know our target cannot conflict, since safe_from_p has been called. */
2500 #if 0
2501 /* Don't try copying piece by piece into a hard register
2502 since that is vulnerable to being clobbered by EXP.
2503 Instead, construct in a pseudo register and then copy it all. */
2504 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
2505 {
2506 rtx temp = gen_reg_rtx (GET_MODE (target));
2507 store_constructor (exp, temp);
2508 emit_move_insn (target, temp);
2509 return;
2510 }
2511 #endif
2512
2513 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE)
2514 {
2515 register tree elt;
2516
2517 /* Inform later passes that the whole union value is dead. */
2518 if (TREE_CODE (type) == UNION_TYPE)
2519 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2520
2521 /* If we are building a static constructor into a register,
2522 set the initial value as zero so we can fold the value into
2523 a constant. */
2524 else if (GET_CODE (target) == REG && TREE_STATIC (exp))
2525 emit_move_insn (target, const0_rtx);
2526
2527 /* If the constructor has fewer fields than the structure,
2528 clear the whole structure first. */
2529 else if (list_length (CONSTRUCTOR_ELTS (exp))
2530 != list_length (TYPE_FIELDS (type)))
2531 clear_storage (target, int_size_in_bytes (type));
2532 else
2533 /* Inform later passes that the old value is dead. */
2534 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2535
2536 /* Store each element of the constructor into
2537 the corresponding field of TARGET. */
2538
2539 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
2540 {
2541 register tree field = TREE_PURPOSE (elt);
2542 register enum machine_mode mode;
2543 int bitsize;
2544 int bitpos;
2545 int unsignedp;
2546
2547 /* Just ignore missing fields.
2548 We cleared the whole structure, above,
2549 if any fields are missing. */
2550 if (field == 0)
2551 continue;
2552
2553 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
2554 unsignedp = TREE_UNSIGNED (field);
2555 mode = DECL_MODE (field);
2556 if (DECL_BIT_FIELD (field))
2557 mode = VOIDmode;
2558
2559 if (TREE_CODE (DECL_FIELD_BITPOS (field)) != INTEGER_CST)
2560 /* ??? This case remains to be written. */
2561 abort ();
2562
2563 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
2564
2565 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2566 /* The alignment of TARGET is
2567 at least what its type requires. */
2568 VOIDmode, 0,
2569 TYPE_ALIGN (type) / BITS_PER_UNIT,
2570 int_size_in_bytes (type));
2571 }
2572 }
2573 else if (TREE_CODE (type) == ARRAY_TYPE)
2574 {
2575 register tree elt;
2576 register int i;
2577 tree domain = TYPE_DOMAIN (type);
2578 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
2579 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
2580 tree elttype = TREE_TYPE (type);
2581
2582 /* If the constructor has fewer fields than the structure,
2583 clear the whole structure first. Similarly if this this is
2584 static constructor of a non-BLKmode object. */
2585
2586 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
2587 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
2588 clear_storage (target, maxelt - minelt + 1);
2589 else
2590 /* Inform later passes that the old value is dead. */
2591 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2592
2593 /* Store each element of the constructor into
2594 the corresponding element of TARGET, determined
2595 by counting the elements. */
2596 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
2597 elt;
2598 elt = TREE_CHAIN (elt), i++)
2599 {
2600 register enum machine_mode mode;
2601 int bitsize;
2602 int bitpos;
2603 int unsignedp;
2604
2605 mode = TYPE_MODE (elttype);
2606 bitsize = GET_MODE_BITSIZE (mode);
2607 unsignedp = TREE_UNSIGNED (elttype);
2608
2609 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2610
2611 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2612 /* The alignment of TARGET is
2613 at least what its type requires. */
2614 VOIDmode, 0,
2615 TYPE_ALIGN (type) / BITS_PER_UNIT,
2616 int_size_in_bytes (type));
2617 }
2618 }
2619
2620 else
2621 abort ();
2622 }
2623
2624 /* Store the value of EXP (an expression tree)
2625 into a subfield of TARGET which has mode MODE and occupies
2626 BITSIZE bits, starting BITPOS bits from the start of TARGET.
2627 If MODE is VOIDmode, it means that we are storing into a bit-field.
2628
2629 If VALUE_MODE is VOIDmode, return nothing in particular.
2630 UNSIGNEDP is not used in this case.
2631
2632 Otherwise, return an rtx for the value stored. This rtx
2633 has mode VALUE_MODE if that is convenient to do.
2634 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
2635
2636 ALIGN is the alignment that TARGET is known to have, measured in bytes.
2637 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
2638
2639 static rtx
2640 store_field (target, bitsize, bitpos, mode, exp, value_mode,
2641 unsignedp, align, total_size)
2642 rtx target;
2643 int bitsize, bitpos;
2644 enum machine_mode mode;
2645 tree exp;
2646 enum machine_mode value_mode;
2647 int unsignedp;
2648 int align;
2649 int total_size;
2650 {
2651 HOST_WIDE_INT width_mask = 0;
2652
2653 if (bitsize < HOST_BITS_PER_WIDE_INT)
2654 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
2655
2656 /* If we are storing into an unaligned field of an aligned union that is
2657 in a register, we may have the mode of TARGET being an integer mode but
2658 MODE == BLKmode. In that case, get an aligned object whose size and
2659 alignment are the same as TARGET and store TARGET into it (we can avoid
2660 the store if the field being stored is the entire width of TARGET). Then
2661 call ourselves recursively to store the field into a BLKmode version of
2662 that object. Finally, load from the object into TARGET. This is not
2663 very efficient in general, but should only be slightly more expensive
2664 than the otherwise-required unaligned accesses. Perhaps this can be
2665 cleaned up later. */
2666
2667 if (mode == BLKmode
2668 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
2669 {
2670 rtx object = assign_stack_temp (GET_MODE (target),
2671 GET_MODE_SIZE (GET_MODE (target)), 0);
2672 rtx blk_object = copy_rtx (object);
2673
2674 PUT_MODE (blk_object, BLKmode);
2675
2676 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
2677 emit_move_insn (object, target);
2678
2679 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
2680 align, total_size);
2681
2682 emit_move_insn (target, object);
2683
2684 return target;
2685 }
2686
2687 /* If the structure is in a register or if the component
2688 is a bit field, we cannot use addressing to access it.
2689 Use bit-field techniques or SUBREG to store in it. */
2690
2691 if (mode == VOIDmode
2692 || (mode != BLKmode && ! direct_store[(int) mode])
2693 || GET_CODE (target) == REG
2694 || GET_CODE (target) == SUBREG)
2695 {
2696 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2697 /* Store the value in the bitfield. */
2698 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
2699 if (value_mode != VOIDmode)
2700 {
2701 /* The caller wants an rtx for the value. */
2702 /* If possible, avoid refetching from the bitfield itself. */
2703 if (width_mask != 0
2704 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
2705 {
2706 tree count;
2707 enum machine_mode tmode;
2708
2709 if (unsignedp)
2710 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
2711 tmode = GET_MODE (temp);
2712 if (tmode == VOIDmode)
2713 tmode = value_mode;
2714 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
2715 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
2716 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
2717 }
2718 return extract_bit_field (target, bitsize, bitpos, unsignedp,
2719 NULL_RTX, value_mode, 0, align,
2720 total_size);
2721 }
2722 return const0_rtx;
2723 }
2724 else
2725 {
2726 rtx addr = XEXP (target, 0);
2727 rtx to_rtx;
2728
2729 /* If a value is wanted, it must be the lhs;
2730 so make the address stable for multiple use. */
2731
2732 if (value_mode != VOIDmode && GET_CODE (addr) != REG
2733 && ! CONSTANT_ADDRESS_P (addr)
2734 /* A frame-pointer reference is already stable. */
2735 && ! (GET_CODE (addr) == PLUS
2736 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2737 && (XEXP (addr, 0) == virtual_incoming_args_rtx
2738 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
2739 addr = copy_to_reg (addr);
2740
2741 /* Now build a reference to just the desired component. */
2742
2743 to_rtx = change_address (target, mode,
2744 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
2745 MEM_IN_STRUCT_P (to_rtx) = 1;
2746
2747 return store_expr (exp, to_rtx, value_mode != VOIDmode);
2748 }
2749 }
2750 \f
2751 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
2752 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
2753 ARRAY_REFs at constant positions and find the ultimate containing object,
2754 which we return.
2755
2756 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
2757 bit position, and *PUNSIGNEDP to the signedness of the field.
2758 If the position of the field is variable, we store a tree
2759 giving the variable offset (in units) in *POFFSET.
2760 This offset is in addition to the bit position.
2761 If the position is not variable, we store 0 in *POFFSET.
2762
2763 If any of the extraction expressions is volatile,
2764 we store 1 in *PVOLATILEP. Otherwise we don't change that.
2765
2766 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
2767 is a mode that can be used to access the field. In that case, *PBITSIZE
2768 is redundant.
2769
2770 If the field describes a variable-sized object, *PMODE is set to
2771 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
2772 this case, but the address of the object can be found. */
2773
2774 tree
2775 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode, punsignedp, pvolatilep)
2776 tree exp;
2777 int *pbitsize;
2778 int *pbitpos;
2779 tree *poffset;
2780 enum machine_mode *pmode;
2781 int *punsignedp;
2782 int *pvolatilep;
2783 {
2784 tree size_tree = 0;
2785 enum machine_mode mode = VOIDmode;
2786 tree offset = 0;
2787
2788 if (TREE_CODE (exp) == COMPONENT_REF)
2789 {
2790 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
2791 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
2792 mode = DECL_MODE (TREE_OPERAND (exp, 1));
2793 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
2794 }
2795 else if (TREE_CODE (exp) == BIT_FIELD_REF)
2796 {
2797 size_tree = TREE_OPERAND (exp, 1);
2798 *punsignedp = TREE_UNSIGNED (exp);
2799 }
2800 else
2801 {
2802 mode = TYPE_MODE (TREE_TYPE (exp));
2803 *pbitsize = GET_MODE_BITSIZE (mode);
2804 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2805 }
2806
2807 if (size_tree)
2808 {
2809 if (TREE_CODE (size_tree) != INTEGER_CST)
2810 mode = BLKmode, *pbitsize = -1;
2811 else
2812 *pbitsize = TREE_INT_CST_LOW (size_tree);
2813 }
2814
2815 /* Compute cumulative bit-offset for nested component-refs and array-refs,
2816 and find the ultimate containing object. */
2817
2818 *pbitpos = 0;
2819
2820 while (1)
2821 {
2822 if (TREE_CODE (exp) == INDIRECT_REF && flag_volatile)
2823 *pvolatilep = 1;
2824
2825 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
2826 {
2827 tree pos = (TREE_CODE (exp) == COMPONENT_REF
2828 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
2829 : TREE_OPERAND (exp, 2));
2830
2831 if (TREE_CODE (pos) == PLUS_EXPR)
2832 {
2833 tree constant, var;
2834 if (TREE_CODE (TREE_OPERAND (pos, 0)) == INTEGER_CST)
2835 {
2836 constant = TREE_OPERAND (pos, 0);
2837 var = TREE_OPERAND (pos, 1);
2838 }
2839 else if (TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
2840 {
2841 constant = TREE_OPERAND (pos, 1);
2842 var = TREE_OPERAND (pos, 0);
2843 }
2844 else
2845 abort ();
2846 *pbitpos += TREE_INT_CST_LOW (constant);
2847 if (offset)
2848 offset = size_binop (PLUS_EXPR, offset,
2849 size_binop (FLOOR_DIV_EXPR, var,
2850 size_int (BITS_PER_UNIT)));
2851 else
2852 offset = size_binop (FLOOR_DIV_EXPR, var,
2853 size_int (BITS_PER_UNIT));
2854 }
2855 else if (TREE_CODE (pos) == INTEGER_CST)
2856 *pbitpos += TREE_INT_CST_LOW (pos);
2857 else
2858 {
2859 /* Assume here that the offset is a multiple of a unit.
2860 If not, there should be an explicitly added constant. */
2861 if (offset)
2862 offset = size_binop (PLUS_EXPR, offset,
2863 size_binop (FLOOR_DIV_EXPR, pos,
2864 size_int (BITS_PER_UNIT)));
2865 else
2866 offset = size_binop (FLOOR_DIV_EXPR, pos,
2867 size_int (BITS_PER_UNIT));
2868 }
2869 }
2870
2871 else if (TREE_CODE (exp) == ARRAY_REF
2872 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
2873 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST)
2874 {
2875 *pbitpos += (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
2876 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp))));
2877 }
2878 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
2879 && ! ((TREE_CODE (exp) == NOP_EXPR
2880 || TREE_CODE (exp) == CONVERT_EXPR)
2881 && (TYPE_MODE (TREE_TYPE (exp))
2882 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
2883 break;
2884
2885 /* If any reference in the chain is volatile, the effect is volatile. */
2886 if (TREE_THIS_VOLATILE (exp))
2887 *pvolatilep = 1;
2888 exp = TREE_OPERAND (exp, 0);
2889 }
2890
2891 /* If this was a bit-field, see if there is a mode that allows direct
2892 access in case EXP is in memory. */
2893 if (mode == VOIDmode && *pbitpos % *pbitsize == 0)
2894 {
2895 mode = mode_for_size (*pbitsize, MODE_INT, 0);
2896 if (mode == BLKmode)
2897 mode = VOIDmode;
2898 }
2899
2900 *pmode = mode;
2901 *poffset = offset;
2902 #if 0
2903 /* We aren't finished fixing the callers to really handle nonzero offset. */
2904 if (offset != 0)
2905 abort ();
2906 #endif
2907
2908 return exp;
2909 }
2910 \f
2911 /* Given an rtx VALUE that may contain additions and multiplications,
2912 return an equivalent value that just refers to a register or memory.
2913 This is done by generating instructions to perform the arithmetic
2914 and returning a pseudo-register containing the value. */
2915
2916 rtx
2917 force_operand (value, target)
2918 rtx value, target;
2919 {
2920 register optab binoptab = 0;
2921 /* Use a temporary to force order of execution of calls to
2922 `force_operand'. */
2923 rtx tmp;
2924 register rtx op2;
2925 /* Use subtarget as the target for operand 0 of a binary operation. */
2926 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
2927
2928 if (GET_CODE (value) == PLUS)
2929 binoptab = add_optab;
2930 else if (GET_CODE (value) == MINUS)
2931 binoptab = sub_optab;
2932 else if (GET_CODE (value) == MULT)
2933 {
2934 op2 = XEXP (value, 1);
2935 if (!CONSTANT_P (op2)
2936 && !(GET_CODE (op2) == REG && op2 != subtarget))
2937 subtarget = 0;
2938 tmp = force_operand (XEXP (value, 0), subtarget);
2939 return expand_mult (GET_MODE (value), tmp,
2940 force_operand (op2, NULL_RTX),
2941 target, 0);
2942 }
2943
2944 if (binoptab)
2945 {
2946 op2 = XEXP (value, 1);
2947 if (!CONSTANT_P (op2)
2948 && !(GET_CODE (op2) == REG && op2 != subtarget))
2949 subtarget = 0;
2950 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
2951 {
2952 binoptab = add_optab;
2953 op2 = negate_rtx (GET_MODE (value), op2);
2954 }
2955
2956 /* Check for an addition with OP2 a constant integer and our first
2957 operand a PLUS of a virtual register and something else. In that
2958 case, we want to emit the sum of the virtual register and the
2959 constant first and then add the other value. This allows virtual
2960 register instantiation to simply modify the constant rather than
2961 creating another one around this addition. */
2962 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
2963 && GET_CODE (XEXP (value, 0)) == PLUS
2964 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
2965 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
2966 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
2967 {
2968 rtx temp = expand_binop (GET_MODE (value), binoptab,
2969 XEXP (XEXP (value, 0), 0), op2,
2970 subtarget, 0, OPTAB_LIB_WIDEN);
2971 return expand_binop (GET_MODE (value), binoptab, temp,
2972 force_operand (XEXP (XEXP (value, 0), 1), 0),
2973 target, 0, OPTAB_LIB_WIDEN);
2974 }
2975
2976 tmp = force_operand (XEXP (value, 0), subtarget);
2977 return expand_binop (GET_MODE (value), binoptab, tmp,
2978 force_operand (op2, NULL_RTX),
2979 target, 0, OPTAB_LIB_WIDEN);
2980 /* We give UNSIGNEP = 0 to expand_binop
2981 because the only operations we are expanding here are signed ones. */
2982 }
2983 return value;
2984 }
2985 \f
2986 /* Subroutine of expand_expr:
2987 save the non-copied parts (LIST) of an expr (LHS), and return a list
2988 which can restore these values to their previous values,
2989 should something modify their storage. */
2990
2991 static tree
2992 save_noncopied_parts (lhs, list)
2993 tree lhs;
2994 tree list;
2995 {
2996 tree tail;
2997 tree parts = 0;
2998
2999 for (tail = list; tail; tail = TREE_CHAIN (tail))
3000 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3001 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
3002 else
3003 {
3004 tree part = TREE_VALUE (tail);
3005 tree part_type = TREE_TYPE (part);
3006 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
3007 rtx target = assign_stack_temp (TYPE_MODE (part_type),
3008 int_size_in_bytes (part_type), 0);
3009 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
3010 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
3011 parts = tree_cons (to_be_saved,
3012 build (RTL_EXPR, part_type, NULL_TREE,
3013 (tree) target),
3014 parts);
3015 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
3016 }
3017 return parts;
3018 }
3019
3020 /* Subroutine of expand_expr:
3021 record the non-copied parts (LIST) of an expr (LHS), and return a list
3022 which specifies the initial values of these parts. */
3023
3024 static tree
3025 init_noncopied_parts (lhs, list)
3026 tree lhs;
3027 tree list;
3028 {
3029 tree tail;
3030 tree parts = 0;
3031
3032 for (tail = list; tail; tail = TREE_CHAIN (tail))
3033 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3034 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
3035 else
3036 {
3037 tree part = TREE_VALUE (tail);
3038 tree part_type = TREE_TYPE (part);
3039 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
3040 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
3041 }
3042 return parts;
3043 }
3044
3045 /* Subroutine of expand_expr: return nonzero iff there is no way that
3046 EXP can reference X, which is being modified. */
3047
3048 static int
3049 safe_from_p (x, exp)
3050 rtx x;
3051 tree exp;
3052 {
3053 rtx exp_rtl = 0;
3054 int i, nops;
3055
3056 if (x == 0)
3057 return 1;
3058
3059 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
3060 find the underlying pseudo. */
3061 if (GET_CODE (x) == SUBREG)
3062 {
3063 x = SUBREG_REG (x);
3064 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3065 return 0;
3066 }
3067
3068 /* If X is a location in the outgoing argument area, it is always safe. */
3069 if (GET_CODE (x) == MEM
3070 && (XEXP (x, 0) == virtual_outgoing_args_rtx
3071 || (GET_CODE (XEXP (x, 0)) == PLUS
3072 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
3073 return 1;
3074
3075 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3076 {
3077 case 'd':
3078 exp_rtl = DECL_RTL (exp);
3079 break;
3080
3081 case 'c':
3082 return 1;
3083
3084 case 'x':
3085 if (TREE_CODE (exp) == TREE_LIST)
3086 return ((TREE_VALUE (exp) == 0
3087 || safe_from_p (x, TREE_VALUE (exp)))
3088 && (TREE_CHAIN (exp) == 0
3089 || safe_from_p (x, TREE_CHAIN (exp))));
3090 else
3091 return 0;
3092
3093 case '1':
3094 return safe_from_p (x, TREE_OPERAND (exp, 0));
3095
3096 case '2':
3097 case '<':
3098 return (safe_from_p (x, TREE_OPERAND (exp, 0))
3099 && safe_from_p (x, TREE_OPERAND (exp, 1)));
3100
3101 case 'e':
3102 case 'r':
3103 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
3104 the expression. If it is set, we conflict iff we are that rtx or
3105 both are in memory. Otherwise, we check all operands of the
3106 expression recursively. */
3107
3108 switch (TREE_CODE (exp))
3109 {
3110 case ADDR_EXPR:
3111 return staticp (TREE_OPERAND (exp, 0));
3112
3113 case INDIRECT_REF:
3114 if (GET_CODE (x) == MEM)
3115 return 0;
3116 break;
3117
3118 case CALL_EXPR:
3119 exp_rtl = CALL_EXPR_RTL (exp);
3120 if (exp_rtl == 0)
3121 {
3122 /* Assume that the call will clobber all hard registers and
3123 all of memory. */
3124 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3125 || GET_CODE (x) == MEM)
3126 return 0;
3127 }
3128
3129 break;
3130
3131 case RTL_EXPR:
3132 exp_rtl = RTL_EXPR_RTL (exp);
3133 if (exp_rtl == 0)
3134 /* We don't know what this can modify. */
3135 return 0;
3136
3137 break;
3138
3139 case WITH_CLEANUP_EXPR:
3140 exp_rtl = RTL_EXPR_RTL (exp);
3141 break;
3142
3143 case SAVE_EXPR:
3144 exp_rtl = SAVE_EXPR_RTL (exp);
3145 break;
3146
3147 case BIND_EXPR:
3148 /* The only operand we look at is operand 1. The rest aren't
3149 part of the expression. */
3150 return safe_from_p (x, TREE_OPERAND (exp, 1));
3151
3152 case METHOD_CALL_EXPR:
3153 /* This takes a rtx argument, but shouldn't appear here. */
3154 abort ();
3155 }
3156
3157 /* If we have an rtx, we do not need to scan our operands. */
3158 if (exp_rtl)
3159 break;
3160
3161 nops = tree_code_length[(int) TREE_CODE (exp)];
3162 for (i = 0; i < nops; i++)
3163 if (TREE_OPERAND (exp, i) != 0
3164 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3165 return 0;
3166 }
3167
3168 /* If we have an rtl, find any enclosed object. Then see if we conflict
3169 with it. */
3170 if (exp_rtl)
3171 {
3172 if (GET_CODE (exp_rtl) == SUBREG)
3173 {
3174 exp_rtl = SUBREG_REG (exp_rtl);
3175 if (GET_CODE (exp_rtl) == REG
3176 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3177 return 0;
3178 }
3179
3180 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3181 are memory and EXP is not readonly. */
3182 return ! (rtx_equal_p (x, exp_rtl)
3183 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3184 && ! TREE_READONLY (exp)));
3185 }
3186
3187 /* If we reach here, it is safe. */
3188 return 1;
3189 }
3190
3191 /* Subroutine of expand_expr: return nonzero iff EXP is an
3192 expression whose type is statically determinable. */
3193
3194 static int
3195 fixed_type_p (exp)
3196 tree exp;
3197 {
3198 if (TREE_CODE (exp) == PARM_DECL
3199 || TREE_CODE (exp) == VAR_DECL
3200 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3201 || TREE_CODE (exp) == COMPONENT_REF
3202 || TREE_CODE (exp) == ARRAY_REF)
3203 return 1;
3204 return 0;
3205 }
3206 \f
3207 /* expand_expr: generate code for computing expression EXP.
3208 An rtx for the computed value is returned. The value is never null.
3209 In the case of a void EXP, const0_rtx is returned.
3210
3211 The value may be stored in TARGET if TARGET is nonzero.
3212 TARGET is just a suggestion; callers must assume that
3213 the rtx returned may not be the same as TARGET.
3214
3215 If TARGET is CONST0_RTX, it means that the value will be ignored.
3216
3217 If TMODE is not VOIDmode, it suggests generating the
3218 result in mode TMODE. But this is done only when convenient.
3219 Otherwise, TMODE is ignored and the value generated in its natural mode.
3220 TMODE is just a suggestion; callers must assume that
3221 the rtx returned may not have mode TMODE.
3222
3223 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
3224 with a constant address even if that address is not normally legitimate.
3225 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
3226
3227 If MODIFIER is EXPAND_SUM then when EXP is an addition
3228 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3229 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3230 products as above, or REG or MEM, or constant.
3231 Ordinarily in such cases we would output mul or add instructions
3232 and then return a pseudo reg containing the sum.
3233
3234 EXPAND_INITIALIZER is much like EXPAND_SUM except that
3235 it also marks a label as absolutely required (it can't be dead).
3236 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
3237 This is used for outputting expressions used in initializers. */
3238
3239 rtx
3240 expand_expr (exp, target, tmode, modifier)
3241 register tree exp;
3242 rtx target;
3243 enum machine_mode tmode;
3244 enum expand_modifier modifier;
3245 {
3246 register rtx op0, op1, temp;
3247 tree type = TREE_TYPE (exp);
3248 int unsignedp = TREE_UNSIGNED (type);
3249 register enum machine_mode mode = TYPE_MODE (type);
3250 register enum tree_code code = TREE_CODE (exp);
3251 optab this_optab;
3252 /* Use subtarget as the target for operand 0 of a binary operation. */
3253 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3254 rtx original_target = target;
3255 int ignore = target == const0_rtx;
3256 tree context;
3257
3258 /* Don't use hard regs as subtargets, because the combiner
3259 can only handle pseudo regs. */
3260 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
3261 subtarget = 0;
3262 /* Avoid subtargets inside loops,
3263 since they hide some invariant expressions. */
3264 if (preserve_subexpressions_p ())
3265 subtarget = 0;
3266
3267 if (ignore) target = 0, original_target = 0;
3268
3269 /* If will do cse, generate all results into pseudo registers
3270 since 1) that allows cse to find more things
3271 and 2) otherwise cse could produce an insn the machine
3272 cannot support. */
3273
3274 if (! cse_not_expected && mode != BLKmode && target
3275 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
3276 target = subtarget;
3277
3278 /* Ensure we reference a volatile object even if value is ignored. */
3279 if (ignore && TREE_THIS_VOLATILE (exp)
3280 && mode != VOIDmode && mode != BLKmode)
3281 {
3282 target = gen_reg_rtx (mode);
3283 temp = expand_expr (exp, target, VOIDmode, modifier);
3284 if (temp != target)
3285 emit_move_insn (target, temp);
3286 return target;
3287 }
3288
3289 switch (code)
3290 {
3291 case LABEL_DECL:
3292 {
3293 tree function = decl_function_context (exp);
3294 /* Handle using a label in a containing function. */
3295 if (function != current_function_decl && function != 0)
3296 {
3297 struct function *p = find_function_data (function);
3298 /* Allocate in the memory associated with the function
3299 that the label is in. */
3300 push_obstacks (p->function_obstack,
3301 p->function_maybepermanent_obstack);
3302
3303 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3304 label_rtx (exp), p->forced_labels);
3305 pop_obstacks ();
3306 }
3307 else if (modifier == EXPAND_INITIALIZER)
3308 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3309 label_rtx (exp), forced_labels);
3310 temp = gen_rtx (MEM, FUNCTION_MODE,
3311 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
3312 if (function != current_function_decl && function != 0)
3313 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
3314 return temp;
3315 }
3316
3317 case PARM_DECL:
3318 if (DECL_RTL (exp) == 0)
3319 {
3320 error_with_decl (exp, "prior parameter's size depends on `%s'");
3321 return CONST0_RTX (mode);
3322 }
3323
3324 case FUNCTION_DECL:
3325 case VAR_DECL:
3326 case RESULT_DECL:
3327 if (DECL_RTL (exp) == 0)
3328 abort ();
3329 /* Ensure variable marked as used
3330 even if it doesn't go through a parser. */
3331 TREE_USED (exp) = 1;
3332 /* Handle variables inherited from containing functions. */
3333 context = decl_function_context (exp);
3334
3335 /* We treat inline_function_decl as an alias for the current function
3336 because that is the inline function whose vars, types, etc.
3337 are being merged into the current function.
3338 See expand_inline_function. */
3339 if (context != 0 && context != current_function_decl
3340 && context != inline_function_decl
3341 /* If var is static, we don't need a static chain to access it. */
3342 && ! (GET_CODE (DECL_RTL (exp)) == MEM
3343 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
3344 {
3345 rtx addr;
3346
3347 /* Mark as non-local and addressable. */
3348 DECL_NONLOCAL (exp) = 1;
3349 mark_addressable (exp);
3350 if (GET_CODE (DECL_RTL (exp)) != MEM)
3351 abort ();
3352 addr = XEXP (DECL_RTL (exp), 0);
3353 if (GET_CODE (addr) == MEM)
3354 addr = gen_rtx (MEM, Pmode, fix_lexical_addr (XEXP (addr, 0), exp));
3355 else
3356 addr = fix_lexical_addr (addr, exp);
3357 return change_address (DECL_RTL (exp), mode, addr);
3358 }
3359
3360 /* This is the case of an array whose size is to be determined
3361 from its initializer, while the initializer is still being parsed.
3362 See expand_decl. */
3363 if (GET_CODE (DECL_RTL (exp)) == MEM
3364 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
3365 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
3366 XEXP (DECL_RTL (exp), 0));
3367 if (GET_CODE (DECL_RTL (exp)) == MEM
3368 && modifier != EXPAND_CONST_ADDRESS
3369 && modifier != EXPAND_SUM
3370 && modifier != EXPAND_INITIALIZER)
3371 {
3372 /* DECL_RTL probably contains a constant address.
3373 On RISC machines where a constant address isn't valid,
3374 make some insns to get that address into a register. */
3375 if (!memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
3376 || (flag_force_addr
3377 && CONSTANT_ADDRESS_P (XEXP (DECL_RTL (exp), 0))))
3378 return change_address (DECL_RTL (exp), VOIDmode,
3379 copy_rtx (XEXP (DECL_RTL (exp), 0)));
3380 }
3381
3382 /* If the mode of DECL_RTL does not match that of the decl, it
3383 must be a promoted value. We return a SUBREG of the wanted mode,
3384 but mark it so that we know that it was already extended. */
3385
3386 if (GET_CODE (DECL_RTL (exp)) == REG
3387 && GET_MODE (DECL_RTL (exp)) != mode)
3388 {
3389 enum machine_mode decl_mode = DECL_MODE (exp);
3390
3391 /* Get the signedness used for this variable. Ensure we get the
3392 same mode we got when the variable was declared. */
3393
3394 PROMOTE_MODE (decl_mode, unsignedp, type);
3395
3396 if (decl_mode != GET_MODE (DECL_RTL (exp)))
3397 abort ();
3398
3399 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
3400 SUBREG_PROMOTED_VAR_P (temp) = 1;
3401 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3402 return temp;
3403 }
3404
3405 return DECL_RTL (exp);
3406
3407 case INTEGER_CST:
3408 return immed_double_const (TREE_INT_CST_LOW (exp),
3409 TREE_INT_CST_HIGH (exp),
3410 mode);
3411
3412 case CONST_DECL:
3413 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
3414
3415 case REAL_CST:
3416 /* If optimized, generate immediate CONST_DOUBLE
3417 which will be turned into memory by reload if necessary.
3418
3419 We used to force a register so that loop.c could see it. But
3420 this does not allow gen_* patterns to perform optimizations with
3421 the constants. It also produces two insns in cases like "x = 1.0;".
3422 On most machines, floating-point constants are not permitted in
3423 many insns, so we'd end up copying it to a register in any case.
3424
3425 Now, we do the copying in expand_binop, if appropriate. */
3426 return immed_real_const (exp);
3427
3428 case COMPLEX_CST:
3429 case STRING_CST:
3430 if (! TREE_CST_RTL (exp))
3431 output_constant_def (exp);
3432
3433 /* TREE_CST_RTL probably contains a constant address.
3434 On RISC machines where a constant address isn't valid,
3435 make some insns to get that address into a register. */
3436 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
3437 && modifier != EXPAND_CONST_ADDRESS
3438 && modifier != EXPAND_INITIALIZER
3439 && modifier != EXPAND_SUM
3440 && !memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0)))
3441 return change_address (TREE_CST_RTL (exp), VOIDmode,
3442 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
3443 return TREE_CST_RTL (exp);
3444
3445 case SAVE_EXPR:
3446 context = decl_function_context (exp);
3447 /* We treat inline_function_decl as an alias for the current function
3448 because that is the inline function whose vars, types, etc.
3449 are being merged into the current function.
3450 See expand_inline_function. */
3451 if (context == current_function_decl || context == inline_function_decl)
3452 context = 0;
3453
3454 /* If this is non-local, handle it. */
3455 if (context)
3456 {
3457 temp = SAVE_EXPR_RTL (exp);
3458 if (temp && GET_CODE (temp) == REG)
3459 {
3460 put_var_into_stack (exp);
3461 temp = SAVE_EXPR_RTL (exp);
3462 }
3463 if (temp == 0 || GET_CODE (temp) != MEM)
3464 abort ();
3465 return change_address (temp, mode,
3466 fix_lexical_addr (XEXP (temp, 0), exp));
3467 }
3468 if (SAVE_EXPR_RTL (exp) == 0)
3469 {
3470 if (mode == BLKmode)
3471 temp
3472 = assign_stack_temp (mode,
3473 int_size_in_bytes (TREE_TYPE (exp)), 0);
3474 else
3475 {
3476 enum machine_mode var_mode = mode;
3477
3478 if (TREE_CODE (type) == INTEGER_TYPE
3479 || TREE_CODE (type) == ENUMERAL_TYPE
3480 || TREE_CODE (type) == BOOLEAN_TYPE
3481 || TREE_CODE (type) == CHAR_TYPE
3482 || TREE_CODE (type) == REAL_TYPE
3483 || TREE_CODE (type) == POINTER_TYPE
3484 || TREE_CODE (type) == OFFSET_TYPE)
3485 {
3486 PROMOTE_MODE (var_mode, unsignedp, type);
3487 }
3488
3489 temp = gen_reg_rtx (var_mode);
3490 }
3491
3492 SAVE_EXPR_RTL (exp) = temp;
3493 store_expr (TREE_OPERAND (exp, 0), temp, 0);
3494 if (!optimize && GET_CODE (temp) == REG)
3495 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
3496 save_expr_regs);
3497 }
3498
3499 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
3500 must be a promoted value. We return a SUBREG of the wanted mode,
3501 but mark it so that we know that it was already extended. Note
3502 that `unsignedp' was modified above in this case. */
3503
3504 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
3505 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
3506 {
3507 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
3508 SUBREG_PROMOTED_VAR_P (temp) = 1;
3509 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3510 return temp;
3511 }
3512
3513 return SAVE_EXPR_RTL (exp);
3514
3515 case EXIT_EXPR:
3516 /* Exit the current loop if the body-expression is true. */
3517 {
3518 rtx label = gen_label_rtx ();
3519 do_jump (TREE_OPERAND (exp, 0), label, NULL_RTX);
3520 expand_exit_loop (NULL_PTR);
3521 emit_label (label);
3522 }
3523 return const0_rtx;
3524
3525 case LOOP_EXPR:
3526 expand_start_loop (1);
3527 expand_expr_stmt (TREE_OPERAND (exp, 0));
3528 expand_end_loop ();
3529
3530 return const0_rtx;
3531
3532 case BIND_EXPR:
3533 {
3534 tree vars = TREE_OPERAND (exp, 0);
3535 int vars_need_expansion = 0;
3536
3537 /* Need to open a binding contour here because
3538 if there are any cleanups they most be contained here. */
3539 expand_start_bindings (0);
3540
3541 /* Mark the corresponding BLOCK for output in its proper place. */
3542 if (TREE_OPERAND (exp, 2) != 0
3543 && ! TREE_USED (TREE_OPERAND (exp, 2)))
3544 insert_block (TREE_OPERAND (exp, 2));
3545
3546 /* If VARS have not yet been expanded, expand them now. */
3547 while (vars)
3548 {
3549 if (DECL_RTL (vars) == 0)
3550 {
3551 vars_need_expansion = 1;
3552 expand_decl (vars);
3553 }
3554 expand_decl_init (vars);
3555 vars = TREE_CHAIN (vars);
3556 }
3557
3558 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
3559
3560 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
3561
3562 return temp;
3563 }
3564
3565 case RTL_EXPR:
3566 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
3567 abort ();
3568 emit_insns (RTL_EXPR_SEQUENCE (exp));
3569 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
3570 return RTL_EXPR_RTL (exp);
3571
3572 case CONSTRUCTOR:
3573 /* All elts simple constants => refer to a constant in memory. But
3574 if this is a non-BLKmode mode, let it store a field at a time
3575 since that should make a CONST_INT or CONST_DOUBLE when we
3576 fold. */
3577 if (TREE_STATIC (exp) && (mode == BLKmode || TREE_ADDRESSABLE (exp)))
3578 {
3579 rtx constructor = output_constant_def (exp);
3580 if (modifier != EXPAND_CONST_ADDRESS
3581 && modifier != EXPAND_INITIALIZER
3582 && modifier != EXPAND_SUM
3583 && !memory_address_p (GET_MODE (constructor),
3584 XEXP (constructor, 0)))
3585 constructor = change_address (constructor, VOIDmode,
3586 XEXP (constructor, 0));
3587 return constructor;
3588 }
3589
3590 if (ignore)
3591 {
3592 tree elt;
3593 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3594 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
3595 return const0_rtx;
3596 }
3597 else
3598 {
3599 if (target == 0 || ! safe_from_p (target, exp))
3600 {
3601 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
3602 target = gen_reg_rtx (mode);
3603 else
3604 {
3605 rtx safe_target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
3606 if (target)
3607 MEM_IN_STRUCT_P (safe_target) = MEM_IN_STRUCT_P (target);
3608 target = safe_target;
3609 }
3610 }
3611 store_constructor (exp, target);
3612 return target;
3613 }
3614
3615 case INDIRECT_REF:
3616 {
3617 tree exp1 = TREE_OPERAND (exp, 0);
3618 tree exp2;
3619
3620 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
3621 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
3622 This code has the same general effect as simply doing
3623 expand_expr on the save expr, except that the expression PTR
3624 is computed for use as a memory address. This means different
3625 code, suitable for indexing, may be generated. */
3626 if (TREE_CODE (exp1) == SAVE_EXPR
3627 && SAVE_EXPR_RTL (exp1) == 0
3628 && TREE_CODE (exp2 = TREE_OPERAND (exp1, 0)) != ERROR_MARK
3629 && TYPE_MODE (TREE_TYPE (exp1)) == Pmode
3630 && TYPE_MODE (TREE_TYPE (exp2)) == Pmode)
3631 {
3632 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
3633 VOIDmode, EXPAND_SUM);
3634 op0 = memory_address (mode, temp);
3635 op0 = copy_all_regs (op0);
3636 SAVE_EXPR_RTL (exp1) = op0;
3637 }
3638 else
3639 {
3640 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
3641 op0 = memory_address (mode, op0);
3642 }
3643
3644 temp = gen_rtx (MEM, mode, op0);
3645 /* If address was computed by addition,
3646 mark this as an element of an aggregate. */
3647 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
3648 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
3649 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
3650 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
3651 || TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3652 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
3653 || (TREE_CODE (exp1) == ADDR_EXPR
3654 && (exp2 = TREE_OPERAND (exp1, 0))
3655 && (TREE_CODE (TREE_TYPE (exp2)) == ARRAY_TYPE
3656 || TREE_CODE (TREE_TYPE (exp2)) == RECORD_TYPE
3657 || TREE_CODE (TREE_TYPE (exp2)) == UNION_TYPE)))
3658 MEM_IN_STRUCT_P (temp) = 1;
3659 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) || flag_volatile;
3660 #if 0 /* It is incorrectto set RTX_UNCHANGING_P here, because the fact that
3661 a location is accessed through a pointer to const does not mean
3662 that the value there can never change. */
3663 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
3664 #endif
3665 return temp;
3666 }
3667
3668 case ARRAY_REF:
3669 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
3670 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3671 {
3672 /* Nonconstant array index or nonconstant element size.
3673 Generate the tree for *(&array+index) and expand that,
3674 except do it in a language-independent way
3675 and don't complain about non-lvalue arrays.
3676 `mark_addressable' should already have been called
3677 for any array for which this case will be reached. */
3678
3679 /* Don't forget the const or volatile flag from the array element. */
3680 tree variant_type = build_type_variant (type,
3681 TREE_READONLY (exp),
3682 TREE_THIS_VOLATILE (exp));
3683 tree array_adr = build1 (ADDR_EXPR, build_pointer_type (variant_type),
3684 TREE_OPERAND (exp, 0));
3685 tree index = TREE_OPERAND (exp, 1);
3686 tree elt;
3687
3688 /* Convert the integer argument to a type the same size as a pointer
3689 so the multiply won't overflow spuriously. */
3690 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
3691 index = convert (type_for_size (POINTER_SIZE, 0), index);
3692
3693 /* Don't think the address has side effects
3694 just because the array does.
3695 (In some cases the address might have side effects,
3696 and we fail to record that fact here. However, it should not
3697 matter, since expand_expr should not care.) */
3698 TREE_SIDE_EFFECTS (array_adr) = 0;
3699
3700 elt = build1 (INDIRECT_REF, type,
3701 fold (build (PLUS_EXPR, TYPE_POINTER_TO (variant_type),
3702 array_adr,
3703 fold (build (MULT_EXPR,
3704 TYPE_POINTER_TO (variant_type),
3705 index, size_in_bytes (type))))));
3706
3707 /* Volatility, etc., of new expression is same as old expression. */
3708 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
3709 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
3710 TREE_READONLY (elt) = TREE_READONLY (exp);
3711
3712 return expand_expr (elt, target, tmode, modifier);
3713 }
3714
3715 /* Fold an expression like: "foo"[2].
3716 This is not done in fold so it won't happen inside &. */
3717 {
3718 int i;
3719 tree arg0 = TREE_OPERAND (exp, 0);
3720 tree arg1 = TREE_OPERAND (exp, 1);
3721
3722 if (TREE_CODE (arg0) == STRING_CST
3723 && TREE_CODE (arg1) == INTEGER_CST
3724 && !TREE_INT_CST_HIGH (arg1)
3725 && (i = TREE_INT_CST_LOW (arg1)) < TREE_STRING_LENGTH (arg0))
3726 {
3727 if (TREE_TYPE (TREE_TYPE (arg0)) == integer_type_node)
3728 {
3729 exp = build_int_2 (((int *)TREE_STRING_POINTER (arg0))[i], 0);
3730 TREE_TYPE (exp) = integer_type_node;
3731 return expand_expr (exp, target, tmode, modifier);
3732 }
3733 if (TREE_TYPE (TREE_TYPE (arg0)) == char_type_node)
3734 {
3735 exp = build_int_2 (TREE_STRING_POINTER (arg0)[i], 0);
3736 TREE_TYPE (exp) = integer_type_node;
3737 return expand_expr (convert (TREE_TYPE (TREE_TYPE (arg0)), exp), target, tmode, modifier);
3738 }
3739 }
3740 }
3741
3742 /* If this is a constant index into a constant array,
3743 just get the value from the array. Handle both the cases when
3744 we have an explicit constructor and when our operand is a variable
3745 that was declared const. */
3746
3747 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
3748 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
3749 {
3750 tree index = fold (TREE_OPERAND (exp, 1));
3751 if (TREE_CODE (index) == INTEGER_CST
3752 && TREE_INT_CST_HIGH (index) == 0)
3753 {
3754 int i = TREE_INT_CST_LOW (index);
3755 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
3756
3757 while (elem && i--)
3758 elem = TREE_CHAIN (elem);
3759 if (elem)
3760 return expand_expr (fold (TREE_VALUE (elem)), target,
3761 tmode, modifier);
3762 }
3763 }
3764
3765 else if (TREE_READONLY (TREE_OPERAND (exp, 0))
3766 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
3767 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == ARRAY_TYPE
3768 && TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3769 && DECL_INITIAL (TREE_OPERAND (exp, 0))
3770 && optimize >= 1
3771 && (TREE_CODE (DECL_INITIAL (TREE_OPERAND (exp, 0)))
3772 != ERROR_MARK))
3773 {
3774 tree index = fold (TREE_OPERAND (exp, 1));
3775 if (TREE_CODE (index) == INTEGER_CST
3776 && TREE_INT_CST_HIGH (index) == 0)
3777 {
3778 int i = TREE_INT_CST_LOW (index);
3779 tree init = DECL_INITIAL (TREE_OPERAND (exp, 0));
3780
3781 if (TREE_CODE (init) == CONSTRUCTOR)
3782 {
3783 tree elem = CONSTRUCTOR_ELTS (init);
3784
3785 while (elem && i--)
3786 elem = TREE_CHAIN (elem);
3787 if (elem)
3788 return expand_expr (fold (TREE_VALUE (elem)), target,
3789 tmode, modifier);
3790 }
3791 else if (TREE_CODE (init) == STRING_CST
3792 && i < TREE_STRING_LENGTH (init))
3793 {
3794 temp = GEN_INT (TREE_STRING_POINTER (init)[i]);
3795 return convert_to_mode (mode, temp, 0);
3796 }
3797 }
3798 }
3799 /* Treat array-ref with constant index as a component-ref. */
3800
3801 case COMPONENT_REF:
3802 case BIT_FIELD_REF:
3803 /* If the operand is a CONSTRUCTOR, we can just extract the
3804 appropriate field if it is present. */
3805 if (code != ARRAY_REF
3806 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
3807 {
3808 tree elt;
3809
3810 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
3811 elt = TREE_CHAIN (elt))
3812 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
3813 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
3814 }
3815
3816 {
3817 enum machine_mode mode1;
3818 int bitsize;
3819 int bitpos;
3820 tree offset;
3821 int volatilep = 0;
3822 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
3823 &mode1, &unsignedp, &volatilep);
3824
3825 /* In some cases, we will be offsetting OP0's address by a constant.
3826 So get it as a sum, if possible. If we will be using it
3827 directly in an insn, we validate it. */
3828 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_SUM);
3829
3830 /* If this is a constant, put it into a register if it is a
3831 legimate constant and memory if it isn't. */
3832 if (CONSTANT_P (op0))
3833 {
3834 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
3835 if (LEGITIMATE_CONSTANT_P (op0))
3836 op0 = force_reg (mode, op0);
3837 else
3838 op0 = validize_mem (force_const_mem (mode, op0));
3839 }
3840
3841 if (offset != 0)
3842 {
3843 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3844
3845 if (GET_CODE (op0) != MEM)
3846 abort ();
3847 op0 = change_address (op0, VOIDmode,
3848 gen_rtx (PLUS, Pmode, XEXP (op0, 0),
3849 force_reg (Pmode, offset_rtx)));
3850 }
3851
3852 /* Don't forget about volatility even if this is a bitfield. */
3853 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
3854 {
3855 op0 = copy_rtx (op0);
3856 MEM_VOLATILE_P (op0) = 1;
3857 }
3858
3859 if (mode1 == VOIDmode
3860 || (mode1 != BLKmode && ! direct_load[(int) mode1]
3861 && modifier != EXPAND_CONST_ADDRESS
3862 && modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
3863 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG)
3864 {
3865 /* In cases where an aligned union has an unaligned object
3866 as a field, we might be extracting a BLKmode value from
3867 an integer-mode (e.g., SImode) object. Handle this case
3868 by doing the extract into an object as wide as the field
3869 (which we know to be the width of a basic mode), then
3870 storing into memory, and changing the mode to BLKmode. */
3871 enum machine_mode ext_mode = mode;
3872
3873 if (ext_mode == BLKmode)
3874 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
3875
3876 if (ext_mode == BLKmode)
3877 abort ();
3878
3879 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
3880 unsignedp, target, ext_mode, ext_mode,
3881 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
3882 int_size_in_bytes (TREE_TYPE (tem)));
3883 if (mode == BLKmode)
3884 {
3885 rtx new = assign_stack_temp (ext_mode,
3886 bitsize / BITS_PER_UNIT, 0);
3887
3888 emit_move_insn (new, op0);
3889 op0 = copy_rtx (new);
3890 PUT_MODE (op0, BLKmode);
3891 }
3892
3893 return op0;
3894 }
3895
3896 /* Get a reference to just this component. */
3897 if (modifier == EXPAND_CONST_ADDRESS
3898 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
3899 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
3900 (bitpos / BITS_PER_UNIT)));
3901 else
3902 op0 = change_address (op0, mode1,
3903 plus_constant (XEXP (op0, 0),
3904 (bitpos / BITS_PER_UNIT)));
3905 MEM_IN_STRUCT_P (op0) = 1;
3906 MEM_VOLATILE_P (op0) |= volatilep;
3907 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
3908 return op0;
3909 if (target == 0)
3910 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
3911 convert_move (target, op0, unsignedp);
3912 return target;
3913 }
3914
3915 case OFFSET_REF:
3916 {
3917 tree base = build_unary_op (ADDR_EXPR, TREE_OPERAND (exp, 0), 0);
3918 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
3919 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
3920 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
3921 MEM_IN_STRUCT_P (temp) = 1;
3922 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) || flag_volatile;
3923 #if 0 /* It is incorrectto set RTX_UNCHANGING_P here, because the fact that
3924 a location is accessed through a pointer to const does not mean
3925 that the value there can never change. */
3926 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
3927 #endif
3928 return temp;
3929 }
3930
3931 /* Intended for a reference to a buffer of a file-object in Pascal.
3932 But it's not certain that a special tree code will really be
3933 necessary for these. INDIRECT_REF might work for them. */
3934 case BUFFER_REF:
3935 abort ();
3936
3937 /* IN_EXPR: Inlined pascal set IN expression.
3938
3939 Algorithm:
3940 rlo = set_low - (set_low%bits_per_word);
3941 the_word = set [ (index - rlo)/bits_per_word ];
3942 bit_index = index % bits_per_word;
3943 bitmask = 1 << bit_index;
3944 return !!(the_word & bitmask); */
3945 case IN_EXPR:
3946 preexpand_calls (exp);
3947 {
3948 tree set = TREE_OPERAND (exp, 0);
3949 tree index = TREE_OPERAND (exp, 1);
3950 tree set_type = TREE_TYPE (set);
3951
3952 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
3953 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
3954
3955 rtx index_val;
3956 rtx lo_r;
3957 rtx hi_r;
3958 rtx rlow;
3959 rtx diff, quo, rem, addr, bit, result;
3960 rtx setval, setaddr;
3961 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
3962
3963 if (target == 0)
3964 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
3965
3966 /* If domain is empty, answer is no. */
3967 if (tree_int_cst_lt (set_high_bound, set_low_bound))
3968 return const0_rtx;
3969
3970 index_val = expand_expr (index, 0, VOIDmode, 0);
3971 lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
3972 hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
3973 setval = expand_expr (set, 0, VOIDmode, 0);
3974 setaddr = XEXP (setval, 0);
3975
3976 /* Compare index against bounds, if they are constant. */
3977 if (GET_CODE (index_val) == CONST_INT
3978 && GET_CODE (lo_r) == CONST_INT)
3979 {
3980 if (INTVAL (index_val) < INTVAL (lo_r))
3981 return const0_rtx;
3982 }
3983
3984 if (GET_CODE (index_val) == CONST_INT
3985 && GET_CODE (hi_r) == CONST_INT)
3986 {
3987 if (INTVAL (hi_r) < INTVAL (index_val))
3988 return const0_rtx;
3989 }
3990
3991 /* If we get here, we have to generate the code for both cases
3992 (in range and out of range). */
3993
3994 op0 = gen_label_rtx ();
3995 op1 = gen_label_rtx ();
3996
3997 if (! (GET_CODE (index_val) == CONST_INT
3998 && GET_CODE (lo_r) == CONST_INT))
3999 {
4000 emit_cmp_insn (index_val, lo_r, LT, 0, GET_MODE (index_val), 0, 0);
4001 emit_jump_insn (gen_blt (op1));
4002 }
4003
4004 if (! (GET_CODE (index_val) == CONST_INT
4005 && GET_CODE (hi_r) == CONST_INT))
4006 {
4007 emit_cmp_insn (index_val, hi_r, GT, 0, GET_MODE (index_val), 0, 0);
4008 emit_jump_insn (gen_bgt (op1));
4009 }
4010
4011 /* Calculate the element number of bit zero in the first word
4012 of the set. */
4013 if (GET_CODE (lo_r) == CONST_INT)
4014 rlow = gen_rtx (CONST_INT, VOIDmode,
4015 INTVAL (lo_r) & ~ (1 << BITS_PER_UNIT));
4016 else
4017 rlow = expand_binop (index_mode, and_optab,
4018 lo_r, gen_rtx (CONST_INT, VOIDmode,
4019 ~ (1 << BITS_PER_UNIT)),
4020 0, 0, OPTAB_LIB_WIDEN);
4021
4022 diff = expand_binop (index_mode, sub_optab,
4023 index_val, rlow, 0, 0, OPTAB_LIB_WIDEN);
4024
4025 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
4026 gen_rtx (CONST_INT, VOIDmode, BITS_PER_UNIT),
4027 0, 0);
4028 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
4029 gen_rtx (CONST_INT, VOIDmode, BITS_PER_UNIT),
4030 0, 0);
4031 addr = memory_address (byte_mode,
4032 expand_binop (index_mode, add_optab,
4033 diff, setaddr));
4034 /* Extract the bit we want to examine */
4035 bit = expand_shift (RSHIFT_EXPR, byte_mode,
4036 gen_rtx (MEM, byte_mode, addr), rem, 0, 1);
4037 result = expand_binop (SImode, and_optab, bit, const1_rtx, target,
4038 1, OPTAB_LIB_WIDEN);
4039 emit_move_insn (target, result);
4040
4041 /* Output the code to handle the out-of-range case. */
4042 emit_jump (op0);
4043 emit_label (op1);
4044 emit_move_insn (target, const0_rtx);
4045 emit_label (op0);
4046 return target;
4047 }
4048
4049 case WITH_CLEANUP_EXPR:
4050 if (RTL_EXPR_RTL (exp) == 0)
4051 {
4052 RTL_EXPR_RTL (exp)
4053 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
4054 cleanups_this_call
4055 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
4056 /* That's it for this cleanup. */
4057 TREE_OPERAND (exp, 2) = 0;
4058 }
4059 return RTL_EXPR_RTL (exp);
4060
4061 case CALL_EXPR:
4062 /* Check for a built-in function. */
4063 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
4064 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == FUNCTION_DECL
4065 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4066 return expand_builtin (exp, target, subtarget, tmode, ignore);
4067 /* If this call was expanded already by preexpand_calls,
4068 just return the result we got. */
4069 if (CALL_EXPR_RTL (exp) != 0)
4070 return CALL_EXPR_RTL (exp);
4071 return expand_call (exp, target, ignore);
4072
4073 case NON_LVALUE_EXPR:
4074 case NOP_EXPR:
4075 case CONVERT_EXPR:
4076 case REFERENCE_EXPR:
4077 if (TREE_CODE (type) == VOID_TYPE || ignore)
4078 {
4079 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
4080 return const0_rtx;
4081 }
4082 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4083 return expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
4084 if (TREE_CODE (type) == UNION_TYPE)
4085 {
4086 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
4087 if (target == 0)
4088 {
4089 if (mode == BLKmode)
4090 {
4091 if (TYPE_SIZE (type) == 0
4092 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4093 abort ();
4094 target = assign_stack_temp (BLKmode,
4095 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4096 + BITS_PER_UNIT - 1)
4097 / BITS_PER_UNIT, 0);
4098 }
4099 else
4100 target = gen_reg_rtx (mode);
4101 }
4102 if (GET_CODE (target) == MEM)
4103 /* Store data into beginning of memory target. */
4104 store_expr (TREE_OPERAND (exp, 0),
4105 change_address (target, TYPE_MODE (valtype), 0), 0);
4106
4107 else if (GET_CODE (target) == REG)
4108 /* Store this field into a union of the proper type. */
4109 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
4110 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
4111 VOIDmode, 0, 1,
4112 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
4113 else
4114 abort ();
4115
4116 /* Return the entire union. */
4117 return target;
4118 }
4119 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
4120 if (GET_MODE (op0) == mode || GET_MODE (op0) == VOIDmode)
4121 return op0;
4122 if (modifier == EXPAND_INITIALIZER)
4123 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
4124 if (flag_force_mem && GET_CODE (op0) == MEM)
4125 op0 = copy_to_reg (op0);
4126
4127 if (target == 0)
4128 return convert_to_mode (mode, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4129 else
4130 convert_move (target, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4131 return target;
4132
4133 case PLUS_EXPR:
4134 /* We come here from MINUS_EXPR when the second operand is a constant. */
4135 plus_expr:
4136 this_optab = add_optab;
4137
4138 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
4139 something else, make sure we add the register to the constant and
4140 then to the other thing. This case can occur during strength
4141 reduction and doing it this way will produce better code if the
4142 frame pointer or argument pointer is eliminated.
4143
4144 fold-const.c will ensure that the constant is always in the inner
4145 PLUS_EXPR, so the only case we need to do anything about is if
4146 sp, ap, or fp is our second argument, in which case we must swap
4147 the innermost first argument and our second argument. */
4148
4149 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4150 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
4151 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
4152 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
4153 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
4154 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
4155 {
4156 tree t = TREE_OPERAND (exp, 1);
4157
4158 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4159 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
4160 }
4161
4162 /* If the result is to be Pmode and we are adding an integer to
4163 something, we might be forming a constant. So try to use
4164 plus_constant. If it produces a sum and we can't accept it,
4165 use force_operand. This allows P = &ARR[const] to generate
4166 efficient code on machines where a SYMBOL_REF is not a valid
4167 address.
4168
4169 If this is an EXPAND_SUM call, always return the sum. */
4170 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
4171 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4172 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4173 || mode == Pmode))
4174 {
4175 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
4176 EXPAND_SUM);
4177 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
4178 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4179 op1 = force_operand (op1, target);
4180 return op1;
4181 }
4182
4183 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4184 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
4185 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4186 || mode == Pmode))
4187 {
4188 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
4189 EXPAND_SUM);
4190 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
4191 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4192 op0 = force_operand (op0, target);
4193 return op0;
4194 }
4195
4196 /* No sense saving up arithmetic to be done
4197 if it's all in the wrong mode to form part of an address.
4198 And force_operand won't know whether to sign-extend or
4199 zero-extend. */
4200 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4201 || mode != Pmode) goto binop;
4202
4203 preexpand_calls (exp);
4204 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4205 subtarget = 0;
4206
4207 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
4208 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
4209
4210 /* Make sure any term that's a sum with a constant comes last. */
4211 if (GET_CODE (op0) == PLUS
4212 && CONSTANT_P (XEXP (op0, 1)))
4213 {
4214 temp = op0;
4215 op0 = op1;
4216 op1 = temp;
4217 }
4218 /* If adding to a sum including a constant,
4219 associate it to put the constant outside. */
4220 if (GET_CODE (op1) == PLUS
4221 && CONSTANT_P (XEXP (op1, 1)))
4222 {
4223 rtx constant_term = const0_rtx;
4224
4225 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
4226 if (temp != 0)
4227 op0 = temp;
4228 /* Ensure that MULT comes first if there is one. */
4229 else if (GET_CODE (op0) == MULT)
4230 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
4231 else
4232 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
4233
4234 /* Let's also eliminate constants from op0 if possible. */
4235 op0 = eliminate_constant_term (op0, &constant_term);
4236
4237 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
4238 their sum should be a constant. Form it into OP1, since the
4239 result we want will then be OP0 + OP1. */
4240
4241 temp = simplify_binary_operation (PLUS, mode, constant_term,
4242 XEXP (op1, 1));
4243 if (temp != 0)
4244 op1 = temp;
4245 else
4246 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
4247 }
4248
4249 /* Put a constant term last and put a multiplication first. */
4250 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
4251 temp = op1, op1 = op0, op0 = temp;
4252
4253 temp = simplify_binary_operation (PLUS, mode, op0, op1);
4254 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
4255
4256 case MINUS_EXPR:
4257 /* Handle difference of two symbolic constants,
4258 for the sake of an initializer. */
4259 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4260 && really_constant_p (TREE_OPERAND (exp, 0))
4261 && really_constant_p (TREE_OPERAND (exp, 1)))
4262 {
4263 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
4264 VOIDmode, modifier);
4265 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4266 VOIDmode, modifier);
4267 return gen_rtx (MINUS, mode, op0, op1);
4268 }
4269 /* Convert A - const to A + (-const). */
4270 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4271 {
4272 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0),
4273 fold (build1 (NEGATE_EXPR, type,
4274 TREE_OPERAND (exp, 1))));
4275 goto plus_expr;
4276 }
4277 this_optab = sub_optab;
4278 goto binop;
4279
4280 case MULT_EXPR:
4281 preexpand_calls (exp);
4282 /* If first operand is constant, swap them.
4283 Thus the following special case checks need only
4284 check the second operand. */
4285 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
4286 {
4287 register tree t1 = TREE_OPERAND (exp, 0);
4288 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
4289 TREE_OPERAND (exp, 1) = t1;
4290 }
4291
4292 /* Attempt to return something suitable for generating an
4293 indexed address, for machines that support that. */
4294
4295 if (modifier == EXPAND_SUM && mode == Pmode
4296 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4297 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
4298 {
4299 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
4300
4301 /* Apply distributive law if OP0 is x+c. */
4302 if (GET_CODE (op0) == PLUS
4303 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
4304 return gen_rtx (PLUS, mode,
4305 gen_rtx (MULT, mode, XEXP (op0, 0),
4306 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
4307 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
4308 * INTVAL (XEXP (op0, 1))));
4309
4310 if (GET_CODE (op0) != REG)
4311 op0 = force_operand (op0, NULL_RTX);
4312 if (GET_CODE (op0) != REG)
4313 op0 = copy_to_mode_reg (mode, op0);
4314
4315 return gen_rtx (MULT, mode, op0,
4316 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
4317 }
4318
4319 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4320 subtarget = 0;
4321
4322 /* Check for multiplying things that have been extended
4323 from a narrower type. If this machine supports multiplying
4324 in that narrower type with a result in the desired type,
4325 do it that way, and avoid the explicit type-conversion. */
4326 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
4327 && TREE_CODE (type) == INTEGER_TYPE
4328 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4329 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
4330 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4331 && int_fits_type_p (TREE_OPERAND (exp, 1),
4332 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4333 /* Don't use a widening multiply if a shift will do. */
4334 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
4335 > HOST_BITS_PER_WIDE_INT)
4336 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
4337 ||
4338 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
4339 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4340 ==
4341 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
4342 /* If both operands are extended, they must either both
4343 be zero-extended or both be sign-extended. */
4344 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4345 ==
4346 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
4347 {
4348 enum machine_mode innermode
4349 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
4350 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4351 ? umul_widen_optab : smul_widen_optab);
4352 if (mode == GET_MODE_WIDER_MODE (innermode)
4353 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
4354 {
4355 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4356 NULL_RTX, VOIDmode, 0);
4357 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4358 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4359 VOIDmode, 0);
4360 else
4361 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
4362 NULL_RTX, VOIDmode, 0);
4363 goto binop2;
4364 }
4365 }
4366 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4367 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4368 return expand_mult (mode, op0, op1, target, unsignedp);
4369
4370 case TRUNC_DIV_EXPR:
4371 case FLOOR_DIV_EXPR:
4372 case CEIL_DIV_EXPR:
4373 case ROUND_DIV_EXPR:
4374 case EXACT_DIV_EXPR:
4375 preexpand_calls (exp);
4376 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4377 subtarget = 0;
4378 /* Possible optimization: compute the dividend with EXPAND_SUM
4379 then if the divisor is constant can optimize the case
4380 where some terms of the dividend have coeffs divisible by it. */
4381 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4382 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4383 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
4384
4385 case RDIV_EXPR:
4386 this_optab = flodiv_optab;
4387 goto binop;
4388
4389 case TRUNC_MOD_EXPR:
4390 case FLOOR_MOD_EXPR:
4391 case CEIL_MOD_EXPR:
4392 case ROUND_MOD_EXPR:
4393 preexpand_calls (exp);
4394 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4395 subtarget = 0;
4396 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4397 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4398 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
4399
4400 case FIX_ROUND_EXPR:
4401 case FIX_FLOOR_EXPR:
4402 case FIX_CEIL_EXPR:
4403 abort (); /* Not used for C. */
4404
4405 case FIX_TRUNC_EXPR:
4406 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
4407 if (target == 0)
4408 target = gen_reg_rtx (mode);
4409 expand_fix (target, op0, unsignedp);
4410 return target;
4411
4412 case FLOAT_EXPR:
4413 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
4414 if (target == 0)
4415 target = gen_reg_rtx (mode);
4416 /* expand_float can't figure out what to do if FROM has VOIDmode.
4417 So give it the correct mode. With -O, cse will optimize this. */
4418 if (GET_MODE (op0) == VOIDmode)
4419 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
4420 op0);
4421 expand_float (target, op0,
4422 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4423 return target;
4424
4425 case NEGATE_EXPR:
4426 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4427 temp = expand_unop (mode, neg_optab, op0, target, 0);
4428 if (temp == 0)
4429 abort ();
4430 return temp;
4431
4432 case ABS_EXPR:
4433 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4434
4435 /* Handle complex values specially. */
4436 {
4437 enum machine_mode opmode
4438 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4439
4440 if (GET_MODE_CLASS (opmode) == MODE_COMPLEX_INT
4441 || GET_MODE_CLASS (opmode) == MODE_COMPLEX_FLOAT)
4442 return expand_complex_abs (opmode, op0, target, unsignedp);
4443 }
4444
4445 /* Unsigned abs is simply the operand. Testing here means we don't
4446 risk generating incorrect code below. */
4447 if (TREE_UNSIGNED (type))
4448 return op0;
4449
4450 /* First try to do it with a special abs instruction. */
4451 temp = expand_unop (mode, abs_optab, op0, target, 0);
4452 if (temp != 0)
4453 return temp;
4454
4455 /* If this machine has expensive jumps, we can do integer absolute
4456 value of X as (((signed) x >> (W-1)) ^ x) - ((signed) x >> (W-1)),
4457 where W is the width of MODE. */
4458
4459 if (GET_MODE_CLASS (mode) == MODE_INT && BRANCH_COST >= 2)
4460 {
4461 rtx extended = expand_shift (RSHIFT_EXPR, mode, op0,
4462 size_int (GET_MODE_BITSIZE (mode) - 1),
4463 NULL_RTX, 0);
4464
4465 temp = expand_binop (mode, xor_optab, extended, op0, target, 0,
4466 OPTAB_LIB_WIDEN);
4467 if (temp != 0)
4468 temp = expand_binop (mode, sub_optab, temp, extended, target, 0,
4469 OPTAB_LIB_WIDEN);
4470
4471 if (temp != 0)
4472 return temp;
4473 }
4474
4475 /* If that does not win, use conditional jump and negate. */
4476 target = original_target;
4477 temp = gen_label_rtx ();
4478 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 0))
4479 || (GET_CODE (target) == REG
4480 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4481 target = gen_reg_rtx (mode);
4482 emit_move_insn (target, op0);
4483 emit_cmp_insn (target,
4484 expand_expr (convert (type, integer_zero_node),
4485 NULL_RTX, VOIDmode, 0),
4486 GE, NULL_RTX, mode, 0, 0);
4487 NO_DEFER_POP;
4488 emit_jump_insn (gen_bge (temp));
4489 op0 = expand_unop (mode, neg_optab, target, target, 0);
4490 if (op0 != target)
4491 emit_move_insn (target, op0);
4492 emit_label (temp);
4493 OK_DEFER_POP;
4494 return target;
4495
4496 case MAX_EXPR:
4497 case MIN_EXPR:
4498 target = original_target;
4499 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
4500 || (GET_CODE (target) == REG
4501 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4502 target = gen_reg_rtx (mode);
4503 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4504 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4505
4506 /* First try to do it with a special MIN or MAX instruction.
4507 If that does not win, use a conditional jump to select the proper
4508 value. */
4509 this_optab = (TREE_UNSIGNED (type)
4510 ? (code == MIN_EXPR ? umin_optab : umax_optab)
4511 : (code == MIN_EXPR ? smin_optab : smax_optab));
4512
4513 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
4514 OPTAB_WIDEN);
4515 if (temp != 0)
4516 return temp;
4517
4518 if (target != op0)
4519 emit_move_insn (target, op0);
4520 op0 = gen_label_rtx ();
4521 if (code == MAX_EXPR)
4522 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4523 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
4524 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
4525 else
4526 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4527 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
4528 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
4529 if (temp == const0_rtx)
4530 emit_move_insn (target, op1);
4531 else if (temp != const_true_rtx)
4532 {
4533 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
4534 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
4535 else
4536 abort ();
4537 emit_move_insn (target, op1);
4538 }
4539 emit_label (op0);
4540 return target;
4541
4542 /* ??? Can optimize when the operand of this is a bitwise operation,
4543 by using a different bitwise operation. */
4544 case BIT_NOT_EXPR:
4545 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4546 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
4547 if (temp == 0)
4548 abort ();
4549 return temp;
4550
4551 case FFS_EXPR:
4552 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4553 temp = expand_unop (mode, ffs_optab, op0, target, 1);
4554 if (temp == 0)
4555 abort ();
4556 return temp;
4557
4558 /* ??? Can optimize bitwise operations with one arg constant.
4559 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
4560 and (a bitwise1 b) bitwise2 b (etc)
4561 but that is probably not worth while. */
4562
4563 /* BIT_AND_EXPR is for bitwise anding.
4564 TRUTH_AND_EXPR is for anding two boolean values
4565 when we want in all cases to compute both of them.
4566 In general it is fastest to do TRUTH_AND_EXPR by
4567 computing both operands as actual zero-or-1 values
4568 and then bitwise anding. In cases where there cannot
4569 be any side effects, better code would be made by
4570 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR;
4571 but the question is how to recognize those cases. */
4572
4573 case TRUTH_AND_EXPR:
4574 case BIT_AND_EXPR:
4575 this_optab = and_optab;
4576 goto binop;
4577
4578 /* See comment above about TRUTH_AND_EXPR; it applies here too. */
4579 case TRUTH_OR_EXPR:
4580 case BIT_IOR_EXPR:
4581 this_optab = ior_optab;
4582 goto binop;
4583
4584 case BIT_XOR_EXPR:
4585 this_optab = xor_optab;
4586 goto binop;
4587
4588 case LSHIFT_EXPR:
4589 case RSHIFT_EXPR:
4590 case LROTATE_EXPR:
4591 case RROTATE_EXPR:
4592 preexpand_calls (exp);
4593 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4594 subtarget = 0;
4595 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4596 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
4597 unsignedp);
4598
4599 /* Could determine the answer when only additive constants differ.
4600 Also, the addition of one can be handled by changing the condition. */
4601 case LT_EXPR:
4602 case LE_EXPR:
4603 case GT_EXPR:
4604 case GE_EXPR:
4605 case EQ_EXPR:
4606 case NE_EXPR:
4607 preexpand_calls (exp);
4608 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
4609 if (temp != 0)
4610 return temp;
4611 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
4612 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
4613 && original_target
4614 && GET_CODE (original_target) == REG
4615 && (GET_MODE (original_target)
4616 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
4617 {
4618 temp = expand_expr (TREE_OPERAND (exp, 0), original_target, VOIDmode, 0);
4619 if (temp != original_target)
4620 temp = copy_to_reg (temp);
4621 op1 = gen_label_rtx ();
4622 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
4623 GET_MODE (temp), unsignedp, 0);
4624 emit_jump_insn (gen_beq (op1));
4625 emit_move_insn (temp, const1_rtx);
4626 emit_label (op1);
4627 return temp;
4628 }
4629 /* If no set-flag instruction, must generate a conditional
4630 store into a temporary variable. Drop through
4631 and handle this like && and ||. */
4632
4633 case TRUTH_ANDIF_EXPR:
4634 case TRUTH_ORIF_EXPR:
4635 if (target == 0 || ! safe_from_p (target, exp)
4636 /* Make sure we don't have a hard reg (such as function's return
4637 value) live across basic blocks, if not optimizing. */
4638 || (!optimize && GET_CODE (target) == REG
4639 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4640 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4641 emit_clr_insn (target);
4642 op1 = gen_label_rtx ();
4643 jumpifnot (exp, op1);
4644 emit_0_to_1_insn (target);
4645 emit_label (op1);
4646 return target;
4647
4648 case TRUTH_NOT_EXPR:
4649 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4650 /* The parser is careful to generate TRUTH_NOT_EXPR
4651 only with operands that are always zero or one. */
4652 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
4653 target, 1, OPTAB_LIB_WIDEN);
4654 if (temp == 0)
4655 abort ();
4656 return temp;
4657
4658 case COMPOUND_EXPR:
4659 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4660 emit_queue ();
4661 return expand_expr (TREE_OPERAND (exp, 1),
4662 (ignore ? const0_rtx : target),
4663 VOIDmode, 0);
4664
4665 case COND_EXPR:
4666 {
4667 /* Note that COND_EXPRs whose type is a structure or union
4668 are required to be constructed to contain assignments of
4669 a temporary variable, so that we can evaluate them here
4670 for side effect only. If type is void, we must do likewise. */
4671
4672 /* If an arm of the branch requires a cleanup,
4673 only that cleanup is performed. */
4674
4675 tree singleton = 0;
4676 tree binary_op = 0, unary_op = 0;
4677 tree old_cleanups = cleanups_this_call;
4678 cleanups_this_call = 0;
4679
4680 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
4681 convert it to our mode, if necessary. */
4682 if (integer_onep (TREE_OPERAND (exp, 1))
4683 && integer_zerop (TREE_OPERAND (exp, 2))
4684 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4685 {
4686 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
4687 if (GET_MODE (op0) == mode)
4688 return op0;
4689 if (target == 0)
4690 target = gen_reg_rtx (mode);
4691 convert_move (target, op0, unsignedp);
4692 return target;
4693 }
4694
4695 /* If we are not to produce a result, we have no target. Otherwise,
4696 if a target was specified use it; it will not be used as an
4697 intermediate target unless it is safe. If no target, use a
4698 temporary. */
4699
4700 if (mode == VOIDmode || ignore)
4701 temp = 0;
4702 else if (original_target
4703 && safe_from_p (original_target, TREE_OPERAND (exp, 0)))
4704 temp = original_target;
4705 else if (mode == BLKmode)
4706 {
4707 if (TYPE_SIZE (type) == 0
4708 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4709 abort ();
4710 temp = assign_stack_temp (BLKmode,
4711 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4712 + BITS_PER_UNIT - 1)
4713 / BITS_PER_UNIT, 0);
4714 }
4715 else
4716 temp = gen_reg_rtx (mode);
4717
4718 /* Check for X ? A + B : A. If we have this, we can copy
4719 A to the output and conditionally add B. Similarly for unary
4720 operations. Don't do this if X has side-effects because
4721 those side effects might affect A or B and the "?" operation is
4722 a sequence point in ANSI. (We test for side effects later.) */
4723
4724 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
4725 && operand_equal_p (TREE_OPERAND (exp, 2),
4726 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4727 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
4728 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
4729 && operand_equal_p (TREE_OPERAND (exp, 1),
4730 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4731 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
4732 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
4733 && operand_equal_p (TREE_OPERAND (exp, 2),
4734 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4735 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
4736 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
4737 && operand_equal_p (TREE_OPERAND (exp, 1),
4738 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4739 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
4740
4741 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
4742 operation, do this as A + (X != 0). Similarly for other simple
4743 binary operators. */
4744 if (singleton && binary_op
4745 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4746 && (TREE_CODE (binary_op) == PLUS_EXPR
4747 || TREE_CODE (binary_op) == MINUS_EXPR
4748 || TREE_CODE (binary_op) == BIT_IOR_EXPR
4749 || TREE_CODE (binary_op) == BIT_XOR_EXPR
4750 || TREE_CODE (binary_op) == BIT_AND_EXPR)
4751 && integer_onep (TREE_OPERAND (binary_op, 1))
4752 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4753 {
4754 rtx result;
4755 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
4756 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
4757 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
4758 : TREE_CODE (binary_op) == BIT_XOR_EXPR ? xor_optab
4759 : and_optab);
4760
4761 /* If we had X ? A : A + 1, do this as A + (X == 0).
4762
4763 We have to invert the truth value here and then put it
4764 back later if do_store_flag fails. We cannot simply copy
4765 TREE_OPERAND (exp, 0) to another variable and modify that
4766 because invert_truthvalue can modify the tree pointed to
4767 by its argument. */
4768 if (singleton == TREE_OPERAND (exp, 1))
4769 TREE_OPERAND (exp, 0)
4770 = invert_truthvalue (TREE_OPERAND (exp, 0));
4771
4772 result = do_store_flag (TREE_OPERAND (exp, 0),
4773 (safe_from_p (temp, singleton)
4774 ? temp : NULL_RTX),
4775 mode, BRANCH_COST <= 1);
4776
4777 if (result)
4778 {
4779 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
4780 return expand_binop (mode, boptab, op1, result, temp,
4781 unsignedp, OPTAB_LIB_WIDEN);
4782 }
4783 else if (singleton == TREE_OPERAND (exp, 1))
4784 TREE_OPERAND (exp, 0)
4785 = invert_truthvalue (TREE_OPERAND (exp, 0));
4786 }
4787
4788 NO_DEFER_POP;
4789 op0 = gen_label_rtx ();
4790
4791 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
4792 {
4793 if (temp != 0)
4794 {
4795 /* If the target conflicts with the other operand of the
4796 binary op, we can't use it. Also, we can't use the target
4797 if it is a hard register, because evaluating the condition
4798 might clobber it. */
4799 if ((binary_op
4800 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
4801 || (GET_CODE (temp) == REG
4802 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
4803 temp = gen_reg_rtx (mode);
4804 store_expr (singleton, temp, 0);
4805 }
4806 else
4807 expand_expr (singleton,
4808 ignore ? const1_rtx : NULL_RTX, VOIDmode, 0);
4809 if (cleanups_this_call)
4810 {
4811 sorry ("aggregate value in COND_EXPR");
4812 cleanups_this_call = 0;
4813 }
4814 if (singleton == TREE_OPERAND (exp, 1))
4815 jumpif (TREE_OPERAND (exp, 0), op0);
4816 else
4817 jumpifnot (TREE_OPERAND (exp, 0), op0);
4818
4819 if (binary_op && temp == 0)
4820 /* Just touch the other operand. */
4821 expand_expr (TREE_OPERAND (binary_op, 1),
4822 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
4823 else if (binary_op)
4824 store_expr (build (TREE_CODE (binary_op), type,
4825 make_tree (type, temp),
4826 TREE_OPERAND (binary_op, 1)),
4827 temp, 0);
4828 else
4829 store_expr (build1 (TREE_CODE (unary_op), type,
4830 make_tree (type, temp)),
4831 temp, 0);
4832 op1 = op0;
4833 }
4834 #if 0
4835 /* This is now done in jump.c and is better done there because it
4836 produces shorter register lifetimes. */
4837
4838 /* Check for both possibilities either constants or variables
4839 in registers (but not the same as the target!). If so, can
4840 save branches by assigning one, branching, and assigning the
4841 other. */
4842 else if (temp && GET_MODE (temp) != BLKmode
4843 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
4844 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
4845 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
4846 && DECL_RTL (TREE_OPERAND (exp, 1))
4847 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
4848 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
4849 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
4850 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
4851 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
4852 && DECL_RTL (TREE_OPERAND (exp, 2))
4853 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
4854 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
4855 {
4856 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4857 temp = gen_reg_rtx (mode);
4858 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4859 jumpifnot (TREE_OPERAND (exp, 0), op0);
4860 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4861 op1 = op0;
4862 }
4863 #endif
4864 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
4865 comparison operator. If we have one of these cases, set the
4866 output to A, branch on A (cse will merge these two references),
4867 then set the output to FOO. */
4868 else if (temp
4869 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
4870 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
4871 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4872 TREE_OPERAND (exp, 1), 0)
4873 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4874 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
4875 {
4876 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4877 temp = gen_reg_rtx (mode);
4878 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4879 jumpif (TREE_OPERAND (exp, 0), op0);
4880 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4881 op1 = op0;
4882 }
4883 else if (temp
4884 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
4885 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
4886 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4887 TREE_OPERAND (exp, 2), 0)
4888 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4889 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
4890 {
4891 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4892 temp = gen_reg_rtx (mode);
4893 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4894 jumpifnot (TREE_OPERAND (exp, 0), op0);
4895 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4896 op1 = op0;
4897 }
4898 else
4899 {
4900 op1 = gen_label_rtx ();
4901 jumpifnot (TREE_OPERAND (exp, 0), op0);
4902 if (temp != 0)
4903 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4904 else
4905 expand_expr (TREE_OPERAND (exp, 1),
4906 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
4907 if (cleanups_this_call)
4908 {
4909 sorry ("aggregate value in COND_EXPR");
4910 cleanups_this_call = 0;
4911 }
4912
4913 emit_queue ();
4914 emit_jump_insn (gen_jump (op1));
4915 emit_barrier ();
4916 emit_label (op0);
4917 if (temp != 0)
4918 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4919 else
4920 expand_expr (TREE_OPERAND (exp, 2),
4921 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
4922 }
4923
4924 if (cleanups_this_call)
4925 {
4926 sorry ("aggregate value in COND_EXPR");
4927 cleanups_this_call = 0;
4928 }
4929
4930 emit_queue ();
4931 emit_label (op1);
4932 OK_DEFER_POP;
4933 cleanups_this_call = old_cleanups;
4934 return temp;
4935 }
4936
4937 case TARGET_EXPR:
4938 {
4939 /* Something needs to be initialized, but we didn't know
4940 where that thing was when building the tree. For example,
4941 it could be the return value of a function, or a parameter
4942 to a function which lays down in the stack, or a temporary
4943 variable which must be passed by reference.
4944
4945 We guarantee that the expression will either be constructed
4946 or copied into our original target. */
4947
4948 tree slot = TREE_OPERAND (exp, 0);
4949 tree exp1;
4950
4951 if (TREE_CODE (slot) != VAR_DECL)
4952 abort ();
4953
4954 if (target == 0)
4955 {
4956 if (DECL_RTL (slot) != 0)
4957 {
4958 target = DECL_RTL (slot);
4959 /* If we have already expanded the slot, so don't do
4960 it again. (mrs) */
4961 if (TREE_OPERAND (exp, 1) == NULL_TREE)
4962 return target;
4963 }
4964 else
4965 {
4966 target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
4967 /* All temp slots at this level must not conflict. */
4968 preserve_temp_slots (target);
4969 DECL_RTL (slot) = target;
4970 }
4971
4972 #if 0
4973 /* I bet this needs to be done, and I bet that it needs to
4974 be above, inside the else clause. The reason is
4975 simple, how else is it going to get cleaned up? (mrs)
4976
4977 The reason is probably did not work before, and was
4978 commented out is because this was re-expanding already
4979 expanded target_exprs (target == 0 and DECL_RTL (slot)
4980 != 0) also cleaning them up many times as well. :-( */
4981
4982 /* Since SLOT is not known to the called function
4983 to belong to its stack frame, we must build an explicit
4984 cleanup. This case occurs when we must build up a reference
4985 to pass the reference as an argument. In this case,
4986 it is very likely that such a reference need not be
4987 built here. */
4988
4989 if (TREE_OPERAND (exp, 2) == 0)
4990 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
4991 if (TREE_OPERAND (exp, 2))
4992 cleanups_this_call = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2),
4993 cleanups_this_call);
4994 #endif
4995 }
4996 else
4997 {
4998 /* This case does occur, when expanding a parameter which
4999 needs to be constructed on the stack. The target
5000 is the actual stack address that we want to initialize.
5001 The function we call will perform the cleanup in this case. */
5002
5003 DECL_RTL (slot) = target;
5004 }
5005
5006 exp1 = TREE_OPERAND (exp, 1);
5007 /* Mark it as expanded. */
5008 TREE_OPERAND (exp, 1) = NULL_TREE;
5009
5010 return expand_expr (exp1, target, tmode, modifier);
5011 }
5012
5013 case INIT_EXPR:
5014 {
5015 tree lhs = TREE_OPERAND (exp, 0);
5016 tree rhs = TREE_OPERAND (exp, 1);
5017 tree noncopied_parts = 0;
5018 tree lhs_type = TREE_TYPE (lhs);
5019
5020 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5021 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
5022 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
5023 TYPE_NONCOPIED_PARTS (lhs_type));
5024 while (noncopied_parts != 0)
5025 {
5026 expand_assignment (TREE_VALUE (noncopied_parts),
5027 TREE_PURPOSE (noncopied_parts), 0, 0);
5028 noncopied_parts = TREE_CHAIN (noncopied_parts);
5029 }
5030 return temp;
5031 }
5032
5033 case MODIFY_EXPR:
5034 {
5035 /* If lhs is complex, expand calls in rhs before computing it.
5036 That's so we don't compute a pointer and save it over a call.
5037 If lhs is simple, compute it first so we can give it as a
5038 target if the rhs is just a call. This avoids an extra temp and copy
5039 and that prevents a partial-subsumption which makes bad code.
5040 Actually we could treat component_ref's of vars like vars. */
5041
5042 tree lhs = TREE_OPERAND (exp, 0);
5043 tree rhs = TREE_OPERAND (exp, 1);
5044 tree noncopied_parts = 0;
5045 tree lhs_type = TREE_TYPE (lhs);
5046
5047 temp = 0;
5048
5049 if (TREE_CODE (lhs) != VAR_DECL
5050 && TREE_CODE (lhs) != RESULT_DECL
5051 && TREE_CODE (lhs) != PARM_DECL)
5052 preexpand_calls (exp);
5053
5054 /* Check for |= or &= of a bitfield of size one into another bitfield
5055 of size 1. In this case, (unless we need the result of the
5056 assignment) we can do this more efficiently with a
5057 test followed by an assignment, if necessary.
5058
5059 ??? At this point, we can't get a BIT_FIELD_REF here. But if
5060 things change so we do, this code should be enhanced to
5061 support it. */
5062 if (ignore
5063 && TREE_CODE (lhs) == COMPONENT_REF
5064 && (TREE_CODE (rhs) == BIT_IOR_EXPR
5065 || TREE_CODE (rhs) == BIT_AND_EXPR)
5066 && TREE_OPERAND (rhs, 0) == lhs
5067 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
5068 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
5069 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
5070 {
5071 rtx label = gen_label_rtx ();
5072
5073 do_jump (TREE_OPERAND (rhs, 1),
5074 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
5075 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
5076 expand_assignment (lhs, convert (TREE_TYPE (rhs),
5077 (TREE_CODE (rhs) == BIT_IOR_EXPR
5078 ? integer_one_node
5079 : integer_zero_node)),
5080 0, 0);
5081 do_pending_stack_adjust ();
5082 emit_label (label);
5083 return const0_rtx;
5084 }
5085
5086 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
5087 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
5088 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
5089 TYPE_NONCOPIED_PARTS (lhs_type));
5090
5091 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5092 while (noncopied_parts != 0)
5093 {
5094 expand_assignment (TREE_PURPOSE (noncopied_parts),
5095 TREE_VALUE (noncopied_parts), 0, 0);
5096 noncopied_parts = TREE_CHAIN (noncopied_parts);
5097 }
5098 return temp;
5099 }
5100
5101 case PREINCREMENT_EXPR:
5102 case PREDECREMENT_EXPR:
5103 return expand_increment (exp, 0);
5104
5105 case POSTINCREMENT_EXPR:
5106 case POSTDECREMENT_EXPR:
5107 /* Faster to treat as pre-increment if result is not used. */
5108 return expand_increment (exp, ! ignore);
5109
5110 case ADDR_EXPR:
5111 /* Are we taking the address of a nested function? */
5112 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
5113 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
5114 {
5115 op0 = trampoline_address (TREE_OPERAND (exp, 0));
5116 op0 = force_operand (op0, target);
5117 }
5118 else
5119 {
5120 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
5121 (modifier == EXPAND_INITIALIZER
5122 ? modifier : EXPAND_CONST_ADDRESS));
5123 if (GET_CODE (op0) != MEM)
5124 abort ();
5125
5126 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5127 return XEXP (op0, 0);
5128 op0 = force_operand (XEXP (op0, 0), target);
5129 }
5130 if (flag_force_addr && GET_CODE (op0) != REG)
5131 return force_reg (Pmode, op0);
5132 return op0;
5133
5134 case ENTRY_VALUE_EXPR:
5135 abort ();
5136
5137 /* COMPLEX type for Extended Pascal & Fortran */
5138 case COMPLEX_EXPR:
5139 {
5140 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5141
5142 rtx prev;
5143
5144 /* Get the rtx code of the operands. */
5145 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5146 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
5147
5148 if (! target)
5149 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5150
5151 prev = get_last_insn ();
5152
5153 /* Tell flow that the whole of the destination is being set. */
5154 if (GET_CODE (target) == REG)
5155 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5156
5157 /* Move the real (op0) and imaginary (op1) parts to their location. */
5158 emit_move_insn (gen_realpart (mode, target), op0);
5159 emit_move_insn (gen_imagpart (mode, target), op1);
5160
5161 /* Complex construction should appear as a single unit. */
5162 group_insns (prev);
5163
5164 return target;
5165 }
5166
5167 case REALPART_EXPR:
5168 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5169 return gen_realpart (mode, op0);
5170
5171 case IMAGPART_EXPR:
5172 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5173 return gen_imagpart (mode, op0);
5174
5175 case CONJ_EXPR:
5176 {
5177 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5178 rtx imag_t;
5179 rtx prev;
5180
5181 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5182
5183 if (! target)
5184 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5185
5186 prev = get_last_insn ();
5187
5188 /* Tell flow that the whole of the destination is being set. */
5189 if (GET_CODE (target) == REG)
5190 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5191
5192 /* Store the realpart and the negated imagpart to target. */
5193 emit_move_insn (gen_realpart (mode, target), gen_realpart (mode, op0));
5194
5195 imag_t = gen_imagpart (mode, target);
5196 temp = expand_unop (mode, neg_optab,
5197 gen_imagpart (mode, op0), imag_t, 0);
5198 if (temp != imag_t)
5199 emit_move_insn (imag_t, temp);
5200
5201 /* Conjugate should appear as a single unit */
5202 group_insns (prev);
5203
5204 return target;
5205 }
5206
5207 case ERROR_MARK:
5208 return const0_rtx;
5209
5210 default:
5211 return (*lang_expand_expr) (exp, target, tmode, modifier);
5212 }
5213
5214 /* Here to do an ordinary binary operator, generating an instruction
5215 from the optab already placed in `this_optab'. */
5216 binop:
5217 preexpand_calls (exp);
5218 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5219 subtarget = 0;
5220 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5221 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5222 binop2:
5223 temp = expand_binop (mode, this_optab, op0, op1, target,
5224 unsignedp, OPTAB_LIB_WIDEN);
5225 if (temp == 0)
5226 abort ();
5227 return temp;
5228 }
5229 \f
5230 /* Return the alignment in bits of EXP, a pointer valued expression.
5231 But don't return more than MAX_ALIGN no matter what.
5232 The alignment returned is, by default, the alignment of the thing that
5233 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
5234
5235 Otherwise, look at the expression to see if we can do better, i.e., if the
5236 expression is actually pointing at an object whose alignment is tighter. */
5237
5238 static int
5239 get_pointer_alignment (exp, max_align)
5240 tree exp;
5241 unsigned max_align;
5242 {
5243 unsigned align, inner;
5244
5245 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
5246 return 0;
5247
5248 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
5249 align = MIN (align, max_align);
5250
5251 while (1)
5252 {
5253 switch (TREE_CODE (exp))
5254 {
5255 case NOP_EXPR:
5256 case CONVERT_EXPR:
5257 case NON_LVALUE_EXPR:
5258 exp = TREE_OPERAND (exp, 0);
5259 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
5260 return align;
5261 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
5262 inner = MIN (inner, max_align);
5263 align = MAX (align, inner);
5264 break;
5265
5266 case PLUS_EXPR:
5267 /* If sum of pointer + int, restrict our maximum alignment to that
5268 imposed by the integer. If not, we can't do any better than
5269 ALIGN. */
5270 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
5271 return align;
5272
5273 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
5274 & (max_align - 1))
5275 != 0)
5276 max_align >>= 1;
5277
5278 exp = TREE_OPERAND (exp, 0);
5279 break;
5280
5281 case ADDR_EXPR:
5282 /* See what we are pointing at and look at its alignment. */
5283 exp = TREE_OPERAND (exp, 0);
5284 if (TREE_CODE (exp) == FUNCTION_DECL)
5285 align = MAX (align, FUNCTION_BOUNDARY);
5286 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
5287 align = MAX (align, DECL_ALIGN (exp));
5288 #ifdef CONSTANT_ALIGNMENT
5289 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
5290 align = CONSTANT_ALIGNMENT (exp, align);
5291 #endif
5292 return MIN (align, max_align);
5293
5294 default:
5295 return align;
5296 }
5297 }
5298 }
5299 \f
5300 /* Return the tree node and offset if a given argument corresponds to
5301 a string constant. */
5302
5303 static tree
5304 string_constant (arg, ptr_offset)
5305 tree arg;
5306 tree *ptr_offset;
5307 {
5308 STRIP_NOPS (arg);
5309
5310 if (TREE_CODE (arg) == ADDR_EXPR
5311 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
5312 {
5313 *ptr_offset = integer_zero_node;
5314 return TREE_OPERAND (arg, 0);
5315 }
5316 else if (TREE_CODE (arg) == PLUS_EXPR)
5317 {
5318 tree arg0 = TREE_OPERAND (arg, 0);
5319 tree arg1 = TREE_OPERAND (arg, 1);
5320
5321 STRIP_NOPS (arg0);
5322 STRIP_NOPS (arg1);
5323
5324 if (TREE_CODE (arg0) == ADDR_EXPR
5325 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
5326 {
5327 *ptr_offset = arg1;
5328 return TREE_OPERAND (arg0, 0);
5329 }
5330 else if (TREE_CODE (arg1) == ADDR_EXPR
5331 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
5332 {
5333 *ptr_offset = arg0;
5334 return TREE_OPERAND (arg1, 0);
5335 }
5336 }
5337
5338 return 0;
5339 }
5340
5341 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
5342 way, because it could contain a zero byte in the middle.
5343 TREE_STRING_LENGTH is the size of the character array, not the string.
5344
5345 Unfortunately, string_constant can't access the values of const char
5346 arrays with initializers, so neither can we do so here. */
5347
5348 static tree
5349 c_strlen (src)
5350 tree src;
5351 {
5352 tree offset_node;
5353 int offset, max;
5354 char *ptr;
5355
5356 src = string_constant (src, &offset_node);
5357 if (src == 0)
5358 return 0;
5359 max = TREE_STRING_LENGTH (src);
5360 ptr = TREE_STRING_POINTER (src);
5361 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
5362 {
5363 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
5364 compute the offset to the following null if we don't know where to
5365 start searching for it. */
5366 int i;
5367 for (i = 0; i < max; i++)
5368 if (ptr[i] == 0)
5369 return 0;
5370 /* We don't know the starting offset, but we do know that the string
5371 has no internal zero bytes. We can assume that the offset falls
5372 within the bounds of the string; otherwise, the programmer deserves
5373 what he gets. Subtract the offset from the length of the string,
5374 and return that. */
5375 /* This would perhaps not be valid if we were dealing with named
5376 arrays in addition to literal string constants. */
5377 return size_binop (MINUS_EXPR, size_int (max), offset_node);
5378 }
5379
5380 /* We have a known offset into the string. Start searching there for
5381 a null character. */
5382 if (offset_node == 0)
5383 offset = 0;
5384 else
5385 {
5386 /* Did we get a long long offset? If so, punt. */
5387 if (TREE_INT_CST_HIGH (offset_node) != 0)
5388 return 0;
5389 offset = TREE_INT_CST_LOW (offset_node);
5390 }
5391 /* If the offset is known to be out of bounds, warn, and call strlen at
5392 runtime. */
5393 if (offset < 0 || offset > max)
5394 {
5395 warning ("offset outside bounds of constant string");
5396 return 0;
5397 }
5398 /* Use strlen to search for the first zero byte. Since any strings
5399 constructed with build_string will have nulls appended, we win even
5400 if we get handed something like (char[4])"abcd".
5401
5402 Since OFFSET is our starting index into the string, no further
5403 calculation is needed. */
5404 return size_int (strlen (ptr + offset));
5405 }
5406 \f
5407 /* Expand an expression EXP that calls a built-in function,
5408 with result going to TARGET if that's convenient
5409 (and in mode MODE if that's convenient).
5410 SUBTARGET may be used as the target for computing one of EXP's operands.
5411 IGNORE is nonzero if the value is to be ignored. */
5412
5413 static rtx
5414 expand_builtin (exp, target, subtarget, mode, ignore)
5415 tree exp;
5416 rtx target;
5417 rtx subtarget;
5418 enum machine_mode mode;
5419 int ignore;
5420 {
5421 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5422 tree arglist = TREE_OPERAND (exp, 1);
5423 rtx op0;
5424 rtx lab1, insns;
5425 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
5426 optab builtin_optab;
5427
5428 switch (DECL_FUNCTION_CODE (fndecl))
5429 {
5430 case BUILT_IN_ABS:
5431 case BUILT_IN_LABS:
5432 case BUILT_IN_FABS:
5433 /* build_function_call changes these into ABS_EXPR. */
5434 abort ();
5435
5436 case BUILT_IN_SIN:
5437 case BUILT_IN_COS:
5438 case BUILT_IN_FSQRT:
5439 /* If not optimizing, call the library function. */
5440 if (! optimize)
5441 break;
5442
5443 if (arglist == 0
5444 /* Arg could be wrong type if user redeclared this fcn wrong. */
5445 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
5446 return CONST0_RTX (TYPE_MODE (TREE_TYPE (exp)));
5447
5448 /* Stabilize and compute the argument. */
5449 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
5450 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
5451 {
5452 exp = copy_node (exp);
5453 arglist = copy_node (arglist);
5454 TREE_OPERAND (exp, 1) = arglist;
5455 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
5456 }
5457 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
5458
5459 /* Make a suitable register to place result in. */
5460 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5461
5462 emit_queue ();
5463 start_sequence ();
5464
5465 switch (DECL_FUNCTION_CODE (fndecl))
5466 {
5467 case BUILT_IN_SIN:
5468 builtin_optab = sin_optab; break;
5469 case BUILT_IN_COS:
5470 builtin_optab = cos_optab; break;
5471 case BUILT_IN_FSQRT:
5472 builtin_optab = sqrt_optab; break;
5473 default:
5474 abort ();
5475 }
5476
5477 /* Compute into TARGET.
5478 Set TARGET to wherever the result comes back. */
5479 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
5480 builtin_optab, op0, target, 0);
5481
5482 /* If we were unable to expand via the builtin, stop the
5483 sequence (without outputting the insns) and break, causing
5484 a call the the library function. */
5485 if (target == 0)
5486 {
5487 end_sequence ();
5488 break;
5489 }
5490
5491 /* Check the results by default. But if flag_fast_math is turned on,
5492 then assume sqrt will always be called with valid arguments. */
5493
5494 if (! flag_fast_math)
5495 {
5496 /* Don't define the builtin FP instructions
5497 if your machine is not IEEE. */
5498 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
5499 abort ();
5500
5501 lab1 = gen_label_rtx ();
5502
5503 /* Test the result; if it is NaN, set errno=EDOM because
5504 the argument was not in the domain. */
5505 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
5506 emit_jump_insn (gen_beq (lab1));
5507
5508 #if TARGET_EDOM
5509 {
5510 #ifdef GEN_ERRNO_RTX
5511 rtx errno_rtx = GEN_ERRNO_RTX;
5512 #else
5513 rtx errno_rtx
5514 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "*errno"));
5515 #endif
5516
5517 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
5518 }
5519 #else
5520 /* We can't set errno=EDOM directly; let the library call do it.
5521 Pop the arguments right away in case the call gets deleted. */
5522 NO_DEFER_POP;
5523 expand_call (exp, target, 0);
5524 OK_DEFER_POP;
5525 #endif
5526
5527 emit_label (lab1);
5528 }
5529
5530 /* Output the entire sequence. */
5531 insns = get_insns ();
5532 end_sequence ();
5533 emit_insns (insns);
5534
5535 return target;
5536
5537 case BUILT_IN_SAVEREGS:
5538 /* Don't do __builtin_saveregs more than once in a function.
5539 Save the result of the first call and reuse it. */
5540 if (saveregs_value != 0)
5541 return saveregs_value;
5542 {
5543 /* When this function is called, it means that registers must be
5544 saved on entry to this function. So we migrate the
5545 call to the first insn of this function. */
5546 rtx temp;
5547 rtx seq;
5548 rtx valreg, saved_valreg;
5549
5550 /* Now really call the function. `expand_call' does not call
5551 expand_builtin, so there is no danger of infinite recursion here. */
5552 start_sequence ();
5553
5554 #ifdef EXPAND_BUILTIN_SAVEREGS
5555 /* Do whatever the machine needs done in this case. */
5556 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
5557 #else
5558 /* The register where the function returns its value
5559 is likely to have something else in it, such as an argument.
5560 So preserve that register around the call. */
5561 if (value_mode != VOIDmode)
5562 {
5563 valreg = hard_libcall_value (value_mode);
5564 saved_valreg = gen_reg_rtx (value_mode);
5565 emit_move_insn (saved_valreg, valreg);
5566 }
5567
5568 /* Generate the call, putting the value in a pseudo. */
5569 temp = expand_call (exp, target, ignore);
5570
5571 if (value_mode != VOIDmode)
5572 emit_move_insn (valreg, saved_valreg);
5573 #endif
5574
5575 seq = get_insns ();
5576 end_sequence ();
5577
5578 saveregs_value = temp;
5579
5580 /* This won't work inside a SEQUENCE--it really has to be
5581 at the start of the function. */
5582 if (in_sequence_p ())
5583 {
5584 /* Better to do this than to crash. */
5585 error ("`va_start' used within `({...})'");
5586 return temp;
5587 }
5588
5589 /* Put the sequence after the NOTE that starts the function. */
5590 emit_insns_before (seq, NEXT_INSN (get_insns ()));
5591 return temp;
5592 }
5593
5594 /* __builtin_args_info (N) returns word N of the arg space info
5595 for the current function. The number and meanings of words
5596 is controlled by the definition of CUMULATIVE_ARGS. */
5597 case BUILT_IN_ARGS_INFO:
5598 {
5599 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
5600 int i;
5601 int *word_ptr = (int *) &current_function_args_info;
5602 tree type, elts, result;
5603
5604 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
5605 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
5606 __FILE__, __LINE__);
5607
5608 if (arglist != 0)
5609 {
5610 tree arg = TREE_VALUE (arglist);
5611 if (TREE_CODE (arg) != INTEGER_CST)
5612 error ("argument of __builtin_args_info must be constant");
5613 else
5614 {
5615 int wordnum = TREE_INT_CST_LOW (arg);
5616
5617 if (wordnum < 0 || wordnum >= nwords)
5618 error ("argument of __builtin_args_info out of range");
5619 else
5620 return GEN_INT (word_ptr[wordnum]);
5621 }
5622 }
5623 else
5624 error ("missing argument in __builtin_args_info");
5625
5626 return const0_rtx;
5627
5628 #if 0
5629 for (i = 0; i < nwords; i++)
5630 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
5631
5632 type = build_array_type (integer_type_node,
5633 build_index_type (build_int_2 (nwords, 0)));
5634 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
5635 TREE_CONSTANT (result) = 1;
5636 TREE_STATIC (result) = 1;
5637 result = build (INDIRECT_REF, build_pointer_type (type), result);
5638 TREE_CONSTANT (result) = 1;
5639 return expand_expr (result, NULL_RTX, VOIDmode, 0);
5640 #endif
5641 }
5642
5643 /* Return the address of the first anonymous stack arg. */
5644 case BUILT_IN_NEXT_ARG:
5645 {
5646 tree fntype = TREE_TYPE (current_function_decl);
5647 if (!(TYPE_ARG_TYPES (fntype) != 0
5648 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
5649 != void_type_node)))
5650 {
5651 error ("`va_start' used in function with fixed args");
5652 return const0_rtx;
5653 }
5654 }
5655
5656 return expand_binop (Pmode, add_optab,
5657 current_function_internal_arg_pointer,
5658 current_function_arg_offset_rtx,
5659 NULL_RTX, 0, OPTAB_LIB_WIDEN);
5660
5661 case BUILT_IN_CLASSIFY_TYPE:
5662 if (arglist != 0)
5663 {
5664 tree type = TREE_TYPE (TREE_VALUE (arglist));
5665 enum tree_code code = TREE_CODE (type);
5666 if (code == VOID_TYPE)
5667 return GEN_INT (void_type_class);
5668 if (code == INTEGER_TYPE)
5669 return GEN_INT (integer_type_class);
5670 if (code == CHAR_TYPE)
5671 return GEN_INT (char_type_class);
5672 if (code == ENUMERAL_TYPE)
5673 return GEN_INT (enumeral_type_class);
5674 if (code == BOOLEAN_TYPE)
5675 return GEN_INT (boolean_type_class);
5676 if (code == POINTER_TYPE)
5677 return GEN_INT (pointer_type_class);
5678 if (code == REFERENCE_TYPE)
5679 return GEN_INT (reference_type_class);
5680 if (code == OFFSET_TYPE)
5681 return GEN_INT (offset_type_class);
5682 if (code == REAL_TYPE)
5683 return GEN_INT (real_type_class);
5684 if (code == COMPLEX_TYPE)
5685 return GEN_INT (complex_type_class);
5686 if (code == FUNCTION_TYPE)
5687 return GEN_INT (function_type_class);
5688 if (code == METHOD_TYPE)
5689 return GEN_INT (method_type_class);
5690 if (code == RECORD_TYPE)
5691 return GEN_INT (record_type_class);
5692 if (code == UNION_TYPE)
5693 return GEN_INT (union_type_class);
5694 if (code == ARRAY_TYPE)
5695 return GEN_INT (array_type_class);
5696 if (code == STRING_TYPE)
5697 return GEN_INT (string_type_class);
5698 if (code == SET_TYPE)
5699 return GEN_INT (set_type_class);
5700 if (code == FILE_TYPE)
5701 return GEN_INT (file_type_class);
5702 if (code == LANG_TYPE)
5703 return GEN_INT (lang_type_class);
5704 }
5705 return GEN_INT (no_type_class);
5706
5707 case BUILT_IN_CONSTANT_P:
5708 if (arglist == 0)
5709 return const0_rtx;
5710 else
5711 return (TREE_CODE_CLASS (TREE_CODE (TREE_VALUE (arglist))) == 'c'
5712 ? const1_rtx : const0_rtx);
5713
5714 case BUILT_IN_FRAME_ADDRESS:
5715 /* The argument must be a nonnegative integer constant.
5716 It counts the number of frames to scan up the stack.
5717 The value is the address of that frame. */
5718 case BUILT_IN_RETURN_ADDRESS:
5719 /* The argument must be a nonnegative integer constant.
5720 It counts the number of frames to scan up the stack.
5721 The value is the return address saved in that frame. */
5722 if (arglist == 0)
5723 /* Warning about missing arg was already issued. */
5724 return const0_rtx;
5725 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
5726 {
5727 error ("invalid arg to __builtin_return_address");
5728 return const0_rtx;
5729 }
5730 else if (tree_int_cst_lt (TREE_VALUE (arglist), integer_zero_node))
5731 {
5732 error ("invalid arg to __builtin_return_address");
5733 return const0_rtx;
5734 }
5735 else
5736 {
5737 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
5738 rtx tem = frame_pointer_rtx;
5739 int i;
5740
5741 /* Scan back COUNT frames to the specified frame. */
5742 for (i = 0; i < count; i++)
5743 {
5744 /* Assume the dynamic chain pointer is in the word that
5745 the frame address points to, unless otherwise specified. */
5746 #ifdef DYNAMIC_CHAIN_ADDRESS
5747 tem = DYNAMIC_CHAIN_ADDRESS (tem);
5748 #endif
5749 tem = memory_address (Pmode, tem);
5750 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
5751 }
5752
5753 /* For __builtin_frame_address, return what we've got. */
5754 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5755 return tem;
5756
5757 /* For __builtin_return_address,
5758 Get the return address from that frame. */
5759 #ifdef RETURN_ADDR_RTX
5760 return RETURN_ADDR_RTX (count, tem);
5761 #else
5762 tem = memory_address (Pmode,
5763 plus_constant (tem, GET_MODE_SIZE (Pmode)));
5764 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
5765 #endif
5766 }
5767
5768 case BUILT_IN_ALLOCA:
5769 if (arglist == 0
5770 /* Arg could be non-integer if user redeclared this fcn wrong. */
5771 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
5772 return const0_rtx;
5773 current_function_calls_alloca = 1;
5774 /* Compute the argument. */
5775 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
5776
5777 /* Allocate the desired space. */
5778 target = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5779
5780 /* Record the new stack level for nonlocal gotos. */
5781 if (nonlocal_goto_handler_slot != 0)
5782 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
5783 return target;
5784
5785 case BUILT_IN_FFS:
5786 /* If not optimizing, call the library function. */
5787 if (!optimize)
5788 break;
5789
5790 if (arglist == 0
5791 /* Arg could be non-integer if user redeclared this fcn wrong. */
5792 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
5793 return const0_rtx;
5794
5795 /* Compute the argument. */
5796 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
5797 /* Compute ffs, into TARGET if possible.
5798 Set TARGET to wherever the result comes back. */
5799 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
5800 ffs_optab, op0, target, 1);
5801 if (target == 0)
5802 abort ();
5803 return target;
5804
5805 case BUILT_IN_STRLEN:
5806 /* If not optimizing, call the library function. */
5807 if (!optimize)
5808 break;
5809
5810 if (arglist == 0
5811 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5812 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
5813 return const0_rtx;
5814 else
5815 {
5816 tree src = TREE_VALUE (arglist);
5817 tree len = c_strlen (src);
5818
5819 int align
5820 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5821
5822 rtx result, src_rtx, char_rtx;
5823 enum machine_mode insn_mode = value_mode, char_mode;
5824 enum insn_code icode;
5825
5826 /* If the length is known, just return it. */
5827 if (len != 0)
5828 return expand_expr (len, target, mode, 0);
5829
5830 /* If SRC is not a pointer type, don't do this operation inline. */
5831 if (align == 0)
5832 break;
5833
5834 /* Call a function if we can't compute strlen in the right mode. */
5835
5836 while (insn_mode != VOIDmode)
5837 {
5838 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
5839 if (icode != CODE_FOR_nothing)
5840 break;
5841
5842 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
5843 }
5844 if (insn_mode == VOIDmode)
5845 break;
5846
5847 /* Make a place to write the result of the instruction. */
5848 result = target;
5849 if (! (result != 0
5850 && GET_CODE (result) == REG
5851 && GET_MODE (result) == insn_mode
5852 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
5853 result = gen_reg_rtx (insn_mode);
5854
5855 /* Make sure the operands are acceptable to the predicates. */
5856
5857 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
5858 result = gen_reg_rtx (insn_mode);
5859
5860 src_rtx = memory_address (BLKmode,
5861 expand_expr (src, NULL_RTX, Pmode,
5862 EXPAND_NORMAL));
5863 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
5864 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
5865
5866 char_rtx = const0_rtx;
5867 char_mode = insn_operand_mode[(int)icode][2];
5868 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
5869 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
5870
5871 emit_insn (GEN_FCN (icode) (result,
5872 gen_rtx (MEM, BLKmode, src_rtx),
5873 char_rtx, GEN_INT (align)));
5874
5875 /* Return the value in the proper mode for this function. */
5876 if (GET_MODE (result) == value_mode)
5877 return result;
5878 else if (target != 0)
5879 {
5880 convert_move (target, result, 0);
5881 return target;
5882 }
5883 else
5884 return convert_to_mode (value_mode, result, 0);
5885 }
5886
5887 case BUILT_IN_STRCPY:
5888 /* If not optimizing, call the library function. */
5889 if (!optimize)
5890 break;
5891
5892 if (arglist == 0
5893 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5894 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5895 || TREE_CHAIN (arglist) == 0
5896 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
5897 return const0_rtx;
5898 else
5899 {
5900 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
5901
5902 if (len == 0)
5903 break;
5904
5905 len = size_binop (PLUS_EXPR, len, integer_one_node);
5906
5907 chainon (arglist, build_tree_list (NULL_TREE, len));
5908 }
5909
5910 /* Drops in. */
5911 case BUILT_IN_MEMCPY:
5912 /* If not optimizing, call the library function. */
5913 if (!optimize)
5914 break;
5915
5916 if (arglist == 0
5917 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5918 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5919 || TREE_CHAIN (arglist) == 0
5920 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
5921 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
5922 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
5923 return const0_rtx;
5924 else
5925 {
5926 tree dest = TREE_VALUE (arglist);
5927 tree src = TREE_VALUE (TREE_CHAIN (arglist));
5928 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5929
5930 int src_align
5931 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5932 int dest_align
5933 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5934 rtx dest_rtx;
5935
5936 /* If either SRC or DEST is not a pointer type, don't do
5937 this operation in-line. */
5938 if (src_align == 0 || dest_align == 0)
5939 {
5940 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
5941 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
5942 break;
5943 }
5944
5945 dest_rtx = expand_expr (dest, NULL_RTX, Pmode, EXPAND_NORMAL);
5946
5947 /* Copy word part most expediently. */
5948 emit_block_move (gen_rtx (MEM, BLKmode,
5949 memory_address (BLKmode, dest_rtx)),
5950 gen_rtx (MEM, BLKmode,
5951 memory_address (BLKmode,
5952 expand_expr (src, NULL_RTX,
5953 Pmode,
5954 EXPAND_NORMAL))),
5955 expand_expr (len, NULL_RTX, VOIDmode, 0),
5956 MIN (src_align, dest_align));
5957 return dest_rtx;
5958 }
5959
5960 /* These comparison functions need an instruction that returns an actual
5961 index. An ordinary compare that just sets the condition codes
5962 is not enough. */
5963 #ifdef HAVE_cmpstrsi
5964 case BUILT_IN_STRCMP:
5965 /* If not optimizing, call the library function. */
5966 if (!optimize)
5967 break;
5968
5969 if (arglist == 0
5970 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5971 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5972 || TREE_CHAIN (arglist) == 0
5973 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
5974 return const0_rtx;
5975 else if (!HAVE_cmpstrsi)
5976 break;
5977 {
5978 tree arg1 = TREE_VALUE (arglist);
5979 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
5980 tree offset;
5981 tree len, len2;
5982
5983 len = c_strlen (arg1);
5984 if (len)
5985 len = size_binop (PLUS_EXPR, integer_one_node, len);
5986 len2 = c_strlen (arg2);
5987 if (len2)
5988 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
5989
5990 /* If we don't have a constant length for the first, use the length
5991 of the second, if we know it. We don't require a constant for
5992 this case; some cost analysis could be done if both are available
5993 but neither is constant. For now, assume they're equally cheap.
5994
5995 If both strings have constant lengths, use the smaller. This
5996 could arise if optimization results in strcpy being called with
5997 two fixed strings, or if the code was machine-generated. We should
5998 add some code to the `memcmp' handler below to deal with such
5999 situations, someday. */
6000 if (!len || TREE_CODE (len) != INTEGER_CST)
6001 {
6002 if (len2)
6003 len = len2;
6004 else if (len == 0)
6005 break;
6006 }
6007 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
6008 {
6009 if (tree_int_cst_lt (len2, len))
6010 len = len2;
6011 }
6012
6013 chainon (arglist, build_tree_list (NULL_TREE, len));
6014 }
6015
6016 /* Drops in. */
6017 case BUILT_IN_MEMCMP:
6018 /* If not optimizing, call the library function. */
6019 if (!optimize)
6020 break;
6021
6022 if (arglist == 0
6023 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6024 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6025 || TREE_CHAIN (arglist) == 0
6026 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6027 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6028 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6029 return const0_rtx;
6030 else if (!HAVE_cmpstrsi)
6031 break;
6032 {
6033 tree arg1 = TREE_VALUE (arglist);
6034 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
6035 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6036 rtx result;
6037
6038 int arg1_align
6039 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6040 int arg2_align
6041 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6042 enum machine_mode insn_mode
6043 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
6044
6045 /* If we don't have POINTER_TYPE, call the function. */
6046 if (arg1_align == 0 || arg2_align == 0)
6047 {
6048 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
6049 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
6050 break;
6051 }
6052
6053 /* Make a place to write the result of the instruction. */
6054 result = target;
6055 if (! (result != 0
6056 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
6057 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
6058 result = gen_reg_rtx (insn_mode);
6059
6060 emit_insn (gen_cmpstrsi (result,
6061 gen_rtx (MEM, BLKmode,
6062 expand_expr (arg1, NULL_RTX, Pmode,
6063 EXPAND_NORMAL)),
6064 gen_rtx (MEM, BLKmode,
6065 expand_expr (arg2, NULL_RTX, Pmode,
6066 EXPAND_NORMAL)),
6067 expand_expr (len, NULL_RTX, VOIDmode, 0),
6068 GEN_INT (MIN (arg1_align, arg2_align))));
6069
6070 /* Return the value in the proper mode for this function. */
6071 mode = TYPE_MODE (TREE_TYPE (exp));
6072 if (GET_MODE (result) == mode)
6073 return result;
6074 else if (target != 0)
6075 {
6076 convert_move (target, result, 0);
6077 return target;
6078 }
6079 else
6080 return convert_to_mode (mode, result, 0);
6081 }
6082 #else
6083 case BUILT_IN_STRCMP:
6084 case BUILT_IN_MEMCMP:
6085 break;
6086 #endif
6087
6088 default: /* just do library call, if unknown builtin */
6089 error ("built-in function %s not currently supported",
6090 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
6091 }
6092
6093 /* The switch statement above can drop through to cause the function
6094 to be called normally. */
6095
6096 return expand_call (exp, target, ignore);
6097 }
6098 \f
6099 /* Expand code for a post- or pre- increment or decrement
6100 and return the RTX for the result.
6101 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
6102
6103 static rtx
6104 expand_increment (exp, post)
6105 register tree exp;
6106 int post;
6107 {
6108 register rtx op0, op1;
6109 register rtx temp, value;
6110 register tree incremented = TREE_OPERAND (exp, 0);
6111 optab this_optab = add_optab;
6112 int icode;
6113 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
6114 int op0_is_copy = 0;
6115
6116 /* Stabilize any component ref that might need to be
6117 evaluated more than once below. */
6118 if (TREE_CODE (incremented) == BIT_FIELD_REF
6119 || (TREE_CODE (incremented) == COMPONENT_REF
6120 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
6121 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
6122 incremented = stabilize_reference (incremented);
6123
6124 /* Compute the operands as RTX.
6125 Note whether OP0 is the actual lvalue or a copy of it:
6126 I believe it is a copy iff it is a register or subreg
6127 and insns were generated in computing it. */
6128
6129 temp = get_last_insn ();
6130 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
6131
6132 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
6133 in place but intead must do sign- or zero-extension during assignment,
6134 so we copy it into a new register and let the code below use it as
6135 a copy.
6136
6137 Note that we can safely modify this SUBREG since it is know not to be
6138 shared (it was made by the expand_expr call above). */
6139
6140 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
6141 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
6142
6143 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
6144 && temp != get_last_insn ());
6145 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6146
6147 /* Decide whether incrementing or decrementing. */
6148 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
6149 || TREE_CODE (exp) == PREDECREMENT_EXPR)
6150 this_optab = sub_optab;
6151
6152 /* If OP0 is not the actual lvalue, but rather a copy in a register,
6153 then we cannot just increment OP0. We must
6154 therefore contrive to increment the original value.
6155 Then we can return OP0 since it is a copy of the old value. */
6156 if (op0_is_copy)
6157 {
6158 /* This is the easiest way to increment the value wherever it is.
6159 Problems with multiple evaluation of INCREMENTED
6160 are prevented because either (1) it is a component_ref,
6161 in which case it was stabilized above, or (2) it is an array_ref
6162 with constant index in an array in a register, which is
6163 safe to reevaluate. */
6164 tree newexp = build ((this_optab == add_optab
6165 ? PLUS_EXPR : MINUS_EXPR),
6166 TREE_TYPE (exp),
6167 incremented,
6168 TREE_OPERAND (exp, 1));
6169 temp = expand_assignment (incremented, newexp, ! post, 0);
6170 return post ? op0 : temp;
6171 }
6172
6173 /* Convert decrement by a constant into a negative increment. */
6174 if (this_optab == sub_optab
6175 && GET_CODE (op1) == CONST_INT)
6176 {
6177 op1 = GEN_INT (- INTVAL (op1));
6178 this_optab = add_optab;
6179 }
6180
6181 if (post)
6182 {
6183 /* We have a true reference to the value in OP0.
6184 If there is an insn to add or subtract in this mode, queue it. */
6185
6186 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
6187 op0 = stabilize (op0);
6188 #endif
6189
6190 icode = (int) this_optab->handlers[(int) mode].insn_code;
6191 if (icode != (int) CODE_FOR_nothing
6192 /* Make sure that OP0 is valid for operands 0 and 1
6193 of the insn we want to queue. */
6194 && (*insn_operand_predicate[icode][0]) (op0, mode)
6195 && (*insn_operand_predicate[icode][1]) (op0, mode))
6196 {
6197 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
6198 op1 = force_reg (mode, op1);
6199
6200 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
6201 }
6202 }
6203
6204 /* Preincrement, or we can't increment with one simple insn. */
6205 if (post)
6206 /* Save a copy of the value before inc or dec, to return it later. */
6207 temp = value = copy_to_reg (op0);
6208 else
6209 /* Arrange to return the incremented value. */
6210 /* Copy the rtx because expand_binop will protect from the queue,
6211 and the results of that would be invalid for us to return
6212 if our caller does emit_queue before using our result. */
6213 temp = copy_rtx (value = op0);
6214
6215 /* Increment however we can. */
6216 op1 = expand_binop (mode, this_optab, value, op1, op0,
6217 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
6218 /* Make sure the value is stored into OP0. */
6219 if (op1 != op0)
6220 emit_move_insn (op0, op1);
6221
6222 return temp;
6223 }
6224 \f
6225 /* Expand all function calls contained within EXP, innermost ones first.
6226 But don't look within expressions that have sequence points.
6227 For each CALL_EXPR, record the rtx for its value
6228 in the CALL_EXPR_RTL field. */
6229
6230 static void
6231 preexpand_calls (exp)
6232 tree exp;
6233 {
6234 register int nops, i;
6235 int type = TREE_CODE_CLASS (TREE_CODE (exp));
6236
6237 if (! do_preexpand_calls)
6238 return;
6239
6240 /* Only expressions and references can contain calls. */
6241
6242 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
6243 return;
6244
6245 switch (TREE_CODE (exp))
6246 {
6247 case CALL_EXPR:
6248 /* Do nothing if already expanded. */
6249 if (CALL_EXPR_RTL (exp) != 0)
6250 return;
6251
6252 /* Do nothing to built-in functions. */
6253 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
6254 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
6255 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6256 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
6257 return;
6258
6259 case COMPOUND_EXPR:
6260 case COND_EXPR:
6261 case TRUTH_ANDIF_EXPR:
6262 case TRUTH_ORIF_EXPR:
6263 /* If we find one of these, then we can be sure
6264 the adjust will be done for it (since it makes jumps).
6265 Do it now, so that if this is inside an argument
6266 of a function, we don't get the stack adjustment
6267 after some other args have already been pushed. */
6268 do_pending_stack_adjust ();
6269 return;
6270
6271 case BLOCK:
6272 case RTL_EXPR:
6273 case WITH_CLEANUP_EXPR:
6274 return;
6275
6276 case SAVE_EXPR:
6277 if (SAVE_EXPR_RTL (exp) != 0)
6278 return;
6279 }
6280
6281 nops = tree_code_length[(int) TREE_CODE (exp)];
6282 for (i = 0; i < nops; i++)
6283 if (TREE_OPERAND (exp, i) != 0)
6284 {
6285 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
6286 if (type == 'e' || type == '<' || type == '1' || type == '2'
6287 || type == 'r')
6288 preexpand_calls (TREE_OPERAND (exp, i));
6289 }
6290 }
6291 \f
6292 /* At the start of a function, record that we have no previously-pushed
6293 arguments waiting to be popped. */
6294
6295 void
6296 init_pending_stack_adjust ()
6297 {
6298 pending_stack_adjust = 0;
6299 }
6300
6301 /* When exiting from function, if safe, clear out any pending stack adjust
6302 so the adjustment won't get done. */
6303
6304 void
6305 clear_pending_stack_adjust ()
6306 {
6307 #ifdef EXIT_IGNORE_STACK
6308 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
6309 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
6310 && ! flag_inline_functions)
6311 pending_stack_adjust = 0;
6312 #endif
6313 }
6314
6315 /* Pop any previously-pushed arguments that have not been popped yet. */
6316
6317 void
6318 do_pending_stack_adjust ()
6319 {
6320 if (inhibit_defer_pop == 0)
6321 {
6322 if (pending_stack_adjust != 0)
6323 adjust_stack (GEN_INT (pending_stack_adjust));
6324 pending_stack_adjust = 0;
6325 }
6326 }
6327
6328 /* Expand all cleanups up to OLD_CLEANUPS.
6329 Needed here, and also for language-dependent calls. */
6330
6331 void
6332 expand_cleanups_to (old_cleanups)
6333 tree old_cleanups;
6334 {
6335 while (cleanups_this_call != old_cleanups)
6336 {
6337 expand_expr (TREE_VALUE (cleanups_this_call), NULL_RTX, VOIDmode, 0);
6338 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
6339 }
6340 }
6341 \f
6342 /* Expand conditional expressions. */
6343
6344 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
6345 LABEL is an rtx of code CODE_LABEL, in this function and all the
6346 functions here. */
6347
6348 void
6349 jumpifnot (exp, label)
6350 tree exp;
6351 rtx label;
6352 {
6353 do_jump (exp, label, NULL_RTX);
6354 }
6355
6356 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
6357
6358 void
6359 jumpif (exp, label)
6360 tree exp;
6361 rtx label;
6362 {
6363 do_jump (exp, NULL_RTX, label);
6364 }
6365
6366 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
6367 the result is zero, or IF_TRUE_LABEL if the result is one.
6368 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
6369 meaning fall through in that case.
6370
6371 do_jump always does any pending stack adjust except when it does not
6372 actually perform a jump. An example where there is no jump
6373 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
6374
6375 This function is responsible for optimizing cases such as
6376 &&, || and comparison operators in EXP. */
6377
6378 void
6379 do_jump (exp, if_false_label, if_true_label)
6380 tree exp;
6381 rtx if_false_label, if_true_label;
6382 {
6383 register enum tree_code code = TREE_CODE (exp);
6384 /* Some cases need to create a label to jump to
6385 in order to properly fall through.
6386 These cases set DROP_THROUGH_LABEL nonzero. */
6387 rtx drop_through_label = 0;
6388 rtx temp;
6389 rtx comparison = 0;
6390 int i;
6391 tree type;
6392
6393 emit_queue ();
6394
6395 switch (code)
6396 {
6397 case ERROR_MARK:
6398 break;
6399
6400 case INTEGER_CST:
6401 temp = integer_zerop (exp) ? if_false_label : if_true_label;
6402 if (temp)
6403 emit_jump (temp);
6404 break;
6405
6406 #if 0
6407 /* This is not true with #pragma weak */
6408 case ADDR_EXPR:
6409 /* The address of something can never be zero. */
6410 if (if_true_label)
6411 emit_jump (if_true_label);
6412 break;
6413 #endif
6414
6415 case NOP_EXPR:
6416 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
6417 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
6418 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
6419 goto normal;
6420 case CONVERT_EXPR:
6421 /* If we are narrowing the operand, we have to do the compare in the
6422 narrower mode. */
6423 if ((TYPE_PRECISION (TREE_TYPE (exp))
6424 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6425 goto normal;
6426 case NON_LVALUE_EXPR:
6427 case REFERENCE_EXPR:
6428 case ABS_EXPR:
6429 case NEGATE_EXPR:
6430 case LROTATE_EXPR:
6431 case RROTATE_EXPR:
6432 /* These cannot change zero->non-zero or vice versa. */
6433 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
6434 break;
6435
6436 #if 0
6437 /* This is never less insns than evaluating the PLUS_EXPR followed by
6438 a test and can be longer if the test is eliminated. */
6439 case PLUS_EXPR:
6440 /* Reduce to minus. */
6441 exp = build (MINUS_EXPR, TREE_TYPE (exp),
6442 TREE_OPERAND (exp, 0),
6443 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
6444 TREE_OPERAND (exp, 1))));
6445 /* Process as MINUS. */
6446 #endif
6447
6448 case MINUS_EXPR:
6449 /* Non-zero iff operands of minus differ. */
6450 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
6451 TREE_OPERAND (exp, 0),
6452 TREE_OPERAND (exp, 1)),
6453 NE, NE);
6454 break;
6455
6456 case BIT_AND_EXPR:
6457 /* If we are AND'ing with a small constant, do this comparison in the
6458 smallest type that fits. If the machine doesn't have comparisons
6459 that small, it will be converted back to the wider comparison.
6460 This helps if we are testing the sign bit of a narrower object.
6461 combine can't do this for us because it can't know whether a
6462 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
6463
6464 if (! SLOW_BYTE_ACCESS
6465 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6466 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
6467 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
6468 && (type = type_for_size (i + 1, 1)) != 0
6469 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
6470 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
6471 != CODE_FOR_nothing))
6472 {
6473 do_jump (convert (type, exp), if_false_label, if_true_label);
6474 break;
6475 }
6476 goto normal;
6477
6478 case TRUTH_NOT_EXPR:
6479 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6480 break;
6481
6482 case TRUTH_ANDIF_EXPR:
6483 if (if_false_label == 0)
6484 if_false_label = drop_through_label = gen_label_rtx ();
6485 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
6486 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6487 break;
6488
6489 case TRUTH_ORIF_EXPR:
6490 if (if_true_label == 0)
6491 if_true_label = drop_through_label = gen_label_rtx ();
6492 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
6493 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6494 break;
6495
6496 case COMPOUND_EXPR:
6497 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6498 free_temp_slots ();
6499 emit_queue ();
6500 do_pending_stack_adjust ();
6501 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6502 break;
6503
6504 case COMPONENT_REF:
6505 case BIT_FIELD_REF:
6506 case ARRAY_REF:
6507 {
6508 int bitsize, bitpos, unsignedp;
6509 enum machine_mode mode;
6510 tree type;
6511 tree offset;
6512 int volatilep = 0;
6513
6514 /* Get description of this reference. We don't actually care
6515 about the underlying object here. */
6516 get_inner_reference (exp, &bitsize, &bitpos, &offset,
6517 &mode, &unsignedp, &volatilep);
6518
6519 type = type_for_size (bitsize, unsignedp);
6520 if (! SLOW_BYTE_ACCESS
6521 && type != 0 && bitsize >= 0
6522 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
6523 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
6524 != CODE_FOR_nothing))
6525 {
6526 do_jump (convert (type, exp), if_false_label, if_true_label);
6527 break;
6528 }
6529 goto normal;
6530 }
6531
6532 case COND_EXPR:
6533 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
6534 if (integer_onep (TREE_OPERAND (exp, 1))
6535 && integer_zerop (TREE_OPERAND (exp, 2)))
6536 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
6537
6538 else if (integer_zerop (TREE_OPERAND (exp, 1))
6539 && integer_onep (TREE_OPERAND (exp, 2)))
6540 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6541
6542 else
6543 {
6544 register rtx label1 = gen_label_rtx ();
6545 drop_through_label = gen_label_rtx ();
6546 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
6547 /* Now the THEN-expression. */
6548 do_jump (TREE_OPERAND (exp, 1),
6549 if_false_label ? if_false_label : drop_through_label,
6550 if_true_label ? if_true_label : drop_through_label);
6551 /* In case the do_jump just above never jumps. */
6552 do_pending_stack_adjust ();
6553 emit_label (label1);
6554 /* Now the ELSE-expression. */
6555 do_jump (TREE_OPERAND (exp, 2),
6556 if_false_label ? if_false_label : drop_through_label,
6557 if_true_label ? if_true_label : drop_through_label);
6558 }
6559 break;
6560
6561 case EQ_EXPR:
6562 if (integer_zerop (TREE_OPERAND (exp, 1)))
6563 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6564 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6565 == MODE_INT)
6566 &&
6567 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6568 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
6569 else
6570 comparison = compare (exp, EQ, EQ);
6571 break;
6572
6573 case NE_EXPR:
6574 if (integer_zerop (TREE_OPERAND (exp, 1)))
6575 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
6576 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6577 == MODE_INT)
6578 &&
6579 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6580 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
6581 else
6582 comparison = compare (exp, NE, NE);
6583 break;
6584
6585 case LT_EXPR:
6586 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6587 == MODE_INT)
6588 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6589 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
6590 else
6591 comparison = compare (exp, LT, LTU);
6592 break;
6593
6594 case LE_EXPR:
6595 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6596 == MODE_INT)
6597 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6598 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
6599 else
6600 comparison = compare (exp, LE, LEU);
6601 break;
6602
6603 case GT_EXPR:
6604 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6605 == MODE_INT)
6606 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6607 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
6608 else
6609 comparison = compare (exp, GT, GTU);
6610 break;
6611
6612 case GE_EXPR:
6613 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6614 == MODE_INT)
6615 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6616 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
6617 else
6618 comparison = compare (exp, GE, GEU);
6619 break;
6620
6621 default:
6622 normal:
6623 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
6624 #if 0
6625 /* This is not needed any more and causes poor code since it causes
6626 comparisons and tests from non-SI objects to have different code
6627 sequences. */
6628 /* Copy to register to avoid generating bad insns by cse
6629 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
6630 if (!cse_not_expected && GET_CODE (temp) == MEM)
6631 temp = copy_to_reg (temp);
6632 #endif
6633 do_pending_stack_adjust ();
6634 if (GET_CODE (temp) == CONST_INT)
6635 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
6636 else if (GET_CODE (temp) == LABEL_REF)
6637 comparison = const_true_rtx;
6638 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
6639 && !can_compare_p (GET_MODE (temp)))
6640 /* Note swapping the labels gives us not-equal. */
6641 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
6642 else if (GET_MODE (temp) != VOIDmode)
6643 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
6644 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
6645 GET_MODE (temp), NULL_RTX, 0);
6646 else
6647 abort ();
6648 }
6649
6650 /* Do any postincrements in the expression that was tested. */
6651 emit_queue ();
6652
6653 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
6654 straight into a conditional jump instruction as the jump condition.
6655 Otherwise, all the work has been done already. */
6656
6657 if (comparison == const_true_rtx)
6658 {
6659 if (if_true_label)
6660 emit_jump (if_true_label);
6661 }
6662 else if (comparison == const0_rtx)
6663 {
6664 if (if_false_label)
6665 emit_jump (if_false_label);
6666 }
6667 else if (comparison)
6668 do_jump_for_compare (comparison, if_false_label, if_true_label);
6669
6670 free_temp_slots ();
6671
6672 if (drop_through_label)
6673 {
6674 /* If do_jump produces code that might be jumped around,
6675 do any stack adjusts from that code, before the place
6676 where control merges in. */
6677 do_pending_stack_adjust ();
6678 emit_label (drop_through_label);
6679 }
6680 }
6681 \f
6682 /* Given a comparison expression EXP for values too wide to be compared
6683 with one insn, test the comparison and jump to the appropriate label.
6684 The code of EXP is ignored; we always test GT if SWAP is 0,
6685 and LT if SWAP is 1. */
6686
6687 static void
6688 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
6689 tree exp;
6690 int swap;
6691 rtx if_false_label, if_true_label;
6692 {
6693 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
6694 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
6695 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6696 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
6697 rtx drop_through_label = 0;
6698 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
6699 int i;
6700
6701 if (! if_true_label || ! if_false_label)
6702 drop_through_label = gen_label_rtx ();
6703 if (! if_true_label)
6704 if_true_label = drop_through_label;
6705 if (! if_false_label)
6706 if_false_label = drop_through_label;
6707
6708 /* Compare a word at a time, high order first. */
6709 for (i = 0; i < nwords; i++)
6710 {
6711 rtx comp;
6712 rtx op0_word, op1_word;
6713
6714 if (WORDS_BIG_ENDIAN)
6715 {
6716 op0_word = operand_subword_force (op0, i, mode);
6717 op1_word = operand_subword_force (op1, i, mode);
6718 }
6719 else
6720 {
6721 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
6722 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
6723 }
6724
6725 /* All but high-order word must be compared as unsigned. */
6726 comp = compare_from_rtx (op0_word, op1_word,
6727 (unsignedp || i > 0) ? GTU : GT,
6728 unsignedp, word_mode, NULL_RTX, 0);
6729 if (comp == const_true_rtx)
6730 emit_jump (if_true_label);
6731 else if (comp != const0_rtx)
6732 do_jump_for_compare (comp, NULL_RTX, if_true_label);
6733
6734 /* Consider lower words only if these are equal. */
6735 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
6736 NULL_RTX, 0);
6737 if (comp == const_true_rtx)
6738 emit_jump (if_false_label);
6739 else if (comp != const0_rtx)
6740 do_jump_for_compare (comp, NULL_RTX, if_false_label);
6741 }
6742
6743 if (if_false_label)
6744 emit_jump (if_false_label);
6745 if (drop_through_label)
6746 emit_label (drop_through_label);
6747 }
6748
6749 /* Given an EQ_EXPR expression EXP for values too wide to be compared
6750 with one insn, test the comparison and jump to the appropriate label. */
6751
6752 static void
6753 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
6754 tree exp;
6755 rtx if_false_label, if_true_label;
6756 {
6757 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6758 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6759 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6760 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
6761 int i;
6762 rtx drop_through_label = 0;
6763
6764 if (! if_false_label)
6765 drop_through_label = if_false_label = gen_label_rtx ();
6766
6767 for (i = 0; i < nwords; i++)
6768 {
6769 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
6770 operand_subword_force (op1, i, mode),
6771 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
6772 word_mode, NULL_RTX, 0);
6773 if (comp == const_true_rtx)
6774 emit_jump (if_false_label);
6775 else if (comp != const0_rtx)
6776 do_jump_for_compare (comp, if_false_label, NULL_RTX);
6777 }
6778
6779 if (if_true_label)
6780 emit_jump (if_true_label);
6781 if (drop_through_label)
6782 emit_label (drop_through_label);
6783 }
6784 \f
6785 /* Jump according to whether OP0 is 0.
6786 We assume that OP0 has an integer mode that is too wide
6787 for the available compare insns. */
6788
6789 static void
6790 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
6791 rtx op0;
6792 rtx if_false_label, if_true_label;
6793 {
6794 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
6795 int i;
6796 rtx drop_through_label = 0;
6797
6798 if (! if_false_label)
6799 drop_through_label = if_false_label = gen_label_rtx ();
6800
6801 for (i = 0; i < nwords; i++)
6802 {
6803 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
6804 GET_MODE (op0)),
6805 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
6806 if (comp == const_true_rtx)
6807 emit_jump (if_false_label);
6808 else if (comp != const0_rtx)
6809 do_jump_for_compare (comp, if_false_label, NULL_RTX);
6810 }
6811
6812 if (if_true_label)
6813 emit_jump (if_true_label);
6814 if (drop_through_label)
6815 emit_label (drop_through_label);
6816 }
6817
6818 /* Given a comparison expression in rtl form, output conditional branches to
6819 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
6820
6821 static void
6822 do_jump_for_compare (comparison, if_false_label, if_true_label)
6823 rtx comparison, if_false_label, if_true_label;
6824 {
6825 if (if_true_label)
6826 {
6827 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
6828 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
6829 else
6830 abort ();
6831
6832 if (if_false_label)
6833 emit_jump (if_false_label);
6834 }
6835 else if (if_false_label)
6836 {
6837 rtx insn;
6838 rtx prev = PREV_INSN (get_last_insn ());
6839 rtx branch = 0;
6840
6841 /* Output the branch with the opposite condition. Then try to invert
6842 what is generated. If more than one insn is a branch, or if the
6843 branch is not the last insn written, abort. If we can't invert
6844 the branch, emit make a true label, redirect this jump to that,
6845 emit a jump to the false label and define the true label. */
6846
6847 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
6848 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_false_label));
6849 else
6850 abort ();
6851
6852 /* Here we get the insn before what was just emitted.
6853 On some machines, emitting the branch can discard
6854 the previous compare insn and emit a replacement. */
6855 if (prev == 0)
6856 /* If there's only one preceding insn... */
6857 insn = get_insns ();
6858 else
6859 insn = NEXT_INSN (prev);
6860
6861 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
6862 if (GET_CODE (insn) == JUMP_INSN)
6863 {
6864 if (branch)
6865 abort ();
6866 branch = insn;
6867 }
6868
6869 if (branch != get_last_insn ())
6870 abort ();
6871
6872 if (! invert_jump (branch, if_false_label))
6873 {
6874 if_true_label = gen_label_rtx ();
6875 redirect_jump (branch, if_true_label);
6876 emit_jump (if_false_label);
6877 emit_label (if_true_label);
6878 }
6879 }
6880 }
6881 \f
6882 /* Generate code for a comparison expression EXP
6883 (including code to compute the values to be compared)
6884 and set (CC0) according to the result.
6885 SIGNED_CODE should be the rtx operation for this comparison for
6886 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
6887
6888 We force a stack adjustment unless there are currently
6889 things pushed on the stack that aren't yet used. */
6890
6891 static rtx
6892 compare (exp, signed_code, unsigned_code)
6893 register tree exp;
6894 enum rtx_code signed_code, unsigned_code;
6895 {
6896 register rtx op0
6897 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6898 register rtx op1
6899 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6900 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
6901 register enum machine_mode mode = TYPE_MODE (type);
6902 int unsignedp = TREE_UNSIGNED (type);
6903 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
6904
6905 return compare_from_rtx (op0, op1, code, unsignedp, mode,
6906 ((mode == BLKmode)
6907 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
6908 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
6909 }
6910
6911 /* Like compare but expects the values to compare as two rtx's.
6912 The decision as to signed or unsigned comparison must be made by the caller.
6913
6914 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
6915 compared.
6916
6917 If ALIGN is non-zero, it is the alignment of this type; if zero, the
6918 size of MODE should be used. */
6919
6920 rtx
6921 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
6922 register rtx op0, op1;
6923 enum rtx_code code;
6924 int unsignedp;
6925 enum machine_mode mode;
6926 rtx size;
6927 int align;
6928 {
6929 /* If one operand is constant, make it the second one. */
6930
6931 if (GET_CODE (op0) == CONST_INT || GET_CODE (op0) == CONST_DOUBLE)
6932 {
6933 rtx tem = op0;
6934 op0 = op1;
6935 op1 = tem;
6936 code = swap_condition (code);
6937 }
6938
6939 if (flag_force_mem)
6940 {
6941 op0 = force_not_mem (op0);
6942 op1 = force_not_mem (op1);
6943 }
6944
6945 do_pending_stack_adjust ();
6946
6947 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT)
6948 return simplify_relational_operation (code, mode, op0, op1);
6949
6950 #if 0
6951 /* There's no need to do this now that combine.c can eliminate lots of
6952 sign extensions. This can be less efficient in certain cases on other
6953 machines.
6954
6955 /* If this is a signed equality comparison, we can do it as an
6956 unsigned comparison since zero-extension is cheaper than sign
6957 extension and comparisons with zero are done as unsigned. This is
6958 the case even on machines that can do fast sign extension, since
6959 zero-extension is easier to combinen with other operations than
6960 sign-extension is. If we are comparing against a constant, we must
6961 convert it to what it would look like unsigned. */
6962 if ((code == EQ || code == NE) && ! unsignedp
6963 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
6964 {
6965 if (GET_CODE (op1) == CONST_INT
6966 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
6967 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
6968 unsignedp = 1;
6969 }
6970 #endif
6971
6972 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
6973
6974 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
6975 }
6976 \f
6977 /* Generate code to calculate EXP using a store-flag instruction
6978 and return an rtx for the result. EXP is either a comparison
6979 or a TRUTH_NOT_EXPR whose operand is a comparison.
6980
6981 If TARGET is nonzero, store the result there if convenient.
6982
6983 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
6984 cheap.
6985
6986 Return zero if there is no suitable set-flag instruction
6987 available on this machine.
6988
6989 Once expand_expr has been called on the arguments of the comparison,
6990 we are committed to doing the store flag, since it is not safe to
6991 re-evaluate the expression. We emit the store-flag insn by calling
6992 emit_store_flag, but only expand the arguments if we have a reason
6993 to believe that emit_store_flag will be successful. If we think that
6994 it will, but it isn't, we have to simulate the store-flag with a
6995 set/jump/set sequence. */
6996
6997 static rtx
6998 do_store_flag (exp, target, mode, only_cheap)
6999 tree exp;
7000 rtx target;
7001 enum machine_mode mode;
7002 int only_cheap;
7003 {
7004 enum rtx_code code;
7005 tree arg0, arg1, type;
7006 tree tem;
7007 enum machine_mode operand_mode;
7008 int invert = 0;
7009 int unsignedp;
7010 rtx op0, op1;
7011 enum insn_code icode;
7012 rtx subtarget = target;
7013 rtx result, label, pattern, jump_pat;
7014
7015 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
7016 result at the end. We can't simply invert the test since it would
7017 have already been inverted if it were valid. This case occurs for
7018 some floating-point comparisons. */
7019
7020 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
7021 invert = 1, exp = TREE_OPERAND (exp, 0);
7022
7023 arg0 = TREE_OPERAND (exp, 0);
7024 arg1 = TREE_OPERAND (exp, 1);
7025 type = TREE_TYPE (arg0);
7026 operand_mode = TYPE_MODE (type);
7027 unsignedp = TREE_UNSIGNED (type);
7028
7029 /* We won't bother with BLKmode store-flag operations because it would mean
7030 passing a lot of information to emit_store_flag. */
7031 if (operand_mode == BLKmode)
7032 return 0;
7033
7034 STRIP_NOPS (arg0);
7035 STRIP_NOPS (arg1);
7036
7037 /* Get the rtx comparison code to use. We know that EXP is a comparison
7038 operation of some type. Some comparisons against 1 and -1 can be
7039 converted to comparisons with zero. Do so here so that the tests
7040 below will be aware that we have a comparison with zero. These
7041 tests will not catch constants in the first operand, but constants
7042 are rarely passed as the first operand. */
7043
7044 switch (TREE_CODE (exp))
7045 {
7046 case EQ_EXPR:
7047 code = EQ;
7048 break;
7049 case NE_EXPR:
7050 code = NE;
7051 break;
7052 case LT_EXPR:
7053 if (integer_onep (arg1))
7054 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
7055 else
7056 code = unsignedp ? LTU : LT;
7057 break;
7058 case LE_EXPR:
7059 if (integer_all_onesp (arg1))
7060 arg1 = integer_zero_node, code = unsignedp ? LTU : LT;
7061 else
7062 code = unsignedp ? LEU : LE;
7063 break;
7064 case GT_EXPR:
7065 if (integer_all_onesp (arg1))
7066 arg1 = integer_zero_node, code = unsignedp ? GEU : GE;
7067 else
7068 code = unsignedp ? GTU : GT;
7069 break;
7070 case GE_EXPR:
7071 if (integer_onep (arg1))
7072 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
7073 else
7074 code = unsignedp ? GEU : GE;
7075 break;
7076 default:
7077 abort ();
7078 }
7079
7080 /* Put a constant second. */
7081 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
7082 {
7083 tem = arg0; arg0 = arg1; arg1 = tem;
7084 code = swap_condition (code);
7085 }
7086
7087 /* If this is an equality or inequality test of a single bit, we can
7088 do this by shifting the bit being tested to the low-order bit and
7089 masking the result with the constant 1. If the condition was EQ,
7090 we xor it with 1. This does not require an scc insn and is faster
7091 than an scc insn even if we have it. */
7092
7093 if ((code == NE || code == EQ)
7094 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7095 && integer_pow2p (TREE_OPERAND (arg0, 1))
7096 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
7097 {
7098 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
7099 NULL_RTX, VOIDmode, 0)));
7100
7101 if (subtarget == 0 || GET_CODE (subtarget) != REG
7102 || GET_MODE (subtarget) != operand_mode
7103 || ! safe_from_p (subtarget, TREE_OPERAND (arg0, 0)))
7104 subtarget = 0;
7105
7106 op0 = expand_expr (TREE_OPERAND (arg0, 0), subtarget, VOIDmode, 0);
7107
7108 if (bitnum != 0)
7109 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
7110 size_int (bitnum), target, 1);
7111
7112 if (GET_MODE (op0) != mode)
7113 op0 = convert_to_mode (mode, op0, 1);
7114
7115 if (bitnum != TYPE_PRECISION (type) - 1)
7116 op0 = expand_and (op0, const1_rtx, target);
7117
7118 if ((code == EQ && ! invert) || (code == NE && invert))
7119 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, target, 0,
7120 OPTAB_LIB_WIDEN);
7121
7122 return op0;
7123 }
7124
7125 /* Now see if we are likely to be able to do this. Return if not. */
7126 if (! can_compare_p (operand_mode))
7127 return 0;
7128 icode = setcc_gen_code[(int) code];
7129 if (icode == CODE_FOR_nothing
7130 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
7131 {
7132 /* We can only do this if it is one of the special cases that
7133 can be handled without an scc insn. */
7134 if ((code == LT && integer_zerop (arg1))
7135 || (! only_cheap && code == GE && integer_zerop (arg1)))
7136 ;
7137 else if (BRANCH_COST >= 0
7138 && ! only_cheap && (code == NE || code == EQ)
7139 && TREE_CODE (type) != REAL_TYPE
7140 && ((abs_optab->handlers[(int) operand_mode].insn_code
7141 != CODE_FOR_nothing)
7142 || (ffs_optab->handlers[(int) operand_mode].insn_code
7143 != CODE_FOR_nothing)))
7144 ;
7145 else
7146 return 0;
7147 }
7148
7149 preexpand_calls (exp);
7150 if (subtarget == 0 || GET_CODE (subtarget) != REG
7151 || GET_MODE (subtarget) != operand_mode
7152 || ! safe_from_p (subtarget, arg1))
7153 subtarget = 0;
7154
7155 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
7156 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
7157
7158 if (target == 0)
7159 target = gen_reg_rtx (mode);
7160
7161 result = emit_store_flag (target, code, op0, op1, operand_mode,
7162 unsignedp, 1);
7163
7164 if (result)
7165 {
7166 if (invert)
7167 result = expand_binop (mode, xor_optab, result, const1_rtx,
7168 result, 0, OPTAB_LIB_WIDEN);
7169 return result;
7170 }
7171
7172 /* If this failed, we have to do this with set/compare/jump/set code. */
7173 if (target == 0 || GET_CODE (target) != REG
7174 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
7175 target = gen_reg_rtx (GET_MODE (target));
7176
7177 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
7178 result = compare_from_rtx (op0, op1, code, unsignedp,
7179 operand_mode, NULL_RTX, 0);
7180 if (GET_CODE (result) == CONST_INT)
7181 return (((result == const0_rtx && ! invert)
7182 || (result != const0_rtx && invert))
7183 ? const0_rtx : const1_rtx);
7184
7185 label = gen_label_rtx ();
7186 if (bcc_gen_fctn[(int) code] == 0)
7187 abort ();
7188
7189 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
7190 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
7191 emit_label (label);
7192
7193 return target;
7194 }
7195 \f
7196 /* Generate a tablejump instruction (used for switch statements). */
7197
7198 #ifdef HAVE_tablejump
7199
7200 /* INDEX is the value being switched on, with the lowest value
7201 in the table already subtracted.
7202 MODE is its expected mode (needed if INDEX is constant).
7203 RANGE is the length of the jump table.
7204 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
7205
7206 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
7207 index value is out of range. */
7208
7209 void
7210 do_tablejump (index, mode, range, table_label, default_label)
7211 rtx index, range, table_label, default_label;
7212 enum machine_mode mode;
7213 {
7214 register rtx temp, vector;
7215
7216 /* Do an unsigned comparison (in the proper mode) between the index
7217 expression and the value which represents the length of the range.
7218 Since we just finished subtracting the lower bound of the range
7219 from the index expression, this comparison allows us to simultaneously
7220 check that the original index expression value is both greater than
7221 or equal to the minimum value of the range and less than or equal to
7222 the maximum value of the range. */
7223
7224 emit_cmp_insn (range, index, LTU, NULL_RTX, mode, 0, 0);
7225 emit_jump_insn (gen_bltu (default_label));
7226
7227 /* If index is in range, it must fit in Pmode.
7228 Convert to Pmode so we can index with it. */
7229 if (mode != Pmode)
7230 index = convert_to_mode (Pmode, index, 1);
7231
7232 /* If flag_force_addr were to affect this address
7233 it could interfere with the tricky assumptions made
7234 about addresses that contain label-refs,
7235 which may be valid only very near the tablejump itself. */
7236 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
7237 GET_MODE_SIZE, because this indicates how large insns are. The other
7238 uses should all be Pmode, because they are addresses. This code
7239 could fail if addresses and insns are not the same size. */
7240 index = memory_address_noforce
7241 (CASE_VECTOR_MODE,
7242 gen_rtx (PLUS, Pmode,
7243 gen_rtx (MULT, Pmode, index,
7244 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
7245 gen_rtx (LABEL_REF, Pmode, table_label)));
7246 temp = gen_reg_rtx (CASE_VECTOR_MODE);
7247 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
7248 RTX_UNCHANGING_P (vector) = 1;
7249 convert_move (temp, vector, 0);
7250
7251 emit_jump_insn (gen_tablejump (temp, table_label));
7252
7253 #ifndef CASE_VECTOR_PC_RELATIVE
7254 /* If we are generating PIC code or if the table is PC-relative, the
7255 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
7256 if (! flag_pic)
7257 emit_barrier ();
7258 #endif
7259 }
7260
7261 #endif /* HAVE_tablejump */
This page took 0.377549 seconds and 6 git commands to generate.