]> gcc.gnu.org Git - gcc.git/blob - gcc/expr.c
(convert_move): Don't call protect_from_queue.
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21 #include "config.h"
22 #include "rtl.h"
23 #include "tree.h"
24 #include "flags.h"
25 #include "function.h"
26 #include "insn-flags.h"
27 #include "insn-codes.h"
28 #include "expr.h"
29 #include "insn-config.h"
30 #include "recog.h"
31 #include "output.h"
32 #include "gvarargs.h"
33 #include "typeclass.h"
34
35 #define CEIL(x,y) (((x) + (y) - 1) / (y))
36
37 /* Decide whether a function's arguments should be processed
38 from first to last or from last to first. */
39
40 #ifdef STACK_GROWS_DOWNWARD
41 #ifdef PUSH_ROUNDING
42 #define PUSH_ARGS_REVERSED /* If it's last to first */
43 #endif
44 #endif
45
46 #ifndef STACK_PUSH_CODE
47 #ifdef STACK_GROWS_DOWNWARD
48 #define STACK_PUSH_CODE PRE_DEC
49 #else
50 #define STACK_PUSH_CODE PRE_INC
51 #endif
52 #endif
53
54 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
55 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
56
57 /* If this is nonzero, we do not bother generating VOLATILE
58 around volatile memory references, and we are willing to
59 output indirect addresses. If cse is to follow, we reject
60 indirect addresses so a useful potential cse is generated;
61 if it is used only once, instruction combination will produce
62 the same indirect address eventually. */
63 int cse_not_expected;
64
65 /* Nonzero to generate code for all the subroutines within an
66 expression before generating the upper levels of the expression.
67 Nowadays this is never zero. */
68 int do_preexpand_calls = 1;
69
70 /* Number of units that we should eventually pop off the stack.
71 These are the arguments to function calls that have already returned. */
72 int pending_stack_adjust;
73
74 /* Nonzero means stack pops must not be deferred, and deferred stack
75 pops must not be output. It is nonzero inside a function call,
76 inside a conditional expression, inside a statement expression,
77 and in other cases as well. */
78 int inhibit_defer_pop;
79
80 /* A list of all cleanups which belong to the arguments of
81 function calls being expanded by expand_call. */
82 tree cleanups_this_call;
83
84 /* Nonzero means __builtin_saveregs has already been done in this function.
85 The value is the pseudoreg containing the value __builtin_saveregs
86 returned. */
87 static rtx saveregs_value;
88
89 rtx store_expr ();
90 static void store_constructor ();
91 static rtx store_field ();
92 static rtx expand_builtin ();
93 static rtx compare ();
94 static rtx do_store_flag ();
95 static void preexpand_calls ();
96 static rtx expand_increment ();
97 static void init_queue ();
98
99 void do_pending_stack_adjust ();
100 static void do_jump_for_compare ();
101 static void do_jump_by_parts_equality ();
102 static void do_jump_by_parts_equality_rtx ();
103 static void do_jump_by_parts_greater ();
104
105 /* Record for each mode whether we can move a register directly to or
106 from an object of that mode in memory. If we can't, we won't try
107 to use that mode directly when accessing a field of that mode. */
108
109 static char direct_load[NUM_MACHINE_MODES];
110 static char direct_store[NUM_MACHINE_MODES];
111
112 /* MOVE_RATIO is the number of move instructions that is better than
113 a block move. */
114
115 #ifndef MOVE_RATIO
116 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi)
117 #define MOVE_RATIO 2
118 #else
119 /* A value of around 6 would minimize code size; infinity would minimize
120 execution time. */
121 #define MOVE_RATIO 15
122 #endif
123 #endif
124
125 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
126
127 #ifndef SLOW_UNALIGNED_ACCESS
128 #define SLOW_UNALIGNED_ACCESS 0
129 #endif
130 \f
131 /* This is run once per compilation to set up which modes can be used
132 directly in memory. */
133
134 void
135 init_expr_once ()
136 {
137 rtx insn, pat;
138 enum machine_mode mode;
139 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
140
141 start_sequence ();
142 insn = emit_insn (gen_rtx (SET, 0, 0));
143 pat = PATTERN (insn);
144
145 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
146 mode = (enum machine_mode) ((int) mode + 1))
147 {
148 int regno;
149 rtx reg;
150 int num_clobbers;
151
152 direct_load[(int) mode] = direct_store[(int) mode] = 0;
153 PUT_MODE (mem, mode);
154
155 /* Find a register that can be used in this mode, if any. */
156 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
157 if (HARD_REGNO_MODE_OK (regno, mode))
158 break;
159
160 if (regno == FIRST_PSEUDO_REGISTER)
161 continue;
162
163 reg = gen_rtx (REG, mode, regno);
164
165 SET_SRC (pat) = mem;
166 SET_DEST (pat) = reg;
167 direct_load[(int) mode] = (recog (pat, insn, &num_clobbers)) >= 0;
168
169 SET_SRC (pat) = reg;
170 SET_DEST (pat) = mem;
171 direct_store[(int) mode] = (recog (pat, insn, &num_clobbers)) >= 0;
172 }
173
174 end_sequence ();
175 }
176
177 /* This is run at the start of compiling a function. */
178
179 void
180 init_expr ()
181 {
182 init_queue ();
183
184 pending_stack_adjust = 0;
185 inhibit_defer_pop = 0;
186 cleanups_this_call = 0;
187 saveregs_value = 0;
188 forced_labels = 0;
189 }
190
191 /* Save all variables describing the current status into the structure *P.
192 This is used before starting a nested function. */
193
194 void
195 save_expr_status (p)
196 struct function *p;
197 {
198 /* Instead of saving the postincrement queue, empty it. */
199 emit_queue ();
200
201 p->pending_stack_adjust = pending_stack_adjust;
202 p->inhibit_defer_pop = inhibit_defer_pop;
203 p->cleanups_this_call = cleanups_this_call;
204 p->saveregs_value = saveregs_value;
205 p->forced_labels = forced_labels;
206
207 pending_stack_adjust = 0;
208 inhibit_defer_pop = 0;
209 cleanups_this_call = 0;
210 saveregs_value = 0;
211 forced_labels = 0;
212 }
213
214 /* Restore all variables describing the current status from the structure *P.
215 This is used after a nested function. */
216
217 void
218 restore_expr_status (p)
219 struct function *p;
220 {
221 pending_stack_adjust = p->pending_stack_adjust;
222 inhibit_defer_pop = p->inhibit_defer_pop;
223 cleanups_this_call = p->cleanups_this_call;
224 saveregs_value = p->saveregs_value;
225 forced_labels = p->forced_labels;
226 }
227 \f
228 /* Manage the queue of increment instructions to be output
229 for POSTINCREMENT_EXPR expressions, etc. */
230
231 static rtx pending_chain;
232
233 /* Queue up to increment (or change) VAR later. BODY says how:
234 BODY should be the same thing you would pass to emit_insn
235 to increment right away. It will go to emit_insn later on.
236
237 The value is a QUEUED expression to be used in place of VAR
238 where you want to guarantee the pre-incrementation value of VAR. */
239
240 static rtx
241 enqueue_insn (var, body)
242 rtx var, body;
243 {
244 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
245 var, NULL_RTX, NULL_RTX, body, pending_chain);
246 return pending_chain;
247 }
248
249 /* Use protect_from_queue to convert a QUEUED expression
250 into something that you can put immediately into an instruction.
251 If the queued incrementation has not happened yet,
252 protect_from_queue returns the variable itself.
253 If the incrementation has happened, protect_from_queue returns a temp
254 that contains a copy of the old value of the variable.
255
256 Any time an rtx which might possibly be a QUEUED is to be put
257 into an instruction, it must be passed through protect_from_queue first.
258 QUEUED expressions are not meaningful in instructions.
259
260 Do not pass a value through protect_from_queue and then hold
261 on to it for a while before putting it in an instruction!
262 If the queue is flushed in between, incorrect code will result. */
263
264 rtx
265 protect_from_queue (x, modify)
266 register rtx x;
267 int modify;
268 {
269 register RTX_CODE code = GET_CODE (x);
270
271 #if 0 /* A QUEUED can hang around after the queue is forced out. */
272 /* Shortcut for most common case. */
273 if (pending_chain == 0)
274 return x;
275 #endif
276
277 if (code != QUEUED)
278 {
279 /* A special hack for read access to (MEM (QUEUED ...))
280 to facilitate use of autoincrement.
281 Make a copy of the contents of the memory location
282 rather than a copy of the address, but not
283 if the value is of mode BLKmode. */
284 if (code == MEM && GET_MODE (x) != BLKmode
285 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
286 {
287 register rtx y = XEXP (x, 0);
288 XEXP (x, 0) = QUEUED_VAR (y);
289 if (QUEUED_INSN (y))
290 {
291 register rtx temp = gen_reg_rtx (GET_MODE (x));
292 emit_insn_before (gen_move_insn (temp, x),
293 QUEUED_INSN (y));
294 return temp;
295 }
296 return x;
297 }
298 /* Otherwise, recursively protect the subexpressions of all
299 the kinds of rtx's that can contain a QUEUED. */
300 if (code == MEM)
301 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
302 else if (code == PLUS || code == MULT)
303 {
304 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
305 XEXP (x, 1) = protect_from_queue (XEXP (x, 1), 0);
306 }
307 return x;
308 }
309 /* If the increment has not happened, use the variable itself. */
310 if (QUEUED_INSN (x) == 0)
311 return QUEUED_VAR (x);
312 /* If the increment has happened and a pre-increment copy exists,
313 use that copy. */
314 if (QUEUED_COPY (x) != 0)
315 return QUEUED_COPY (x);
316 /* The increment has happened but we haven't set up a pre-increment copy.
317 Set one up now, and use it. */
318 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
319 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
320 QUEUED_INSN (x));
321 return QUEUED_COPY (x);
322 }
323
324 /* Return nonzero if X contains a QUEUED expression:
325 if it contains anything that will be altered by a queued increment.
326 We handle only combinations of MEM, PLUS, MINUS and MULT operators
327 since memory addresses generally contain only those. */
328
329 static int
330 queued_subexp_p (x)
331 rtx x;
332 {
333 register enum rtx_code code = GET_CODE (x);
334 switch (code)
335 {
336 case QUEUED:
337 return 1;
338 case MEM:
339 return queued_subexp_p (XEXP (x, 0));
340 case MULT:
341 case PLUS:
342 case MINUS:
343 return queued_subexp_p (XEXP (x, 0))
344 || queued_subexp_p (XEXP (x, 1));
345 }
346 return 0;
347 }
348
349 /* Perform all the pending incrementations. */
350
351 void
352 emit_queue ()
353 {
354 register rtx p;
355 while (p = pending_chain)
356 {
357 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
358 pending_chain = QUEUED_NEXT (p);
359 }
360 }
361
362 static void
363 init_queue ()
364 {
365 if (pending_chain)
366 abort ();
367 }
368 \f
369 /* Copy data from FROM to TO, where the machine modes are not the same.
370 Both modes may be integer, or both may be floating.
371 UNSIGNEDP should be nonzero if FROM is an unsigned type.
372 This causes zero-extension instead of sign-extension. */
373
374 void
375 convert_move (to, from, unsignedp)
376 register rtx to, from;
377 int unsignedp;
378 {
379 enum machine_mode to_mode = GET_MODE (to);
380 enum machine_mode from_mode = GET_MODE (from);
381 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
382 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
383 enum insn_code code;
384 rtx libcall;
385
386 /* rtx code for making an equivalent value. */
387 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
388
389 to = protect_from_queue (to, 1);
390 from = protect_from_queue (from, 0);
391
392 if (to_real != from_real)
393 abort ();
394
395 if (to_mode == from_mode
396 || (from_mode == VOIDmode && CONSTANT_P (from)))
397 {
398 emit_move_insn (to, from);
399 return;
400 }
401
402 if (to_real)
403 {
404 #ifdef HAVE_extendsfdf2
405 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
406 {
407 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
408 return;
409 }
410 #endif
411 #ifdef HAVE_extendsfxf2
412 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
413 {
414 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
415 return;
416 }
417 #endif
418 #ifdef HAVE_extendsftf2
419 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
420 {
421 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
422 return;
423 }
424 #endif
425 #ifdef HAVE_extenddfxf2
426 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
427 {
428 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
429 return;
430 }
431 #endif
432 #ifdef HAVE_extenddftf2
433 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
434 {
435 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
436 return;
437 }
438 #endif
439 #ifdef HAVE_truncdfsf2
440 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
441 {
442 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
443 return;
444 }
445 #endif
446 #ifdef HAVE_truncxfsf2
447 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
448 {
449 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
450 return;
451 }
452 #endif
453 #ifdef HAVE_trunctfsf2
454 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
455 {
456 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
457 return;
458 }
459 #endif
460 #ifdef HAVE_truncxfdf2
461 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
462 {
463 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
464 return;
465 }
466 #endif
467 #ifdef HAVE_trunctfdf2
468 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
469 {
470 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
471 return;
472 }
473 #endif
474
475 libcall = (rtx) 0;
476 switch (from_mode)
477 {
478 case SFmode:
479 switch (to_mode)
480 {
481 case DFmode:
482 libcall = extendsfdf2_libfunc;
483 break;
484
485 case XFmode:
486 libcall = extendsfxf2_libfunc;
487 break;
488
489 case TFmode:
490 libcall = extendsftf2_libfunc;
491 break;
492 }
493 break;
494
495 case DFmode:
496 switch (to_mode)
497 {
498 case SFmode:
499 libcall = truncdfsf2_libfunc;
500 break;
501
502 case XFmode:
503 libcall = extenddfxf2_libfunc;
504 break;
505
506 case TFmode:
507 libcall = extenddftf2_libfunc;
508 break;
509 }
510 break;
511
512 case XFmode:
513 switch (to_mode)
514 {
515 case SFmode:
516 libcall = truncxfsf2_libfunc;
517 break;
518
519 case DFmode:
520 libcall = truncxfdf2_libfunc;
521 break;
522 }
523 break;
524
525 case TFmode:
526 switch (to_mode)
527 {
528 case SFmode:
529 libcall = trunctfsf2_libfunc;
530 break;
531
532 case DFmode:
533 libcall = trunctfdf2_libfunc;
534 break;
535 }
536 break;
537 }
538
539 if (libcall == (rtx) 0)
540 /* This conversion is not implemented yet. */
541 abort ();
542
543 emit_library_call (libcall, 1, to_mode, 1, from, from_mode);
544 emit_move_insn (to, hard_libcall_value (to_mode));
545 return;
546 }
547
548 /* Now both modes are integers. */
549
550 /* Handle expanding beyond a word. */
551 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
552 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
553 {
554 rtx insns;
555 rtx lowpart;
556 rtx fill_value;
557 rtx lowfrom;
558 int i;
559 enum machine_mode lowpart_mode;
560 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
561
562 /* Try converting directly if the insn is supported. */
563 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
564 != CODE_FOR_nothing)
565 {
566 emit_unop_insn (code, to, from, equiv_code);
567 return;
568 }
569 /* Next, try converting via full word. */
570 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
571 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
572 != CODE_FOR_nothing))
573 {
574 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
575 emit_unop_insn (code, to,
576 gen_lowpart (word_mode, to), equiv_code);
577 return;
578 }
579
580 /* No special multiword conversion insn; do it by hand. */
581 start_sequence ();
582
583 /* Get a copy of FROM widened to a word, if necessary. */
584 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
585 lowpart_mode = word_mode;
586 else
587 lowpart_mode = from_mode;
588
589 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
590
591 lowpart = gen_lowpart (lowpart_mode, to);
592 emit_move_insn (lowpart, lowfrom);
593
594 /* Compute the value to put in each remaining word. */
595 if (unsignedp)
596 fill_value = const0_rtx;
597 else
598 {
599 #ifdef HAVE_slt
600 if (HAVE_slt
601 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
602 && STORE_FLAG_VALUE == -1)
603 {
604 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
605 lowpart_mode, 0, 0);
606 fill_value = gen_reg_rtx (word_mode);
607 emit_insn (gen_slt (fill_value));
608 }
609 else
610 #endif
611 {
612 fill_value
613 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
614 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
615 NULL_RTX, 0);
616 fill_value = convert_to_mode (word_mode, fill_value, 1);
617 }
618 }
619
620 /* Fill the remaining words. */
621 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
622 {
623 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
624 rtx subword = operand_subword (to, index, 1, to_mode);
625
626 if (subword == 0)
627 abort ();
628
629 if (fill_value != subword)
630 emit_move_insn (subword, fill_value);
631 }
632
633 insns = get_insns ();
634 end_sequence ();
635
636 emit_no_conflict_block (insns, to, from, NULL_RTX,
637 gen_rtx (equiv_code, to_mode, from));
638 return;
639 }
640
641 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD)
642 {
643 convert_move (to, gen_lowpart (word_mode, from), 0);
644 return;
645 }
646
647 /* Handle pointer conversion */ /* SPEE 900220 */
648 if (to_mode == PSImode)
649 {
650 if (from_mode != SImode)
651 from = convert_to_mode (SImode, from, unsignedp);
652
653 #ifdef HAVE_truncsipsi
654 if (HAVE_truncsipsi)
655 {
656 emit_unop_insn (CODE_FOR_truncsipsi, to, from, UNKNOWN);
657 return;
658 }
659 #endif /* HAVE_truncsipsi */
660 abort ();
661 }
662
663 if (from_mode == PSImode)
664 {
665 if (to_mode != SImode)
666 {
667 from = convert_to_mode (SImode, from, unsignedp);
668 from_mode = SImode;
669 }
670 else
671 {
672 #ifdef HAVE_extendpsisi
673 if (HAVE_extendpsisi)
674 {
675 emit_unop_insn (CODE_FOR_extendpsisi, to, from, UNKNOWN);
676 return;
677 }
678 #endif /* HAVE_extendpsisi */
679 abort ();
680 }
681 }
682
683 /* Now follow all the conversions between integers
684 no more than a word long. */
685
686 /* For truncation, usually we can just refer to FROM in a narrower mode. */
687 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
688 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
689 GET_MODE_BITSIZE (from_mode))
690 && ((GET_CODE (from) == MEM
691 && ! MEM_VOLATILE_P (from)
692 && direct_load[(int) to_mode]
693 && ! mode_dependent_address_p (XEXP (from, 0)))
694 || GET_CODE (from) == REG
695 || GET_CODE (from) == SUBREG))
696 {
697 emit_move_insn (to, gen_lowpart (to_mode, from));
698 return;
699 }
700
701 /* For truncation, usually we can just refer to FROM in a narrower mode. */
702 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
703 {
704 /* Convert directly if that works. */
705 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
706 != CODE_FOR_nothing)
707 {
708 emit_unop_insn (code, to, from, equiv_code);
709 return;
710 }
711 else
712 {
713 enum machine_mode intermediate;
714
715 /* Search for a mode to convert via. */
716 for (intermediate = from_mode; intermediate != VOIDmode;
717 intermediate = GET_MODE_WIDER_MODE (intermediate))
718 if ((can_extend_p (to_mode, intermediate, unsignedp)
719 != CODE_FOR_nothing)
720 && (can_extend_p (intermediate, from_mode, unsignedp)
721 != CODE_FOR_nothing))
722 {
723 convert_move (to, convert_to_mode (intermediate, from,
724 unsignedp), unsignedp);
725 return;
726 }
727
728 /* No suitable intermediate mode. */
729 abort ();
730 }
731 }
732
733 /* Support special truncate insns for certain modes. */
734
735 if (from_mode == DImode && to_mode == SImode)
736 {
737 #ifdef HAVE_truncdisi2
738 if (HAVE_truncdisi2)
739 {
740 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
741 return;
742 }
743 #endif
744 convert_move (to, force_reg (from_mode, from), unsignedp);
745 return;
746 }
747
748 if (from_mode == DImode && to_mode == HImode)
749 {
750 #ifdef HAVE_truncdihi2
751 if (HAVE_truncdihi2)
752 {
753 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
754 return;
755 }
756 #endif
757 convert_move (to, force_reg (from_mode, from), unsignedp);
758 return;
759 }
760
761 if (from_mode == DImode && to_mode == QImode)
762 {
763 #ifdef HAVE_truncdiqi2
764 if (HAVE_truncdiqi2)
765 {
766 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
767 return;
768 }
769 #endif
770 convert_move (to, force_reg (from_mode, from), unsignedp);
771 return;
772 }
773
774 if (from_mode == SImode && to_mode == HImode)
775 {
776 #ifdef HAVE_truncsihi2
777 if (HAVE_truncsihi2)
778 {
779 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
780 return;
781 }
782 #endif
783 convert_move (to, force_reg (from_mode, from), unsignedp);
784 return;
785 }
786
787 if (from_mode == SImode && to_mode == QImode)
788 {
789 #ifdef HAVE_truncsiqi2
790 if (HAVE_truncsiqi2)
791 {
792 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
793 return;
794 }
795 #endif
796 convert_move (to, force_reg (from_mode, from), unsignedp);
797 return;
798 }
799
800 if (from_mode == HImode && to_mode == QImode)
801 {
802 #ifdef HAVE_trunchiqi2
803 if (HAVE_trunchiqi2)
804 {
805 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
806 return;
807 }
808 #endif
809 convert_move (to, force_reg (from_mode, from), unsignedp);
810 return;
811 }
812
813 /* Handle truncation of volatile memrefs, and so on;
814 the things that couldn't be truncated directly,
815 and for which there was no special instruction. */
816 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
817 {
818 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
819 emit_move_insn (to, temp);
820 return;
821 }
822
823 /* Mode combination is not recognized. */
824 abort ();
825 }
826
827 /* Return an rtx for a value that would result
828 from converting X to mode MODE.
829 Both X and MODE may be floating, or both integer.
830 UNSIGNEDP is nonzero if X is an unsigned value.
831 This can be done by referring to a part of X in place
832 or by copying to a new temporary with conversion.
833
834 This function *must not* call protect_from_queue
835 except when putting X into an insn (in which case convert_move does it). */
836
837 rtx
838 convert_to_mode (mode, x, unsignedp)
839 enum machine_mode mode;
840 rtx x;
841 int unsignedp;
842 {
843 register rtx temp;
844
845 if (mode == GET_MODE (x))
846 return x;
847
848 /* There is one case that we must handle specially: If we are converting
849 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
850 we are to interpret the constant as unsigned, gen_lowpart will do
851 the wrong if the constant appears negative. What we want to do is
852 make the high-order word of the constant zero, not all ones. */
853
854 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
855 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
856 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
857 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
858
859 /* We can do this with a gen_lowpart if both desired and current modes
860 are integer, and this is either a constant integer, a register, or a
861 non-volatile MEM. Except for the constant case, we must be narrowing
862 the operand. */
863
864 if (GET_CODE (x) == CONST_INT
865 || (GET_MODE_CLASS (mode) == MODE_INT
866 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
867 && (GET_CODE (x) == CONST_DOUBLE
868 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (x))
869 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x))
870 && direct_load[(int) mode]
871 || GET_CODE (x) == REG)))))
872 return gen_lowpart (mode, x);
873
874 temp = gen_reg_rtx (mode);
875 convert_move (temp, x, unsignedp);
876 return temp;
877 }
878 \f
879 /* Generate several move instructions to copy LEN bytes
880 from block FROM to block TO. (These are MEM rtx's with BLKmode).
881 The caller must pass FROM and TO
882 through protect_from_queue before calling.
883 ALIGN (in bytes) is maximum alignment we can assume. */
884
885 struct move_by_pieces
886 {
887 rtx to;
888 rtx to_addr;
889 int autinc_to;
890 int explicit_inc_to;
891 rtx from;
892 rtx from_addr;
893 int autinc_from;
894 int explicit_inc_from;
895 int len;
896 int offset;
897 int reverse;
898 };
899
900 static void move_by_pieces_1 ();
901 static int move_by_pieces_ninsns ();
902
903 static void
904 move_by_pieces (to, from, len, align)
905 rtx to, from;
906 int len, align;
907 {
908 struct move_by_pieces data;
909 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
910 int max_size = MOVE_MAX + 1;
911
912 data.offset = 0;
913 data.to_addr = to_addr;
914 data.from_addr = from_addr;
915 data.to = to;
916 data.from = from;
917 data.autinc_to
918 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
919 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
920 data.autinc_from
921 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
922 || GET_CODE (from_addr) == POST_INC
923 || GET_CODE (from_addr) == POST_DEC);
924
925 data.explicit_inc_from = 0;
926 data.explicit_inc_to = 0;
927 data.reverse
928 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
929 if (data.reverse) data.offset = len;
930 data.len = len;
931
932 /* If copying requires more than two move insns,
933 copy addresses to registers (to make displacements shorter)
934 and use post-increment if available. */
935 if (!(data.autinc_from && data.autinc_to)
936 && move_by_pieces_ninsns (len, align) > 2)
937 {
938 #ifdef HAVE_PRE_DECREMENT
939 if (data.reverse && ! data.autinc_from)
940 {
941 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
942 data.autinc_from = 1;
943 data.explicit_inc_from = -1;
944 }
945 #endif
946 #ifdef HAVE_POST_INCREMENT
947 if (! data.autinc_from)
948 {
949 data.from_addr = copy_addr_to_reg (from_addr);
950 data.autinc_from = 1;
951 data.explicit_inc_from = 1;
952 }
953 #endif
954 if (!data.autinc_from && CONSTANT_P (from_addr))
955 data.from_addr = copy_addr_to_reg (from_addr);
956 #ifdef HAVE_PRE_DECREMENT
957 if (data.reverse && ! data.autinc_to)
958 {
959 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
960 data.autinc_to = 1;
961 data.explicit_inc_to = -1;
962 }
963 #endif
964 #ifdef HAVE_POST_INCREMENT
965 if (! data.reverse && ! data.autinc_to)
966 {
967 data.to_addr = copy_addr_to_reg (to_addr);
968 data.autinc_to = 1;
969 data.explicit_inc_to = 1;
970 }
971 #endif
972 if (!data.autinc_to && CONSTANT_P (to_addr))
973 data.to_addr = copy_addr_to_reg (to_addr);
974 }
975
976 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
977 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
978 align = MOVE_MAX;
979
980 /* First move what we can in the largest integer mode, then go to
981 successively smaller modes. */
982
983 while (max_size > 1)
984 {
985 enum machine_mode mode = VOIDmode, tmode;
986 enum insn_code icode;
987
988 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
989 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
990 if (GET_MODE_SIZE (tmode) < max_size)
991 mode = tmode;
992
993 if (mode == VOIDmode)
994 break;
995
996 icode = mov_optab->handlers[(int) mode].insn_code;
997 if (icode != CODE_FOR_nothing
998 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
999 GET_MODE_SIZE (mode)))
1000 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1001
1002 max_size = GET_MODE_SIZE (mode);
1003 }
1004
1005 /* The code above should have handled everything. */
1006 if (data.len != 0)
1007 abort ();
1008 }
1009
1010 /* Return number of insns required to move L bytes by pieces.
1011 ALIGN (in bytes) is maximum alignment we can assume. */
1012
1013 static int
1014 move_by_pieces_ninsns (l, align)
1015 unsigned int l;
1016 int align;
1017 {
1018 register int n_insns = 0;
1019 int max_size = MOVE_MAX + 1;
1020
1021 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1022 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1023 align = MOVE_MAX;
1024
1025 while (max_size > 1)
1026 {
1027 enum machine_mode mode = VOIDmode, tmode;
1028 enum insn_code icode;
1029
1030 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1031 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1032 if (GET_MODE_SIZE (tmode) < max_size)
1033 mode = tmode;
1034
1035 if (mode == VOIDmode)
1036 break;
1037
1038 icode = mov_optab->handlers[(int) mode].insn_code;
1039 if (icode != CODE_FOR_nothing
1040 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1041 GET_MODE_SIZE (mode)))
1042 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1043
1044 max_size = GET_MODE_SIZE (mode);
1045 }
1046
1047 return n_insns;
1048 }
1049
1050 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1051 with move instructions for mode MODE. GENFUN is the gen_... function
1052 to make a move insn for that mode. DATA has all the other info. */
1053
1054 static void
1055 move_by_pieces_1 (genfun, mode, data)
1056 rtx (*genfun) ();
1057 enum machine_mode mode;
1058 struct move_by_pieces *data;
1059 {
1060 register int size = GET_MODE_SIZE (mode);
1061 register rtx to1, from1;
1062
1063 while (data->len >= size)
1064 {
1065 if (data->reverse) data->offset -= size;
1066
1067 to1 = (data->autinc_to
1068 ? gen_rtx (MEM, mode, data->to_addr)
1069 : change_address (data->to, mode,
1070 plus_constant (data->to_addr, data->offset)));
1071 from1 =
1072 (data->autinc_from
1073 ? gen_rtx (MEM, mode, data->from_addr)
1074 : change_address (data->from, mode,
1075 plus_constant (data->from_addr, data->offset)));
1076
1077 #ifdef HAVE_PRE_DECREMENT
1078 if (data->explicit_inc_to < 0)
1079 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1080 if (data->explicit_inc_from < 0)
1081 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1082 #endif
1083
1084 emit_insn ((*genfun) (to1, from1));
1085 #ifdef HAVE_POST_INCREMENT
1086 if (data->explicit_inc_to > 0)
1087 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1088 if (data->explicit_inc_from > 0)
1089 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1090 #endif
1091
1092 if (! data->reverse) data->offset += size;
1093
1094 data->len -= size;
1095 }
1096 }
1097 \f
1098 /* Emit code to move a block Y to a block X.
1099 This may be done with string-move instructions,
1100 with multiple scalar move instructions, or with a library call.
1101
1102 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1103 with mode BLKmode.
1104 SIZE is an rtx that says how long they are.
1105 ALIGN is the maximum alignment we can assume they have,
1106 measured in bytes. */
1107
1108 void
1109 emit_block_move (x, y, size, align)
1110 rtx x, y;
1111 rtx size;
1112 int align;
1113 {
1114 if (GET_MODE (x) != BLKmode)
1115 abort ();
1116
1117 if (GET_MODE (y) != BLKmode)
1118 abort ();
1119
1120 x = protect_from_queue (x, 1);
1121 y = protect_from_queue (y, 0);
1122 size = protect_from_queue (size, 0);
1123
1124 if (GET_CODE (x) != MEM)
1125 abort ();
1126 if (GET_CODE (y) != MEM)
1127 abort ();
1128 if (size == 0)
1129 abort ();
1130
1131 if (GET_CODE (size) == CONST_INT
1132 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1133 move_by_pieces (x, y, INTVAL (size), align);
1134 else
1135 {
1136 /* Try the most limited insn first, because there's no point
1137 including more than one in the machine description unless
1138 the more limited one has some advantage. */
1139 #ifdef HAVE_movstrqi
1140 if (HAVE_movstrqi
1141 && GET_CODE (size) == CONST_INT
1142 && ((unsigned) INTVAL (size)
1143 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
1144 {
1145 rtx insn = gen_movstrqi (x, y, size, GEN_INT (align));
1146 if (insn)
1147 {
1148 emit_insn (insn);
1149 return;
1150 }
1151 }
1152 #endif
1153 #ifdef HAVE_movstrhi
1154 if (HAVE_movstrhi
1155 && GET_CODE (size) == CONST_INT
1156 && ((unsigned) INTVAL (size)
1157 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
1158 {
1159 rtx insn = gen_movstrhi (x, y, size, GEN_INT (align));
1160 if (insn)
1161 {
1162 emit_insn (insn);
1163 return;
1164 }
1165 }
1166 #endif
1167 #ifdef HAVE_movstrsi
1168 if (HAVE_movstrsi)
1169 {
1170 rtx insn = gen_movstrsi (x, y, size, GEN_INT (align));
1171 if (insn)
1172 {
1173 emit_insn (insn);
1174 return;
1175 }
1176 }
1177 #endif
1178 #ifdef HAVE_movstrdi
1179 if (HAVE_movstrdi)
1180 {
1181 rtx insn = gen_movstrdi (x, y, size, GEN_INT (align));
1182 if (insn)
1183 {
1184 emit_insn (insn);
1185 return;
1186 }
1187 }
1188 #endif
1189
1190 #ifdef TARGET_MEM_FUNCTIONS
1191 emit_library_call (memcpy_libfunc, 1,
1192 VOIDmode, 3, XEXP (x, 0), Pmode,
1193 XEXP (y, 0), Pmode,
1194 convert_to_mode (Pmode, size, 1), Pmode);
1195 #else
1196 emit_library_call (bcopy_libfunc, 1,
1197 VOIDmode, 3, XEXP (y, 0), Pmode,
1198 XEXP (x, 0), Pmode,
1199 convert_to_mode (Pmode, size, 1), Pmode);
1200 #endif
1201 }
1202 }
1203 \f
1204 /* Copy all or part of a value X into registers starting at REGNO.
1205 The number of registers to be filled is NREGS. */
1206
1207 void
1208 move_block_to_reg (regno, x, nregs, mode)
1209 int regno;
1210 rtx x;
1211 int nregs;
1212 enum machine_mode mode;
1213 {
1214 int i;
1215 rtx pat, last;
1216
1217 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1218 x = validize_mem (force_const_mem (mode, x));
1219
1220 /* See if the machine can do this with a load multiple insn. */
1221 #ifdef HAVE_load_multiple
1222 last = get_last_insn ();
1223 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1224 GEN_INT (nregs));
1225 if (pat)
1226 {
1227 emit_insn (pat);
1228 return;
1229 }
1230 else
1231 delete_insns_since (last);
1232 #endif
1233
1234 for (i = 0; i < nregs; i++)
1235 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1236 operand_subword_force (x, i, mode));
1237 }
1238
1239 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1240 The number of registers to be filled is NREGS. */
1241
1242 void
1243 move_block_from_reg (regno, x, nregs)
1244 int regno;
1245 rtx x;
1246 int nregs;
1247 {
1248 int i;
1249 rtx pat, last;
1250
1251 /* See if the machine can do this with a store multiple insn. */
1252 #ifdef HAVE_store_multiple
1253 last = get_last_insn ();
1254 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1255 GEN_INT (nregs));
1256 if (pat)
1257 {
1258 emit_insn (pat);
1259 return;
1260 }
1261 else
1262 delete_insns_since (last);
1263 #endif
1264
1265 for (i = 0; i < nregs; i++)
1266 {
1267 rtx tem = operand_subword (x, i, 1, BLKmode);
1268
1269 if (tem == 0)
1270 abort ();
1271
1272 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1273 }
1274 }
1275
1276 /* Mark NREGS consecutive regs, starting at REGNO, as being live now. */
1277
1278 void
1279 use_regs (regno, nregs)
1280 int regno;
1281 int nregs;
1282 {
1283 int i;
1284
1285 for (i = 0; i < nregs; i++)
1286 emit_insn (gen_rtx (USE, VOIDmode, gen_rtx (REG, word_mode, regno + i)));
1287 }
1288 \f
1289 /* Write zeros through the storage of OBJECT.
1290 If OBJECT has BLKmode, SIZE is its length in bytes. */
1291
1292 void
1293 clear_storage (object, size)
1294 rtx object;
1295 int size;
1296 {
1297 if (GET_MODE (object) == BLKmode)
1298 {
1299 #ifdef TARGET_MEM_FUNCTIONS
1300 emit_library_call (memset_libfunc, 1,
1301 VOIDmode, 3,
1302 XEXP (object, 0), Pmode, const0_rtx, Pmode,
1303 GEN_INT (size), Pmode);
1304 #else
1305 emit_library_call (bzero_libfunc, 1,
1306 VOIDmode, 2,
1307 XEXP (object, 0), Pmode,
1308 GEN_INT (size), Pmode);
1309 #endif
1310 }
1311 else
1312 emit_move_insn (object, const0_rtx);
1313 }
1314
1315 /* Generate code to copy Y into X.
1316 Both Y and X must have the same mode, except that
1317 Y can be a constant with VOIDmode.
1318 This mode cannot be BLKmode; use emit_block_move for that.
1319
1320 Return the last instruction emitted. */
1321
1322 rtx
1323 emit_move_insn (x, y)
1324 rtx x, y;
1325 {
1326 enum machine_mode mode = GET_MODE (x);
1327 int i;
1328
1329 x = protect_from_queue (x, 1);
1330 y = protect_from_queue (y, 0);
1331
1332 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1333 abort ();
1334
1335 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1336 y = force_const_mem (mode, y);
1337
1338 /* If X or Y are memory references, verify that their addresses are valid
1339 for the machine. */
1340 if (GET_CODE (x) == MEM
1341 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1342 && ! push_operand (x, GET_MODE (x)))
1343 || (flag_force_addr
1344 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1345 x = change_address (x, VOIDmode, XEXP (x, 0));
1346
1347 if (GET_CODE (y) == MEM
1348 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1349 || (flag_force_addr
1350 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1351 y = change_address (y, VOIDmode, XEXP (y, 0));
1352
1353 if (mode == BLKmode)
1354 abort ();
1355
1356 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1357 return
1358 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1359
1360 /* This will handle any multi-word mode that lacks a move_insn pattern.
1361 However, you will get better code if you define such patterns,
1362 even if they must turn into multiple assembler instructions. */
1363 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1364 {
1365 rtx last_insn = 0;
1366
1367 for (i = 0;
1368 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1369 i++)
1370 {
1371 rtx xpart = operand_subword (x, i, 1, mode);
1372 rtx ypart = operand_subword (y, i, 1, mode);
1373
1374 /* If we can't get a part of Y, put Y into memory if it is a
1375 constant. Otherwise, force it into a register. If we still
1376 can't get a part of Y, abort. */
1377 if (ypart == 0 && CONSTANT_P (y))
1378 {
1379 y = force_const_mem (mode, y);
1380 ypart = operand_subword (y, i, 1, mode);
1381 }
1382 else if (ypart == 0)
1383 ypart = operand_subword_force (y, i, mode);
1384
1385 if (xpart == 0 || ypart == 0)
1386 abort ();
1387
1388 last_insn = emit_move_insn (xpart, ypart);
1389 }
1390 return last_insn;
1391 }
1392 else
1393 abort ();
1394 }
1395 \f
1396 /* Pushing data onto the stack. */
1397
1398 /* Push a block of length SIZE (perhaps variable)
1399 and return an rtx to address the beginning of the block.
1400 Note that it is not possible for the value returned to be a QUEUED.
1401 The value may be virtual_outgoing_args_rtx.
1402
1403 EXTRA is the number of bytes of padding to push in addition to SIZE.
1404 BELOW nonzero means this padding comes at low addresses;
1405 otherwise, the padding comes at high addresses. */
1406
1407 rtx
1408 push_block (size, extra, below)
1409 rtx size;
1410 int extra, below;
1411 {
1412 register rtx temp;
1413 if (CONSTANT_P (size))
1414 anti_adjust_stack (plus_constant (size, extra));
1415 else if (GET_CODE (size) == REG && extra == 0)
1416 anti_adjust_stack (size);
1417 else
1418 {
1419 rtx temp = copy_to_mode_reg (Pmode, size);
1420 if (extra != 0)
1421 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
1422 temp, 0, OPTAB_LIB_WIDEN);
1423 anti_adjust_stack (temp);
1424 }
1425
1426 #ifdef STACK_GROWS_DOWNWARD
1427 temp = virtual_outgoing_args_rtx;
1428 if (extra != 0 && below)
1429 temp = plus_constant (temp, extra);
1430 #else
1431 if (GET_CODE (size) == CONST_INT)
1432 temp = plus_constant (virtual_outgoing_args_rtx,
1433 - INTVAL (size) - (below ? 0 : extra));
1434 else if (extra != 0 && !below)
1435 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1436 negate_rtx (Pmode, plus_constant (size, extra)));
1437 else
1438 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1439 negate_rtx (Pmode, size));
1440 #endif
1441
1442 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
1443 }
1444
1445 static rtx
1446 gen_push_operand ()
1447 {
1448 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
1449 }
1450
1451 /* Generate code to push X onto the stack, assuming it has mode MODE and
1452 type TYPE.
1453 MODE is redundant except when X is a CONST_INT (since they don't
1454 carry mode info).
1455 SIZE is an rtx for the size of data to be copied (in bytes),
1456 needed only if X is BLKmode.
1457
1458 ALIGN (in bytes) is maximum alignment we can assume.
1459
1460 If PARTIAL is nonzero, then copy that many of the first words
1461 of X into registers starting with REG, and push the rest of X.
1462 The amount of space pushed is decreased by PARTIAL words,
1463 rounded *down* to a multiple of PARM_BOUNDARY.
1464 REG must be a hard register in this case.
1465
1466 EXTRA is the amount in bytes of extra space to leave next to this arg.
1467 This is ignored if an argument block has already been allocated.
1468
1469 On a machine that lacks real push insns, ARGS_ADDR is the address of
1470 the bottom of the argument block for this call. We use indexing off there
1471 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
1472 argument block has not been preallocated.
1473
1474 ARGS_SO_FAR is the size of args previously pushed for this call. */
1475
1476 void
1477 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
1478 args_addr, args_so_far)
1479 register rtx x;
1480 enum machine_mode mode;
1481 tree type;
1482 rtx size;
1483 int align;
1484 int partial;
1485 rtx reg;
1486 int extra;
1487 rtx args_addr;
1488 rtx args_so_far;
1489 {
1490 rtx xinner;
1491 enum direction stack_direction
1492 #ifdef STACK_GROWS_DOWNWARD
1493 = downward;
1494 #else
1495 = upward;
1496 #endif
1497
1498 /* Decide where to pad the argument: `downward' for below,
1499 `upward' for above, or `none' for don't pad it.
1500 Default is below for small data on big-endian machines; else above. */
1501 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
1502
1503 /* Invert direction if stack is post-update. */
1504 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
1505 if (where_pad != none)
1506 where_pad = (where_pad == downward ? upward : downward);
1507
1508 xinner = x = protect_from_queue (x, 0);
1509
1510 if (mode == BLKmode)
1511 {
1512 /* Copy a block into the stack, entirely or partially. */
1513
1514 register rtx temp;
1515 int used = partial * UNITS_PER_WORD;
1516 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
1517 int skip;
1518
1519 if (size == 0)
1520 abort ();
1521
1522 used -= offset;
1523
1524 /* USED is now the # of bytes we need not copy to the stack
1525 because registers will take care of them. */
1526
1527 if (partial != 0)
1528 xinner = change_address (xinner, BLKmode,
1529 plus_constant (XEXP (xinner, 0), used));
1530
1531 /* If the partial register-part of the arg counts in its stack size,
1532 skip the part of stack space corresponding to the registers.
1533 Otherwise, start copying to the beginning of the stack space,
1534 by setting SKIP to 0. */
1535 #ifndef REG_PARM_STACK_SPACE
1536 skip = 0;
1537 #else
1538 skip = used;
1539 #endif
1540
1541 #ifdef PUSH_ROUNDING
1542 /* Do it with several push insns if that doesn't take lots of insns
1543 and if there is no difficulty with push insns that skip bytes
1544 on the stack for alignment purposes. */
1545 if (args_addr == 0
1546 && GET_CODE (size) == CONST_INT
1547 && skip == 0
1548 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
1549 < MOVE_RATIO)
1550 /* Here we avoid the case of a structure whose weak alignment
1551 forces many pushes of a small amount of data,
1552 and such small pushes do rounding that causes trouble. */
1553 && ((! STRICT_ALIGNMENT && ! SLOW_UNALIGNED_ACCESS)
1554 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
1555 || PUSH_ROUNDING (align) == align)
1556 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
1557 {
1558 /* Push padding now if padding above and stack grows down,
1559 or if padding below and stack grows up.
1560 But if space already allocated, this has already been done. */
1561 if (extra && args_addr == 0
1562 && where_pad != none && where_pad != stack_direction)
1563 anti_adjust_stack (GEN_INT (extra));
1564
1565 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
1566 INTVAL (size) - used, align);
1567 }
1568 else
1569 #endif /* PUSH_ROUNDING */
1570 {
1571 /* Otherwise make space on the stack and copy the data
1572 to the address of that space. */
1573
1574 /* Deduct words put into registers from the size we must copy. */
1575 if (partial != 0)
1576 {
1577 if (GET_CODE (size) == CONST_INT)
1578 size = GEN_INT (INTVAL (size) - used);
1579 else
1580 size = expand_binop (GET_MODE (size), sub_optab, size,
1581 GEN_INT (used), NULL_RTX, 0,
1582 OPTAB_LIB_WIDEN);
1583 }
1584
1585 /* Get the address of the stack space.
1586 In this case, we do not deal with EXTRA separately.
1587 A single stack adjust will do. */
1588 if (! args_addr)
1589 {
1590 temp = push_block (size, extra, where_pad == downward);
1591 extra = 0;
1592 }
1593 else if (GET_CODE (args_so_far) == CONST_INT)
1594 temp = memory_address (BLKmode,
1595 plus_constant (args_addr,
1596 skip + INTVAL (args_so_far)));
1597 else
1598 temp = memory_address (BLKmode,
1599 plus_constant (gen_rtx (PLUS, Pmode,
1600 args_addr, args_so_far),
1601 skip));
1602
1603 /* TEMP is the address of the block. Copy the data there. */
1604 if (GET_CODE (size) == CONST_INT
1605 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
1606 < MOVE_RATIO))
1607 {
1608 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
1609 INTVAL (size), align);
1610 goto ret;
1611 }
1612 /* Try the most limited insn first, because there's no point
1613 including more than one in the machine description unless
1614 the more limited one has some advantage. */
1615 #ifdef HAVE_movstrqi
1616 if (HAVE_movstrqi
1617 && GET_CODE (size) == CONST_INT
1618 && ((unsigned) INTVAL (size)
1619 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
1620 {
1621 emit_insn (gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
1622 xinner, size, GEN_INT (align)));
1623 goto ret;
1624 }
1625 #endif
1626 #ifdef HAVE_movstrhi
1627 if (HAVE_movstrhi
1628 && GET_CODE (size) == CONST_INT
1629 && ((unsigned) INTVAL (size)
1630 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
1631 {
1632 emit_insn (gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
1633 xinner, size, GEN_INT (align)));
1634 goto ret;
1635 }
1636 #endif
1637 #ifdef HAVE_movstrsi
1638 if (HAVE_movstrsi)
1639 {
1640 emit_insn (gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
1641 xinner, size, GEN_INT (align)));
1642 goto ret;
1643 }
1644 #endif
1645 #ifdef HAVE_movstrdi
1646 if (HAVE_movstrdi)
1647 {
1648 emit_insn (gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
1649 xinner, size, GEN_INT (align)));
1650 goto ret;
1651 }
1652 #endif
1653
1654 #ifndef ACCUMULATE_OUTGOING_ARGS
1655 /* If the source is referenced relative to the stack pointer,
1656 copy it to another register to stabilize it. We do not need
1657 to do this if we know that we won't be changing sp. */
1658
1659 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
1660 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
1661 temp = copy_to_reg (temp);
1662 #endif
1663
1664 /* Make inhibit_defer_pop nonzero around the library call
1665 to force it to pop the bcopy-arguments right away. */
1666 NO_DEFER_POP;
1667 #ifdef TARGET_MEM_FUNCTIONS
1668 emit_library_call (memcpy_libfunc, 1,
1669 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
1670 size, Pmode);
1671 #else
1672 emit_library_call (bcopy_libfunc, 1,
1673 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
1674 size, Pmode);
1675 #endif
1676 OK_DEFER_POP;
1677 }
1678 }
1679 else if (partial > 0)
1680 {
1681 /* Scalar partly in registers. */
1682
1683 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
1684 int i;
1685 int not_stack;
1686 /* # words of start of argument
1687 that we must make space for but need not store. */
1688 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
1689 int args_offset = INTVAL (args_so_far);
1690 int skip;
1691
1692 /* Push padding now if padding above and stack grows down,
1693 or if padding below and stack grows up.
1694 But if space already allocated, this has already been done. */
1695 if (extra && args_addr == 0
1696 && where_pad != none && where_pad != stack_direction)
1697 anti_adjust_stack (GEN_INT (extra));
1698
1699 /* If we make space by pushing it, we might as well push
1700 the real data. Otherwise, we can leave OFFSET nonzero
1701 and leave the space uninitialized. */
1702 if (args_addr == 0)
1703 offset = 0;
1704
1705 /* Now NOT_STACK gets the number of words that we don't need to
1706 allocate on the stack. */
1707 not_stack = partial - offset;
1708
1709 /* If the partial register-part of the arg counts in its stack size,
1710 skip the part of stack space corresponding to the registers.
1711 Otherwise, start copying to the beginning of the stack space,
1712 by setting SKIP to 0. */
1713 #ifndef REG_PARM_STACK_SPACE
1714 skip = 0;
1715 #else
1716 skip = not_stack;
1717 #endif
1718
1719 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1720 x = validize_mem (force_const_mem (mode, x));
1721
1722 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
1723 SUBREGs of such registers are not allowed. */
1724 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
1725 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
1726 x = copy_to_reg (x);
1727
1728 /* Loop over all the words allocated on the stack for this arg. */
1729 /* We can do it by words, because any scalar bigger than a word
1730 has a size a multiple of a word. */
1731 #ifndef PUSH_ARGS_REVERSED
1732 for (i = not_stack; i < size; i++)
1733 #else
1734 for (i = size - 1; i >= not_stack; i--)
1735 #endif
1736 if (i >= not_stack + offset)
1737 emit_push_insn (operand_subword_force (x, i, mode),
1738 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
1739 0, args_addr,
1740 GEN_INT (args_offset + ((i - not_stack + skip)
1741 * UNITS_PER_WORD)));
1742 }
1743 else
1744 {
1745 rtx addr;
1746
1747 /* Push padding now if padding above and stack grows down,
1748 or if padding below and stack grows up.
1749 But if space already allocated, this has already been done. */
1750 if (extra && args_addr == 0
1751 && where_pad != none && where_pad != stack_direction)
1752 anti_adjust_stack (GEN_INT (extra));
1753
1754 #ifdef PUSH_ROUNDING
1755 if (args_addr == 0)
1756 addr = gen_push_operand ();
1757 else
1758 #endif
1759 if (GET_CODE (args_so_far) == CONST_INT)
1760 addr
1761 = memory_address (mode,
1762 plus_constant (args_addr, INTVAL (args_so_far)));
1763 else
1764 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
1765 args_so_far));
1766
1767 emit_move_insn (gen_rtx (MEM, mode, addr), x);
1768 }
1769
1770 ret:
1771 /* If part should go in registers, copy that part
1772 into the appropriate registers. Do this now, at the end,
1773 since mem-to-mem copies above may do function calls. */
1774 if (partial > 0)
1775 move_block_to_reg (REGNO (reg), x, partial, mode);
1776
1777 if (extra && args_addr == 0 && where_pad == stack_direction)
1778 anti_adjust_stack (GEN_INT (extra));
1779 }
1780 \f
1781 /* Output a library call to function FUN (a SYMBOL_REF rtx)
1782 (emitting the queue unless NO_QUEUE is nonzero),
1783 for a value of mode OUTMODE,
1784 with NARGS different arguments, passed as alternating rtx values
1785 and machine_modes to convert them to.
1786 The rtx values should have been passed through protect_from_queue already.
1787
1788 NO_QUEUE will be true if and only if the library call is a `const' call
1789 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
1790 to the variable is_const in expand_call. */
1791
1792 void
1793 emit_library_call (va_alist)
1794 va_dcl
1795 {
1796 va_list p;
1797 struct args_size args_size;
1798 register int argnum;
1799 enum machine_mode outmode;
1800 int nargs;
1801 rtx fun;
1802 rtx orgfun;
1803 int inc;
1804 int count;
1805 rtx argblock = 0;
1806 CUMULATIVE_ARGS args_so_far;
1807 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
1808 struct args_size offset; struct args_size size; };
1809 struct arg *argvec;
1810 int old_inhibit_defer_pop = inhibit_defer_pop;
1811 int no_queue = 0;
1812 rtx use_insns;
1813
1814 va_start (p);
1815 orgfun = fun = va_arg (p, rtx);
1816 no_queue = va_arg (p, int);
1817 outmode = va_arg (p, enum machine_mode);
1818 nargs = va_arg (p, int);
1819
1820 /* Copy all the libcall-arguments out of the varargs data
1821 and into a vector ARGVEC.
1822
1823 Compute how to pass each argument. We only support a very small subset
1824 of the full argument passing conventions to limit complexity here since
1825 library functions shouldn't have many args. */
1826
1827 argvec = (struct arg *) alloca (nargs * sizeof (struct arg));
1828
1829 INIT_CUMULATIVE_ARGS (args_so_far, (tree)0, fun);
1830
1831 args_size.constant = 0;
1832 args_size.var = 0;
1833
1834 for (count = 0; count < nargs; count++)
1835 {
1836 rtx val = va_arg (p, rtx);
1837 enum machine_mode mode = va_arg (p, enum machine_mode);
1838
1839 /* We cannot convert the arg value to the mode the library wants here;
1840 must do it earlier where we know the signedness of the arg. */
1841 if (mode == BLKmode
1842 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
1843 abort ();
1844
1845 /* On some machines, there's no way to pass a float to a library fcn.
1846 Pass it as a double instead. */
1847 #ifdef LIBGCC_NEEDS_DOUBLE
1848 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
1849 val = convert_to_mode (DFmode, val), mode = DFmode;
1850 #endif
1851
1852 /* There's no need to call protect_from_queue, because
1853 either emit_move_insn or emit_push_insn will do that. */
1854
1855 /* Make sure it is a reasonable operand for a move or push insn. */
1856 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
1857 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
1858 val = force_operand (val, NULL_RTX);
1859
1860 argvec[count].value = val;
1861 argvec[count].mode = mode;
1862
1863 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
1864 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
1865 abort ();
1866 #endif
1867
1868 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1869 if (argvec[count].reg && GET_CODE (argvec[count].reg) == EXPR_LIST)
1870 abort ();
1871 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1872 argvec[count].partial
1873 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
1874 #else
1875 argvec[count].partial = 0;
1876 #endif
1877
1878 locate_and_pad_parm (mode, NULL_TREE,
1879 argvec[count].reg && argvec[count].partial == 0,
1880 NULL_TREE, &args_size, &argvec[count].offset,
1881 &argvec[count].size);
1882
1883 if (argvec[count].size.var)
1884 abort ();
1885
1886 #ifndef REG_PARM_STACK_SPACE
1887 if (argvec[count].partial)
1888 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
1889 #endif
1890
1891 if (argvec[count].reg == 0 || argvec[count].partial != 0
1892 #ifdef REG_PARM_STACK_SPACE
1893 || 1
1894 #endif
1895 )
1896 args_size.constant += argvec[count].size.constant;
1897
1898 #ifdef ACCUMULATE_OUTGOING_ARGS
1899 /* If this arg is actually passed on the stack, it might be
1900 clobbering something we already put there (this library call might
1901 be inside the evaluation of an argument to a function whose call
1902 requires the stack). This will only occur when the library call
1903 has sufficient args to run out of argument registers. Abort in
1904 this case; if this ever occurs, code must be added to save and
1905 restore the arg slot. */
1906
1907 if (argvec[count].reg == 0 || argvec[count].partial != 0)
1908 abort ();
1909 #endif
1910
1911 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree)0, 1);
1912 }
1913 va_end (p);
1914
1915 /* If this machine requires an external definition for library
1916 functions, write one out. */
1917 assemble_external_libcall (fun);
1918
1919 #ifdef STACK_BOUNDARY
1920 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
1921 / STACK_BYTES) * STACK_BYTES);
1922 #endif
1923
1924 #ifdef REG_PARM_STACK_SPACE
1925 args_size.constant = MAX (args_size.constant,
1926 REG_PARM_STACK_SPACE ((tree) 0));
1927 #endif
1928
1929 #ifdef ACCUMULATE_OUTGOING_ARGS
1930 if (args_size.constant > current_function_outgoing_args_size)
1931 current_function_outgoing_args_size = args_size.constant;
1932 args_size.constant = 0;
1933 #endif
1934
1935 #ifndef PUSH_ROUNDING
1936 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
1937 #endif
1938
1939 #ifdef PUSH_ARGS_REVERSED
1940 inc = -1;
1941 argnum = nargs - 1;
1942 #else
1943 inc = 1;
1944 argnum = 0;
1945 #endif
1946
1947 /* Push the args that need to be pushed. */
1948
1949 for (count = 0; count < nargs; count++, argnum += inc)
1950 {
1951 register enum machine_mode mode = argvec[argnum].mode;
1952 register rtx val = argvec[argnum].value;
1953 rtx reg = argvec[argnum].reg;
1954 int partial = argvec[argnum].partial;
1955
1956 if (! (reg != 0 && partial == 0))
1957 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
1958 argblock, GEN_INT (argvec[count].offset.constant));
1959 NO_DEFER_POP;
1960 }
1961
1962 #ifdef PUSH_ARGS_REVERSED
1963 argnum = nargs - 1;
1964 #else
1965 argnum = 0;
1966 #endif
1967
1968 /* Now load any reg parms into their regs. */
1969
1970 for (count = 0; count < nargs; count++, argnum += inc)
1971 {
1972 register enum machine_mode mode = argvec[argnum].mode;
1973 register rtx val = argvec[argnum].value;
1974 rtx reg = argvec[argnum].reg;
1975 int partial = argvec[argnum].partial;
1976
1977 if (reg != 0 && partial == 0)
1978 emit_move_insn (reg, val);
1979 NO_DEFER_POP;
1980 }
1981
1982 /* For version 1.37, try deleting this entirely. */
1983 if (! no_queue)
1984 emit_queue ();
1985
1986 /* Any regs containing parms remain in use through the call. */
1987 start_sequence ();
1988 for (count = 0; count < nargs; count++)
1989 if (argvec[count].reg != 0)
1990 emit_insn (gen_rtx (USE, VOIDmode, argvec[count].reg));
1991
1992 use_insns = get_insns ();
1993 end_sequence ();
1994
1995 fun = prepare_call_address (fun, NULL_TREE, &use_insns);
1996
1997 /* Don't allow popping to be deferred, since then
1998 cse'ing of library calls could delete a call and leave the pop. */
1999 NO_DEFER_POP;
2000
2001 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2002 will set inhibit_defer_pop to that value. */
2003
2004 emit_call_1 (fun, get_identifier (XSTR (orgfun, 0)), args_size.constant, 0,
2005 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2006 outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
2007 old_inhibit_defer_pop + 1, use_insns, no_queue);
2008
2009 /* Now restore inhibit_defer_pop to its actual original value. */
2010 OK_DEFER_POP;
2011 }
2012 \f
2013 /* Expand an assignment that stores the value of FROM into TO.
2014 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2015 (This may contain a QUEUED rtx.)
2016 Otherwise, the returned value is not meaningful.
2017
2018 SUGGEST_REG is no longer actually used.
2019 It used to mean, copy the value through a register
2020 and return that register, if that is possible.
2021 But now we do this if WANT_VALUE.
2022
2023 If the value stored is a constant, we return the constant. */
2024
2025 rtx
2026 expand_assignment (to, from, want_value, suggest_reg)
2027 tree to, from;
2028 int want_value;
2029 int suggest_reg;
2030 {
2031 register rtx to_rtx = 0;
2032 rtx result;
2033
2034 /* Don't crash if the lhs of the assignment was erroneous. */
2035
2036 if (TREE_CODE (to) == ERROR_MARK)
2037 return expand_expr (from, NULL_RTX, VOIDmode, 0);
2038
2039 /* Assignment of a structure component needs special treatment
2040 if the structure component's rtx is not simply a MEM.
2041 Assignment of an array element at a constant index
2042 has the same problem. */
2043
2044 if (TREE_CODE (to) == COMPONENT_REF
2045 || TREE_CODE (to) == BIT_FIELD_REF
2046 || (TREE_CODE (to) == ARRAY_REF
2047 && TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2048 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST))
2049 {
2050 enum machine_mode mode1;
2051 int bitsize;
2052 int bitpos;
2053 tree offset;
2054 int unsignedp;
2055 int volatilep = 0;
2056 tree tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
2057 &mode1, &unsignedp, &volatilep);
2058
2059 /* If we are going to use store_bit_field and extract_bit_field,
2060 make sure to_rtx will be safe for multiple use. */
2061
2062 if (mode1 == VOIDmode && want_value)
2063 tem = stabilize_reference (tem);
2064
2065 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
2066 if (offset != 0)
2067 {
2068 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2069
2070 if (GET_CODE (to_rtx) != MEM)
2071 abort ();
2072 to_rtx = change_address (to_rtx, VOIDmode,
2073 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2074 force_reg (Pmode, offset_rtx)));
2075 }
2076 if (volatilep)
2077 {
2078 if (GET_CODE (to_rtx) == MEM)
2079 MEM_VOLATILE_P (to_rtx) = 1;
2080 #if 0 /* This was turned off because, when a field is volatile
2081 in an object which is not volatile, the object may be in a register,
2082 and then we would abort over here. */
2083 else
2084 abort ();
2085 #endif
2086 }
2087
2088 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2089 (want_value
2090 /* Spurious cast makes HPUX compiler happy. */
2091 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2092 : VOIDmode),
2093 unsignedp,
2094 /* Required alignment of containing datum. */
2095 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
2096 int_size_in_bytes (TREE_TYPE (tem)));
2097 preserve_temp_slots (result);
2098 free_temp_slots ();
2099
2100 return result;
2101 }
2102
2103 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2104 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2105
2106 if (to_rtx == 0)
2107 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2108
2109 /* In case we are returning the contents of an object which overlaps
2110 the place the value is being stored, use a safe function when copying
2111 a value through a pointer into a structure value return block. */
2112 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2113 && current_function_returns_struct
2114 && !current_function_returns_pcc_struct)
2115 {
2116 rtx from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
2117 rtx size = expr_size (from);
2118
2119 #ifdef TARGET_MEM_FUNCTIONS
2120 emit_library_call (memcpy_libfunc, 1,
2121 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2122 XEXP (from_rtx, 0), Pmode,
2123 size, Pmode);
2124 #else
2125 emit_library_call (bcopy_libfunc, 1,
2126 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2127 XEXP (to_rtx, 0), Pmode,
2128 size, Pmode);
2129 #endif
2130
2131 preserve_temp_slots (to_rtx);
2132 free_temp_slots ();
2133 return to_rtx;
2134 }
2135
2136 /* Compute FROM and store the value in the rtx we got. */
2137
2138 result = store_expr (from, to_rtx, want_value);
2139 preserve_temp_slots (result);
2140 free_temp_slots ();
2141 return result;
2142 }
2143
2144 /* Generate code for computing expression EXP,
2145 and storing the value into TARGET.
2146 Returns TARGET or an equivalent value.
2147 TARGET may contain a QUEUED rtx.
2148
2149 If SUGGEST_REG is nonzero, copy the value through a register
2150 and return that register, if that is possible.
2151
2152 If the value stored is a constant, we return the constant. */
2153
2154 rtx
2155 store_expr (exp, target, suggest_reg)
2156 register tree exp;
2157 register rtx target;
2158 int suggest_reg;
2159 {
2160 register rtx temp;
2161 int dont_return_target = 0;
2162
2163 if (TREE_CODE (exp) == COMPOUND_EXPR)
2164 {
2165 /* Perform first part of compound expression, then assign from second
2166 part. */
2167 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2168 emit_queue ();
2169 return store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2170 }
2171 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2172 {
2173 /* For conditional expression, get safe form of the target. Then
2174 test the condition, doing the appropriate assignment on either
2175 side. This avoids the creation of unnecessary temporaries.
2176 For non-BLKmode, it is more efficient not to do this. */
2177
2178 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2179
2180 emit_queue ();
2181 target = protect_from_queue (target, 1);
2182
2183 NO_DEFER_POP;
2184 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2185 store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2186 emit_queue ();
2187 emit_jump_insn (gen_jump (lab2));
2188 emit_barrier ();
2189 emit_label (lab1);
2190 store_expr (TREE_OPERAND (exp, 2), target, suggest_reg);
2191 emit_queue ();
2192 emit_label (lab2);
2193 OK_DEFER_POP;
2194 return target;
2195 }
2196 else if (suggest_reg && GET_CODE (target) == MEM
2197 && GET_MODE (target) != BLKmode)
2198 /* If target is in memory and caller wants value in a register instead,
2199 arrange that. Pass TARGET as target for expand_expr so that,
2200 if EXP is another assignment, SUGGEST_REG will be nonzero for it.
2201 We know expand_expr will not use the target in that case. */
2202 {
2203 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
2204 GET_MODE (target), 0);
2205 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2206 temp = copy_to_reg (temp);
2207 dont_return_target = 1;
2208 }
2209 else if (queued_subexp_p (target))
2210 /* If target contains a postincrement, it is not safe
2211 to use as the returned value. It would access the wrong
2212 place by the time the queued increment gets output.
2213 So copy the value through a temporary and use that temp
2214 as the result. */
2215 {
2216 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2217 {
2218 /* Expand EXP into a new pseudo. */
2219 temp = gen_reg_rtx (GET_MODE (target));
2220 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2221 }
2222 else
2223 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
2224 dont_return_target = 1;
2225 }
2226 else
2227 {
2228 temp = expand_expr (exp, target, GET_MODE (target), 0);
2229 /* DO return TARGET if it's a specified hardware register.
2230 expand_return relies on this. */
2231 if (!(target && GET_CODE (target) == REG
2232 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2233 && CONSTANT_P (temp))
2234 dont_return_target = 1;
2235 }
2236
2237 /* If value was not generated in the target, store it there.
2238 Convert the value to TARGET's type first if nec. */
2239
2240 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2241 {
2242 target = protect_from_queue (target, 1);
2243 if (GET_MODE (temp) != GET_MODE (target)
2244 && GET_MODE (temp) != VOIDmode)
2245 {
2246 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2247 if (dont_return_target)
2248 {
2249 /* In this case, we will return TEMP,
2250 so make sure it has the proper mode.
2251 But don't forget to store the value into TARGET. */
2252 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2253 emit_move_insn (target, temp);
2254 }
2255 else
2256 convert_move (target, temp, unsignedp);
2257 }
2258
2259 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2260 {
2261 /* Handle copying a string constant into an array.
2262 The string constant may be shorter than the array.
2263 So copy just the string's actual length, and clear the rest. */
2264 rtx size;
2265
2266 /* Get the size of the data type of the string,
2267 which is actually the size of the target. */
2268 size = expr_size (exp);
2269 if (GET_CODE (size) == CONST_INT
2270 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2271 emit_block_move (target, temp, size,
2272 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2273 else
2274 {
2275 /* Compute the size of the data to copy from the string. */
2276 tree copy_size
2277 = fold (build (MIN_EXPR, sizetype,
2278 size_binop (CEIL_DIV_EXPR,
2279 TYPE_SIZE (TREE_TYPE (exp)),
2280 size_int (BITS_PER_UNIT)),
2281 convert (sizetype,
2282 build_int_2 (TREE_STRING_LENGTH (exp), 0))));
2283 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
2284 VOIDmode, 0);
2285 rtx label = 0;
2286
2287 /* Copy that much. */
2288 emit_block_move (target, temp, copy_size_rtx,
2289 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2290
2291 /* Figure out how much is left in TARGET
2292 that we have to clear. */
2293 if (GET_CODE (copy_size_rtx) == CONST_INT)
2294 {
2295 temp = plus_constant (XEXP (target, 0),
2296 TREE_STRING_LENGTH (exp));
2297 size = plus_constant (size,
2298 - TREE_STRING_LENGTH (exp));
2299 }
2300 else
2301 {
2302 enum machine_mode size_mode = Pmode;
2303
2304 temp = force_reg (Pmode, XEXP (target, 0));
2305 temp = expand_binop (size_mode, add_optab, temp,
2306 copy_size_rtx, NULL_RTX, 0,
2307 OPTAB_LIB_WIDEN);
2308
2309 size = expand_binop (size_mode, sub_optab, size,
2310 copy_size_rtx, NULL_RTX, 0,
2311 OPTAB_LIB_WIDEN);
2312
2313 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
2314 GET_MODE (size), 0, 0);
2315 label = gen_label_rtx ();
2316 emit_jump_insn (gen_blt (label));
2317 }
2318
2319 if (size != const0_rtx)
2320 {
2321 #ifdef TARGET_MEM_FUNCTIONS
2322 emit_library_call (memset_libfunc, 1, VOIDmode, 3,
2323 temp, Pmode, const0_rtx, Pmode, size, Pmode);
2324 #else
2325 emit_library_call (bzero_libfunc, 1, VOIDmode, 2,
2326 temp, Pmode, size, Pmode);
2327 #endif
2328 }
2329 if (label)
2330 emit_label (label);
2331 }
2332 }
2333 else if (GET_MODE (temp) == BLKmode)
2334 emit_block_move (target, temp, expr_size (exp),
2335 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2336 else
2337 emit_move_insn (target, temp);
2338 }
2339 if (dont_return_target)
2340 return temp;
2341 return target;
2342 }
2343 \f
2344 /* Store the value of constructor EXP into the rtx TARGET.
2345 TARGET is either a REG or a MEM. */
2346
2347 static void
2348 store_constructor (exp, target)
2349 tree exp;
2350 rtx target;
2351 {
2352 tree type = TREE_TYPE (exp);
2353
2354 /* We know our target cannot conflict, since safe_from_p has been called. */
2355 #if 0
2356 /* Don't try copying piece by piece into a hard register
2357 since that is vulnerable to being clobbered by EXP.
2358 Instead, construct in a pseudo register and then copy it all. */
2359 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
2360 {
2361 rtx temp = gen_reg_rtx (GET_MODE (target));
2362 store_constructor (exp, temp);
2363 emit_move_insn (target, temp);
2364 return;
2365 }
2366 #endif
2367
2368 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE)
2369 {
2370 register tree elt;
2371
2372 /* Inform later passes that the whole union value is dead. */
2373 if (TREE_CODE (type) == UNION_TYPE)
2374 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2375
2376 /* If we are building a static constructor into a register,
2377 set the initial value as zero so we can fold the value into
2378 a constant. */
2379 else if (GET_CODE (target) == REG && TREE_STATIC (exp))
2380 emit_move_insn (target, const0_rtx);
2381
2382 /* If the constructor has fewer fields than the structure,
2383 clear the whole structure first. */
2384 else if (list_length (CONSTRUCTOR_ELTS (exp))
2385 != list_length (TYPE_FIELDS (type)))
2386 clear_storage (target, int_size_in_bytes (type));
2387 else
2388 /* Inform later passes that the old value is dead. */
2389 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2390
2391 /* Store each element of the constructor into
2392 the corresponding field of TARGET. */
2393
2394 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
2395 {
2396 register tree field = TREE_PURPOSE (elt);
2397 register enum machine_mode mode;
2398 int bitsize;
2399 int bitpos;
2400 int unsignedp;
2401
2402 /* Just ignore missing fields.
2403 We cleared the whole structure, above,
2404 if any fields are missing. */
2405 if (field == 0)
2406 continue;
2407
2408 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
2409 unsignedp = TREE_UNSIGNED (field);
2410 mode = DECL_MODE (field);
2411 if (DECL_BIT_FIELD (field))
2412 mode = VOIDmode;
2413
2414 if (TREE_CODE (DECL_FIELD_BITPOS (field)) != INTEGER_CST)
2415 /* ??? This case remains to be written. */
2416 abort ();
2417
2418 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
2419
2420 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2421 /* The alignment of TARGET is
2422 at least what its type requires. */
2423 VOIDmode, 0,
2424 TYPE_ALIGN (type) / BITS_PER_UNIT,
2425 int_size_in_bytes (type));
2426 }
2427 }
2428 else if (TREE_CODE (type) == ARRAY_TYPE)
2429 {
2430 register tree elt;
2431 register int i;
2432 tree domain = TYPE_DOMAIN (type);
2433 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
2434 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
2435 tree elttype = TREE_TYPE (type);
2436
2437 /* If the constructor has fewer fields than the structure,
2438 clear the whole structure first. Similarly if this this is
2439 static constructor of a non-BLKmode object. */
2440
2441 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
2442 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
2443 clear_storage (target, maxelt - minelt + 1);
2444 else
2445 /* Inform later passes that the old value is dead. */
2446 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2447
2448 /* Store each element of the constructor into
2449 the corresponding element of TARGET, determined
2450 by counting the elements. */
2451 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
2452 elt;
2453 elt = TREE_CHAIN (elt), i++)
2454 {
2455 register enum machine_mode mode;
2456 int bitsize;
2457 int bitpos;
2458 int unsignedp;
2459
2460 mode = TYPE_MODE (elttype);
2461 bitsize = GET_MODE_BITSIZE (mode);
2462 unsignedp = TREE_UNSIGNED (elttype);
2463
2464 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2465
2466 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2467 /* The alignment of TARGET is
2468 at least what its type requires. */
2469 VOIDmode, 0,
2470 TYPE_ALIGN (type) / BITS_PER_UNIT,
2471 int_size_in_bytes (type));
2472 }
2473 }
2474
2475 else
2476 abort ();
2477 }
2478
2479 /* Store the value of EXP (an expression tree)
2480 into a subfield of TARGET which has mode MODE and occupies
2481 BITSIZE bits, starting BITPOS bits from the start of TARGET.
2482 If MODE is VOIDmode, it means that we are storing into a bit-field.
2483
2484 If VALUE_MODE is VOIDmode, return nothing in particular.
2485 UNSIGNEDP is not used in this case.
2486
2487 Otherwise, return an rtx for the value stored. This rtx
2488 has mode VALUE_MODE if that is convenient to do.
2489 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
2490
2491 ALIGN is the alignment that TARGET is known to have, measured in bytes.
2492 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
2493
2494 static rtx
2495 store_field (target, bitsize, bitpos, mode, exp, value_mode,
2496 unsignedp, align, total_size)
2497 rtx target;
2498 int bitsize, bitpos;
2499 enum machine_mode mode;
2500 tree exp;
2501 enum machine_mode value_mode;
2502 int unsignedp;
2503 int align;
2504 int total_size;
2505 {
2506 HOST_WIDE_INT width_mask = 0;
2507
2508 if (bitsize < HOST_BITS_PER_WIDE_INT)
2509 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
2510
2511 /* If we are storing into an unaligned field of an aligned union that is
2512 in a register, we may have the mode of TARGET being an integer mode but
2513 MODE == BLKmode. In that case, get an aligned object whose size and
2514 alignment are the same as TARGET and store TARGET into it (we can avoid
2515 the store if the field being stored is the entire width of TARGET). Then
2516 call ourselves recursively to store the field into a BLKmode version of
2517 that object. Finally, load from the object into TARGET. This is not
2518 very efficient in general, but should only be slightly more expensive
2519 than the otherwise-required unaligned accesses. Perhaps this can be
2520 cleaned up later. */
2521
2522 if (mode == BLKmode
2523 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
2524 {
2525 rtx object = assign_stack_temp (GET_MODE (target),
2526 GET_MODE_SIZE (GET_MODE (target)), 0);
2527 rtx blk_object = copy_rtx (object);
2528
2529 PUT_MODE (blk_object, BLKmode);
2530
2531 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
2532 emit_move_insn (object, target);
2533
2534 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
2535 align, total_size);
2536
2537 emit_move_insn (target, object);
2538
2539 return target;
2540 }
2541
2542 /* If the structure is in a register or if the component
2543 is a bit field, we cannot use addressing to access it.
2544 Use bit-field techniques or SUBREG to store in it. */
2545
2546 if (mode == VOIDmode
2547 || (mode != BLKmode && ! direct_store[(int) mode])
2548 || GET_CODE (target) == REG
2549 || GET_CODE (target) == SUBREG)
2550 {
2551 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2552 /* Store the value in the bitfield. */
2553 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
2554 if (value_mode != VOIDmode)
2555 {
2556 /* The caller wants an rtx for the value. */
2557 /* If possible, avoid refetching from the bitfield itself. */
2558 if (width_mask != 0
2559 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
2560 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
2561 return extract_bit_field (target, bitsize, bitpos, unsignedp,
2562 NULL_RTX, value_mode, 0, align,
2563 total_size);
2564 }
2565 return const0_rtx;
2566 }
2567 else
2568 {
2569 rtx addr = XEXP (target, 0);
2570 rtx to_rtx;
2571
2572 /* If a value is wanted, it must be the lhs;
2573 so make the address stable for multiple use. */
2574
2575 if (value_mode != VOIDmode && GET_CODE (addr) != REG
2576 && ! CONSTANT_ADDRESS_P (addr)
2577 /* A frame-pointer reference is already stable. */
2578 && ! (GET_CODE (addr) == PLUS
2579 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2580 && (XEXP (addr, 0) == virtual_incoming_args_rtx
2581 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
2582 addr = copy_to_reg (addr);
2583
2584 /* Now build a reference to just the desired component. */
2585
2586 to_rtx = change_address (target, mode,
2587 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
2588 MEM_IN_STRUCT_P (to_rtx) = 1;
2589
2590 return store_expr (exp, to_rtx, value_mode != VOIDmode);
2591 }
2592 }
2593 \f
2594 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
2595 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
2596 ARRAY_REFs at constant positions and find the ultimate containing object,
2597 which we return.
2598
2599 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
2600 bit position, and *PUNSIGNEDP to the signedness of the field.
2601 If the position of the field is variable, we store a tree
2602 giving the variable offset (in units) in *POFFSET.
2603 This offset is in addition to the bit position.
2604 If the position is not variable, we store 0 in *POFFSET.
2605
2606 If any of the extraction expressions is volatile,
2607 we store 1 in *PVOLATILEP. Otherwise we don't change that.
2608
2609 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
2610 is a mode that can be used to access the field. In that case, *PBITSIZE
2611 is redundant.
2612
2613 If the field describes a variable-sized object, *PMODE is set to
2614 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
2615 this case, but the address of the object can be found. */
2616
2617 tree
2618 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode, punsignedp, pvolatilep)
2619 tree exp;
2620 int *pbitsize;
2621 int *pbitpos;
2622 tree *poffset;
2623 enum machine_mode *pmode;
2624 int *punsignedp;
2625 int *pvolatilep;
2626 {
2627 tree size_tree = 0;
2628 enum machine_mode mode = VOIDmode;
2629 tree offset = 0;
2630
2631 if (TREE_CODE (exp) == COMPONENT_REF)
2632 {
2633 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
2634 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
2635 mode = DECL_MODE (TREE_OPERAND (exp, 1));
2636 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
2637 }
2638 else if (TREE_CODE (exp) == BIT_FIELD_REF)
2639 {
2640 size_tree = TREE_OPERAND (exp, 1);
2641 *punsignedp = TREE_UNSIGNED (exp);
2642 }
2643 else
2644 {
2645 mode = TYPE_MODE (TREE_TYPE (exp));
2646 *pbitsize = GET_MODE_BITSIZE (mode);
2647 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2648 }
2649
2650 if (size_tree)
2651 {
2652 if (TREE_CODE (size_tree) != INTEGER_CST)
2653 mode = BLKmode, *pbitsize = -1;
2654 else
2655 *pbitsize = TREE_INT_CST_LOW (size_tree);
2656 }
2657
2658 /* Compute cumulative bit-offset for nested component-refs and array-refs,
2659 and find the ultimate containing object. */
2660
2661 *pbitpos = 0;
2662
2663 while (1)
2664 {
2665 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
2666 {
2667 tree pos = (TREE_CODE (exp) == COMPONENT_REF
2668 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
2669 : TREE_OPERAND (exp, 2));
2670
2671 if (TREE_CODE (pos) == PLUS_EXPR)
2672 {
2673 tree constant, var;
2674 if (TREE_CODE (TREE_OPERAND (pos, 0)) == INTEGER_CST)
2675 {
2676 constant = TREE_OPERAND (pos, 0);
2677 var = TREE_OPERAND (pos, 1);
2678 }
2679 else if (TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
2680 {
2681 constant = TREE_OPERAND (pos, 1);
2682 var = TREE_OPERAND (pos, 0);
2683 }
2684 else
2685 abort ();
2686 *pbitpos += TREE_INT_CST_LOW (constant);
2687 if (offset)
2688 offset = size_binop (PLUS_EXPR, offset,
2689 size_binop (FLOOR_DIV_EXPR, var,
2690 size_int (BITS_PER_UNIT)));
2691 else
2692 offset = size_binop (FLOOR_DIV_EXPR, var,
2693 size_int (BITS_PER_UNIT));
2694 }
2695 else if (TREE_CODE (pos) == INTEGER_CST)
2696 *pbitpos += TREE_INT_CST_LOW (pos);
2697 else
2698 {
2699 /* Assume here that the offset is a multiple of a unit.
2700 If not, there should be an explicitly added constant. */
2701 if (offset)
2702 offset = size_binop (PLUS_EXPR, offset,
2703 size_binop (FLOOR_DIV_EXPR, pos,
2704 size_int (BITS_PER_UNIT)));
2705 else
2706 offset = size_binop (FLOOR_DIV_EXPR, pos,
2707 size_int (BITS_PER_UNIT));
2708 }
2709 }
2710
2711 else if (TREE_CODE (exp) == ARRAY_REF
2712 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
2713 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST)
2714 {
2715 *pbitpos += (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
2716 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp))));
2717 }
2718 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
2719 && ! ((TREE_CODE (exp) == NOP_EXPR
2720 || TREE_CODE (exp) == CONVERT_EXPR)
2721 && (TYPE_MODE (TREE_TYPE (exp))
2722 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
2723 break;
2724
2725 /* If any reference in the chain is volatile, the effect is volatile. */
2726 if (TREE_THIS_VOLATILE (exp))
2727 *pvolatilep = 1;
2728 exp = TREE_OPERAND (exp, 0);
2729 }
2730
2731 /* If this was a bit-field, see if there is a mode that allows direct
2732 access in case EXP is in memory. */
2733 if (mode == VOIDmode && *pbitpos % *pbitsize == 0)
2734 {
2735 mode = mode_for_size (*pbitsize, MODE_INT, 0);
2736 if (mode == BLKmode)
2737 mode = VOIDmode;
2738 }
2739
2740 *pmode = mode;
2741 *poffset = offset;
2742 #if 0
2743 /* We aren't finished fixing the callers to really handle nonzero offset. */
2744 if (offset != 0)
2745 abort ();
2746 #endif
2747
2748 return exp;
2749 }
2750 \f
2751 /* Given an rtx VALUE that may contain additions and multiplications,
2752 return an equivalent value that just refers to a register or memory.
2753 This is done by generating instructions to perform the arithmetic
2754 and returning a pseudo-register containing the value. */
2755
2756 rtx
2757 force_operand (value, target)
2758 rtx value, target;
2759 {
2760 register optab binoptab = 0;
2761 /* Use a temporary to force order of execution of calls to
2762 `force_operand'. */
2763 rtx tmp;
2764 register rtx op2;
2765 /* Use subtarget as the target for operand 0 of a binary operation. */
2766 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
2767
2768 if (GET_CODE (value) == PLUS)
2769 binoptab = add_optab;
2770 else if (GET_CODE (value) == MINUS)
2771 binoptab = sub_optab;
2772 else if (GET_CODE (value) == MULT)
2773 {
2774 op2 = XEXP (value, 1);
2775 if (!CONSTANT_P (op2)
2776 && !(GET_CODE (op2) == REG && op2 != subtarget))
2777 subtarget = 0;
2778 tmp = force_operand (XEXP (value, 0), subtarget);
2779 return expand_mult (GET_MODE (value), tmp,
2780 force_operand (op2, NULL_RTX),
2781 target, 0);
2782 }
2783
2784 if (binoptab)
2785 {
2786 op2 = XEXP (value, 1);
2787 if (!CONSTANT_P (op2)
2788 && !(GET_CODE (op2) == REG && op2 != subtarget))
2789 subtarget = 0;
2790 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
2791 {
2792 binoptab = add_optab;
2793 op2 = negate_rtx (GET_MODE (value), op2);
2794 }
2795
2796 /* Check for an addition with OP2 a constant integer and our first
2797 operand a PLUS of a virtual register and something else. In that
2798 case, we want to emit the sum of the virtual register and the
2799 constant first and then add the other value. This allows virtual
2800 register instantiation to simply modify the constant rather than
2801 creating another one around this addition. */
2802 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
2803 && GET_CODE (XEXP (value, 0)) == PLUS
2804 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
2805 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
2806 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
2807 {
2808 rtx temp = expand_binop (GET_MODE (value), binoptab,
2809 XEXP (XEXP (value, 0), 0), op2,
2810 subtarget, 0, OPTAB_LIB_WIDEN);
2811 return expand_binop (GET_MODE (value), binoptab, temp,
2812 force_operand (XEXP (XEXP (value, 0), 1), 0),
2813 target, 0, OPTAB_LIB_WIDEN);
2814 }
2815
2816 tmp = force_operand (XEXP (value, 0), subtarget);
2817 return expand_binop (GET_MODE (value), binoptab, tmp,
2818 force_operand (op2, NULL_RTX),
2819 target, 0, OPTAB_LIB_WIDEN);
2820 /* We give UNSIGNEP = 0 to expand_binop
2821 because the only operations we are expanding here are signed ones. */
2822 }
2823 return value;
2824 }
2825 \f
2826 /* Subroutine of expand_expr:
2827 save the non-copied parts (LIST) of an expr (LHS), and return a list
2828 which can restore these values to their previous values,
2829 should something modify their storage. */
2830
2831 static tree
2832 save_noncopied_parts (lhs, list)
2833 tree lhs;
2834 tree list;
2835 {
2836 tree tail;
2837 tree parts = 0;
2838
2839 for (tail = list; tail; tail = TREE_CHAIN (tail))
2840 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
2841 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
2842 else
2843 {
2844 tree part = TREE_VALUE (tail);
2845 tree part_type = TREE_TYPE (part);
2846 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
2847 rtx target = assign_stack_temp (TYPE_MODE (part_type),
2848 int_size_in_bytes (part_type), 0);
2849 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
2850 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
2851 parts = tree_cons (to_be_saved,
2852 build (RTL_EXPR, part_type, NULL_TREE,
2853 (tree) target),
2854 parts);
2855 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
2856 }
2857 return parts;
2858 }
2859
2860 /* Subroutine of expand_expr:
2861 record the non-copied parts (LIST) of an expr (LHS), and return a list
2862 which specifies the initial values of these parts. */
2863
2864 static tree
2865 init_noncopied_parts (lhs, list)
2866 tree lhs;
2867 tree list;
2868 {
2869 tree tail;
2870 tree parts = 0;
2871
2872 for (tail = list; tail; tail = TREE_CHAIN (tail))
2873 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
2874 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
2875 else
2876 {
2877 tree part = TREE_VALUE (tail);
2878 tree part_type = TREE_TYPE (part);
2879 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
2880 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
2881 }
2882 return parts;
2883 }
2884
2885 /* Subroutine of expand_expr: return nonzero iff there is no way that
2886 EXP can reference X, which is being modified. */
2887
2888 static int
2889 safe_from_p (x, exp)
2890 rtx x;
2891 tree exp;
2892 {
2893 rtx exp_rtl = 0;
2894 int i, nops;
2895
2896 if (x == 0)
2897 return 1;
2898
2899 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
2900 find the underlying pseudo. */
2901 if (GET_CODE (x) == SUBREG)
2902 {
2903 x = SUBREG_REG (x);
2904 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
2905 return 0;
2906 }
2907
2908 /* If X is a location in the outgoing argument area, it is always safe. */
2909 if (GET_CODE (x) == MEM
2910 && (XEXP (x, 0) == virtual_outgoing_args_rtx
2911 || (GET_CODE (XEXP (x, 0)) == PLUS
2912 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
2913 return 1;
2914
2915 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2916 {
2917 case 'd':
2918 exp_rtl = DECL_RTL (exp);
2919 break;
2920
2921 case 'c':
2922 return 1;
2923
2924 case 'x':
2925 if (TREE_CODE (exp) == TREE_LIST)
2926 return ((TREE_VALUE (exp) == 0
2927 || safe_from_p (x, TREE_VALUE (exp)))
2928 && (TREE_CHAIN (exp) == 0
2929 || safe_from_p (x, TREE_CHAIN (exp))));
2930 else
2931 return 0;
2932
2933 case '1':
2934 return safe_from_p (x, TREE_OPERAND (exp, 0));
2935
2936 case '2':
2937 case '<':
2938 return (safe_from_p (x, TREE_OPERAND (exp, 0))
2939 && safe_from_p (x, TREE_OPERAND (exp, 1)));
2940
2941 case 'e':
2942 case 'r':
2943 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
2944 the expression. If it is set, we conflict iff we are that rtx or
2945 both are in memory. Otherwise, we check all operands of the
2946 expression recursively. */
2947
2948 switch (TREE_CODE (exp))
2949 {
2950 case ADDR_EXPR:
2951 return staticp (TREE_OPERAND (exp, 0));
2952
2953 case INDIRECT_REF:
2954 if (GET_CODE (x) == MEM)
2955 return 0;
2956 break;
2957
2958 case CALL_EXPR:
2959 exp_rtl = CALL_EXPR_RTL (exp);
2960 if (exp_rtl == 0)
2961 {
2962 /* Assume that the call will clobber all hard registers and
2963 all of memory. */
2964 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
2965 || GET_CODE (x) == MEM)
2966 return 0;
2967 }
2968
2969 break;
2970
2971 case RTL_EXPR:
2972 exp_rtl = RTL_EXPR_RTL (exp);
2973 if (exp_rtl == 0)
2974 /* We don't know what this can modify. */
2975 return 0;
2976
2977 break;
2978
2979 case WITH_CLEANUP_EXPR:
2980 exp_rtl = RTL_EXPR_RTL (exp);
2981 break;
2982
2983 case SAVE_EXPR:
2984 exp_rtl = SAVE_EXPR_RTL (exp);
2985 break;
2986
2987 case BIND_EXPR:
2988 /* The only operand we look at is operand 1. The rest aren't
2989 part of the expression. */
2990 return safe_from_p (x, TREE_OPERAND (exp, 1));
2991
2992 case METHOD_CALL_EXPR:
2993 /* This takes a rtx argument, but shouldn't appear here. */
2994 abort ();
2995 }
2996
2997 /* If we have an rtx, we do not need to scan our operands. */
2998 if (exp_rtl)
2999 break;
3000
3001 nops = tree_code_length[(int) TREE_CODE (exp)];
3002 for (i = 0; i < nops; i++)
3003 if (TREE_OPERAND (exp, i) != 0
3004 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3005 return 0;
3006 }
3007
3008 /* If we have an rtl, find any enclosed object. Then see if we conflict
3009 with it. */
3010 if (exp_rtl)
3011 {
3012 if (GET_CODE (exp_rtl) == SUBREG)
3013 {
3014 exp_rtl = SUBREG_REG (exp_rtl);
3015 if (GET_CODE (exp_rtl) == REG
3016 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3017 return 0;
3018 }
3019
3020 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3021 are memory and EXP is not readonly. */
3022 return ! (rtx_equal_p (x, exp_rtl)
3023 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3024 && ! TREE_READONLY (exp)));
3025 }
3026
3027 /* If we reach here, it is safe. */
3028 return 1;
3029 }
3030
3031 /* Subroutine of expand_expr: return nonzero iff EXP is an
3032 expression whose type is statically determinable. */
3033
3034 static int
3035 fixed_type_p (exp)
3036 tree exp;
3037 {
3038 if (TREE_CODE (exp) == PARM_DECL
3039 || TREE_CODE (exp) == VAR_DECL
3040 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3041 || TREE_CODE (exp) == COMPONENT_REF
3042 || TREE_CODE (exp) == ARRAY_REF)
3043 return 1;
3044 return 0;
3045 }
3046 \f
3047 /* expand_expr: generate code for computing expression EXP.
3048 An rtx for the computed value is returned. The value is never null.
3049 In the case of a void EXP, const0_rtx is returned.
3050
3051 The value may be stored in TARGET if TARGET is nonzero.
3052 TARGET is just a suggestion; callers must assume that
3053 the rtx returned may not be the same as TARGET.
3054
3055 If TARGET is CONST0_RTX, it means that the value will be ignored.
3056
3057 If TMODE is not VOIDmode, it suggests generating the
3058 result in mode TMODE. But this is done only when convenient.
3059 Otherwise, TMODE is ignored and the value generated in its natural mode.
3060 TMODE is just a suggestion; callers must assume that
3061 the rtx returned may not have mode TMODE.
3062
3063 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
3064 with a constant address even if that address is not normally legitimate.
3065 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
3066
3067 If MODIFIER is EXPAND_SUM then when EXP is an addition
3068 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3069 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3070 products as above, or REG or MEM, or constant.
3071 Ordinarily in such cases we would output mul or add instructions
3072 and then return a pseudo reg containing the sum.
3073
3074 EXPAND_INITIALIZER is much like EXPAND_SUM except that
3075 it also marks a label as absolutely required (it can't be dead).
3076 This is used for outputting expressions used in initializers. */
3077
3078 rtx
3079 expand_expr (exp, target, tmode, modifier)
3080 register tree exp;
3081 rtx target;
3082 enum machine_mode tmode;
3083 enum expand_modifier modifier;
3084 {
3085 register rtx op0, op1, temp;
3086 tree type = TREE_TYPE (exp);
3087 int unsignedp = TREE_UNSIGNED (type);
3088 register enum machine_mode mode = TYPE_MODE (type);
3089 register enum tree_code code = TREE_CODE (exp);
3090 optab this_optab;
3091 /* Use subtarget as the target for operand 0 of a binary operation. */
3092 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3093 rtx original_target = target;
3094 int ignore = target == const0_rtx;
3095 tree context;
3096
3097 /* Don't use hard regs as subtargets, because the combiner
3098 can only handle pseudo regs. */
3099 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
3100 subtarget = 0;
3101 /* Avoid subtargets inside loops,
3102 since they hide some invariant expressions. */
3103 if (preserve_subexpressions_p ())
3104 subtarget = 0;
3105
3106 if (ignore) target = 0, original_target = 0;
3107
3108 /* If will do cse, generate all results into pseudo registers
3109 since 1) that allows cse to find more things
3110 and 2) otherwise cse could produce an insn the machine
3111 cannot support. */
3112
3113 if (! cse_not_expected && mode != BLKmode && target
3114 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
3115 target = subtarget;
3116
3117 /* Ensure we reference a volatile object even if value is ignored. */
3118 if (ignore && TREE_THIS_VOLATILE (exp)
3119 && mode != VOIDmode && mode != BLKmode)
3120 {
3121 target = gen_reg_rtx (mode);
3122 temp = expand_expr (exp, target, VOIDmode, modifier);
3123 if (temp != target)
3124 emit_move_insn (target, temp);
3125 return target;
3126 }
3127
3128 switch (code)
3129 {
3130 case LABEL_DECL:
3131 {
3132 tree function = decl_function_context (exp);
3133 /* Handle using a label in a containing function. */
3134 if (function != current_function_decl && function != 0)
3135 {
3136 struct function *p = find_function_data (function);
3137 /* Allocate in the memory associated with the function
3138 that the label is in. */
3139 push_obstacks (p->function_obstack,
3140 p->function_maybepermanent_obstack);
3141
3142 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3143 label_rtx (exp), p->forced_labels);
3144 pop_obstacks ();
3145 }
3146 else if (modifier == EXPAND_INITIALIZER)
3147 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3148 label_rtx (exp), forced_labels);
3149 return gen_rtx (MEM, FUNCTION_MODE,
3150 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
3151 }
3152
3153 case PARM_DECL:
3154 if (DECL_RTL (exp) == 0)
3155 {
3156 error_with_decl (exp, "prior parameter's size depends on `%s'");
3157 return CONST0_RTX (mode);
3158 }
3159
3160 case FUNCTION_DECL:
3161 case VAR_DECL:
3162 case RESULT_DECL:
3163 if (DECL_RTL (exp) == 0)
3164 abort ();
3165 /* Ensure variable marked as used
3166 even if it doesn't go through a parser. */
3167 TREE_USED (exp) = 1;
3168 /* Handle variables inherited from containing functions. */
3169 context = decl_function_context (exp);
3170
3171 /* We treat inline_function_decl as an alias for the current function
3172 because that is the inline function whose vars, types, etc.
3173 are being merged into the current function.
3174 See expand_inline_function. */
3175 if (context != 0 && context != current_function_decl
3176 && context != inline_function_decl
3177 /* If var is static, we don't need a static chain to access it. */
3178 && ! (GET_CODE (DECL_RTL (exp)) == MEM
3179 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
3180 {
3181 rtx addr;
3182
3183 /* Mark as non-local and addressable. */
3184 DECL_NONLOCAL (exp) = 1;
3185 mark_addressable (exp);
3186 if (GET_CODE (DECL_RTL (exp)) != MEM)
3187 abort ();
3188 addr = XEXP (DECL_RTL (exp), 0);
3189 if (GET_CODE (addr) == MEM)
3190 addr = gen_rtx (MEM, Pmode, fix_lexical_addr (XEXP (addr, 0), exp));
3191 else
3192 addr = fix_lexical_addr (addr, exp);
3193 return change_address (DECL_RTL (exp), mode, addr);
3194 }
3195
3196 /* This is the case of an array whose size is to be determined
3197 from its initializer, while the initializer is still being parsed.
3198 See expand_decl. */
3199 if (GET_CODE (DECL_RTL (exp)) == MEM
3200 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
3201 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
3202 XEXP (DECL_RTL (exp), 0));
3203 if (GET_CODE (DECL_RTL (exp)) == MEM
3204 && modifier != EXPAND_CONST_ADDRESS
3205 && modifier != EXPAND_SUM
3206 && modifier != EXPAND_INITIALIZER)
3207 {
3208 /* DECL_RTL probably contains a constant address.
3209 On RISC machines where a constant address isn't valid,
3210 make some insns to get that address into a register. */
3211 if (!memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
3212 || (flag_force_addr
3213 && CONSTANT_ADDRESS_P (XEXP (DECL_RTL (exp), 0))))
3214 return change_address (DECL_RTL (exp), VOIDmode,
3215 copy_rtx (XEXP (DECL_RTL (exp), 0)));
3216 }
3217 return DECL_RTL (exp);
3218
3219 case INTEGER_CST:
3220 return immed_double_const (TREE_INT_CST_LOW (exp),
3221 TREE_INT_CST_HIGH (exp),
3222 mode);
3223
3224 case CONST_DECL:
3225 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
3226
3227 case REAL_CST:
3228 /* If optimized, generate immediate CONST_DOUBLE
3229 which will be turned into memory by reload if necessary.
3230
3231 We used to force a register so that loop.c could see it. But
3232 this does not allow gen_* patterns to perform optimizations with
3233 the constants. It also produces two insns in cases like "x = 1.0;".
3234 On most machines, floating-point constants are not permitted in
3235 many insns, so we'd end up copying it to a register in any case.
3236
3237 Now, we do the copying in expand_binop, if appropriate. */
3238 return immed_real_const (exp);
3239
3240 case COMPLEX_CST:
3241 case STRING_CST:
3242 if (! TREE_CST_RTL (exp))
3243 output_constant_def (exp);
3244
3245 /* TREE_CST_RTL probably contains a constant address.
3246 On RISC machines where a constant address isn't valid,
3247 make some insns to get that address into a register. */
3248 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
3249 && modifier != EXPAND_CONST_ADDRESS
3250 && modifier != EXPAND_INITIALIZER
3251 && modifier != EXPAND_SUM
3252 && !memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0)))
3253 return change_address (TREE_CST_RTL (exp), VOIDmode,
3254 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
3255 return TREE_CST_RTL (exp);
3256
3257 case SAVE_EXPR:
3258 context = decl_function_context (exp);
3259 /* We treat inline_function_decl as an alias for the current function
3260 because that is the inline function whose vars, types, etc.
3261 are being merged into the current function.
3262 See expand_inline_function. */
3263 if (context == current_function_decl || context == inline_function_decl)
3264 context = 0;
3265
3266 /* If this is non-local, handle it. */
3267 if (context)
3268 {
3269 temp = SAVE_EXPR_RTL (exp);
3270 if (temp && GET_CODE (temp) == REG)
3271 {
3272 put_var_into_stack (exp);
3273 temp = SAVE_EXPR_RTL (exp);
3274 }
3275 if (temp == 0 || GET_CODE (temp) != MEM)
3276 abort ();
3277 return change_address (temp, mode,
3278 fix_lexical_addr (XEXP (temp, 0), exp));
3279 }
3280 if (SAVE_EXPR_RTL (exp) == 0)
3281 {
3282 if (mode == BLKmode)
3283 temp
3284 = assign_stack_temp (mode,
3285 int_size_in_bytes (TREE_TYPE (exp)), 0);
3286 else
3287 temp = gen_reg_rtx (mode);
3288 SAVE_EXPR_RTL (exp) = temp;
3289 store_expr (TREE_OPERAND (exp, 0), temp, 0);
3290 if (!optimize && GET_CODE (temp) == REG)
3291 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
3292 save_expr_regs);
3293 }
3294 return SAVE_EXPR_RTL (exp);
3295
3296 case EXIT_EXPR:
3297 /* Exit the current loop if the body-expression is true. */
3298 {
3299 rtx label = gen_label_rtx ();
3300 do_jump (TREE_OPERAND (exp, 0), label, NULL_RTX);
3301 expand_exit_loop (NULL_PTR);
3302 emit_label (label);
3303 }
3304 return const0_rtx;
3305
3306 case LOOP_EXPR:
3307 expand_start_loop (1);
3308 expand_expr_stmt (TREE_OPERAND (exp, 0));
3309 expand_end_loop ();
3310
3311 return const0_rtx;
3312
3313 case BIND_EXPR:
3314 {
3315 tree vars = TREE_OPERAND (exp, 0);
3316 int vars_need_expansion = 0;
3317
3318 /* Need to open a binding contour here because
3319 if there are any cleanups they most be contained here. */
3320 expand_start_bindings (0);
3321
3322 /* Mark the corresponding BLOCK for output. */
3323 if (TREE_OPERAND (exp, 2) != 0)
3324 TREE_USED (TREE_OPERAND (exp, 2)) = 1;
3325
3326 /* If VARS have not yet been expanded, expand them now. */
3327 while (vars)
3328 {
3329 if (DECL_RTL (vars) == 0)
3330 {
3331 vars_need_expansion = 1;
3332 expand_decl (vars);
3333 }
3334 expand_decl_init (vars);
3335 vars = TREE_CHAIN (vars);
3336 }
3337
3338 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
3339
3340 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
3341
3342 return temp;
3343 }
3344
3345 case RTL_EXPR:
3346 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
3347 abort ();
3348 emit_insns (RTL_EXPR_SEQUENCE (exp));
3349 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
3350 return RTL_EXPR_RTL (exp);
3351
3352 case CONSTRUCTOR:
3353 /* All elts simple constants => refer to a constant in memory. But
3354 if this is a non-BLKmode mode, let it store a field at a time
3355 since that should make a CONST_INT or CONST_DOUBLE when we
3356 fold. */
3357 if (TREE_STATIC (exp) && (mode == BLKmode || TREE_ADDRESSABLE (exp)))
3358 {
3359 rtx constructor = output_constant_def (exp);
3360 if (modifier != EXPAND_CONST_ADDRESS
3361 && modifier != EXPAND_INITIALIZER
3362 && modifier != EXPAND_SUM
3363 && !memory_address_p (GET_MODE (constructor),
3364 XEXP (constructor, 0)))
3365 constructor = change_address (constructor, VOIDmode,
3366 XEXP (constructor, 0));
3367 return constructor;
3368 }
3369
3370 if (ignore)
3371 {
3372 tree elt;
3373 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3374 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
3375 return const0_rtx;
3376 }
3377 else
3378 {
3379 if (target == 0 || ! safe_from_p (target, exp))
3380 {
3381 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
3382 target = gen_reg_rtx (mode);
3383 else
3384 {
3385 rtx safe_target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
3386 if (target)
3387 MEM_IN_STRUCT_P (safe_target) = MEM_IN_STRUCT_P (target);
3388 target = safe_target;
3389 }
3390 }
3391 store_constructor (exp, target);
3392 return target;
3393 }
3394
3395 case INDIRECT_REF:
3396 {
3397 tree exp1 = TREE_OPERAND (exp, 0);
3398 tree exp2;
3399
3400 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
3401 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
3402 This code has the same general effect as simply doing
3403 expand_expr on the save expr, except that the expression PTR
3404 is computed for use as a memory address. This means different
3405 code, suitable for indexing, may be generated. */
3406 if (TREE_CODE (exp1) == SAVE_EXPR
3407 && SAVE_EXPR_RTL (exp1) == 0
3408 && TREE_CODE (exp2 = TREE_OPERAND (exp1, 0)) != ERROR_MARK
3409 && TYPE_MODE (TREE_TYPE (exp1)) == Pmode
3410 && TYPE_MODE (TREE_TYPE (exp2)) == Pmode)
3411 {
3412 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
3413 VOIDmode, EXPAND_SUM);
3414 op0 = memory_address (mode, temp);
3415 op0 = copy_all_regs (op0);
3416 SAVE_EXPR_RTL (exp1) = op0;
3417 }
3418 else
3419 {
3420 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
3421 op0 = memory_address (mode, op0);
3422 }
3423
3424 temp = gen_rtx (MEM, mode, op0);
3425 /* If address was computed by addition,
3426 mark this as an element of an aggregate. */
3427 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
3428 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
3429 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
3430 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
3431 || TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3432 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
3433 || (TREE_CODE (exp1) == ADDR_EXPR
3434 && (exp2 = TREE_OPERAND (exp1, 0))
3435 && (TREE_CODE (TREE_TYPE (exp2)) == ARRAY_TYPE
3436 || TREE_CODE (TREE_TYPE (exp2)) == RECORD_TYPE
3437 || TREE_CODE (TREE_TYPE (exp2)) == UNION_TYPE)))
3438 MEM_IN_STRUCT_P (temp) = 1;
3439 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) || flag_volatile;
3440 #if 0 /* It is incorrectto set RTX_UNCHANGING_P here, because the fact that
3441 a location is accessed through a pointer to const does not mean
3442 that the value there can never change. */
3443 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
3444 #endif
3445 return temp;
3446 }
3447
3448 case ARRAY_REF:
3449 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
3450 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3451 {
3452 /* Nonconstant array index or nonconstant element size.
3453 Generate the tree for *(&array+index) and expand that,
3454 except do it in a language-independent way
3455 and don't complain about non-lvalue arrays.
3456 `mark_addressable' should already have been called
3457 for any array for which this case will be reached. */
3458
3459 /* Don't forget the const or volatile flag from the array element. */
3460 tree variant_type = build_type_variant (type,
3461 TREE_READONLY (exp),
3462 TREE_THIS_VOLATILE (exp));
3463 tree array_adr = build1 (ADDR_EXPR, build_pointer_type (variant_type),
3464 TREE_OPERAND (exp, 0));
3465 tree index = TREE_OPERAND (exp, 1);
3466 tree elt;
3467
3468 /* Convert the integer argument to a type the same size as a pointer
3469 so the multiply won't overflow spuriously. */
3470 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
3471 index = convert (type_for_size (POINTER_SIZE, 0), index);
3472
3473 /* Don't think the address has side effects
3474 just because the array does.
3475 (In some cases the address might have side effects,
3476 and we fail to record that fact here. However, it should not
3477 matter, since expand_expr should not care.) */
3478 TREE_SIDE_EFFECTS (array_adr) = 0;
3479
3480 elt = build1 (INDIRECT_REF, type,
3481 fold (build (PLUS_EXPR, TYPE_POINTER_TO (variant_type),
3482 array_adr,
3483 fold (build (MULT_EXPR,
3484 TYPE_POINTER_TO (variant_type),
3485 index, size_in_bytes (type))))));
3486
3487 /* Volatility, etc., of new expression is same as old expression. */
3488 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
3489 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
3490 TREE_READONLY (elt) = TREE_READONLY (exp);
3491
3492 return expand_expr (elt, target, tmode, modifier);
3493 }
3494
3495 /* Fold an expression like: "foo"[2].
3496 This is not done in fold so it won't happen inside &. */
3497 {
3498 int i;
3499 tree arg0 = TREE_OPERAND (exp, 0);
3500 tree arg1 = TREE_OPERAND (exp, 1);
3501
3502 if (TREE_CODE (arg0) == STRING_CST
3503 && TREE_CODE (arg1) == INTEGER_CST
3504 && !TREE_INT_CST_HIGH (arg1)
3505 && (i = TREE_INT_CST_LOW (arg1)) < TREE_STRING_LENGTH (arg0))
3506 {
3507 if (TREE_TYPE (TREE_TYPE (arg0)) == integer_type_node)
3508 {
3509 exp = build_int_2 (((int *)TREE_STRING_POINTER (arg0))[i], 0);
3510 TREE_TYPE (exp) = integer_type_node;
3511 return expand_expr (exp, target, tmode, modifier);
3512 }
3513 if (TREE_TYPE (TREE_TYPE (arg0)) == char_type_node)
3514 {
3515 exp = build_int_2 (TREE_STRING_POINTER (arg0)[i], 0);
3516 TREE_TYPE (exp) = integer_type_node;
3517 return expand_expr (convert (TREE_TYPE (TREE_TYPE (arg0)), exp), target, tmode, modifier);
3518 }
3519 }
3520 }
3521
3522 /* If this is a constant index into a constant array,
3523 just get the value from the array. Handle both the cases when
3524 we have an explicit constructor and when our operand is a variable
3525 that was declared const. */
3526
3527 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
3528 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
3529 {
3530 tree index = fold (TREE_OPERAND (exp, 1));
3531 if (TREE_CODE (index) == INTEGER_CST
3532 && TREE_INT_CST_HIGH (index) == 0)
3533 {
3534 int i = TREE_INT_CST_LOW (index);
3535 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
3536
3537 while (elem && i--)
3538 elem = TREE_CHAIN (elem);
3539 if (elem)
3540 return expand_expr (fold (TREE_VALUE (elem)), target,
3541 tmode, modifier);
3542 }
3543 }
3544
3545 else if (TREE_READONLY (TREE_OPERAND (exp, 0))
3546 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
3547 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == ARRAY_TYPE
3548 && TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3549 && DECL_INITIAL (TREE_OPERAND (exp, 0))
3550 && optimize >= 1
3551 && (TREE_CODE (DECL_INITIAL (TREE_OPERAND (exp, 0)))
3552 != ERROR_MARK))
3553 {
3554 tree index = fold (TREE_OPERAND (exp, 1));
3555 if (TREE_CODE (index) == INTEGER_CST
3556 && TREE_INT_CST_HIGH (index) == 0)
3557 {
3558 int i = TREE_INT_CST_LOW (index);
3559 tree init = DECL_INITIAL (TREE_OPERAND (exp, 0));
3560
3561 if (TREE_CODE (init) == CONSTRUCTOR)
3562 {
3563 tree elem = CONSTRUCTOR_ELTS (init);
3564
3565 while (elem && i--)
3566 elem = TREE_CHAIN (elem);
3567 if (elem)
3568 return expand_expr (fold (TREE_VALUE (elem)), target,
3569 tmode, modifier);
3570 }
3571 else if (TREE_CODE (init) == STRING_CST
3572 && i < TREE_STRING_LENGTH (init))
3573 {
3574 temp = GEN_INT (TREE_STRING_POINTER (init)[i]);
3575 return convert_to_mode (mode, temp, 0);
3576 }
3577 }
3578 }
3579 /* Treat array-ref with constant index as a component-ref. */
3580
3581 case COMPONENT_REF:
3582 case BIT_FIELD_REF:
3583 /* If the operand is a CONSTRUCTOR, we can just extract the
3584 appropriate field if it is present. */
3585 if (code != ARRAY_REF
3586 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
3587 {
3588 tree elt;
3589
3590 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
3591 elt = TREE_CHAIN (elt))
3592 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
3593 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
3594 }
3595
3596 {
3597 enum machine_mode mode1;
3598 int bitsize;
3599 int bitpos;
3600 tree offset;
3601 int volatilep = 0;
3602 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
3603 &mode1, &unsignedp, &volatilep);
3604
3605 /* In some cases, we will be offsetting OP0's address by a constant.
3606 So get it as a sum, if possible. If we will be using it
3607 directly in an insn, we validate it. */
3608 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_SUM);
3609
3610 /* If this is a constant, put it into a register if it is a
3611 legimate constant and memory if it isn't. */
3612 if (CONSTANT_P (op0))
3613 {
3614 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
3615 if (LEGITIMATE_CONSTANT_P (op0))
3616 op0 = force_reg (mode, op0);
3617 else
3618 op0 = validize_mem (force_const_mem (mode, op0));
3619 }
3620
3621 if (offset != 0)
3622 {
3623 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3624
3625 if (GET_CODE (op0) != MEM)
3626 abort ();
3627 op0 = change_address (op0, VOIDmode,
3628 gen_rtx (PLUS, Pmode, XEXP (op0, 0),
3629 force_reg (Pmode, offset_rtx)));
3630 }
3631
3632 /* Don't forget about volatility even if this is a bitfield. */
3633 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
3634 {
3635 op0 = copy_rtx (op0);
3636 MEM_VOLATILE_P (op0) = 1;
3637 }
3638
3639 if (mode1 == VOIDmode
3640 || (mode1 != BLKmode && ! direct_load[(int) mode1])
3641 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG)
3642 {
3643 /* In cases where an aligned union has an unaligned object
3644 as a field, we might be extracting a BLKmode value from
3645 an integer-mode (e.g., SImode) object. Handle this case
3646 by doing the extract into an object as wide as the field
3647 (which we know to be the width of a basic mode), then
3648 storing into memory, and changing the mode to BLKmode. */
3649 enum machine_mode ext_mode = mode;
3650
3651 if (ext_mode == BLKmode)
3652 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
3653
3654 if (ext_mode == BLKmode)
3655 abort ();
3656
3657 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
3658 unsignedp, target, ext_mode, ext_mode,
3659 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
3660 int_size_in_bytes (TREE_TYPE (tem)));
3661 if (mode == BLKmode)
3662 {
3663 rtx new = assign_stack_temp (ext_mode,
3664 bitsize / BITS_PER_UNIT, 0);
3665
3666 emit_move_insn (new, op0);
3667 op0 = copy_rtx (new);
3668 PUT_MODE (op0, BLKmode);
3669 }
3670
3671 return op0;
3672 }
3673
3674 /* Get a reference to just this component. */
3675 if (modifier == EXPAND_CONST_ADDRESS
3676 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
3677 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
3678 (bitpos / BITS_PER_UNIT)));
3679 else
3680 op0 = change_address (op0, mode1,
3681 plus_constant (XEXP (op0, 0),
3682 (bitpos / BITS_PER_UNIT)));
3683 MEM_IN_STRUCT_P (op0) = 1;
3684 MEM_VOLATILE_P (op0) |= volatilep;
3685 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
3686 return op0;
3687 if (target == 0)
3688 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
3689 convert_move (target, op0, unsignedp);
3690 return target;
3691 }
3692
3693 case OFFSET_REF:
3694 {
3695 tree base = build_unary_op (ADDR_EXPR, TREE_OPERAND (exp, 0), 0);
3696 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
3697 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
3698 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
3699 MEM_IN_STRUCT_P (temp) = 1;
3700 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) || flag_volatile;
3701 #if 0 /* It is incorrectto set RTX_UNCHANGING_P here, because the fact that
3702 a location is accessed through a pointer to const does not mean
3703 that the value there can never change. */
3704 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
3705 #endif
3706 return temp;
3707 }
3708
3709 /* Intended for a reference to a buffer of a file-object in Pascal.
3710 But it's not certain that a special tree code will really be
3711 necessary for these. INDIRECT_REF might work for them. */
3712 case BUFFER_REF:
3713 abort ();
3714
3715 case WITH_CLEANUP_EXPR:
3716 if (RTL_EXPR_RTL (exp) == 0)
3717 {
3718 RTL_EXPR_RTL (exp)
3719 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
3720 cleanups_this_call
3721 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
3722 /* That's it for this cleanup. */
3723 TREE_OPERAND (exp, 2) = 0;
3724 }
3725 return RTL_EXPR_RTL (exp);
3726
3727 case CALL_EXPR:
3728 /* Check for a built-in function. */
3729 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
3730 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == FUNCTION_DECL
3731 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
3732 return expand_builtin (exp, target, subtarget, tmode, ignore);
3733 /* If this call was expanded already by preexpand_calls,
3734 just return the result we got. */
3735 if (CALL_EXPR_RTL (exp) != 0)
3736 return CALL_EXPR_RTL (exp);
3737 return expand_call (exp, target, ignore);
3738
3739 case NON_LVALUE_EXPR:
3740 case NOP_EXPR:
3741 case CONVERT_EXPR:
3742 case REFERENCE_EXPR:
3743 if (TREE_CODE (type) == VOID_TYPE || ignore)
3744 {
3745 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
3746 return const0_rtx;
3747 }
3748 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
3749 return expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
3750 if (TREE_CODE (type) == UNION_TYPE)
3751 {
3752 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
3753 if (target == 0)
3754 {
3755 if (mode == BLKmode)
3756 {
3757 if (TYPE_SIZE (type) == 0
3758 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3759 abort ();
3760 target = assign_stack_temp (BLKmode,
3761 (TREE_INT_CST_LOW (TYPE_SIZE (type))
3762 + BITS_PER_UNIT - 1)
3763 / BITS_PER_UNIT, 0);
3764 }
3765 else
3766 target = gen_reg_rtx (mode);
3767 }
3768 if (GET_CODE (target) == MEM)
3769 /* Store data into beginning of memory target. */
3770 store_expr (TREE_OPERAND (exp, 0),
3771 change_address (target, TYPE_MODE (valtype), 0),
3772 NULL_RTX);
3773 else if (GET_CODE (target) == REG)
3774 /* Store this field into a union of the proper type. */
3775 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
3776 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
3777 VOIDmode, 0, 1,
3778 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
3779 else
3780 abort ();
3781
3782 /* Return the entire union. */
3783 return target;
3784 }
3785 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
3786 if (GET_MODE (op0) == mode || GET_MODE (op0) == VOIDmode)
3787 return op0;
3788 if (flag_force_mem && GET_CODE (op0) == MEM)
3789 op0 = copy_to_reg (op0);
3790
3791 if (target == 0)
3792 return convert_to_mode (mode, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
3793 else
3794 convert_move (target, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
3795 return target;
3796
3797 case PLUS_EXPR:
3798 /* We come here from MINUS_EXPR when the second operand is a constant. */
3799 plus_expr:
3800 this_optab = add_optab;
3801
3802 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
3803 something else, make sure we add the register to the constant and
3804 then to the other thing. This case can occur during strength
3805 reduction and doing it this way will produce better code if the
3806 frame pointer or argument pointer is eliminated.
3807
3808 fold-const.c will ensure that the constant is always in the inner
3809 PLUS_EXPR, so the only case we need to do anything about is if
3810 sp, ap, or fp is our second argument, in which case we must swap
3811 the innermost first argument and our second argument. */
3812
3813 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
3814 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
3815 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
3816 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
3817 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
3818 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
3819 {
3820 tree t = TREE_OPERAND (exp, 1);
3821
3822 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
3823 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
3824 }
3825
3826 /* If the result is to be Pmode and we are adding an integer to
3827 something, we might be forming a constant. So try to use
3828 plus_constant. If it produces a sum and we can't accept it,
3829 use force_operand. This allows P = &ARR[const] to generate
3830 efficient code on machines where a SYMBOL_REF is not a valid
3831 address.
3832
3833 If this is an EXPAND_SUM call, always return the sum. */
3834 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
3835 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3836 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
3837 || mode == Pmode))
3838 {
3839 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
3840 EXPAND_SUM);
3841 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
3842 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
3843 op1 = force_operand (op1, target);
3844 return op1;
3845 }
3846
3847 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
3848 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
3849 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
3850 || mode == Pmode))
3851 {
3852 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
3853 EXPAND_SUM);
3854 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
3855 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
3856 op0 = force_operand (op0, target);
3857 return op0;
3858 }
3859
3860 /* No sense saving up arithmetic to be done
3861 if it's all in the wrong mode to form part of an address.
3862 And force_operand won't know whether to sign-extend or
3863 zero-extend. */
3864 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
3865 || mode != Pmode) goto binop;
3866
3867 preexpand_calls (exp);
3868 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
3869 subtarget = 0;
3870
3871 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
3872 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
3873
3874 /* Make sure any term that's a sum with a constant comes last. */
3875 if (GET_CODE (op0) == PLUS
3876 && CONSTANT_P (XEXP (op0, 1)))
3877 {
3878 temp = op0;
3879 op0 = op1;
3880 op1 = temp;
3881 }
3882 /* If adding to a sum including a constant,
3883 associate it to put the constant outside. */
3884 if (GET_CODE (op1) == PLUS
3885 && CONSTANT_P (XEXP (op1, 1)))
3886 {
3887 rtx constant_term = const0_rtx;
3888
3889 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
3890 if (temp != 0)
3891 op0 = temp;
3892 /* Ensure that MULT comes first if there is one. */
3893 else if (GET_CODE (op0) == MULT)
3894 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
3895 else
3896 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
3897
3898 /* Let's also eliminate constants from op0 if possible. */
3899 op0 = eliminate_constant_term (op0, &constant_term);
3900
3901 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
3902 their sum should be a constant. Form it into OP1, since the
3903 result we want will then be OP0 + OP1. */
3904
3905 temp = simplify_binary_operation (PLUS, mode, constant_term,
3906 XEXP (op1, 1));
3907 if (temp != 0)
3908 op1 = temp;
3909 else
3910 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
3911 }
3912
3913 /* Put a constant term last and put a multiplication first. */
3914 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
3915 temp = op1, op1 = op0, op0 = temp;
3916
3917 temp = simplify_binary_operation (PLUS, mode, op0, op1);
3918 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
3919
3920 case MINUS_EXPR:
3921 /* Handle difference of two symbolic constants,
3922 for the sake of an initializer. */
3923 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
3924 && really_constant_p (TREE_OPERAND (exp, 0))
3925 && really_constant_p (TREE_OPERAND (exp, 1)))
3926 {
3927 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
3928 VOIDmode, modifier);
3929 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
3930 VOIDmode, modifier);
3931 return gen_rtx (MINUS, mode, op0, op1);
3932 }
3933 /* Convert A - const to A + (-const). */
3934 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
3935 {
3936 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0),
3937 fold (build1 (NEGATE_EXPR, type,
3938 TREE_OPERAND (exp, 1))));
3939 goto plus_expr;
3940 }
3941 this_optab = sub_optab;
3942 goto binop;
3943
3944 case MULT_EXPR:
3945 preexpand_calls (exp);
3946 /* If first operand is constant, swap them.
3947 Thus the following special case checks need only
3948 check the second operand. */
3949 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
3950 {
3951 register tree t1 = TREE_OPERAND (exp, 0);
3952 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
3953 TREE_OPERAND (exp, 1) = t1;
3954 }
3955
3956 /* Attempt to return something suitable for generating an
3957 indexed address, for machines that support that. */
3958
3959 if (modifier == EXPAND_SUM && mode == Pmode
3960 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
3961 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3962 {
3963 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
3964
3965 /* Apply distributive law if OP0 is x+c. */
3966 if (GET_CODE (op0) == PLUS
3967 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
3968 return gen_rtx (PLUS, mode,
3969 gen_rtx (MULT, mode, XEXP (op0, 0),
3970 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
3971 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
3972 * INTVAL (XEXP (op0, 1))));
3973
3974 if (GET_CODE (op0) != REG)
3975 op0 = force_operand (op0, NULL_RTX);
3976 if (GET_CODE (op0) != REG)
3977 op0 = copy_to_mode_reg (mode, op0);
3978
3979 return gen_rtx (MULT, mode, op0,
3980 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
3981 }
3982
3983 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
3984 subtarget = 0;
3985
3986 /* Check for multiplying things that have been extended
3987 from a narrower type. If this machine supports multiplying
3988 in that narrower type with a result in the desired type,
3989 do it that way, and avoid the explicit type-conversion. */
3990 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
3991 && TREE_CODE (type) == INTEGER_TYPE
3992 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
3993 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
3994 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
3995 && int_fits_type_p (TREE_OPERAND (exp, 1),
3996 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
3997 /* Don't use a widening multiply if a shift will do. */
3998 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
3999 > HOST_BITS_PER_WIDE_INT)
4000 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
4001 ||
4002 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
4003 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4004 ==
4005 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
4006 /* If both operands are extended, they must either both
4007 be zero-extended or both be sign-extended. */
4008 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4009 ==
4010 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
4011 {
4012 enum machine_mode innermode
4013 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
4014 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4015 ? umul_widen_optab : smul_widen_optab);
4016 if (mode == GET_MODE_WIDER_MODE (innermode)
4017 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
4018 {
4019 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4020 NULL_RTX, VOIDmode, 0);
4021 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4022 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4023 VOIDmode, 0);
4024 else
4025 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
4026 NULL_RTX, VOIDmode, 0);
4027 goto binop2;
4028 }
4029 }
4030 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4031 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4032 return expand_mult (mode, op0, op1, target, unsignedp);
4033
4034 case TRUNC_DIV_EXPR:
4035 case FLOOR_DIV_EXPR:
4036 case CEIL_DIV_EXPR:
4037 case ROUND_DIV_EXPR:
4038 case EXACT_DIV_EXPR:
4039 preexpand_calls (exp);
4040 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4041 subtarget = 0;
4042 /* Possible optimization: compute the dividend with EXPAND_SUM
4043 then if the divisor is constant can optimize the case
4044 where some terms of the dividend have coeffs divisible by it. */
4045 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4046 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4047 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
4048
4049 case RDIV_EXPR:
4050 this_optab = flodiv_optab;
4051 goto binop;
4052
4053 case TRUNC_MOD_EXPR:
4054 case FLOOR_MOD_EXPR:
4055 case CEIL_MOD_EXPR:
4056 case ROUND_MOD_EXPR:
4057 preexpand_calls (exp);
4058 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4059 subtarget = 0;
4060 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4061 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4062 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
4063
4064 case FIX_ROUND_EXPR:
4065 case FIX_FLOOR_EXPR:
4066 case FIX_CEIL_EXPR:
4067 abort (); /* Not used for C. */
4068
4069 case FIX_TRUNC_EXPR:
4070 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
4071 if (target == 0)
4072 target = gen_reg_rtx (mode);
4073 expand_fix (target, op0, unsignedp);
4074 return target;
4075
4076 case FLOAT_EXPR:
4077 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
4078 if (target == 0)
4079 target = gen_reg_rtx (mode);
4080 /* expand_float can't figure out what to do if FROM has VOIDmode.
4081 So give it the correct mode. With -O, cse will optimize this. */
4082 if (GET_MODE (op0) == VOIDmode)
4083 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
4084 op0);
4085 expand_float (target, op0,
4086 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4087 return target;
4088
4089 case NEGATE_EXPR:
4090 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4091 temp = expand_unop (mode, neg_optab, op0, target, 0);
4092 if (temp == 0)
4093 abort ();
4094 return temp;
4095
4096 case ABS_EXPR:
4097 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4098
4099 /* Unsigned abs is simply the operand. Testing here means we don't
4100 risk generating incorrect code below. */
4101 if (TREE_UNSIGNED (type))
4102 return op0;
4103
4104 /* First try to do it with a special abs instruction. */
4105 temp = expand_unop (mode, abs_optab, op0, target, 0);
4106 if (temp != 0)
4107 return temp;
4108
4109 /* If this machine has expensive jumps, we can do integer absolute
4110 value of X as (((signed) x >> (W-1)) ^ x) - ((signed) x >> (W-1)),
4111 where W is the width of MODE. */
4112
4113 if (GET_MODE_CLASS (mode) == MODE_INT && BRANCH_COST >= 2)
4114 {
4115 rtx extended = expand_shift (RSHIFT_EXPR, mode, op0,
4116 size_int (GET_MODE_BITSIZE (mode) - 1),
4117 NULL_RTX, 0);
4118
4119 temp = expand_binop (mode, xor_optab, extended, op0, target, 0,
4120 OPTAB_LIB_WIDEN);
4121 if (temp != 0)
4122 temp = expand_binop (mode, sub_optab, temp, extended, target, 0,
4123 OPTAB_LIB_WIDEN);
4124
4125 if (temp != 0)
4126 return temp;
4127 }
4128
4129 /* If that does not win, use conditional jump and negate. */
4130 target = original_target;
4131 temp = gen_label_rtx ();
4132 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 0))
4133 || (GET_CODE (target) == REG
4134 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4135 target = gen_reg_rtx (mode);
4136 emit_move_insn (target, op0);
4137 emit_cmp_insn (target,
4138 expand_expr (convert (type, integer_zero_node),
4139 NULL_RTX, VOIDmode, 0),
4140 GE, NULL_RTX, mode, 0, 0);
4141 NO_DEFER_POP;
4142 emit_jump_insn (gen_bge (temp));
4143 op0 = expand_unop (mode, neg_optab, target, target, 0);
4144 if (op0 != target)
4145 emit_move_insn (target, op0);
4146 emit_label (temp);
4147 OK_DEFER_POP;
4148 return target;
4149
4150 case MAX_EXPR:
4151 case MIN_EXPR:
4152 target = original_target;
4153 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
4154 || (GET_CODE (target) == REG
4155 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4156 target = gen_reg_rtx (mode);
4157 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4158 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4159
4160 /* First try to do it with a special MIN or MAX instruction.
4161 If that does not win, use a conditional jump to select the proper
4162 value. */
4163 this_optab = (TREE_UNSIGNED (type)
4164 ? (code == MIN_EXPR ? umin_optab : umax_optab)
4165 : (code == MIN_EXPR ? smin_optab : smax_optab));
4166
4167 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
4168 OPTAB_WIDEN);
4169 if (temp != 0)
4170 return temp;
4171
4172 if (target != op0)
4173 emit_move_insn (target, op0);
4174 op0 = gen_label_rtx ();
4175 if (code == MAX_EXPR)
4176 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4177 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
4178 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
4179 else
4180 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4181 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
4182 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
4183 if (temp == const0_rtx)
4184 emit_move_insn (target, op1);
4185 else if (temp != const_true_rtx)
4186 {
4187 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
4188 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
4189 else
4190 abort ();
4191 emit_move_insn (target, op1);
4192 }
4193 emit_label (op0);
4194 return target;
4195
4196 /* ??? Can optimize when the operand of this is a bitwise operation,
4197 by using a different bitwise operation. */
4198 case BIT_NOT_EXPR:
4199 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4200 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
4201 if (temp == 0)
4202 abort ();
4203 return temp;
4204
4205 case FFS_EXPR:
4206 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4207 temp = expand_unop (mode, ffs_optab, op0, target, 1);
4208 if (temp == 0)
4209 abort ();
4210 return temp;
4211
4212 /* ??? Can optimize bitwise operations with one arg constant.
4213 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
4214 and (a bitwise1 b) bitwise2 b (etc)
4215 but that is probably not worth while. */
4216
4217 /* BIT_AND_EXPR is for bitwise anding.
4218 TRUTH_AND_EXPR is for anding two boolean values
4219 when we want in all cases to compute both of them.
4220 In general it is fastest to do TRUTH_AND_EXPR by
4221 computing both operands as actual zero-or-1 values
4222 and then bitwise anding. In cases where there cannot
4223 be any side effects, better code would be made by
4224 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR;
4225 but the question is how to recognize those cases. */
4226
4227 case TRUTH_AND_EXPR:
4228 case BIT_AND_EXPR:
4229 this_optab = and_optab;
4230 goto binop;
4231
4232 /* See comment above about TRUTH_AND_EXPR; it applies here too. */
4233 case TRUTH_OR_EXPR:
4234 case BIT_IOR_EXPR:
4235 this_optab = ior_optab;
4236 goto binop;
4237
4238 case BIT_XOR_EXPR:
4239 this_optab = xor_optab;
4240 goto binop;
4241
4242 case LSHIFT_EXPR:
4243 case RSHIFT_EXPR:
4244 case LROTATE_EXPR:
4245 case RROTATE_EXPR:
4246 preexpand_calls (exp);
4247 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4248 subtarget = 0;
4249 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4250 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
4251 unsignedp);
4252
4253 /* Could determine the answer when only additive constants differ.
4254 Also, the addition of one can be handled by changing the condition. */
4255 case LT_EXPR:
4256 case LE_EXPR:
4257 case GT_EXPR:
4258 case GE_EXPR:
4259 case EQ_EXPR:
4260 case NE_EXPR:
4261 preexpand_calls (exp);
4262 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
4263 if (temp != 0)
4264 return temp;
4265 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
4266 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
4267 && original_target
4268 && GET_CODE (original_target) == REG
4269 && (GET_MODE (original_target)
4270 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
4271 {
4272 temp = expand_expr (TREE_OPERAND (exp, 0), original_target, VOIDmode, 0);
4273 if (temp != original_target)
4274 temp = copy_to_reg (temp);
4275 op1 = gen_label_rtx ();
4276 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
4277 GET_MODE (temp), unsignedp, 0);
4278 emit_jump_insn (gen_beq (op1));
4279 emit_move_insn (temp, const1_rtx);
4280 emit_label (op1);
4281 return temp;
4282 }
4283 /* If no set-flag instruction, must generate a conditional
4284 store into a temporary variable. Drop through
4285 and handle this like && and ||. */
4286
4287 case TRUTH_ANDIF_EXPR:
4288 case TRUTH_ORIF_EXPR:
4289 if (target == 0 || ! safe_from_p (target, exp)
4290 /* Make sure we don't have a hard reg (such as function's return
4291 value) live across basic blocks, if not optimizing. */
4292 || (!optimize && GET_CODE (target) == REG
4293 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4294 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4295 emit_clr_insn (target);
4296 op1 = gen_label_rtx ();
4297 jumpifnot (exp, op1);
4298 emit_0_to_1_insn (target);
4299 emit_label (op1);
4300 return target;
4301
4302 case TRUTH_NOT_EXPR:
4303 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4304 /* The parser is careful to generate TRUTH_NOT_EXPR
4305 only with operands that are always zero or one. */
4306 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
4307 target, 1, OPTAB_LIB_WIDEN);
4308 if (temp == 0)
4309 abort ();
4310 return temp;
4311
4312 case COMPOUND_EXPR:
4313 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4314 emit_queue ();
4315 return expand_expr (TREE_OPERAND (exp, 1),
4316 (ignore ? const0_rtx : target),
4317 VOIDmode, 0);
4318
4319 case COND_EXPR:
4320 {
4321 /* Note that COND_EXPRs whose type is a structure or union
4322 are required to be constructed to contain assignments of
4323 a temporary variable, so that we can evaluate them here
4324 for side effect only. If type is void, we must do likewise. */
4325
4326 /* If an arm of the branch requires a cleanup,
4327 only that cleanup is performed. */
4328
4329 tree singleton = 0;
4330 tree binary_op = 0, unary_op = 0;
4331 tree old_cleanups = cleanups_this_call;
4332 cleanups_this_call = 0;
4333
4334 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
4335 convert it to our mode, if necessary. */
4336 if (integer_onep (TREE_OPERAND (exp, 1))
4337 && integer_zerop (TREE_OPERAND (exp, 2))
4338 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4339 {
4340 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
4341 if (GET_MODE (op0) == mode)
4342 return op0;
4343 if (target == 0)
4344 target = gen_reg_rtx (mode);
4345 convert_move (target, op0, unsignedp);
4346 return target;
4347 }
4348
4349 /* If we are not to produce a result, we have no target. Otherwise,
4350 if a target was specified use it; it will not be used as an
4351 intermediate target unless it is safe. If no target, use a
4352 temporary. */
4353
4354 if (mode == VOIDmode || ignore)
4355 temp = 0;
4356 else if (original_target
4357 && safe_from_p (original_target, TREE_OPERAND (exp, 0)))
4358 temp = original_target;
4359 else if (mode == BLKmode)
4360 {
4361 if (TYPE_SIZE (type) == 0
4362 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4363 abort ();
4364 temp = assign_stack_temp (BLKmode,
4365 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4366 + BITS_PER_UNIT - 1)
4367 / BITS_PER_UNIT, 0);
4368 }
4369 else
4370 temp = gen_reg_rtx (mode);
4371
4372 /* Check for X ? A + B : A. If we have this, we can copy
4373 A to the output and conditionally add B. Similarly for unary
4374 operations. Don't do this if X has side-effects because
4375 those side effects might affect A or B and the "?" operation is
4376 a sequence point in ANSI. (We test for side effects later.) */
4377
4378 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
4379 && operand_equal_p (TREE_OPERAND (exp, 2),
4380 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4381 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
4382 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
4383 && operand_equal_p (TREE_OPERAND (exp, 1),
4384 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4385 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
4386 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
4387 && operand_equal_p (TREE_OPERAND (exp, 2),
4388 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4389 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
4390 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
4391 && operand_equal_p (TREE_OPERAND (exp, 1),
4392 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4393 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
4394
4395 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
4396 operation, do this as A + (X != 0). Similarly for other simple
4397 binary operators. */
4398 if (singleton && binary_op
4399 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4400 && (TREE_CODE (binary_op) == PLUS_EXPR
4401 || TREE_CODE (binary_op) == MINUS_EXPR
4402 || TREE_CODE (binary_op) == BIT_IOR_EXPR
4403 || TREE_CODE (binary_op) == BIT_XOR_EXPR
4404 || TREE_CODE (binary_op) == BIT_AND_EXPR)
4405 && integer_onep (TREE_OPERAND (binary_op, 1))
4406 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4407 {
4408 rtx result;
4409 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
4410 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
4411 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
4412 : TREE_CODE (binary_op) == BIT_XOR_EXPR ? xor_optab
4413 : and_optab);
4414
4415 /* If we had X ? A : A + 1, do this as A + (X == 0).
4416
4417 We have to invert the truth value here and then put it
4418 back later if do_store_flag fails. We cannot simply copy
4419 TREE_OPERAND (exp, 0) to another variable and modify that
4420 because invert_truthvalue can modify the tree pointed to
4421 by its argument. */
4422 if (singleton == TREE_OPERAND (exp, 1))
4423 TREE_OPERAND (exp, 0)
4424 = invert_truthvalue (TREE_OPERAND (exp, 0));
4425
4426 result = do_store_flag (TREE_OPERAND (exp, 0),
4427 (safe_from_p (temp, singleton)
4428 ? temp : NULL_RTX),
4429 mode, BRANCH_COST <= 1);
4430
4431 if (result)
4432 {
4433 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
4434 return expand_binop (mode, boptab, op1, result, temp,
4435 unsignedp, OPTAB_LIB_WIDEN);
4436 }
4437 else if (singleton == TREE_OPERAND (exp, 1))
4438 TREE_OPERAND (exp, 0)
4439 = invert_truthvalue (TREE_OPERAND (exp, 0));
4440 }
4441
4442 NO_DEFER_POP;
4443 op0 = gen_label_rtx ();
4444
4445 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
4446 {
4447 if (temp != 0)
4448 {
4449 /* If the target conflicts with the other operand of the
4450 binary op, we can't use it. Also, we can't use the target
4451 if it is a hard register, because evaluating the condition
4452 might clobber it. */
4453 if ((binary_op
4454 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
4455 || (GET_CODE (temp) == REG
4456 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
4457 temp = gen_reg_rtx (mode);
4458 store_expr (singleton, temp, 0);
4459 }
4460 else
4461 expand_expr (singleton,
4462 ignore ? const1_rtx : NULL_RTX, VOIDmode, 0);
4463 if (cleanups_this_call)
4464 {
4465 sorry ("aggregate value in COND_EXPR");
4466 cleanups_this_call = 0;
4467 }
4468 if (singleton == TREE_OPERAND (exp, 1))
4469 jumpif (TREE_OPERAND (exp, 0), op0);
4470 else
4471 jumpifnot (TREE_OPERAND (exp, 0), op0);
4472
4473 if (binary_op && temp == 0)
4474 /* Just touch the other operand. */
4475 expand_expr (TREE_OPERAND (binary_op, 1),
4476 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
4477 else if (binary_op)
4478 store_expr (build (TREE_CODE (binary_op), type,
4479 make_tree (type, temp),
4480 TREE_OPERAND (binary_op, 1)),
4481 temp, 0);
4482 else
4483 store_expr (build1 (TREE_CODE (unary_op), type,
4484 make_tree (type, temp)),
4485 temp, 0);
4486 op1 = op0;
4487 }
4488 #if 0
4489 /* This is now done in jump.c and is better done there because it
4490 produces shorter register lifetimes. */
4491
4492 /* Check for both possibilities either constants or variables
4493 in registers (but not the same as the target!). If so, can
4494 save branches by assigning one, branching, and assigning the
4495 other. */
4496 else if (temp && GET_MODE (temp) != BLKmode
4497 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
4498 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
4499 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
4500 && DECL_RTL (TREE_OPERAND (exp, 1))
4501 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
4502 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
4503 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
4504 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
4505 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
4506 && DECL_RTL (TREE_OPERAND (exp, 2))
4507 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
4508 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
4509 {
4510 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4511 temp = gen_reg_rtx (mode);
4512 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4513 jumpifnot (TREE_OPERAND (exp, 0), op0);
4514 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4515 op1 = op0;
4516 }
4517 #endif
4518 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
4519 comparison operator. If we have one of these cases, set the
4520 output to A, branch on A (cse will merge these two references),
4521 then set the output to FOO. */
4522 else if (temp
4523 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
4524 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
4525 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4526 TREE_OPERAND (exp, 1), 0)
4527 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4528 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
4529 {
4530 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4531 temp = gen_reg_rtx (mode);
4532 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4533 jumpif (TREE_OPERAND (exp, 0), op0);
4534 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4535 op1 = op0;
4536 }
4537 else if (temp
4538 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
4539 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
4540 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4541 TREE_OPERAND (exp, 2), 0)
4542 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4543 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
4544 {
4545 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4546 temp = gen_reg_rtx (mode);
4547 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4548 jumpifnot (TREE_OPERAND (exp, 0), op0);
4549 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4550 op1 = op0;
4551 }
4552 else
4553 {
4554 op1 = gen_label_rtx ();
4555 jumpifnot (TREE_OPERAND (exp, 0), op0);
4556 if (temp != 0)
4557 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4558 else
4559 expand_expr (TREE_OPERAND (exp, 1),
4560 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
4561 if (cleanups_this_call)
4562 {
4563 sorry ("aggregate value in COND_EXPR");
4564 cleanups_this_call = 0;
4565 }
4566
4567 emit_queue ();
4568 emit_jump_insn (gen_jump (op1));
4569 emit_barrier ();
4570 emit_label (op0);
4571 if (temp != 0)
4572 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4573 else
4574 expand_expr (TREE_OPERAND (exp, 2),
4575 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
4576 }
4577
4578 if (cleanups_this_call)
4579 {
4580 sorry ("aggregate value in COND_EXPR");
4581 cleanups_this_call = 0;
4582 }
4583
4584 emit_queue ();
4585 emit_label (op1);
4586 OK_DEFER_POP;
4587 cleanups_this_call = old_cleanups;
4588 return temp;
4589 }
4590
4591 case TARGET_EXPR:
4592 {
4593 /* Something needs to be initialized, but we didn't know
4594 where that thing was when building the tree. For example,
4595 it could be the return value of a function, or a parameter
4596 to a function which lays down in the stack, or a temporary
4597 variable which must be passed by reference.
4598
4599 We guarantee that the expression will either be constructed
4600 or copied into our original target. */
4601
4602 tree slot = TREE_OPERAND (exp, 0);
4603
4604 if (TREE_CODE (slot) != VAR_DECL)
4605 abort ();
4606
4607 if (target == 0)
4608 {
4609 if (DECL_RTL (slot) != 0)
4610 target = DECL_RTL (slot);
4611 else
4612 {
4613 target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
4614 /* All temp slots at this level must not conflict. */
4615 preserve_temp_slots (target);
4616 DECL_RTL (slot) = target;
4617 }
4618
4619 #if 0
4620 /* Since SLOT is not known to the called function
4621 to belong to its stack frame, we must build an explicit
4622 cleanup. This case occurs when we must build up a reference
4623 to pass the reference as an argument. In this case,
4624 it is very likely that such a reference need not be
4625 built here. */
4626
4627 if (TREE_OPERAND (exp, 2) == 0)
4628 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
4629 if (TREE_OPERAND (exp, 2))
4630 cleanups_this_call = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2),
4631 cleanups_this_call);
4632 #endif
4633 }
4634 else
4635 {
4636 /* This case does occur, when expanding a parameter which
4637 needs to be constructed on the stack. The target
4638 is the actual stack address that we want to initialize.
4639 The function we call will perform the cleanup in this case. */
4640
4641 DECL_RTL (slot) = target;
4642 }
4643
4644 return expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
4645 }
4646
4647 case INIT_EXPR:
4648 {
4649 tree lhs = TREE_OPERAND (exp, 0);
4650 tree rhs = TREE_OPERAND (exp, 1);
4651 tree noncopied_parts = 0;
4652 tree lhs_type = TREE_TYPE (lhs);
4653
4654 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
4655 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
4656 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
4657 TYPE_NONCOPIED_PARTS (lhs_type));
4658 while (noncopied_parts != 0)
4659 {
4660 expand_assignment (TREE_VALUE (noncopied_parts),
4661 TREE_PURPOSE (noncopied_parts), 0, 0);
4662 noncopied_parts = TREE_CHAIN (noncopied_parts);
4663 }
4664 return temp;
4665 }
4666
4667 case MODIFY_EXPR:
4668 {
4669 /* If lhs is complex, expand calls in rhs before computing it.
4670 That's so we don't compute a pointer and save it over a call.
4671 If lhs is simple, compute it first so we can give it as a
4672 target if the rhs is just a call. This avoids an extra temp and copy
4673 and that prevents a partial-subsumption which makes bad code.
4674 Actually we could treat component_ref's of vars like vars. */
4675
4676 tree lhs = TREE_OPERAND (exp, 0);
4677 tree rhs = TREE_OPERAND (exp, 1);
4678 tree noncopied_parts = 0;
4679 tree lhs_type = TREE_TYPE (lhs);
4680
4681 temp = 0;
4682
4683 if (TREE_CODE (lhs) != VAR_DECL
4684 && TREE_CODE (lhs) != RESULT_DECL
4685 && TREE_CODE (lhs) != PARM_DECL)
4686 preexpand_calls (exp);
4687
4688 /* Check for |= or &= of a bitfield of size one into another bitfield
4689 of size 1. In this case, (unless we need the result of the
4690 assignment) we can do this more efficiently with a
4691 test followed by an assignment, if necessary.
4692
4693 ??? At this point, we can't get a BIT_FIELD_REF here. But if
4694 things change so we do, this code should be enhanced to
4695 support it. */
4696 if (ignore
4697 && TREE_CODE (lhs) == COMPONENT_REF
4698 && (TREE_CODE (rhs) == BIT_IOR_EXPR
4699 || TREE_CODE (rhs) == BIT_AND_EXPR)
4700 && TREE_OPERAND (rhs, 0) == lhs
4701 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
4702 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
4703 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
4704 {
4705 rtx label = gen_label_rtx ();
4706
4707 do_jump (TREE_OPERAND (rhs, 1),
4708 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
4709 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
4710 expand_assignment (lhs, convert (TREE_TYPE (rhs),
4711 (TREE_CODE (rhs) == BIT_IOR_EXPR
4712 ? integer_one_node
4713 : integer_zero_node)),
4714 0, 0);
4715 do_pending_stack_adjust ();
4716 emit_label (label);
4717 return const0_rtx;
4718 }
4719
4720 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
4721 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
4722 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
4723 TYPE_NONCOPIED_PARTS (lhs_type));
4724
4725 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
4726 while (noncopied_parts != 0)
4727 {
4728 expand_assignment (TREE_PURPOSE (noncopied_parts),
4729 TREE_VALUE (noncopied_parts), 0, 0);
4730 noncopied_parts = TREE_CHAIN (noncopied_parts);
4731 }
4732 return temp;
4733 }
4734
4735 case PREINCREMENT_EXPR:
4736 case PREDECREMENT_EXPR:
4737 return expand_increment (exp, 0);
4738
4739 case POSTINCREMENT_EXPR:
4740 case POSTDECREMENT_EXPR:
4741 /* Faster to treat as pre-increment if result is not used. */
4742 return expand_increment (exp, ! ignore);
4743
4744 case ADDR_EXPR:
4745 /* Are we taking the address of a nested function? */
4746 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
4747 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
4748 {
4749 op0 = trampoline_address (TREE_OPERAND (exp, 0));
4750 op0 = force_operand (op0, target);
4751 }
4752 else
4753 {
4754 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
4755 (modifier == EXPAND_INITIALIZER
4756 ? modifier : EXPAND_CONST_ADDRESS));
4757 if (GET_CODE (op0) != MEM)
4758 abort ();
4759
4760 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4761 return XEXP (op0, 0);
4762 op0 = force_operand (XEXP (op0, 0), target);
4763 }
4764 if (flag_force_addr && GET_CODE (op0) != REG)
4765 return force_reg (Pmode, op0);
4766 return op0;
4767
4768 case ENTRY_VALUE_EXPR:
4769 abort ();
4770
4771 case ERROR_MARK:
4772 return const0_rtx;
4773
4774 default:
4775 return (*lang_expand_expr) (exp, target, tmode, modifier);
4776 }
4777
4778 /* Here to do an ordinary binary operator, generating an instruction
4779 from the optab already placed in `this_optab'. */
4780 binop:
4781 preexpand_calls (exp);
4782 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4783 subtarget = 0;
4784 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4785 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4786 binop2:
4787 temp = expand_binop (mode, this_optab, op0, op1, target,
4788 unsignedp, OPTAB_LIB_WIDEN);
4789 if (temp == 0)
4790 abort ();
4791 return temp;
4792 }
4793 \f
4794 /* Return the alignment in bits of EXP, a pointer valued expression.
4795 But don't return more than MAX_ALIGN no matter what.
4796 The alignment returned is, by default, the alignment of the thing that
4797 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
4798
4799 Otherwise, look at the expression to see if we can do better, i.e., if the
4800 expression is actually pointing at an object whose alignment is tighter. */
4801
4802 static int
4803 get_pointer_alignment (exp, max_align)
4804 tree exp;
4805 unsigned max_align;
4806 {
4807 unsigned align, inner;
4808
4809 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
4810 return 0;
4811
4812 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
4813 align = MIN (align, max_align);
4814
4815 while (1)
4816 {
4817 switch (TREE_CODE (exp))
4818 {
4819 case NOP_EXPR:
4820 case CONVERT_EXPR:
4821 case NON_LVALUE_EXPR:
4822 exp = TREE_OPERAND (exp, 0);
4823 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
4824 return align;
4825 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
4826 inner = MIN (inner, max_align);
4827 align = MAX (align, inner);
4828 break;
4829
4830 case PLUS_EXPR:
4831 /* If sum of pointer + int, restrict our maximum alignment to that
4832 imposed by the integer. If not, we can't do any better than
4833 ALIGN. */
4834 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
4835 return align;
4836
4837 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
4838 & (max_align - 1))
4839 != 0)
4840 max_align >>= 1;
4841
4842 exp = TREE_OPERAND (exp, 0);
4843 break;
4844
4845 case ADDR_EXPR:
4846 /* See what we are pointing at and look at its alignment. */
4847 exp = TREE_OPERAND (exp, 0);
4848 if (TREE_CODE (exp) == FUNCTION_DECL)
4849 align = MAX (align, FUNCTION_BOUNDARY);
4850 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
4851 align = MAX (align, DECL_ALIGN (exp));
4852 #ifdef CONSTANT_ALIGNMENT
4853 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
4854 align = CONSTANT_ALIGNMENT (exp, align);
4855 #endif
4856 return MIN (align, max_align);
4857
4858 default:
4859 return align;
4860 }
4861 }
4862 }
4863 \f
4864 /* Return the tree node and offset if a given argument corresponds to
4865 a string constant. */
4866
4867 static tree
4868 string_constant (arg, ptr_offset)
4869 tree arg;
4870 tree *ptr_offset;
4871 {
4872 STRIP_NOPS (arg);
4873
4874 if (TREE_CODE (arg) == ADDR_EXPR
4875 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
4876 {
4877 *ptr_offset = integer_zero_node;
4878 return TREE_OPERAND (arg, 0);
4879 }
4880 else if (TREE_CODE (arg) == PLUS_EXPR)
4881 {
4882 tree arg0 = TREE_OPERAND (arg, 0);
4883 tree arg1 = TREE_OPERAND (arg, 1);
4884
4885 STRIP_NOPS (arg0);
4886 STRIP_NOPS (arg1);
4887
4888 if (TREE_CODE (arg0) == ADDR_EXPR
4889 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
4890 {
4891 *ptr_offset = arg1;
4892 return TREE_OPERAND (arg0, 0);
4893 }
4894 else if (TREE_CODE (arg1) == ADDR_EXPR
4895 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
4896 {
4897 *ptr_offset = arg0;
4898 return TREE_OPERAND (arg1, 0);
4899 }
4900 }
4901
4902 return 0;
4903 }
4904
4905 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
4906 way, because it could contain a zero byte in the middle.
4907 TREE_STRING_LENGTH is the size of the character array, not the string.
4908
4909 Unfortunately, string_constant can't access the values of const char
4910 arrays with initializers, so neither can we do so here. */
4911
4912 static tree
4913 c_strlen (src)
4914 tree src;
4915 {
4916 tree offset_node;
4917 int offset, max;
4918 char *ptr;
4919
4920 src = string_constant (src, &offset_node);
4921 if (src == 0)
4922 return 0;
4923 max = TREE_STRING_LENGTH (src);
4924 ptr = TREE_STRING_POINTER (src);
4925 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
4926 {
4927 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
4928 compute the offset to the following null if we don't know where to
4929 start searching for it. */
4930 int i;
4931 for (i = 0; i < max; i++)
4932 if (ptr[i] == 0)
4933 return 0;
4934 /* We don't know the starting offset, but we do know that the string
4935 has no internal zero bytes. We can assume that the offset falls
4936 within the bounds of the string; otherwise, the programmer deserves
4937 what he gets. Subtract the offset from the length of the string,
4938 and return that. */
4939 /* This would perhaps not be valid if we were dealing with named
4940 arrays in addition to literal string constants. */
4941 return size_binop (MINUS_EXPR, size_int (max), offset_node);
4942 }
4943
4944 /* We have a known offset into the string. Start searching there for
4945 a null character. */
4946 if (offset_node == 0)
4947 offset = 0;
4948 else
4949 {
4950 /* Did we get a long long offset? If so, punt. */
4951 if (TREE_INT_CST_HIGH (offset_node) != 0)
4952 return 0;
4953 offset = TREE_INT_CST_LOW (offset_node);
4954 }
4955 /* If the offset is known to be out of bounds, warn, and call strlen at
4956 runtime. */
4957 if (offset < 0 || offset > max)
4958 {
4959 warning ("offset outside bounds of constant string");
4960 return 0;
4961 }
4962 /* Use strlen to search for the first zero byte. Since any strings
4963 constructed with build_string will have nulls appended, we win even
4964 if we get handed something like (char[4])"abcd".
4965
4966 Since OFFSET is our starting index into the string, no further
4967 calculation is needed. */
4968 return size_int (strlen (ptr + offset));
4969 }
4970 \f
4971 /* Expand an expression EXP that calls a built-in function,
4972 with result going to TARGET if that's convenient
4973 (and in mode MODE if that's convenient).
4974 SUBTARGET may be used as the target for computing one of EXP's operands.
4975 IGNORE is nonzero if the value is to be ignored. */
4976
4977 static rtx
4978 expand_builtin (exp, target, subtarget, mode, ignore)
4979 tree exp;
4980 rtx target;
4981 rtx subtarget;
4982 enum machine_mode mode;
4983 int ignore;
4984 {
4985 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4986 tree arglist = TREE_OPERAND (exp, 1);
4987 rtx op0;
4988 rtx lab1, insns;
4989 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
4990
4991 switch (DECL_FUNCTION_CODE (fndecl))
4992 {
4993 case BUILT_IN_ABS:
4994 case BUILT_IN_LABS:
4995 case BUILT_IN_FABS:
4996 /* build_function_call changes these into ABS_EXPR. */
4997 abort ();
4998
4999 case BUILT_IN_FSQRT:
5000 /* If not optimizing, call the library function. */
5001 if (! optimize)
5002 break;
5003
5004 if (arglist == 0
5005 /* Arg could be wrong type if user redeclared this fcn wrong. */
5006 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
5007 return CONST0_RTX (TYPE_MODE (TREE_TYPE (exp)));
5008
5009 /* Stabilize and compute the argument. */
5010 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
5011 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
5012 {
5013 exp = copy_node (exp);
5014 arglist = copy_node (arglist);
5015 TREE_OPERAND (exp, 1) = arglist;
5016 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
5017 }
5018 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
5019
5020 /* Make a suitable register to place result in. */
5021 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5022
5023 emit_queue ();
5024 start_sequence ();
5025
5026 /* Compute sqrt into TARGET.
5027 Set TARGET to wherever the result comes back. */
5028 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
5029 sqrt_optab, op0, target, 0);
5030
5031 /* If we were unable to expand via the builtin, stop the
5032 sequence (without outputting the insns) and break, causing
5033 a call the the library function. */
5034 if (target == 0)
5035 {
5036 end_sequence ();
5037 break;
5038 }
5039
5040 /* Check the results by default. But if flag_fast_math is turned on,
5041 then assume sqrt will always be called with valid arguments. */
5042
5043 if (! flag_fast_math)
5044 {
5045 /* Don't define the sqrt instructions
5046 if your machine is not IEEE. */
5047 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
5048 abort ();
5049
5050 lab1 = gen_label_rtx ();
5051
5052 /* Test the result; if it is NaN, set errno=EDOM because
5053 the argument was not in the domain. */
5054 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
5055 emit_jump_insn (gen_beq (lab1));
5056
5057 #if TARGET_EDOM
5058 {
5059 #ifdef GEN_ERRNO_RTX
5060 rtx errno_rtx = GEN_ERRNO_RTX;
5061 #else
5062 rtx errno_rtx
5063 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "*errno"));
5064 #endif
5065
5066 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
5067 }
5068 #else
5069 /* We can't set errno=EDOM directly; let the library call do it.
5070 Pop the arguments right away in case the call gets deleted. */
5071 NO_DEFER_POP;
5072 expand_call (exp, target, 0);
5073 OK_DEFER_POP;
5074 #endif
5075
5076 emit_label (lab1);
5077 }
5078
5079 /* Output the entire sequence. */
5080 insns = get_insns ();
5081 end_sequence ();
5082 emit_insns (insns);
5083
5084 return target;
5085
5086 case BUILT_IN_SAVEREGS:
5087 /* Don't do __builtin_saveregs more than once in a function.
5088 Save the result of the first call and reuse it. */
5089 if (saveregs_value != 0)
5090 return saveregs_value;
5091 {
5092 /* When this function is called, it means that registers must be
5093 saved on entry to this function. So we migrate the
5094 call to the first insn of this function. */
5095 rtx temp;
5096 rtx seq;
5097 rtx valreg, saved_valreg;
5098
5099 /* Now really call the function. `expand_call' does not call
5100 expand_builtin, so there is no danger of infinite recursion here. */
5101 start_sequence ();
5102
5103 #ifdef EXPAND_BUILTIN_SAVEREGS
5104 /* Do whatever the machine needs done in this case. */
5105 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
5106 #else
5107 /* The register where the function returns its value
5108 is likely to have something else in it, such as an argument.
5109 So preserve that register around the call. */
5110 if (value_mode != VOIDmode)
5111 {
5112 valreg = hard_libcall_value (value_mode);
5113 saved_valreg = gen_reg_rtx (value_mode);
5114 emit_move_insn (saved_valreg, valreg);
5115 }
5116
5117 /* Generate the call, putting the value in a pseudo. */
5118 temp = expand_call (exp, target, ignore);
5119
5120 if (value_mode != VOIDmode)
5121 emit_move_insn (valreg, saved_valreg);
5122 #endif
5123
5124 seq = get_insns ();
5125 end_sequence ();
5126
5127 saveregs_value = temp;
5128
5129 /* This won't work inside a SEQUENCE--it really has to be
5130 at the start of the function. */
5131 if (in_sequence_p ())
5132 {
5133 /* Better to do this than to crash. */
5134 error ("`va_start' used within `({...})'");
5135 return temp;
5136 }
5137
5138 /* Put the sequence after the NOTE that starts the function. */
5139 emit_insns_before (seq, NEXT_INSN (get_insns ()));
5140 return temp;
5141 }
5142
5143 /* __builtin_args_info (N) returns word N of the arg space info
5144 for the current function. The number and meanings of words
5145 is controlled by the definition of CUMULATIVE_ARGS. */
5146 case BUILT_IN_ARGS_INFO:
5147 {
5148 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
5149 int i;
5150 int *word_ptr = (int *) &current_function_args_info;
5151 tree type, elts, result;
5152
5153 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
5154 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
5155 __FILE__, __LINE__);
5156
5157 if (arglist != 0)
5158 {
5159 tree arg = TREE_VALUE (arglist);
5160 if (TREE_CODE (arg) != INTEGER_CST)
5161 error ("argument of __builtin_args_info must be constant");
5162 else
5163 {
5164 int wordnum = TREE_INT_CST_LOW (arg);
5165
5166 if (wordnum < 0 || wordnum >= nwords)
5167 error ("argument of __builtin_args_info out of range");
5168 else
5169 return GEN_INT (word_ptr[wordnum]);
5170 }
5171 }
5172 else
5173 error ("missing argument in __builtin_args_info");
5174
5175 return const0_rtx;
5176
5177 #if 0
5178 for (i = 0; i < nwords; i++)
5179 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
5180
5181 type = build_array_type (integer_type_node,
5182 build_index_type (build_int_2 (nwords, 0)));
5183 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
5184 TREE_CONSTANT (result) = 1;
5185 TREE_STATIC (result) = 1;
5186 result = build (INDIRECT_REF, build_pointer_type (type), result);
5187 TREE_CONSTANT (result) = 1;
5188 return expand_expr (result, NULL_RTX, VOIDmode, 0);
5189 #endif
5190 }
5191
5192 /* Return the address of the first anonymous stack arg. */
5193 case BUILT_IN_NEXT_ARG:
5194 {
5195 tree fntype = TREE_TYPE (current_function_decl);
5196 if (!(TYPE_ARG_TYPES (fntype) != 0
5197 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
5198 != void_type_node)))
5199 {
5200 error ("`va_start' used in function with fixed args");
5201 return const0_rtx;
5202 }
5203 }
5204
5205 return expand_binop (Pmode, add_optab,
5206 current_function_internal_arg_pointer,
5207 current_function_arg_offset_rtx,
5208 NULL_RTX, 0, OPTAB_LIB_WIDEN);
5209
5210 case BUILT_IN_CLASSIFY_TYPE:
5211 if (arglist != 0)
5212 {
5213 tree type = TREE_TYPE (TREE_VALUE (arglist));
5214 enum tree_code code = TREE_CODE (type);
5215 if (code == VOID_TYPE)
5216 return GEN_INT (void_type_class);
5217 if (code == INTEGER_TYPE)
5218 return GEN_INT (integer_type_class);
5219 if (code == CHAR_TYPE)
5220 return GEN_INT (char_type_class);
5221 if (code == ENUMERAL_TYPE)
5222 return GEN_INT (enumeral_type_class);
5223 if (code == BOOLEAN_TYPE)
5224 return GEN_INT (boolean_type_class);
5225 if (code == POINTER_TYPE)
5226 return GEN_INT (pointer_type_class);
5227 if (code == REFERENCE_TYPE)
5228 return GEN_INT (reference_type_class);
5229 if (code == OFFSET_TYPE)
5230 return GEN_INT (offset_type_class);
5231 if (code == REAL_TYPE)
5232 return GEN_INT (real_type_class);
5233 if (code == COMPLEX_TYPE)
5234 return GEN_INT (complex_type_class);
5235 if (code == FUNCTION_TYPE)
5236 return GEN_INT (function_type_class);
5237 if (code == METHOD_TYPE)
5238 return GEN_INT (method_type_class);
5239 if (code == RECORD_TYPE)
5240 return GEN_INT (record_type_class);
5241 if (code == UNION_TYPE)
5242 return GEN_INT (union_type_class);
5243 if (code == ARRAY_TYPE)
5244 return GEN_INT (array_type_class);
5245 if (code == STRING_TYPE)
5246 return GEN_INT (string_type_class);
5247 if (code == SET_TYPE)
5248 return GEN_INT (set_type_class);
5249 if (code == FILE_TYPE)
5250 return GEN_INT (file_type_class);
5251 if (code == LANG_TYPE)
5252 return GEN_INT (lang_type_class);
5253 }
5254 return GEN_INT (no_type_class);
5255
5256 case BUILT_IN_CONSTANT_P:
5257 if (arglist == 0)
5258 return const0_rtx;
5259 else
5260 return (TREE_CODE_CLASS (TREE_VALUE (arglist)) == 'c'
5261 ? const1_rtx : const0_rtx);
5262
5263 case BUILT_IN_FRAME_ADDRESS:
5264 /* The argument must be a nonnegative integer constant.
5265 It counts the number of frames to scan up the stack.
5266 The value is the address of that frame. */
5267 case BUILT_IN_RETURN_ADDRESS:
5268 /* The argument must be a nonnegative integer constant.
5269 It counts the number of frames to scan up the stack.
5270 The value is the return address saved in that frame. */
5271 if (arglist == 0)
5272 /* Warning about missing arg was already issued. */
5273 return const0_rtx;
5274 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
5275 {
5276 error ("invalid arg to __builtin_return_address");
5277 return const0_rtx;
5278 }
5279 else if (tree_int_cst_lt (TREE_VALUE (arglist), integer_zero_node))
5280 {
5281 error ("invalid arg to __builtin_return_address");
5282 return const0_rtx;
5283 }
5284 else
5285 {
5286 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
5287 rtx tem = frame_pointer_rtx;
5288 int i;
5289
5290 /* Scan back COUNT frames to the specified frame. */
5291 for (i = 0; i < count; i++)
5292 {
5293 /* Assume the dynamic chain pointer is in the word that
5294 the frame address points to, unless otherwise specified. */
5295 #ifdef DYNAMIC_CHAIN_ADDRESS
5296 tem = DYNAMIC_CHAIN_ADDRESS (tem);
5297 #endif
5298 tem = memory_address (Pmode, tem);
5299 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
5300 }
5301
5302 /* For __builtin_frame_address, return what we've got. */
5303 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5304 return tem;
5305
5306 /* For __builtin_return_address,
5307 Get the return address from that frame. */
5308 #ifdef RETURN_ADDR_RTX
5309 return RETURN_ADDR_RTX (count, tem);
5310 #else
5311 tem = memory_address (Pmode,
5312 plus_constant (tem, GET_MODE_SIZE (Pmode)));
5313 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
5314 #endif
5315 }
5316
5317 case BUILT_IN_ALLOCA:
5318 if (arglist == 0
5319 /* Arg could be non-integer if user redeclared this fcn wrong. */
5320 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
5321 return const0_rtx;
5322 current_function_calls_alloca = 1;
5323 /* Compute the argument. */
5324 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
5325
5326 /* Allocate the desired space. */
5327 target = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5328
5329 /* Record the new stack level for nonlocal gotos. */
5330 if (nonlocal_goto_handler_slot != 0)
5331 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
5332 return target;
5333
5334 case BUILT_IN_FFS:
5335 /* If not optimizing, call the library function. */
5336 if (!optimize)
5337 break;
5338
5339 if (arglist == 0
5340 /* Arg could be non-integer if user redeclared this fcn wrong. */
5341 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
5342 return const0_rtx;
5343
5344 /* Compute the argument. */
5345 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
5346 /* Compute ffs, into TARGET if possible.
5347 Set TARGET to wherever the result comes back. */
5348 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
5349 ffs_optab, op0, target, 1);
5350 if (target == 0)
5351 abort ();
5352 return target;
5353
5354 case BUILT_IN_STRLEN:
5355 /* If not optimizing, call the library function. */
5356 if (!optimize)
5357 break;
5358
5359 if (arglist == 0
5360 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5361 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
5362 return const0_rtx;
5363 else
5364 {
5365 tree src = TREE_VALUE (arglist);
5366 tree len = c_strlen (src);
5367
5368 int align
5369 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5370
5371 rtx result, src_rtx, char_rtx;
5372 enum machine_mode insn_mode = value_mode, char_mode;
5373 enum insn_code icode;
5374
5375 /* If the length is known, just return it. */
5376 if (len != 0)
5377 return expand_expr (len, target, mode, 0);
5378
5379 /* If SRC is not a pointer type, don't do this operation inline. */
5380 if (align == 0)
5381 break;
5382
5383 /* Call a function if we can't compute strlen in the right mode. */
5384
5385 while (insn_mode != VOIDmode)
5386 {
5387 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
5388 if (icode != CODE_FOR_nothing)
5389 break;
5390
5391 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
5392 }
5393 if (insn_mode == VOIDmode)
5394 break;
5395
5396 /* Make a place to write the result of the instruction. */
5397 result = target;
5398 if (! (result != 0
5399 && GET_CODE (result) == REG
5400 && GET_MODE (result) == insn_mode
5401 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
5402 result = gen_reg_rtx (insn_mode);
5403
5404 /* Make sure the operands are acceptable to the predicates. */
5405
5406 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
5407 result = gen_reg_rtx (insn_mode);
5408
5409 src_rtx = memory_address (BLKmode,
5410 expand_expr (src, NULL_RTX, Pmode,
5411 EXPAND_NORMAL));
5412 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
5413 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
5414
5415 char_rtx = const0_rtx;
5416 char_mode = insn_operand_mode[(int)icode][2];
5417 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
5418 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
5419
5420 emit_insn (GEN_FCN (icode) (result,
5421 gen_rtx (MEM, BLKmode, src_rtx),
5422 char_rtx, GEN_INT (align)));
5423
5424 /* Return the value in the proper mode for this function. */
5425 if (GET_MODE (result) == value_mode)
5426 return result;
5427 else if (target != 0)
5428 {
5429 convert_move (target, result, 0);
5430 return target;
5431 }
5432 else
5433 return convert_to_mode (value_mode, result, 0);
5434 }
5435
5436 case BUILT_IN_STRCPY:
5437 /* If not optimizing, call the library function. */
5438 if (!optimize)
5439 break;
5440
5441 if (arglist == 0
5442 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5443 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5444 || TREE_CHAIN (arglist) == 0
5445 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
5446 return const0_rtx;
5447 else
5448 {
5449 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
5450
5451 if (len == 0)
5452 break;
5453
5454 len = size_binop (PLUS_EXPR, len, integer_one_node);
5455
5456 chainon (arglist, build_tree_list (NULL_TREE, len));
5457 }
5458
5459 /* Drops in. */
5460 case BUILT_IN_MEMCPY:
5461 /* If not optimizing, call the library function. */
5462 if (!optimize)
5463 break;
5464
5465 if (arglist == 0
5466 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5467 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5468 || TREE_CHAIN (arglist) == 0
5469 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
5470 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
5471 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
5472 return const0_rtx;
5473 else
5474 {
5475 tree dest = TREE_VALUE (arglist);
5476 tree src = TREE_VALUE (TREE_CHAIN (arglist));
5477 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5478
5479 int src_align
5480 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5481 int dest_align
5482 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5483 rtx dest_rtx;
5484
5485 /* If either SRC or DEST is not a pointer type, don't do
5486 this operation in-line. */
5487 if (src_align == 0 || dest_align == 0)
5488 {
5489 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
5490 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
5491 break;
5492 }
5493
5494 dest_rtx = expand_expr (dest, NULL_RTX, Pmode, EXPAND_NORMAL);
5495
5496 /* Copy word part most expediently. */
5497 emit_block_move (gen_rtx (MEM, BLKmode,
5498 memory_address (BLKmode, dest_rtx)),
5499 gen_rtx (MEM, BLKmode,
5500 memory_address (BLKmode,
5501 expand_expr (src, NULL_RTX,
5502 Pmode,
5503 EXPAND_NORMAL))),
5504 expand_expr (len, NULL_RTX, VOIDmode, 0),
5505 MIN (src_align, dest_align));
5506 return dest_rtx;
5507 }
5508
5509 /* These comparison functions need an instruction that returns an actual
5510 index. An ordinary compare that just sets the condition codes
5511 is not enough. */
5512 #ifdef HAVE_cmpstrsi
5513 case BUILT_IN_STRCMP:
5514 /* If not optimizing, call the library function. */
5515 if (!optimize)
5516 break;
5517
5518 if (arglist == 0
5519 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5520 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5521 || TREE_CHAIN (arglist) == 0
5522 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
5523 return const0_rtx;
5524 else if (!HAVE_cmpstrsi)
5525 break;
5526 {
5527 tree arg1 = TREE_VALUE (arglist);
5528 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
5529 tree offset;
5530 tree len, len2;
5531
5532 len = c_strlen (arg1);
5533 if (len)
5534 len = size_binop (PLUS_EXPR, integer_one_node, len);
5535 len2 = c_strlen (arg2);
5536 if (len2)
5537 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
5538
5539 /* If we don't have a constant length for the first, use the length
5540 of the second, if we know it. We don't require a constant for
5541 this case; some cost analysis could be done if both are available
5542 but neither is constant. For now, assume they're equally cheap.
5543
5544 If both strings have constant lengths, use the smaller. This
5545 could arise if optimization results in strcpy being called with
5546 two fixed strings, or if the code was machine-generated. We should
5547 add some code to the `memcmp' handler below to deal with such
5548 situations, someday. */
5549 if (!len || TREE_CODE (len) != INTEGER_CST)
5550 {
5551 if (len2)
5552 len = len2;
5553 else if (len == 0)
5554 break;
5555 }
5556 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
5557 {
5558 if (tree_int_cst_lt (len2, len))
5559 len = len2;
5560 }
5561
5562 chainon (arglist, build_tree_list (NULL_TREE, len));
5563 }
5564
5565 /* Drops in. */
5566 case BUILT_IN_MEMCMP:
5567 /* If not optimizing, call the library function. */
5568 if (!optimize)
5569 break;
5570
5571 if (arglist == 0
5572 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5573 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5574 || TREE_CHAIN (arglist) == 0
5575 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
5576 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
5577 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
5578 return const0_rtx;
5579 else if (!HAVE_cmpstrsi)
5580 break;
5581 {
5582 tree arg1 = TREE_VALUE (arglist);
5583 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
5584 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5585 rtx result;
5586
5587 int arg1_align
5588 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5589 int arg2_align
5590 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5591 enum machine_mode insn_mode
5592 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
5593
5594 /* If we don't have POINTER_TYPE, call the function. */
5595 if (arg1_align == 0 || arg2_align == 0)
5596 {
5597 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
5598 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
5599 break;
5600 }
5601
5602 /* Make a place to write the result of the instruction. */
5603 result = target;
5604 if (! (result != 0
5605 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
5606 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
5607 result = gen_reg_rtx (insn_mode);
5608
5609 emit_insn (gen_cmpstrsi (result,
5610 gen_rtx (MEM, BLKmode,
5611 expand_expr (arg1, NULL_RTX, Pmode,
5612 EXPAND_NORMAL)),
5613 gen_rtx (MEM, BLKmode,
5614 expand_expr (arg2, NULL_RTX, Pmode,
5615 EXPAND_NORMAL)),
5616 expand_expr (len, NULL_RTX, VOIDmode, 0),
5617 GEN_INT (MIN (arg1_align, arg2_align))));
5618
5619 /* Return the value in the proper mode for this function. */
5620 mode = TYPE_MODE (TREE_TYPE (exp));
5621 if (GET_MODE (result) == mode)
5622 return result;
5623 else if (target != 0)
5624 {
5625 convert_move (target, result, 0);
5626 return target;
5627 }
5628 else
5629 return convert_to_mode (mode, result, 0);
5630 }
5631 #else
5632 case BUILT_IN_STRCMP:
5633 case BUILT_IN_MEMCMP:
5634 break;
5635 #endif
5636
5637 default: /* just do library call, if unknown builtin */
5638 error ("built-in function %s not currently supported",
5639 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
5640 }
5641
5642 /* The switch statement above can drop through to cause the function
5643 to be called normally. */
5644
5645 return expand_call (exp, target, ignore);
5646 }
5647 \f
5648 /* Expand code for a post- or pre- increment or decrement
5649 and return the RTX for the result.
5650 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
5651
5652 static rtx
5653 expand_increment (exp, post)
5654 register tree exp;
5655 int post;
5656 {
5657 register rtx op0, op1;
5658 register rtx temp, value;
5659 register tree incremented = TREE_OPERAND (exp, 0);
5660 optab this_optab = add_optab;
5661 int icode;
5662 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
5663 int op0_is_copy = 0;
5664
5665 /* Stabilize any component ref that might need to be
5666 evaluated more than once below. */
5667 if (TREE_CODE (incremented) == BIT_FIELD_REF
5668 || (TREE_CODE (incremented) == COMPONENT_REF
5669 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
5670 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
5671 incremented = stabilize_reference (incremented);
5672
5673 /* Compute the operands as RTX.
5674 Note whether OP0 is the actual lvalue or a copy of it:
5675 I believe it is a copy iff it is a register or subreg
5676 and insns were generated in computing it. */
5677 temp = get_last_insn ();
5678 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
5679 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
5680 && temp != get_last_insn ());
5681 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5682
5683 /* Decide whether incrementing or decrementing. */
5684 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
5685 || TREE_CODE (exp) == PREDECREMENT_EXPR)
5686 this_optab = sub_optab;
5687
5688 /* If OP0 is not the actual lvalue, but rather a copy in a register,
5689 then we cannot just increment OP0. We must
5690 therefore contrive to increment the original value.
5691 Then we can return OP0 since it is a copy of the old value. */
5692 if (op0_is_copy)
5693 {
5694 /* This is the easiest way to increment the value wherever it is.
5695 Problems with multiple evaluation of INCREMENTED
5696 are prevented because either (1) it is a component_ref,
5697 in which case it was stabilized above, or (2) it is an array_ref
5698 with constant index in an array in a register, which is
5699 safe to reevaluate. */
5700 tree newexp = build ((this_optab == add_optab
5701 ? PLUS_EXPR : MINUS_EXPR),
5702 TREE_TYPE (exp),
5703 incremented,
5704 TREE_OPERAND (exp, 1));
5705 temp = expand_assignment (incremented, newexp, ! post, 0);
5706 return post ? op0 : temp;
5707 }
5708
5709 /* Convert decrement by a constant into a negative increment. */
5710 if (this_optab == sub_optab
5711 && GET_CODE (op1) == CONST_INT)
5712 {
5713 op1 = GEN_INT (- INTVAL (op1));
5714 this_optab = add_optab;
5715 }
5716
5717 if (post)
5718 {
5719 /* We have a true reference to the value in OP0.
5720 If there is an insn to add or subtract in this mode, queue it. */
5721
5722 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
5723 op0 = stabilize (op0);
5724 #endif
5725
5726 icode = (int) this_optab->handlers[(int) mode].insn_code;
5727 if (icode != (int) CODE_FOR_nothing
5728 /* Make sure that OP0 is valid for operands 0 and 1
5729 of the insn we want to queue. */
5730 && (*insn_operand_predicate[icode][0]) (op0, mode)
5731 && (*insn_operand_predicate[icode][1]) (op0, mode))
5732 {
5733 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
5734 op1 = force_reg (mode, op1);
5735
5736 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
5737 }
5738 }
5739
5740 /* Preincrement, or we can't increment with one simple insn. */
5741 if (post)
5742 /* Save a copy of the value before inc or dec, to return it later. */
5743 temp = value = copy_to_reg (op0);
5744 else
5745 /* Arrange to return the incremented value. */
5746 /* Copy the rtx because expand_binop will protect from the queue,
5747 and the results of that would be invalid for us to return
5748 if our caller does emit_queue before using our result. */
5749 temp = copy_rtx (value = op0);
5750
5751 /* Increment however we can. */
5752 op1 = expand_binop (mode, this_optab, value, op1, op0,
5753 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
5754 /* Make sure the value is stored into OP0. */
5755 if (op1 != op0)
5756 emit_move_insn (op0, op1);
5757
5758 return temp;
5759 }
5760 \f
5761 /* Expand all function calls contained within EXP, innermost ones first.
5762 But don't look within expressions that have sequence points.
5763 For each CALL_EXPR, record the rtx for its value
5764 in the CALL_EXPR_RTL field. */
5765
5766 static void
5767 preexpand_calls (exp)
5768 tree exp;
5769 {
5770 register int nops, i;
5771 int type = TREE_CODE_CLASS (TREE_CODE (exp));
5772
5773 if (! do_preexpand_calls)
5774 return;
5775
5776 /* Only expressions and references can contain calls. */
5777
5778 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
5779 return;
5780
5781 switch (TREE_CODE (exp))
5782 {
5783 case CALL_EXPR:
5784 /* Do nothing if already expanded. */
5785 if (CALL_EXPR_RTL (exp) != 0)
5786 return;
5787
5788 /* Do nothing to built-in functions. */
5789 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
5790 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
5791 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5792 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
5793 return;
5794
5795 case COMPOUND_EXPR:
5796 case COND_EXPR:
5797 case TRUTH_ANDIF_EXPR:
5798 case TRUTH_ORIF_EXPR:
5799 /* If we find one of these, then we can be sure
5800 the adjust will be done for it (since it makes jumps).
5801 Do it now, so that if this is inside an argument
5802 of a function, we don't get the stack adjustment
5803 after some other args have already been pushed. */
5804 do_pending_stack_adjust ();
5805 return;
5806
5807 case BLOCK:
5808 case RTL_EXPR:
5809 case WITH_CLEANUP_EXPR:
5810 return;
5811
5812 case SAVE_EXPR:
5813 if (SAVE_EXPR_RTL (exp) != 0)
5814 return;
5815 }
5816
5817 nops = tree_code_length[(int) TREE_CODE (exp)];
5818 for (i = 0; i < nops; i++)
5819 if (TREE_OPERAND (exp, i) != 0)
5820 {
5821 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
5822 if (type == 'e' || type == '<' || type == '1' || type == '2'
5823 || type == 'r')
5824 preexpand_calls (TREE_OPERAND (exp, i));
5825 }
5826 }
5827 \f
5828 /* At the start of a function, record that we have no previously-pushed
5829 arguments waiting to be popped. */
5830
5831 void
5832 init_pending_stack_adjust ()
5833 {
5834 pending_stack_adjust = 0;
5835 }
5836
5837 /* When exiting from function, if safe, clear out any pending stack adjust
5838 so the adjustment won't get done. */
5839
5840 void
5841 clear_pending_stack_adjust ()
5842 {
5843 #ifdef EXIT_IGNORE_STACK
5844 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
5845 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
5846 && ! flag_inline_functions)
5847 pending_stack_adjust = 0;
5848 #endif
5849 }
5850
5851 /* Pop any previously-pushed arguments that have not been popped yet. */
5852
5853 void
5854 do_pending_stack_adjust ()
5855 {
5856 if (inhibit_defer_pop == 0)
5857 {
5858 if (pending_stack_adjust != 0)
5859 adjust_stack (GEN_INT (pending_stack_adjust));
5860 pending_stack_adjust = 0;
5861 }
5862 }
5863
5864 /* Expand all cleanups up to OLD_CLEANUPS.
5865 Needed here, and also for language-dependent calls. */
5866
5867 void
5868 expand_cleanups_to (old_cleanups)
5869 tree old_cleanups;
5870 {
5871 while (cleanups_this_call != old_cleanups)
5872 {
5873 expand_expr (TREE_VALUE (cleanups_this_call), NULL_RTX, VOIDmode, 0);
5874 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
5875 }
5876 }
5877 \f
5878 /* Expand conditional expressions. */
5879
5880 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
5881 LABEL is an rtx of code CODE_LABEL, in this function and all the
5882 functions here. */
5883
5884 void
5885 jumpifnot (exp, label)
5886 tree exp;
5887 rtx label;
5888 {
5889 do_jump (exp, label, NULL_RTX);
5890 }
5891
5892 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
5893
5894 void
5895 jumpif (exp, label)
5896 tree exp;
5897 rtx label;
5898 {
5899 do_jump (exp, NULL_RTX, label);
5900 }
5901
5902 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
5903 the result is zero, or IF_TRUE_LABEL if the result is one.
5904 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
5905 meaning fall through in that case.
5906
5907 do_jump always does any pending stack adjust except when it does not
5908 actually perform a jump. An example where there is no jump
5909 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
5910
5911 This function is responsible for optimizing cases such as
5912 &&, || and comparison operators in EXP. */
5913
5914 void
5915 do_jump (exp, if_false_label, if_true_label)
5916 tree exp;
5917 rtx if_false_label, if_true_label;
5918 {
5919 register enum tree_code code = TREE_CODE (exp);
5920 /* Some cases need to create a label to jump to
5921 in order to properly fall through.
5922 These cases set DROP_THROUGH_LABEL nonzero. */
5923 rtx drop_through_label = 0;
5924 rtx temp;
5925 rtx comparison = 0;
5926 int i;
5927 tree type;
5928
5929 emit_queue ();
5930
5931 switch (code)
5932 {
5933 case ERROR_MARK:
5934 break;
5935
5936 case INTEGER_CST:
5937 temp = integer_zerop (exp) ? if_false_label : if_true_label;
5938 if (temp)
5939 emit_jump (temp);
5940 break;
5941
5942 #if 0
5943 /* This is not true with #pragma weak */
5944 case ADDR_EXPR:
5945 /* The address of something can never be zero. */
5946 if (if_true_label)
5947 emit_jump (if_true_label);
5948 break;
5949 #endif
5950
5951 case NOP_EXPR:
5952 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
5953 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
5954 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
5955 goto normal;
5956 case CONVERT_EXPR:
5957 /* If we are narrowing the operand, we have to do the compare in the
5958 narrower mode. */
5959 if ((TYPE_PRECISION (TREE_TYPE (exp))
5960 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5961 goto normal;
5962 case NON_LVALUE_EXPR:
5963 case REFERENCE_EXPR:
5964 case ABS_EXPR:
5965 case NEGATE_EXPR:
5966 case LROTATE_EXPR:
5967 case RROTATE_EXPR:
5968 /* These cannot change zero->non-zero or vice versa. */
5969 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
5970 break;
5971
5972 #if 0
5973 /* This is never less insns than evaluating the PLUS_EXPR followed by
5974 a test and can be longer if the test is eliminated. */
5975 case PLUS_EXPR:
5976 /* Reduce to minus. */
5977 exp = build (MINUS_EXPR, TREE_TYPE (exp),
5978 TREE_OPERAND (exp, 0),
5979 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
5980 TREE_OPERAND (exp, 1))));
5981 /* Process as MINUS. */
5982 #endif
5983
5984 case MINUS_EXPR:
5985 /* Non-zero iff operands of minus differ. */
5986 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
5987 TREE_OPERAND (exp, 0),
5988 TREE_OPERAND (exp, 1)),
5989 NE, NE);
5990 break;
5991
5992 case BIT_AND_EXPR:
5993 /* If we are AND'ing with a small constant, do this comparison in the
5994 smallest type that fits. If the machine doesn't have comparisons
5995 that small, it will be converted back to the wider comparison.
5996 This helps if we are testing the sign bit of a narrower object.
5997 combine can't do this for us because it can't know whether a
5998 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
5999
6000 if (! SLOW_BYTE_ACCESS
6001 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6002 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
6003 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
6004 && (type = type_for_size (i + 1, 1)) != 0
6005 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
6006 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
6007 != CODE_FOR_nothing))
6008 {
6009 do_jump (convert (type, exp), if_false_label, if_true_label);
6010 break;
6011 }
6012 goto normal;
6013
6014 case TRUTH_NOT_EXPR:
6015 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6016 break;
6017
6018 case TRUTH_ANDIF_EXPR:
6019 if (if_false_label == 0)
6020 if_false_label = drop_through_label = gen_label_rtx ();
6021 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
6022 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6023 break;
6024
6025 case TRUTH_ORIF_EXPR:
6026 if (if_true_label == 0)
6027 if_true_label = drop_through_label = gen_label_rtx ();
6028 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
6029 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6030 break;
6031
6032 case COMPOUND_EXPR:
6033 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6034 free_temp_slots ();
6035 emit_queue ();
6036 do_pending_stack_adjust ();
6037 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6038 break;
6039
6040 case COMPONENT_REF:
6041 case BIT_FIELD_REF:
6042 case ARRAY_REF:
6043 {
6044 int bitsize, bitpos, unsignedp;
6045 enum machine_mode mode;
6046 tree type;
6047 tree offset;
6048 int volatilep = 0;
6049
6050 /* Get description of this reference. We don't actually care
6051 about the underlying object here. */
6052 get_inner_reference (exp, &bitsize, &bitpos, &offset,
6053 &mode, &unsignedp, &volatilep);
6054
6055 type = type_for_size (bitsize, unsignedp);
6056 if (! SLOW_BYTE_ACCESS
6057 && type != 0 && bitsize >= 0
6058 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
6059 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
6060 != CODE_FOR_nothing))
6061 {
6062 do_jump (convert (type, exp), if_false_label, if_true_label);
6063 break;
6064 }
6065 goto normal;
6066 }
6067
6068 case COND_EXPR:
6069 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
6070 if (integer_onep (TREE_OPERAND (exp, 1))
6071 && integer_zerop (TREE_OPERAND (exp, 2)))
6072 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
6073
6074 else if (integer_zerop (TREE_OPERAND (exp, 1))
6075 && integer_onep (TREE_OPERAND (exp, 2)))
6076 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6077
6078 else
6079 {
6080 register rtx label1 = gen_label_rtx ();
6081 drop_through_label = gen_label_rtx ();
6082 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
6083 /* Now the THEN-expression. */
6084 do_jump (TREE_OPERAND (exp, 1),
6085 if_false_label ? if_false_label : drop_through_label,
6086 if_true_label ? if_true_label : drop_through_label);
6087 /* In case the do_jump just above never jumps. */
6088 do_pending_stack_adjust ();
6089 emit_label (label1);
6090 /* Now the ELSE-expression. */
6091 do_jump (TREE_OPERAND (exp, 2),
6092 if_false_label ? if_false_label : drop_through_label,
6093 if_true_label ? if_true_label : drop_through_label);
6094 }
6095 break;
6096
6097 case EQ_EXPR:
6098 if (integer_zerop (TREE_OPERAND (exp, 1)))
6099 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6100 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6101 == MODE_INT)
6102 &&
6103 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6104 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
6105 else
6106 comparison = compare (exp, EQ, EQ);
6107 break;
6108
6109 case NE_EXPR:
6110 if (integer_zerop (TREE_OPERAND (exp, 1)))
6111 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
6112 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6113 == MODE_INT)
6114 &&
6115 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6116 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
6117 else
6118 comparison = compare (exp, NE, NE);
6119 break;
6120
6121 case LT_EXPR:
6122 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6123 == MODE_INT)
6124 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6125 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
6126 else
6127 comparison = compare (exp, LT, LTU);
6128 break;
6129
6130 case LE_EXPR:
6131 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6132 == MODE_INT)
6133 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6134 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
6135 else
6136 comparison = compare (exp, LE, LEU);
6137 break;
6138
6139 case GT_EXPR:
6140 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6141 == MODE_INT)
6142 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6143 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
6144 else
6145 comparison = compare (exp, GT, GTU);
6146 break;
6147
6148 case GE_EXPR:
6149 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6150 == MODE_INT)
6151 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6152 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
6153 else
6154 comparison = compare (exp, GE, GEU);
6155 break;
6156
6157 default:
6158 normal:
6159 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
6160 #if 0
6161 /* This is not needed any more and causes poor code since it causes
6162 comparisons and tests from non-SI objects to have different code
6163 sequences. */
6164 /* Copy to register to avoid generating bad insns by cse
6165 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
6166 if (!cse_not_expected && GET_CODE (temp) == MEM)
6167 temp = copy_to_reg (temp);
6168 #endif
6169 do_pending_stack_adjust ();
6170 if (GET_CODE (temp) == CONST_INT)
6171 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
6172 else if (GET_CODE (temp) == LABEL_REF)
6173 comparison = const_true_rtx;
6174 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
6175 && !can_compare_p (GET_MODE (temp)))
6176 /* Note swapping the labels gives us not-equal. */
6177 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
6178 else if (GET_MODE (temp) != VOIDmode)
6179 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
6180 NE, 1, GET_MODE (temp), NULL_RTX, 0);
6181 else
6182 abort ();
6183 }
6184
6185 /* Do any postincrements in the expression that was tested. */
6186 emit_queue ();
6187
6188 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
6189 straight into a conditional jump instruction as the jump condition.
6190 Otherwise, all the work has been done already. */
6191
6192 if (comparison == const_true_rtx)
6193 {
6194 if (if_true_label)
6195 emit_jump (if_true_label);
6196 }
6197 else if (comparison == const0_rtx)
6198 {
6199 if (if_false_label)
6200 emit_jump (if_false_label);
6201 }
6202 else if (comparison)
6203 do_jump_for_compare (comparison, if_false_label, if_true_label);
6204
6205 free_temp_slots ();
6206
6207 if (drop_through_label)
6208 {
6209 /* If do_jump produces code that might be jumped around,
6210 do any stack adjusts from that code, before the place
6211 where control merges in. */
6212 do_pending_stack_adjust ();
6213 emit_label (drop_through_label);
6214 }
6215 }
6216 \f
6217 /* Given a comparison expression EXP for values too wide to be compared
6218 with one insn, test the comparison and jump to the appropriate label.
6219 The code of EXP is ignored; we always test GT if SWAP is 0,
6220 and LT if SWAP is 1. */
6221
6222 static void
6223 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
6224 tree exp;
6225 int swap;
6226 rtx if_false_label, if_true_label;
6227 {
6228 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
6229 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
6230 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6231 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
6232 rtx drop_through_label = 0;
6233 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
6234 int i;
6235
6236 if (! if_true_label || ! if_false_label)
6237 drop_through_label = gen_label_rtx ();
6238 if (! if_true_label)
6239 if_true_label = drop_through_label;
6240 if (! if_false_label)
6241 if_false_label = drop_through_label;
6242
6243 /* Compare a word at a time, high order first. */
6244 for (i = 0; i < nwords; i++)
6245 {
6246 rtx comp;
6247 rtx op0_word, op1_word;
6248
6249 if (WORDS_BIG_ENDIAN)
6250 {
6251 op0_word = operand_subword_force (op0, i, mode);
6252 op1_word = operand_subword_force (op1, i, mode);
6253 }
6254 else
6255 {
6256 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
6257 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
6258 }
6259
6260 /* All but high-order word must be compared as unsigned. */
6261 comp = compare_from_rtx (op0_word, op1_word,
6262 (unsignedp || i > 0) ? GTU : GT,
6263 unsignedp, word_mode, NULL_RTX, 0);
6264 if (comp == const_true_rtx)
6265 emit_jump (if_true_label);
6266 else if (comp != const0_rtx)
6267 do_jump_for_compare (comp, NULL_RTX, if_true_label);
6268
6269 /* Consider lower words only if these are equal. */
6270 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
6271 NULL_RTX, 0);
6272 if (comp == const_true_rtx)
6273 emit_jump (if_false_label);
6274 else if (comp != const0_rtx)
6275 do_jump_for_compare (comp, NULL_RTX, if_false_label);
6276 }
6277
6278 if (if_false_label)
6279 emit_jump (if_false_label);
6280 if (drop_through_label)
6281 emit_label (drop_through_label);
6282 }
6283
6284 /* Given an EQ_EXPR expression EXP for values too wide to be compared
6285 with one insn, test the comparison and jump to the appropriate label. */
6286
6287 static void
6288 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
6289 tree exp;
6290 rtx if_false_label, if_true_label;
6291 {
6292 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6293 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6294 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6295 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
6296 int i;
6297 rtx drop_through_label = 0;
6298
6299 if (! if_false_label)
6300 drop_through_label = if_false_label = gen_label_rtx ();
6301
6302 for (i = 0; i < nwords; i++)
6303 {
6304 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
6305 operand_subword_force (op1, i, mode),
6306 EQ, 0, word_mode, NULL_RTX, 0);
6307 if (comp == const_true_rtx)
6308 emit_jump (if_false_label);
6309 else if (comp != const0_rtx)
6310 do_jump_for_compare (comp, if_false_label, NULL_RTX);
6311 }
6312
6313 if (if_true_label)
6314 emit_jump (if_true_label);
6315 if (drop_through_label)
6316 emit_label (drop_through_label);
6317 }
6318 \f
6319 /* Jump according to whether OP0 is 0.
6320 We assume that OP0 has an integer mode that is too wide
6321 for the available compare insns. */
6322
6323 static void
6324 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
6325 rtx op0;
6326 rtx if_false_label, if_true_label;
6327 {
6328 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
6329 int i;
6330 rtx drop_through_label = 0;
6331
6332 if (! if_false_label)
6333 drop_through_label = if_false_label = gen_label_rtx ();
6334
6335 for (i = 0; i < nwords; i++)
6336 {
6337 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
6338 GET_MODE (op0)),
6339 const0_rtx, EQ, 0, word_mode, NULL_RTX, 0);
6340 if (comp == const_true_rtx)
6341 emit_jump (if_false_label);
6342 else if (comp != const0_rtx)
6343 do_jump_for_compare (comp, if_false_label, NULL_RTX);
6344 }
6345
6346 if (if_true_label)
6347 emit_jump (if_true_label);
6348 if (drop_through_label)
6349 emit_label (drop_through_label);
6350 }
6351
6352 /* Given a comparison expression in rtl form, output conditional branches to
6353 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
6354
6355 static void
6356 do_jump_for_compare (comparison, if_false_label, if_true_label)
6357 rtx comparison, if_false_label, if_true_label;
6358 {
6359 if (if_true_label)
6360 {
6361 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
6362 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
6363 else
6364 abort ();
6365
6366 if (if_false_label)
6367 emit_jump (if_false_label);
6368 }
6369 else if (if_false_label)
6370 {
6371 rtx insn;
6372 rtx prev = PREV_INSN (get_last_insn ());
6373 rtx branch = 0;
6374
6375 /* Output the branch with the opposite condition. Then try to invert
6376 what is generated. If more than one insn is a branch, or if the
6377 branch is not the last insn written, abort. If we can't invert
6378 the branch, emit make a true label, redirect this jump to that,
6379 emit a jump to the false label and define the true label. */
6380
6381 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
6382 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_false_label));
6383 else
6384 abort ();
6385
6386 /* Here we get the insn before what was just emitted.
6387 On some machines, emitting the branch can discard
6388 the previous compare insn and emit a replacement. */
6389 if (prev == 0)
6390 /* If there's only one preceding insn... */
6391 insn = get_insns ();
6392 else
6393 insn = NEXT_INSN (prev);
6394
6395 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
6396 if (GET_CODE (insn) == JUMP_INSN)
6397 {
6398 if (branch)
6399 abort ();
6400 branch = insn;
6401 }
6402
6403 if (branch != get_last_insn ())
6404 abort ();
6405
6406 if (! invert_jump (branch, if_false_label))
6407 {
6408 if_true_label = gen_label_rtx ();
6409 redirect_jump (branch, if_true_label);
6410 emit_jump (if_false_label);
6411 emit_label (if_true_label);
6412 }
6413 }
6414 }
6415 \f
6416 /* Generate code for a comparison expression EXP
6417 (including code to compute the values to be compared)
6418 and set (CC0) according to the result.
6419 SIGNED_CODE should be the rtx operation for this comparison for
6420 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
6421
6422 We force a stack adjustment unless there are currently
6423 things pushed on the stack that aren't yet used. */
6424
6425 static rtx
6426 compare (exp, signed_code, unsigned_code)
6427 register tree exp;
6428 enum rtx_code signed_code, unsigned_code;
6429 {
6430 register rtx op0
6431 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6432 register rtx op1
6433 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6434 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
6435 register enum machine_mode mode = TYPE_MODE (type);
6436 int unsignedp = TREE_UNSIGNED (type);
6437 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
6438
6439 return compare_from_rtx (op0, op1, code, unsignedp, mode,
6440 ((mode == BLKmode)
6441 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
6442 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
6443 }
6444
6445 /* Like compare but expects the values to compare as two rtx's.
6446 The decision as to signed or unsigned comparison must be made by the caller.
6447
6448 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
6449 compared.
6450
6451 If ALIGN is non-zero, it is the alignment of this type; if zero, the
6452 size of MODE should be used. */
6453
6454 rtx
6455 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
6456 register rtx op0, op1;
6457 enum rtx_code code;
6458 int unsignedp;
6459 enum machine_mode mode;
6460 rtx size;
6461 int align;
6462 {
6463 /* If one operand is constant, make it the second one. */
6464
6465 if (GET_CODE (op0) == CONST_INT || GET_CODE (op0) == CONST_DOUBLE)
6466 {
6467 rtx tem = op0;
6468 op0 = op1;
6469 op1 = tem;
6470 code = swap_condition (code);
6471 }
6472
6473 if (flag_force_mem)
6474 {
6475 op0 = force_not_mem (op0);
6476 op1 = force_not_mem (op1);
6477 }
6478
6479 do_pending_stack_adjust ();
6480
6481 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT)
6482 return simplify_relational_operation (code, mode, op0, op1);
6483
6484 /* If this is a signed equality comparison, we can do it as an
6485 unsigned comparison since zero-extension is cheaper than sign
6486 extension and comparisons with zero are done as unsigned. This is
6487 the case even on machines that can do fast sign extension, since
6488 zero-extension is easier to combinen with other operations than
6489 sign-extension is. If we are comparing against a constant, we must
6490 convert it to what it would look like unsigned. */
6491 if ((code == EQ || code == NE) && ! unsignedp
6492 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
6493 {
6494 if (GET_CODE (op1) == CONST_INT
6495 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
6496 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
6497 unsignedp = 1;
6498 }
6499
6500 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
6501
6502 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
6503 }
6504 \f
6505 /* Generate code to calculate EXP using a store-flag instruction
6506 and return an rtx for the result. EXP is either a comparison
6507 or a TRUTH_NOT_EXPR whose operand is a comparison.
6508
6509 If TARGET is nonzero, store the result there if convenient.
6510
6511 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
6512 cheap.
6513
6514 Return zero if there is no suitable set-flag instruction
6515 available on this machine.
6516
6517 Once expand_expr has been called on the arguments of the comparison,
6518 we are committed to doing the store flag, since it is not safe to
6519 re-evaluate the expression. We emit the store-flag insn by calling
6520 emit_store_flag, but only expand the arguments if we have a reason
6521 to believe that emit_store_flag will be successful. If we think that
6522 it will, but it isn't, we have to simulate the store-flag with a
6523 set/jump/set sequence. */
6524
6525 static rtx
6526 do_store_flag (exp, target, mode, only_cheap)
6527 tree exp;
6528 rtx target;
6529 enum machine_mode mode;
6530 int only_cheap;
6531 {
6532 enum rtx_code code;
6533 tree arg0, arg1, type;
6534 tree tem;
6535 enum machine_mode operand_mode;
6536 int invert = 0;
6537 int unsignedp;
6538 rtx op0, op1;
6539 enum insn_code icode;
6540 rtx subtarget = target;
6541 rtx result, label, pattern, jump_pat;
6542
6543 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
6544 result at the end. We can't simply invert the test since it would
6545 have already been inverted if it were valid. This case occurs for
6546 some floating-point comparisons. */
6547
6548 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
6549 invert = 1, exp = TREE_OPERAND (exp, 0);
6550
6551 arg0 = TREE_OPERAND (exp, 0);
6552 arg1 = TREE_OPERAND (exp, 1);
6553 type = TREE_TYPE (arg0);
6554 operand_mode = TYPE_MODE (type);
6555 unsignedp = TREE_UNSIGNED (type);
6556
6557 /* We won't bother with BLKmode store-flag operations because it would mean
6558 passing a lot of information to emit_store_flag. */
6559 if (operand_mode == BLKmode)
6560 return 0;
6561
6562 STRIP_NOPS (arg0);
6563 STRIP_NOPS (arg1);
6564
6565 /* Get the rtx comparison code to use. We know that EXP is a comparison
6566 operation of some type. Some comparisons against 1 and -1 can be
6567 converted to comparisons with zero. Do so here so that the tests
6568 below will be aware that we have a comparison with zero. These
6569 tests will not catch constants in the first operand, but constants
6570 are rarely passed as the first operand. */
6571
6572 switch (TREE_CODE (exp))
6573 {
6574 case EQ_EXPR:
6575 code = EQ;
6576 break;
6577 case NE_EXPR:
6578 code = NE;
6579 break;
6580 case LT_EXPR:
6581 if (integer_onep (arg1))
6582 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
6583 else
6584 code = unsignedp ? LTU : LT;
6585 break;
6586 case LE_EXPR:
6587 if (integer_all_onesp (arg1))
6588 arg1 = integer_zero_node, code = unsignedp ? LTU : LT;
6589 else
6590 code = unsignedp ? LEU : LE;
6591 break;
6592 case GT_EXPR:
6593 if (integer_all_onesp (arg1))
6594 arg1 = integer_zero_node, code = unsignedp ? GEU : GE;
6595 else
6596 code = unsignedp ? GTU : GT;
6597 break;
6598 case GE_EXPR:
6599 if (integer_onep (arg1))
6600 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
6601 else
6602 code = unsignedp ? GEU : GE;
6603 break;
6604 default:
6605 abort ();
6606 }
6607
6608 /* Put a constant second. */
6609 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
6610 {
6611 tem = arg0; arg0 = arg1; arg1 = tem;
6612 code = swap_condition (code);
6613 }
6614
6615 /* If this is an equality or inequality test of a single bit, we can
6616 do this by shifting the bit being tested to the low-order bit and
6617 masking the result with the constant 1. If the condition was EQ,
6618 we xor it with 1. This does not require an scc insn and is faster
6619 than an scc insn even if we have it. */
6620
6621 if ((code == NE || code == EQ)
6622 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6623 && integer_pow2p (TREE_OPERAND (arg0, 1))
6624 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
6625 {
6626 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
6627 NULL_RTX, VOIDmode, 0)));
6628
6629 if (subtarget == 0 || GET_CODE (subtarget) != REG
6630 || GET_MODE (subtarget) != operand_mode
6631 || ! safe_from_p (subtarget, TREE_OPERAND (arg0, 0)))
6632 subtarget = 0;
6633
6634 op0 = expand_expr (TREE_OPERAND (arg0, 0), subtarget, VOIDmode, 0);
6635
6636 if (bitnum != 0)
6637 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
6638 size_int (bitnum), target, 1);
6639
6640 if (GET_MODE (op0) != mode)
6641 op0 = convert_to_mode (mode, op0, 1);
6642
6643 if (bitnum != TYPE_PRECISION (type) - 1)
6644 op0 = expand_and (op0, const1_rtx, target);
6645
6646 if ((code == EQ && ! invert) || (code == NE && invert))
6647 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, target, 0,
6648 OPTAB_LIB_WIDEN);
6649
6650 return op0;
6651 }
6652
6653 /* Now see if we are likely to be able to do this. Return if not. */
6654 if (! can_compare_p (operand_mode))
6655 return 0;
6656 icode = setcc_gen_code[(int) code];
6657 if (icode == CODE_FOR_nothing
6658 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
6659 {
6660 /* We can only do this if it is one of the special cases that
6661 can be handled without an scc insn. */
6662 if ((code == LT && integer_zerop (arg1))
6663 || (! only_cheap && code == GE && integer_zerop (arg1)))
6664 ;
6665 else if (BRANCH_COST >= 0
6666 && ! only_cheap && (code == NE || code == EQ)
6667 && TREE_CODE (type) != REAL_TYPE
6668 && ((abs_optab->handlers[(int) operand_mode].insn_code
6669 != CODE_FOR_nothing)
6670 || (ffs_optab->handlers[(int) operand_mode].insn_code
6671 != CODE_FOR_nothing)))
6672 ;
6673 else
6674 return 0;
6675 }
6676
6677 preexpand_calls (exp);
6678 if (subtarget == 0 || GET_CODE (subtarget) != REG
6679 || GET_MODE (subtarget) != operand_mode
6680 || ! safe_from_p (subtarget, arg1))
6681 subtarget = 0;
6682
6683 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
6684 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6685
6686 if (target == 0)
6687 target = gen_reg_rtx (mode);
6688
6689 result = emit_store_flag (target, code, op0, op1, operand_mode,
6690 unsignedp, 1);
6691
6692 if (result)
6693 {
6694 if (invert)
6695 result = expand_binop (mode, xor_optab, result, const1_rtx,
6696 result, 0, OPTAB_LIB_WIDEN);
6697 return result;
6698 }
6699
6700 /* If this failed, we have to do this with set/compare/jump/set code. */
6701 if (target == 0 || GET_CODE (target) != REG
6702 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
6703 target = gen_reg_rtx (GET_MODE (target));
6704
6705 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
6706 result = compare_from_rtx (op0, op1, code, unsignedp,
6707 operand_mode, NULL_RTX, 0);
6708 if (GET_CODE (result) == CONST_INT)
6709 return (((result == const0_rtx && ! invert)
6710 || (result != const0_rtx && invert))
6711 ? const0_rtx : const1_rtx);
6712
6713 label = gen_label_rtx ();
6714 if (bcc_gen_fctn[(int) code] == 0)
6715 abort ();
6716
6717 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
6718 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
6719 emit_label (label);
6720
6721 return target;
6722 }
6723 \f
6724 /* Generate a tablejump instruction (used for switch statements). */
6725
6726 #ifdef HAVE_tablejump
6727
6728 /* INDEX is the value being switched on, with the lowest value
6729 in the table already subtracted.
6730 MODE is its expected mode (needed if INDEX is constant).
6731 RANGE is the length of the jump table.
6732 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
6733
6734 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
6735 index value is out of range. */
6736
6737 void
6738 do_tablejump (index, mode, range, table_label, default_label)
6739 rtx index, range, table_label, default_label;
6740 enum machine_mode mode;
6741 {
6742 register rtx temp, vector;
6743
6744 /* Do an unsigned comparison (in the proper mode) between the index
6745 expression and the value which represents the length of the range.
6746 Since we just finished subtracting the lower bound of the range
6747 from the index expression, this comparison allows us to simultaneously
6748 check that the original index expression value is both greater than
6749 or equal to the minimum value of the range and less than or equal to
6750 the maximum value of the range. */
6751
6752 emit_cmp_insn (range, index, LTU, NULL_RTX, mode, 0, 0);
6753 emit_jump_insn (gen_bltu (default_label));
6754
6755 /* If index is in range, it must fit in Pmode.
6756 Convert to Pmode so we can index with it. */
6757 if (mode != Pmode)
6758 index = convert_to_mode (Pmode, index, 1);
6759
6760 /* If flag_force_addr were to affect this address
6761 it could interfere with the tricky assumptions made
6762 about addresses that contain label-refs,
6763 which may be valid only very near the tablejump itself. */
6764 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
6765 GET_MODE_SIZE, because this indicates how large insns are. The other
6766 uses should all be Pmode, because they are addresses. This code
6767 could fail if addresses and insns are not the same size. */
6768 index = memory_address_noforce
6769 (CASE_VECTOR_MODE,
6770 gen_rtx (PLUS, Pmode,
6771 gen_rtx (MULT, Pmode, index,
6772 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
6773 gen_rtx (LABEL_REF, Pmode, table_label)));
6774 temp = gen_reg_rtx (CASE_VECTOR_MODE);
6775 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
6776 RTX_UNCHANGING_P (vector) = 1;
6777 convert_move (temp, vector, 0);
6778
6779 emit_jump_insn (gen_tablejump (temp, table_label));
6780
6781 #ifndef CASE_VECTOR_PC_RELATIVE
6782 /* If we are generating PIC code or if the table is PC-relative, the
6783 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
6784 if (! flag_pic)
6785 emit_barrier ();
6786 #endif
6787 }
6788
6789 #endif /* HAVE_tablejump */
This page took 0.394509 seconds and 6 git commands to generate.