]> gcc.gnu.org Git - gcc.git/blob - gcc/expr.c
(compare_from_rtx): Only swap operand if first is a constant and
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21 #include "config.h"
22 #include "rtl.h"
23 #include "tree.h"
24 #include "flags.h"
25 #include "function.h"
26 #include "insn-flags.h"
27 #include "insn-codes.h"
28 #include "expr.h"
29 #include "insn-config.h"
30 #include "recog.h"
31 #include "output.h"
32 #include "gvarargs.h"
33 #include "typeclass.h"
34
35 #define CEIL(x,y) (((x) + (y) - 1) / (y))
36
37 /* Decide whether a function's arguments should be processed
38 from first to last or from last to first. */
39
40 #ifdef STACK_GROWS_DOWNWARD
41 #ifdef PUSH_ROUNDING
42 #define PUSH_ARGS_REVERSED /* If it's last to first */
43 #endif
44 #endif
45
46 #ifndef STACK_PUSH_CODE
47 #ifdef STACK_GROWS_DOWNWARD
48 #define STACK_PUSH_CODE PRE_DEC
49 #else
50 #define STACK_PUSH_CODE PRE_INC
51 #endif
52 #endif
53
54 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
55 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
56
57 /* If this is nonzero, we do not bother generating VOLATILE
58 around volatile memory references, and we are willing to
59 output indirect addresses. If cse is to follow, we reject
60 indirect addresses so a useful potential cse is generated;
61 if it is used only once, instruction combination will produce
62 the same indirect address eventually. */
63 int cse_not_expected;
64
65 /* Nonzero to generate code for all the subroutines within an
66 expression before generating the upper levels of the expression.
67 Nowadays this is never zero. */
68 int do_preexpand_calls = 1;
69
70 /* Number of units that we should eventually pop off the stack.
71 These are the arguments to function calls that have already returned. */
72 int pending_stack_adjust;
73
74 /* Nonzero means stack pops must not be deferred, and deferred stack
75 pops must not be output. It is nonzero inside a function call,
76 inside a conditional expression, inside a statement expression,
77 and in other cases as well. */
78 int inhibit_defer_pop;
79
80 /* A list of all cleanups which belong to the arguments of
81 function calls being expanded by expand_call. */
82 tree cleanups_this_call;
83
84 /* Nonzero means __builtin_saveregs has already been done in this function.
85 The value is the pseudoreg containing the value __builtin_saveregs
86 returned. */
87 static rtx saveregs_value;
88
89 rtx store_expr ();
90 static void store_constructor ();
91 static rtx store_field ();
92 static rtx expand_builtin ();
93 static rtx compare ();
94 static rtx do_store_flag ();
95 static void preexpand_calls ();
96 static rtx expand_increment ();
97 static void init_queue ();
98
99 void do_pending_stack_adjust ();
100 static void do_jump_for_compare ();
101 static void do_jump_by_parts_equality ();
102 static void do_jump_by_parts_equality_rtx ();
103 static void do_jump_by_parts_greater ();
104
105 /* Record for each mode whether we can move a register directly to or
106 from an object of that mode in memory. If we can't, we won't try
107 to use that mode directly when accessing a field of that mode. */
108
109 static char direct_load[NUM_MACHINE_MODES];
110 static char direct_store[NUM_MACHINE_MODES];
111
112 /* MOVE_RATIO is the number of move instructions that is better than
113 a block move. */
114
115 #ifndef MOVE_RATIO
116 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
117 #define MOVE_RATIO 2
118 #else
119 /* A value of around 6 would minimize code size; infinity would minimize
120 execution time. */
121 #define MOVE_RATIO 15
122 #endif
123 #endif
124
125 /* This array records the insn_code of insns to perform block moves. */
126 static enum insn_code movstr_optab[NUM_MACHINE_MODES];
127
128 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
129
130 #ifndef SLOW_UNALIGNED_ACCESS
131 #define SLOW_UNALIGNED_ACCESS 0
132 #endif
133 \f
134 /* This is run once per compilation to set up which modes can be used
135 directly in memory and to initialize the block move optab. */
136
137 void
138 init_expr_once ()
139 {
140 rtx insn, pat;
141 enum machine_mode mode;
142 /* Try indexing by frame ptr and try by stack ptr.
143 It is known that on the Convex the stack ptr isn't a valid index.
144 With luck, one or the other is valid on any machine. */
145 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
146 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
147
148 start_sequence ();
149 insn = emit_insn (gen_rtx (SET, 0, 0));
150 pat = PATTERN (insn);
151
152 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
153 mode = (enum machine_mode) ((int) mode + 1))
154 {
155 int regno;
156 rtx reg;
157 int num_clobbers;
158
159 direct_load[(int) mode] = direct_store[(int) mode] = 0;
160 PUT_MODE (mem, mode);
161 PUT_MODE (mem1, mode);
162
163 /* See if there is some register that can be used in this mode and
164 directly loaded or stored from memory. */
165
166 if (mode != VOIDmode && mode != BLKmode)
167 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
168 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
169 regno++)
170 {
171 if (! HARD_REGNO_MODE_OK (regno, mode))
172 continue;
173
174 reg = gen_rtx (REG, mode, regno);
175
176 SET_SRC (pat) = mem;
177 SET_DEST (pat) = reg;
178 if (recog (pat, insn, &num_clobbers) >= 0)
179 direct_load[(int) mode] = 1;
180
181 SET_SRC (pat) = mem1;
182 SET_DEST (pat) = reg;
183 if (recog (pat, insn, &num_clobbers) >= 0)
184 direct_load[(int) mode] = 1;
185
186 SET_SRC (pat) = reg;
187 SET_DEST (pat) = mem;
188 if (recog (pat, insn, &num_clobbers) >= 0)
189 direct_store[(int) mode] = 1;
190
191 SET_SRC (pat) = reg;
192 SET_DEST (pat) = mem1;
193 if (recog (pat, insn, &num_clobbers) >= 0)
194 direct_store[(int) mode] = 1;
195 }
196
197 movstr_optab[(int) mode] = CODE_FOR_nothing;
198 }
199
200 end_sequence ();
201
202 #ifdef HAVE_movstrqi
203 if (HAVE_movstrqi)
204 movstr_optab[(int) QImode] = CODE_FOR_movstrqi;
205 #endif
206 #ifdef HAVE_movstrhi
207 if (HAVE_movstrhi)
208 movstr_optab[(int) HImode] = CODE_FOR_movstrhi;
209 #endif
210 #ifdef HAVE_movstrsi
211 if (HAVE_movstrsi)
212 movstr_optab[(int) SImode] = CODE_FOR_movstrsi;
213 #endif
214 #ifdef HAVE_movstrdi
215 if (HAVE_movstrdi)
216 movstr_optab[(int) DImode] = CODE_FOR_movstrdi;
217 #endif
218 #ifdef HAVE_movstrti
219 if (HAVE_movstrti)
220 movstr_optab[(int) TImode] = CODE_FOR_movstrti;
221 #endif
222 }
223
224 /* This is run at the start of compiling a function. */
225
226 void
227 init_expr ()
228 {
229 init_queue ();
230
231 pending_stack_adjust = 0;
232 inhibit_defer_pop = 0;
233 cleanups_this_call = 0;
234 saveregs_value = 0;
235 forced_labels = 0;
236 }
237
238 /* Save all variables describing the current status into the structure *P.
239 This is used before starting a nested function. */
240
241 void
242 save_expr_status (p)
243 struct function *p;
244 {
245 /* Instead of saving the postincrement queue, empty it. */
246 emit_queue ();
247
248 p->pending_stack_adjust = pending_stack_adjust;
249 p->inhibit_defer_pop = inhibit_defer_pop;
250 p->cleanups_this_call = cleanups_this_call;
251 p->saveregs_value = saveregs_value;
252 p->forced_labels = forced_labels;
253
254 pending_stack_adjust = 0;
255 inhibit_defer_pop = 0;
256 cleanups_this_call = 0;
257 saveregs_value = 0;
258 forced_labels = 0;
259 }
260
261 /* Restore all variables describing the current status from the structure *P.
262 This is used after a nested function. */
263
264 void
265 restore_expr_status (p)
266 struct function *p;
267 {
268 pending_stack_adjust = p->pending_stack_adjust;
269 inhibit_defer_pop = p->inhibit_defer_pop;
270 cleanups_this_call = p->cleanups_this_call;
271 saveregs_value = p->saveregs_value;
272 forced_labels = p->forced_labels;
273 }
274 \f
275 /* Manage the queue of increment instructions to be output
276 for POSTINCREMENT_EXPR expressions, etc. */
277
278 static rtx pending_chain;
279
280 /* Queue up to increment (or change) VAR later. BODY says how:
281 BODY should be the same thing you would pass to emit_insn
282 to increment right away. It will go to emit_insn later on.
283
284 The value is a QUEUED expression to be used in place of VAR
285 where you want to guarantee the pre-incrementation value of VAR. */
286
287 static rtx
288 enqueue_insn (var, body)
289 rtx var, body;
290 {
291 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
292 var, NULL_RTX, NULL_RTX, body, pending_chain);
293 return pending_chain;
294 }
295
296 /* Use protect_from_queue to convert a QUEUED expression
297 into something that you can put immediately into an instruction.
298 If the queued incrementation has not happened yet,
299 protect_from_queue returns the variable itself.
300 If the incrementation has happened, protect_from_queue returns a temp
301 that contains a copy of the old value of the variable.
302
303 Any time an rtx which might possibly be a QUEUED is to be put
304 into an instruction, it must be passed through protect_from_queue first.
305 QUEUED expressions are not meaningful in instructions.
306
307 Do not pass a value through protect_from_queue and then hold
308 on to it for a while before putting it in an instruction!
309 If the queue is flushed in between, incorrect code will result. */
310
311 rtx
312 protect_from_queue (x, modify)
313 register rtx x;
314 int modify;
315 {
316 register RTX_CODE code = GET_CODE (x);
317
318 #if 0 /* A QUEUED can hang around after the queue is forced out. */
319 /* Shortcut for most common case. */
320 if (pending_chain == 0)
321 return x;
322 #endif
323
324 if (code != QUEUED)
325 {
326 /* A special hack for read access to (MEM (QUEUED ...))
327 to facilitate use of autoincrement.
328 Make a copy of the contents of the memory location
329 rather than a copy of the address, but not
330 if the value is of mode BLKmode. */
331 if (code == MEM && GET_MODE (x) != BLKmode
332 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
333 {
334 register rtx y = XEXP (x, 0);
335 XEXP (x, 0) = QUEUED_VAR (y);
336 if (QUEUED_INSN (y))
337 {
338 register rtx temp = gen_reg_rtx (GET_MODE (x));
339 emit_insn_before (gen_move_insn (temp, x),
340 QUEUED_INSN (y));
341 return temp;
342 }
343 return x;
344 }
345 /* Otherwise, recursively protect the subexpressions of all
346 the kinds of rtx's that can contain a QUEUED. */
347 if (code == MEM)
348 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
349 else if (code == PLUS || code == MULT)
350 {
351 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
352 XEXP (x, 1) = protect_from_queue (XEXP (x, 1), 0);
353 }
354 return x;
355 }
356 /* If the increment has not happened, use the variable itself. */
357 if (QUEUED_INSN (x) == 0)
358 return QUEUED_VAR (x);
359 /* If the increment has happened and a pre-increment copy exists,
360 use that copy. */
361 if (QUEUED_COPY (x) != 0)
362 return QUEUED_COPY (x);
363 /* The increment has happened but we haven't set up a pre-increment copy.
364 Set one up now, and use it. */
365 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
366 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
367 QUEUED_INSN (x));
368 return QUEUED_COPY (x);
369 }
370
371 /* Return nonzero if X contains a QUEUED expression:
372 if it contains anything that will be altered by a queued increment.
373 We handle only combinations of MEM, PLUS, MINUS and MULT operators
374 since memory addresses generally contain only those. */
375
376 static int
377 queued_subexp_p (x)
378 rtx x;
379 {
380 register enum rtx_code code = GET_CODE (x);
381 switch (code)
382 {
383 case QUEUED:
384 return 1;
385 case MEM:
386 return queued_subexp_p (XEXP (x, 0));
387 case MULT:
388 case PLUS:
389 case MINUS:
390 return queued_subexp_p (XEXP (x, 0))
391 || queued_subexp_p (XEXP (x, 1));
392 }
393 return 0;
394 }
395
396 /* Perform all the pending incrementations. */
397
398 void
399 emit_queue ()
400 {
401 register rtx p;
402 while (p = pending_chain)
403 {
404 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
405 pending_chain = QUEUED_NEXT (p);
406 }
407 }
408
409 static void
410 init_queue ()
411 {
412 if (pending_chain)
413 abort ();
414 }
415 \f
416 /* Copy data from FROM to TO, where the machine modes are not the same.
417 Both modes may be integer, or both may be floating.
418 UNSIGNEDP should be nonzero if FROM is an unsigned type.
419 This causes zero-extension instead of sign-extension. */
420
421 void
422 convert_move (to, from, unsignedp)
423 register rtx to, from;
424 int unsignedp;
425 {
426 enum machine_mode to_mode = GET_MODE (to);
427 enum machine_mode from_mode = GET_MODE (from);
428 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
429 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
430 enum insn_code code;
431 rtx libcall;
432
433 /* rtx code for making an equivalent value. */
434 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
435
436 to = protect_from_queue (to, 1);
437 from = protect_from_queue (from, 0);
438
439 if (to_real != from_real)
440 abort ();
441
442 /* If FROM is a SUBREG that indicates that we have already done at least
443 the required extension, strip it. We don't handle such SUBREGs as
444 TO here. */
445
446 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
447 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
448 >= GET_MODE_SIZE (to_mode))
449 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
450 from = gen_lowpart (to_mode, from), from_mode = to_mode;
451
452 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
453 abort ();
454
455 if (to_mode == from_mode
456 || (from_mode == VOIDmode && CONSTANT_P (from)))
457 {
458 emit_move_insn (to, from);
459 return;
460 }
461
462 if (to_real)
463 {
464 #ifdef HAVE_extendqfhf2
465 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == HFmode)
466 {
467 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
468 return;
469 }
470 #endif
471 #ifdef HAVE_extendqfsf2
472 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == SFmode)
473 {
474 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
475 return;
476 }
477 #endif
478 #ifdef HAVE_extendqfdf2
479 if (HAVE_extendqfdf2 && from_mode == QFmode && to_mode == DFmode)
480 {
481 emit_unop_insn (CODE_FOR_extendqfdf2, to, from, UNKNOWN);
482 return;
483 }
484 #endif
485 #ifdef HAVE_extendqfxf2
486 if (HAVE_extendqfxf2 && from_mode == QFmode && to_mode == XFmode)
487 {
488 emit_unop_insn (CODE_FOR_extendqfxf2, to, from, UNKNOWN);
489 return;
490 }
491 #endif
492 #ifdef HAVE_extendqftf2
493 if (HAVE_extendqftf2 && from_mode == QFmode && to_mode == TFmode)
494 {
495 emit_unop_insn (CODE_FOR_extendqftf2, to, from, UNKNOWN);
496 return;
497 }
498 #endif
499
500 #ifdef HAVE_extendhfsf2
501 if (HAVE_extendhfsf2 && from_mode == HFmode && to_mode == SFmode)
502 {
503 emit_unop_insn (CODE_FOR_extendhfsf2, to, from, UNKNOWN);
504 return;
505 }
506 #endif
507 #ifdef HAVE_extendhfdf2
508 if (HAVE_extendhfdf2 && from_mode == HFmode && to_mode == DFmode)
509 {
510 emit_unop_insn (CODE_FOR_extendhfdf2, to, from, UNKNOWN);
511 return;
512 }
513 #endif
514 #ifdef HAVE_extendhfxf2
515 if (HAVE_extendhfxf2 && from_mode == HFmode && to_mode == XFmode)
516 {
517 emit_unop_insn (CODE_FOR_extendhfxf2, to, from, UNKNOWN);
518 return;
519 }
520 #endif
521 #ifdef HAVE_extendhftf2
522 if (HAVE_extendhftf2 && from_mode == HFmode && to_mode == TFmode)
523 {
524 emit_unop_insn (CODE_FOR_extendhftf2, to, from, UNKNOWN);
525 return;
526 }
527 #endif
528
529 #ifdef HAVE_extendsfdf2
530 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
531 {
532 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
533 return;
534 }
535 #endif
536 #ifdef HAVE_extendsfxf2
537 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
538 {
539 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
540 return;
541 }
542 #endif
543 #ifdef HAVE_extendsftf2
544 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
545 {
546 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
547 return;
548 }
549 #endif
550 #ifdef HAVE_extenddfxf2
551 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
552 {
553 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
554 return;
555 }
556 #endif
557 #ifdef HAVE_extenddftf2
558 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
559 {
560 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
561 return;
562 }
563 #endif
564
565 #ifdef HAVE_trunchfqf2
566 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
567 {
568 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
569 return;
570 }
571 #endif
572 #ifdef HAVE_truncsfqf2
573 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
574 {
575 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
576 return;
577 }
578 #endif
579 #ifdef HAVE_truncdfqf2
580 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
581 {
582 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
583 return;
584 }
585 #endif
586 #ifdef HAVE_truncxfqf2
587 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
588 {
589 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
590 return;
591 }
592 #endif
593 #ifdef HAVE_trunctfqf2
594 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
595 {
596 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
597 return;
598 }
599 #endif
600 #ifdef HAVE_truncsfhf2
601 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
602 {
603 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
604 return;
605 }
606 #endif
607 #ifdef HAVE_truncdfhf2
608 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
609 {
610 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
611 return;
612 }
613 #endif
614 #ifdef HAVE_truncxfhf2
615 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
616 {
617 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
618 return;
619 }
620 #endif
621 #ifdef HAVE_trunctfhf2
622 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
623 {
624 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
625 return;
626 }
627 #endif
628 #ifdef HAVE_truncdfsf2
629 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
630 {
631 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
632 return;
633 }
634 #endif
635 #ifdef HAVE_truncxfsf2
636 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
637 {
638 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
639 return;
640 }
641 #endif
642 #ifdef HAVE_trunctfsf2
643 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
644 {
645 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
646 return;
647 }
648 #endif
649 #ifdef HAVE_truncxfdf2
650 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
651 {
652 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
653 return;
654 }
655 #endif
656 #ifdef HAVE_trunctfdf2
657 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
658 {
659 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
660 return;
661 }
662 #endif
663
664 libcall = (rtx) 0;
665 switch (from_mode)
666 {
667 case SFmode:
668 switch (to_mode)
669 {
670 case DFmode:
671 libcall = extendsfdf2_libfunc;
672 break;
673
674 case XFmode:
675 libcall = extendsfxf2_libfunc;
676 break;
677
678 case TFmode:
679 libcall = extendsftf2_libfunc;
680 break;
681 }
682 break;
683
684 case DFmode:
685 switch (to_mode)
686 {
687 case SFmode:
688 libcall = truncdfsf2_libfunc;
689 break;
690
691 case XFmode:
692 libcall = extenddfxf2_libfunc;
693 break;
694
695 case TFmode:
696 libcall = extenddftf2_libfunc;
697 break;
698 }
699 break;
700
701 case XFmode:
702 switch (to_mode)
703 {
704 case SFmode:
705 libcall = truncxfsf2_libfunc;
706 break;
707
708 case DFmode:
709 libcall = truncxfdf2_libfunc;
710 break;
711 }
712 break;
713
714 case TFmode:
715 switch (to_mode)
716 {
717 case SFmode:
718 libcall = trunctfsf2_libfunc;
719 break;
720
721 case DFmode:
722 libcall = trunctfdf2_libfunc;
723 break;
724 }
725 break;
726 }
727
728 if (libcall == (rtx) 0)
729 /* This conversion is not implemented yet. */
730 abort ();
731
732 emit_library_call (libcall, 1, to_mode, 1, from, from_mode);
733 emit_move_insn (to, hard_libcall_value (to_mode));
734 return;
735 }
736
737 /* Now both modes are integers. */
738
739 /* Handle expanding beyond a word. */
740 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
741 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
742 {
743 rtx insns;
744 rtx lowpart;
745 rtx fill_value;
746 rtx lowfrom;
747 int i;
748 enum machine_mode lowpart_mode;
749 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
750
751 /* Try converting directly if the insn is supported. */
752 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
753 != CODE_FOR_nothing)
754 {
755 /* If FROM is a SUBREG, put it into a register. Do this
756 so that we always generate the same set of insns for
757 better cse'ing; if an intermediate assignment occurred,
758 we won't be doing the operation directly on the SUBREG. */
759 if (optimize > 0 && GET_CODE (from) == SUBREG)
760 from = force_reg (from_mode, from);
761 emit_unop_insn (code, to, from, equiv_code);
762 return;
763 }
764 /* Next, try converting via full word. */
765 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
766 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
767 != CODE_FOR_nothing))
768 {
769 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
770 emit_unop_insn (code, to,
771 gen_lowpart (word_mode, to), equiv_code);
772 return;
773 }
774
775 /* No special multiword conversion insn; do it by hand. */
776 start_sequence ();
777
778 /* Get a copy of FROM widened to a word, if necessary. */
779 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
780 lowpart_mode = word_mode;
781 else
782 lowpart_mode = from_mode;
783
784 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
785
786 lowpart = gen_lowpart (lowpart_mode, to);
787 emit_move_insn (lowpart, lowfrom);
788
789 /* Compute the value to put in each remaining word. */
790 if (unsignedp)
791 fill_value = const0_rtx;
792 else
793 {
794 #ifdef HAVE_slt
795 if (HAVE_slt
796 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
797 && STORE_FLAG_VALUE == -1)
798 {
799 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
800 lowpart_mode, 0, 0);
801 fill_value = gen_reg_rtx (word_mode);
802 emit_insn (gen_slt (fill_value));
803 }
804 else
805 #endif
806 {
807 fill_value
808 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
809 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
810 NULL_RTX, 0);
811 fill_value = convert_to_mode (word_mode, fill_value, 1);
812 }
813 }
814
815 /* Fill the remaining words. */
816 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
817 {
818 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
819 rtx subword = operand_subword (to, index, 1, to_mode);
820
821 if (subword == 0)
822 abort ();
823
824 if (fill_value != subword)
825 emit_move_insn (subword, fill_value);
826 }
827
828 insns = get_insns ();
829 end_sequence ();
830
831 emit_no_conflict_block (insns, to, from, NULL_RTX,
832 gen_rtx (equiv_code, to_mode, from));
833 return;
834 }
835
836 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD)
837 {
838 convert_move (to, gen_lowpart (word_mode, from), 0);
839 return;
840 }
841
842 /* Handle pointer conversion */ /* SPEE 900220 */
843 if (to_mode == PSImode)
844 {
845 if (from_mode != SImode)
846 from = convert_to_mode (SImode, from, unsignedp);
847
848 #ifdef HAVE_truncsipsi
849 if (HAVE_truncsipsi)
850 {
851 emit_unop_insn (CODE_FOR_truncsipsi, to, from, UNKNOWN);
852 return;
853 }
854 #endif /* HAVE_truncsipsi */
855 abort ();
856 }
857
858 if (from_mode == PSImode)
859 {
860 if (to_mode != SImode)
861 {
862 from = convert_to_mode (SImode, from, unsignedp);
863 from_mode = SImode;
864 }
865 else
866 {
867 #ifdef HAVE_extendpsisi
868 if (HAVE_extendpsisi)
869 {
870 emit_unop_insn (CODE_FOR_extendpsisi, to, from, UNKNOWN);
871 return;
872 }
873 #endif /* HAVE_extendpsisi */
874 abort ();
875 }
876 }
877
878 /* Now follow all the conversions between integers
879 no more than a word long. */
880
881 /* For truncation, usually we can just refer to FROM in a narrower mode. */
882 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
883 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
884 GET_MODE_BITSIZE (from_mode))
885 && ((GET_CODE (from) == MEM
886 && ! MEM_VOLATILE_P (from)
887 && direct_load[(int) to_mode]
888 && ! mode_dependent_address_p (XEXP (from, 0)))
889 || GET_CODE (from) == REG
890 || GET_CODE (from) == SUBREG))
891 {
892 emit_move_insn (to, gen_lowpart (to_mode, from));
893 return;
894 }
895
896 /* For truncation, usually we can just refer to FROM in a narrower mode. */
897 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
898 {
899 /* Convert directly if that works. */
900 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
901 != CODE_FOR_nothing)
902 {
903 /* If FROM is a SUBREG, put it into a register. Do this
904 so that we always generate the same set of insns for
905 better cse'ing; if an intermediate assignment occurred,
906 we won't be doing the operation directly on the SUBREG. */
907 if (optimize > 0 && GET_CODE (from) == SUBREG)
908 from = force_reg (from_mode, from);
909 emit_unop_insn (code, to, from, equiv_code);
910 return;
911 }
912 else
913 {
914 enum machine_mode intermediate;
915
916 /* Search for a mode to convert via. */
917 for (intermediate = from_mode; intermediate != VOIDmode;
918 intermediate = GET_MODE_WIDER_MODE (intermediate))
919 if ((can_extend_p (to_mode, intermediate, unsignedp)
920 != CODE_FOR_nothing)
921 && (can_extend_p (intermediate, from_mode, unsignedp)
922 != CODE_FOR_nothing))
923 {
924 convert_move (to, convert_to_mode (intermediate, from,
925 unsignedp), unsignedp);
926 return;
927 }
928
929 /* No suitable intermediate mode. */
930 abort ();
931 }
932 }
933
934 /* Support special truncate insns for certain modes. */
935
936 if (from_mode == DImode && to_mode == SImode)
937 {
938 #ifdef HAVE_truncdisi2
939 if (HAVE_truncdisi2)
940 {
941 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
942 return;
943 }
944 #endif
945 convert_move (to, force_reg (from_mode, from), unsignedp);
946 return;
947 }
948
949 if (from_mode == DImode && to_mode == HImode)
950 {
951 #ifdef HAVE_truncdihi2
952 if (HAVE_truncdihi2)
953 {
954 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
955 return;
956 }
957 #endif
958 convert_move (to, force_reg (from_mode, from), unsignedp);
959 return;
960 }
961
962 if (from_mode == DImode && to_mode == QImode)
963 {
964 #ifdef HAVE_truncdiqi2
965 if (HAVE_truncdiqi2)
966 {
967 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
968 return;
969 }
970 #endif
971 convert_move (to, force_reg (from_mode, from), unsignedp);
972 return;
973 }
974
975 if (from_mode == SImode && to_mode == HImode)
976 {
977 #ifdef HAVE_truncsihi2
978 if (HAVE_truncsihi2)
979 {
980 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
981 return;
982 }
983 #endif
984 convert_move (to, force_reg (from_mode, from), unsignedp);
985 return;
986 }
987
988 if (from_mode == SImode && to_mode == QImode)
989 {
990 #ifdef HAVE_truncsiqi2
991 if (HAVE_truncsiqi2)
992 {
993 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
994 return;
995 }
996 #endif
997 convert_move (to, force_reg (from_mode, from), unsignedp);
998 return;
999 }
1000
1001 if (from_mode == HImode && to_mode == QImode)
1002 {
1003 #ifdef HAVE_trunchiqi2
1004 if (HAVE_trunchiqi2)
1005 {
1006 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1007 return;
1008 }
1009 #endif
1010 convert_move (to, force_reg (from_mode, from), unsignedp);
1011 return;
1012 }
1013
1014 /* Handle truncation of volatile memrefs, and so on;
1015 the things that couldn't be truncated directly,
1016 and for which there was no special instruction. */
1017 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1018 {
1019 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1020 emit_move_insn (to, temp);
1021 return;
1022 }
1023
1024 /* Mode combination is not recognized. */
1025 abort ();
1026 }
1027
1028 /* Return an rtx for a value that would result
1029 from converting X to mode MODE.
1030 Both X and MODE may be floating, or both integer.
1031 UNSIGNEDP is nonzero if X is an unsigned value.
1032 This can be done by referring to a part of X in place
1033 or by copying to a new temporary with conversion.
1034
1035 This function *must not* call protect_from_queue
1036 except when putting X into an insn (in which case convert_move does it). */
1037
1038 rtx
1039 convert_to_mode (mode, x, unsignedp)
1040 enum machine_mode mode;
1041 rtx x;
1042 int unsignedp;
1043 {
1044 register rtx temp;
1045
1046 /* If FROM is a SUBREG that indicates that we have already done at least
1047 the required extension, strip it. */
1048
1049 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1050 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1051 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1052 x = gen_lowpart (mode, x);
1053
1054 if (mode == GET_MODE (x))
1055 return x;
1056
1057 /* There is one case that we must handle specially: If we are converting
1058 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1059 we are to interpret the constant as unsigned, gen_lowpart will do
1060 the wrong if the constant appears negative. What we want to do is
1061 make the high-order word of the constant zero, not all ones. */
1062
1063 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1064 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1065 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1066 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
1067
1068 /* We can do this with a gen_lowpart if both desired and current modes
1069 are integer, and this is either a constant integer, a register, or a
1070 non-volatile MEM. Except for the constant case, we must be narrowing
1071 the operand. */
1072
1073 if (GET_CODE (x) == CONST_INT
1074 || (GET_MODE_CLASS (mode) == MODE_INT
1075 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
1076 && (GET_CODE (x) == CONST_DOUBLE
1077 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (x))
1078 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x))
1079 && direct_load[(int) mode]
1080 || GET_CODE (x) == REG)))))
1081 return gen_lowpart (mode, x);
1082
1083 temp = gen_reg_rtx (mode);
1084 convert_move (temp, x, unsignedp);
1085 return temp;
1086 }
1087 \f
1088 /* Generate several move instructions to copy LEN bytes
1089 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1090 The caller must pass FROM and TO
1091 through protect_from_queue before calling.
1092 ALIGN (in bytes) is maximum alignment we can assume. */
1093
1094 struct move_by_pieces
1095 {
1096 rtx to;
1097 rtx to_addr;
1098 int autinc_to;
1099 int explicit_inc_to;
1100 rtx from;
1101 rtx from_addr;
1102 int autinc_from;
1103 int explicit_inc_from;
1104 int len;
1105 int offset;
1106 int reverse;
1107 };
1108
1109 static void move_by_pieces_1 ();
1110 static int move_by_pieces_ninsns ();
1111
1112 static void
1113 move_by_pieces (to, from, len, align)
1114 rtx to, from;
1115 int len, align;
1116 {
1117 struct move_by_pieces data;
1118 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1119 int max_size = MOVE_MAX + 1;
1120
1121 data.offset = 0;
1122 data.to_addr = to_addr;
1123 data.from_addr = from_addr;
1124 data.to = to;
1125 data.from = from;
1126 data.autinc_to
1127 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1128 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1129 data.autinc_from
1130 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1131 || GET_CODE (from_addr) == POST_INC
1132 || GET_CODE (from_addr) == POST_DEC);
1133
1134 data.explicit_inc_from = 0;
1135 data.explicit_inc_to = 0;
1136 data.reverse
1137 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1138 if (data.reverse) data.offset = len;
1139 data.len = len;
1140
1141 /* If copying requires more than two move insns,
1142 copy addresses to registers (to make displacements shorter)
1143 and use post-increment if available. */
1144 if (!(data.autinc_from && data.autinc_to)
1145 && move_by_pieces_ninsns (len, align) > 2)
1146 {
1147 #ifdef HAVE_PRE_DECREMENT
1148 if (data.reverse && ! data.autinc_from)
1149 {
1150 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1151 data.autinc_from = 1;
1152 data.explicit_inc_from = -1;
1153 }
1154 #endif
1155 #ifdef HAVE_POST_INCREMENT
1156 if (! data.autinc_from)
1157 {
1158 data.from_addr = copy_addr_to_reg (from_addr);
1159 data.autinc_from = 1;
1160 data.explicit_inc_from = 1;
1161 }
1162 #endif
1163 if (!data.autinc_from && CONSTANT_P (from_addr))
1164 data.from_addr = copy_addr_to_reg (from_addr);
1165 #ifdef HAVE_PRE_DECREMENT
1166 if (data.reverse && ! data.autinc_to)
1167 {
1168 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1169 data.autinc_to = 1;
1170 data.explicit_inc_to = -1;
1171 }
1172 #endif
1173 #ifdef HAVE_POST_INCREMENT
1174 if (! data.reverse && ! data.autinc_to)
1175 {
1176 data.to_addr = copy_addr_to_reg (to_addr);
1177 data.autinc_to = 1;
1178 data.explicit_inc_to = 1;
1179 }
1180 #endif
1181 if (!data.autinc_to && CONSTANT_P (to_addr))
1182 data.to_addr = copy_addr_to_reg (to_addr);
1183 }
1184
1185 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1186 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1187 align = MOVE_MAX;
1188
1189 /* First move what we can in the largest integer mode, then go to
1190 successively smaller modes. */
1191
1192 while (max_size > 1)
1193 {
1194 enum machine_mode mode = VOIDmode, tmode;
1195 enum insn_code icode;
1196
1197 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1198 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1199 if (GET_MODE_SIZE (tmode) < max_size)
1200 mode = tmode;
1201
1202 if (mode == VOIDmode)
1203 break;
1204
1205 icode = mov_optab->handlers[(int) mode].insn_code;
1206 if (icode != CODE_FOR_nothing
1207 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1208 GET_MODE_SIZE (mode)))
1209 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1210
1211 max_size = GET_MODE_SIZE (mode);
1212 }
1213
1214 /* The code above should have handled everything. */
1215 if (data.len != 0)
1216 abort ();
1217 }
1218
1219 /* Return number of insns required to move L bytes by pieces.
1220 ALIGN (in bytes) is maximum alignment we can assume. */
1221
1222 static int
1223 move_by_pieces_ninsns (l, align)
1224 unsigned int l;
1225 int align;
1226 {
1227 register int n_insns = 0;
1228 int max_size = MOVE_MAX + 1;
1229
1230 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1231 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1232 align = MOVE_MAX;
1233
1234 while (max_size > 1)
1235 {
1236 enum machine_mode mode = VOIDmode, tmode;
1237 enum insn_code icode;
1238
1239 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1240 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1241 if (GET_MODE_SIZE (tmode) < max_size)
1242 mode = tmode;
1243
1244 if (mode == VOIDmode)
1245 break;
1246
1247 icode = mov_optab->handlers[(int) mode].insn_code;
1248 if (icode != CODE_FOR_nothing
1249 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1250 GET_MODE_SIZE (mode)))
1251 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1252
1253 max_size = GET_MODE_SIZE (mode);
1254 }
1255
1256 return n_insns;
1257 }
1258
1259 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1260 with move instructions for mode MODE. GENFUN is the gen_... function
1261 to make a move insn for that mode. DATA has all the other info. */
1262
1263 static void
1264 move_by_pieces_1 (genfun, mode, data)
1265 rtx (*genfun) ();
1266 enum machine_mode mode;
1267 struct move_by_pieces *data;
1268 {
1269 register int size = GET_MODE_SIZE (mode);
1270 register rtx to1, from1;
1271
1272 while (data->len >= size)
1273 {
1274 if (data->reverse) data->offset -= size;
1275
1276 to1 = (data->autinc_to
1277 ? gen_rtx (MEM, mode, data->to_addr)
1278 : change_address (data->to, mode,
1279 plus_constant (data->to_addr, data->offset)));
1280 from1 =
1281 (data->autinc_from
1282 ? gen_rtx (MEM, mode, data->from_addr)
1283 : change_address (data->from, mode,
1284 plus_constant (data->from_addr, data->offset)));
1285
1286 #ifdef HAVE_PRE_DECREMENT
1287 if (data->explicit_inc_to < 0)
1288 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1289 if (data->explicit_inc_from < 0)
1290 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1291 #endif
1292
1293 emit_insn ((*genfun) (to1, from1));
1294 #ifdef HAVE_POST_INCREMENT
1295 if (data->explicit_inc_to > 0)
1296 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1297 if (data->explicit_inc_from > 0)
1298 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1299 #endif
1300
1301 if (! data->reverse) data->offset += size;
1302
1303 data->len -= size;
1304 }
1305 }
1306 \f
1307 /* Emit code to move a block Y to a block X.
1308 This may be done with string-move instructions,
1309 with multiple scalar move instructions, or with a library call.
1310
1311 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1312 with mode BLKmode.
1313 SIZE is an rtx that says how long they are.
1314 ALIGN is the maximum alignment we can assume they have,
1315 measured in bytes. */
1316
1317 void
1318 emit_block_move (x, y, size, align)
1319 rtx x, y;
1320 rtx size;
1321 int align;
1322 {
1323 if (GET_MODE (x) != BLKmode)
1324 abort ();
1325
1326 if (GET_MODE (y) != BLKmode)
1327 abort ();
1328
1329 x = protect_from_queue (x, 1);
1330 y = protect_from_queue (y, 0);
1331 size = protect_from_queue (size, 0);
1332
1333 if (GET_CODE (x) != MEM)
1334 abort ();
1335 if (GET_CODE (y) != MEM)
1336 abort ();
1337 if (size == 0)
1338 abort ();
1339
1340 if (GET_CODE (size) == CONST_INT
1341 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1342 move_by_pieces (x, y, INTVAL (size), align);
1343 else
1344 {
1345 /* Try the most limited insn first, because there's no point
1346 including more than one in the machine description unless
1347 the more limited one has some advantage. */
1348
1349 rtx opalign = GEN_INT (align);
1350 enum machine_mode mode;
1351
1352 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1353 mode = GET_MODE_WIDER_MODE (mode))
1354 {
1355 enum insn_code code = movstr_optab[(int) mode];
1356
1357 if (code != CODE_FOR_nothing
1358 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1359 here because if SIZE is less than the mode mask, as it is
1360 returned by the macro, it will definitely be less than the
1361 actual mode mask. */
1362 && (unsigned) INTVAL (size) <= GET_MODE_MASK (mode)
1363 && (insn_operand_predicate[(int) code][0] == 0
1364 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1365 && (insn_operand_predicate[(int) code][1] == 0
1366 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1367 && (insn_operand_predicate[(int) code][3] == 0
1368 || (*insn_operand_predicate[(int) code][3]) (opalign,
1369 VOIDmode)))
1370 {
1371 rtx op2;
1372 rtx last = get_last_insn ();
1373 rtx pat;
1374
1375 op2 = convert_to_mode (mode, size, 1);
1376 if (insn_operand_predicate[(int) code][2] != 0
1377 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1378 op2 = copy_to_mode_reg (mode, op2);
1379
1380 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1381 if (pat)
1382 {
1383 emit_insn (pat);
1384 return;
1385 }
1386 else
1387 delete_insns_since (last);
1388 }
1389 }
1390
1391 #ifdef TARGET_MEM_FUNCTIONS
1392 emit_library_call (memcpy_libfunc, 0,
1393 VOIDmode, 3, XEXP (x, 0), Pmode,
1394 XEXP (y, 0), Pmode,
1395 convert_to_mode (Pmode, size, 1), Pmode);
1396 #else
1397 emit_library_call (bcopy_libfunc, 0,
1398 VOIDmode, 3, XEXP (y, 0), Pmode,
1399 XEXP (x, 0), Pmode,
1400 convert_to_mode (Pmode, size, 1), Pmode);
1401 #endif
1402 }
1403 }
1404 \f
1405 /* Copy all or part of a value X into registers starting at REGNO.
1406 The number of registers to be filled is NREGS. */
1407
1408 void
1409 move_block_to_reg (regno, x, nregs, mode)
1410 int regno;
1411 rtx x;
1412 int nregs;
1413 enum machine_mode mode;
1414 {
1415 int i;
1416 rtx pat, last;
1417
1418 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1419 x = validize_mem (force_const_mem (mode, x));
1420
1421 /* See if the machine can do this with a load multiple insn. */
1422 #ifdef HAVE_load_multiple
1423 last = get_last_insn ();
1424 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1425 GEN_INT (nregs));
1426 if (pat)
1427 {
1428 emit_insn (pat);
1429 return;
1430 }
1431 else
1432 delete_insns_since (last);
1433 #endif
1434
1435 for (i = 0; i < nregs; i++)
1436 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1437 operand_subword_force (x, i, mode));
1438 }
1439
1440 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1441 The number of registers to be filled is NREGS. */
1442
1443 void
1444 move_block_from_reg (regno, x, nregs)
1445 int regno;
1446 rtx x;
1447 int nregs;
1448 {
1449 int i;
1450 rtx pat, last;
1451
1452 /* See if the machine can do this with a store multiple insn. */
1453 #ifdef HAVE_store_multiple
1454 last = get_last_insn ();
1455 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1456 GEN_INT (nregs));
1457 if (pat)
1458 {
1459 emit_insn (pat);
1460 return;
1461 }
1462 else
1463 delete_insns_since (last);
1464 #endif
1465
1466 for (i = 0; i < nregs; i++)
1467 {
1468 rtx tem = operand_subword (x, i, 1, BLKmode);
1469
1470 if (tem == 0)
1471 abort ();
1472
1473 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1474 }
1475 }
1476
1477 /* Mark NREGS consecutive regs, starting at REGNO, as being live now. */
1478
1479 void
1480 use_regs (regno, nregs)
1481 int regno;
1482 int nregs;
1483 {
1484 int i;
1485
1486 for (i = 0; i < nregs; i++)
1487 emit_insn (gen_rtx (USE, VOIDmode, gen_rtx (REG, word_mode, regno + i)));
1488 }
1489
1490 /* Mark the instructions since PREV as a libcall block.
1491 Add REG_LIBCALL to PREV and add a REG_RETVAL to the most recent insn. */
1492
1493 static void
1494 group_insns (prev)
1495 rtx prev;
1496 {
1497 rtx insn_first;
1498 rtx insn_last;
1499
1500 /* Find the instructions to mark */
1501 if (prev)
1502 insn_first = NEXT_INSN (prev);
1503 else
1504 insn_first = get_insns ();
1505
1506 insn_last = get_last_insn ();
1507
1508 REG_NOTES (insn_last) = gen_rtx (INSN_LIST, REG_RETVAL, insn_first,
1509 REG_NOTES (insn_last));
1510
1511 REG_NOTES (insn_first) = gen_rtx (INSN_LIST, REG_LIBCALL, insn_last,
1512 REG_NOTES (insn_first));
1513 }
1514 \f
1515 /* Write zeros through the storage of OBJECT.
1516 If OBJECT has BLKmode, SIZE is its length in bytes. */
1517
1518 void
1519 clear_storage (object, size)
1520 rtx object;
1521 int size;
1522 {
1523 if (GET_MODE (object) == BLKmode)
1524 {
1525 #ifdef TARGET_MEM_FUNCTIONS
1526 emit_library_call (memset_libfunc, 0,
1527 VOIDmode, 3,
1528 XEXP (object, 0), Pmode, const0_rtx, Pmode,
1529 GEN_INT (size), Pmode);
1530 #else
1531 emit_library_call (bzero_libfunc, 0,
1532 VOIDmode, 2,
1533 XEXP (object, 0), Pmode,
1534 GEN_INT (size), Pmode);
1535 #endif
1536 }
1537 else
1538 emit_move_insn (object, const0_rtx);
1539 }
1540
1541 /* Generate code to copy Y into X.
1542 Both Y and X must have the same mode, except that
1543 Y can be a constant with VOIDmode.
1544 This mode cannot be BLKmode; use emit_block_move for that.
1545
1546 Return the last instruction emitted. */
1547
1548 rtx
1549 emit_move_insn (x, y)
1550 rtx x, y;
1551 {
1552 enum machine_mode mode = GET_MODE (x);
1553 enum machine_mode submode;
1554 enum mode_class class = GET_MODE_CLASS (mode);
1555 int i;
1556
1557 x = protect_from_queue (x, 1);
1558 y = protect_from_queue (y, 0);
1559
1560 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1561 abort ();
1562
1563 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1564 y = force_const_mem (mode, y);
1565
1566 /* If X or Y are memory references, verify that their addresses are valid
1567 for the machine. */
1568 if (GET_CODE (x) == MEM
1569 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1570 && ! push_operand (x, GET_MODE (x)))
1571 || (flag_force_addr
1572 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1573 x = change_address (x, VOIDmode, XEXP (x, 0));
1574
1575 if (GET_CODE (y) == MEM
1576 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1577 || (flag_force_addr
1578 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1579 y = change_address (y, VOIDmode, XEXP (y, 0));
1580
1581 if (mode == BLKmode)
1582 abort ();
1583
1584 if (class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1585 submode = mode_for_size (GET_MODE_UNIT_SIZE (mode) * BITS_PER_UNIT,
1586 (class == MODE_COMPLEX_INT
1587 ? MODE_INT : MODE_FLOAT),
1588 0);
1589
1590 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1591 return
1592 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1593
1594 /* Expand complex moves by moving real part and imag part, if possible. */
1595 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1596 && submode != BLKmode
1597 && (mov_optab->handlers[(int) submode].insn_code
1598 != CODE_FOR_nothing))
1599 {
1600 /* Don't split destination if it is a stack push. */
1601 int stack = push_operand (x, GET_MODE (x));
1602 rtx prev = get_last_insn ();
1603
1604 /* Tell flow that the whole of the destination is being set. */
1605 if (GET_CODE (x) == REG)
1606 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
1607
1608 /* If this is a stack, push the highpart first, so it
1609 will be in the argument order.
1610
1611 In that case, change_address is used only to convert
1612 the mode, not to change the address. */
1613 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1614 ((stack ? change_address (x, submode, (rtx) 0)
1615 : gen_highpart (submode, x)),
1616 gen_highpart (submode, y)));
1617 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1618 ((stack ? change_address (x, submode, (rtx) 0)
1619 : gen_lowpart (submode, x)),
1620 gen_lowpart (submode, y)));
1621
1622 group_insns (prev);
1623
1624 return get_last_insn ();
1625 }
1626
1627 /* This will handle any multi-word mode that lacks a move_insn pattern.
1628 However, you will get better code if you define such patterns,
1629 even if they must turn into multiple assembler instructions. */
1630 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1631 {
1632 rtx last_insn = 0;
1633 rtx prev_insn = get_last_insn ();
1634
1635 for (i = 0;
1636 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1637 i++)
1638 {
1639 rtx xpart = operand_subword (x, i, 1, mode);
1640 rtx ypart = operand_subword (y, i, 1, mode);
1641
1642 /* If we can't get a part of Y, put Y into memory if it is a
1643 constant. Otherwise, force it into a register. If we still
1644 can't get a part of Y, abort. */
1645 if (ypart == 0 && CONSTANT_P (y))
1646 {
1647 y = force_const_mem (mode, y);
1648 ypart = operand_subword (y, i, 1, mode);
1649 }
1650 else if (ypart == 0)
1651 ypart = operand_subword_force (y, i, mode);
1652
1653 if (xpart == 0 || ypart == 0)
1654 abort ();
1655
1656 last_insn = emit_move_insn (xpart, ypart);
1657 }
1658 /* Mark these insns as a libcall block. */
1659 group_insns (prev_insn);
1660
1661 return last_insn;
1662 }
1663 else
1664 abort ();
1665 }
1666 \f
1667 /* Pushing data onto the stack. */
1668
1669 /* Push a block of length SIZE (perhaps variable)
1670 and return an rtx to address the beginning of the block.
1671 Note that it is not possible for the value returned to be a QUEUED.
1672 The value may be virtual_outgoing_args_rtx.
1673
1674 EXTRA is the number of bytes of padding to push in addition to SIZE.
1675 BELOW nonzero means this padding comes at low addresses;
1676 otherwise, the padding comes at high addresses. */
1677
1678 rtx
1679 push_block (size, extra, below)
1680 rtx size;
1681 int extra, below;
1682 {
1683 register rtx temp;
1684 if (CONSTANT_P (size))
1685 anti_adjust_stack (plus_constant (size, extra));
1686 else if (GET_CODE (size) == REG && extra == 0)
1687 anti_adjust_stack (size);
1688 else
1689 {
1690 rtx temp = copy_to_mode_reg (Pmode, size);
1691 if (extra != 0)
1692 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
1693 temp, 0, OPTAB_LIB_WIDEN);
1694 anti_adjust_stack (temp);
1695 }
1696
1697 #ifdef STACK_GROWS_DOWNWARD
1698 temp = virtual_outgoing_args_rtx;
1699 if (extra != 0 && below)
1700 temp = plus_constant (temp, extra);
1701 #else
1702 if (GET_CODE (size) == CONST_INT)
1703 temp = plus_constant (virtual_outgoing_args_rtx,
1704 - INTVAL (size) - (below ? 0 : extra));
1705 else if (extra != 0 && !below)
1706 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1707 negate_rtx (Pmode, plus_constant (size, extra)));
1708 else
1709 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1710 negate_rtx (Pmode, size));
1711 #endif
1712
1713 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
1714 }
1715
1716 rtx
1717 gen_push_operand ()
1718 {
1719 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
1720 }
1721
1722 /* Generate code to push X onto the stack, assuming it has mode MODE and
1723 type TYPE.
1724 MODE is redundant except when X is a CONST_INT (since they don't
1725 carry mode info).
1726 SIZE is an rtx for the size of data to be copied (in bytes),
1727 needed only if X is BLKmode.
1728
1729 ALIGN (in bytes) is maximum alignment we can assume.
1730
1731 If PARTIAL is nonzero, then copy that many of the first words
1732 of X into registers starting with REG, and push the rest of X.
1733 The amount of space pushed is decreased by PARTIAL words,
1734 rounded *down* to a multiple of PARM_BOUNDARY.
1735 REG must be a hard register in this case.
1736
1737 EXTRA is the amount in bytes of extra space to leave next to this arg.
1738 This is ignored if an argument block has already been allocated.
1739
1740 On a machine that lacks real push insns, ARGS_ADDR is the address of
1741 the bottom of the argument block for this call. We use indexing off there
1742 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
1743 argument block has not been preallocated.
1744
1745 ARGS_SO_FAR is the size of args previously pushed for this call. */
1746
1747 void
1748 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
1749 args_addr, args_so_far)
1750 register rtx x;
1751 enum machine_mode mode;
1752 tree type;
1753 rtx size;
1754 int align;
1755 int partial;
1756 rtx reg;
1757 int extra;
1758 rtx args_addr;
1759 rtx args_so_far;
1760 {
1761 rtx xinner;
1762 enum direction stack_direction
1763 #ifdef STACK_GROWS_DOWNWARD
1764 = downward;
1765 #else
1766 = upward;
1767 #endif
1768
1769 /* Decide where to pad the argument: `downward' for below,
1770 `upward' for above, or `none' for don't pad it.
1771 Default is below for small data on big-endian machines; else above. */
1772 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
1773
1774 /* Invert direction if stack is post-update. */
1775 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
1776 if (where_pad != none)
1777 where_pad = (where_pad == downward ? upward : downward);
1778
1779 xinner = x = protect_from_queue (x, 0);
1780
1781 if (mode == BLKmode)
1782 {
1783 /* Copy a block into the stack, entirely or partially. */
1784
1785 register rtx temp;
1786 int used = partial * UNITS_PER_WORD;
1787 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
1788 int skip;
1789
1790 if (size == 0)
1791 abort ();
1792
1793 used -= offset;
1794
1795 /* USED is now the # of bytes we need not copy to the stack
1796 because registers will take care of them. */
1797
1798 if (partial != 0)
1799 xinner = change_address (xinner, BLKmode,
1800 plus_constant (XEXP (xinner, 0), used));
1801
1802 /* If the partial register-part of the arg counts in its stack size,
1803 skip the part of stack space corresponding to the registers.
1804 Otherwise, start copying to the beginning of the stack space,
1805 by setting SKIP to 0. */
1806 #ifndef REG_PARM_STACK_SPACE
1807 skip = 0;
1808 #else
1809 skip = used;
1810 #endif
1811
1812 #ifdef PUSH_ROUNDING
1813 /* Do it with several push insns if that doesn't take lots of insns
1814 and if there is no difficulty with push insns that skip bytes
1815 on the stack for alignment purposes. */
1816 if (args_addr == 0
1817 && GET_CODE (size) == CONST_INT
1818 && skip == 0
1819 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
1820 < MOVE_RATIO)
1821 /* Here we avoid the case of a structure whose weak alignment
1822 forces many pushes of a small amount of data,
1823 and such small pushes do rounding that causes trouble. */
1824 && ((! STRICT_ALIGNMENT && ! SLOW_UNALIGNED_ACCESS)
1825 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
1826 || PUSH_ROUNDING (align) == align)
1827 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
1828 {
1829 /* Push padding now if padding above and stack grows down,
1830 or if padding below and stack grows up.
1831 But if space already allocated, this has already been done. */
1832 if (extra && args_addr == 0
1833 && where_pad != none && where_pad != stack_direction)
1834 anti_adjust_stack (GEN_INT (extra));
1835
1836 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
1837 INTVAL (size) - used, align);
1838 }
1839 else
1840 #endif /* PUSH_ROUNDING */
1841 {
1842 /* Otherwise make space on the stack and copy the data
1843 to the address of that space. */
1844
1845 /* Deduct words put into registers from the size we must copy. */
1846 if (partial != 0)
1847 {
1848 if (GET_CODE (size) == CONST_INT)
1849 size = GEN_INT (INTVAL (size) - used);
1850 else
1851 size = expand_binop (GET_MODE (size), sub_optab, size,
1852 GEN_INT (used), NULL_RTX, 0,
1853 OPTAB_LIB_WIDEN);
1854 }
1855
1856 /* Get the address of the stack space.
1857 In this case, we do not deal with EXTRA separately.
1858 A single stack adjust will do. */
1859 if (! args_addr)
1860 {
1861 temp = push_block (size, extra, where_pad == downward);
1862 extra = 0;
1863 }
1864 else if (GET_CODE (args_so_far) == CONST_INT)
1865 temp = memory_address (BLKmode,
1866 plus_constant (args_addr,
1867 skip + INTVAL (args_so_far)));
1868 else
1869 temp = memory_address (BLKmode,
1870 plus_constant (gen_rtx (PLUS, Pmode,
1871 args_addr, args_so_far),
1872 skip));
1873
1874 /* TEMP is the address of the block. Copy the data there. */
1875 if (GET_CODE (size) == CONST_INT
1876 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
1877 < MOVE_RATIO))
1878 {
1879 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
1880 INTVAL (size), align);
1881 goto ret;
1882 }
1883 /* Try the most limited insn first, because there's no point
1884 including more than one in the machine description unless
1885 the more limited one has some advantage. */
1886 #ifdef HAVE_movstrqi
1887 if (HAVE_movstrqi
1888 && GET_CODE (size) == CONST_INT
1889 && ((unsigned) INTVAL (size)
1890 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
1891 {
1892 emit_insn (gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
1893 xinner, size, GEN_INT (align)));
1894 goto ret;
1895 }
1896 #endif
1897 #ifdef HAVE_movstrhi
1898 if (HAVE_movstrhi
1899 && GET_CODE (size) == CONST_INT
1900 && ((unsigned) INTVAL (size)
1901 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
1902 {
1903 emit_insn (gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
1904 xinner, size, GEN_INT (align)));
1905 goto ret;
1906 }
1907 #endif
1908 #ifdef HAVE_movstrsi
1909 if (HAVE_movstrsi)
1910 {
1911 emit_insn (gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
1912 xinner, size, GEN_INT (align)));
1913 goto ret;
1914 }
1915 #endif
1916 #ifdef HAVE_movstrdi
1917 if (HAVE_movstrdi)
1918 {
1919 emit_insn (gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
1920 xinner, size, GEN_INT (align)));
1921 goto ret;
1922 }
1923 #endif
1924
1925 #ifndef ACCUMULATE_OUTGOING_ARGS
1926 /* If the source is referenced relative to the stack pointer,
1927 copy it to another register to stabilize it. We do not need
1928 to do this if we know that we won't be changing sp. */
1929
1930 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
1931 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
1932 temp = copy_to_reg (temp);
1933 #endif
1934
1935 /* Make inhibit_defer_pop nonzero around the library call
1936 to force it to pop the bcopy-arguments right away. */
1937 NO_DEFER_POP;
1938 #ifdef TARGET_MEM_FUNCTIONS
1939 emit_library_call (memcpy_libfunc, 0,
1940 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
1941 size, Pmode);
1942 #else
1943 emit_library_call (bcopy_libfunc, 0,
1944 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
1945 size, Pmode);
1946 #endif
1947 OK_DEFER_POP;
1948 }
1949 }
1950 else if (partial > 0)
1951 {
1952 /* Scalar partly in registers. */
1953
1954 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
1955 int i;
1956 int not_stack;
1957 /* # words of start of argument
1958 that we must make space for but need not store. */
1959 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
1960 int args_offset = INTVAL (args_so_far);
1961 int skip;
1962
1963 /* Push padding now if padding above and stack grows down,
1964 or if padding below and stack grows up.
1965 But if space already allocated, this has already been done. */
1966 if (extra && args_addr == 0
1967 && where_pad != none && where_pad != stack_direction)
1968 anti_adjust_stack (GEN_INT (extra));
1969
1970 /* If we make space by pushing it, we might as well push
1971 the real data. Otherwise, we can leave OFFSET nonzero
1972 and leave the space uninitialized. */
1973 if (args_addr == 0)
1974 offset = 0;
1975
1976 /* Now NOT_STACK gets the number of words that we don't need to
1977 allocate on the stack. */
1978 not_stack = partial - offset;
1979
1980 /* If the partial register-part of the arg counts in its stack size,
1981 skip the part of stack space corresponding to the registers.
1982 Otherwise, start copying to the beginning of the stack space,
1983 by setting SKIP to 0. */
1984 #ifndef REG_PARM_STACK_SPACE
1985 skip = 0;
1986 #else
1987 skip = not_stack;
1988 #endif
1989
1990 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1991 x = validize_mem (force_const_mem (mode, x));
1992
1993 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
1994 SUBREGs of such registers are not allowed. */
1995 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
1996 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
1997 x = copy_to_reg (x);
1998
1999 /* Loop over all the words allocated on the stack for this arg. */
2000 /* We can do it by words, because any scalar bigger than a word
2001 has a size a multiple of a word. */
2002 #ifndef PUSH_ARGS_REVERSED
2003 for (i = not_stack; i < size; i++)
2004 #else
2005 for (i = size - 1; i >= not_stack; i--)
2006 #endif
2007 if (i >= not_stack + offset)
2008 emit_push_insn (operand_subword_force (x, i, mode),
2009 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2010 0, args_addr,
2011 GEN_INT (args_offset + ((i - not_stack + skip)
2012 * UNITS_PER_WORD)));
2013 }
2014 else
2015 {
2016 rtx addr;
2017
2018 /* Push padding now if padding above and stack grows down,
2019 or if padding below and stack grows up.
2020 But if space already allocated, this has already been done. */
2021 if (extra && args_addr == 0
2022 && where_pad != none && where_pad != stack_direction)
2023 anti_adjust_stack (GEN_INT (extra));
2024
2025 #ifdef PUSH_ROUNDING
2026 if (args_addr == 0)
2027 addr = gen_push_operand ();
2028 else
2029 #endif
2030 if (GET_CODE (args_so_far) == CONST_INT)
2031 addr
2032 = memory_address (mode,
2033 plus_constant (args_addr, INTVAL (args_so_far)));
2034 else
2035 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2036 args_so_far));
2037
2038 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2039 }
2040
2041 ret:
2042 /* If part should go in registers, copy that part
2043 into the appropriate registers. Do this now, at the end,
2044 since mem-to-mem copies above may do function calls. */
2045 if (partial > 0)
2046 move_block_to_reg (REGNO (reg), x, partial, mode);
2047
2048 if (extra && args_addr == 0 && where_pad == stack_direction)
2049 anti_adjust_stack (GEN_INT (extra));
2050 }
2051 \f
2052 /* Output a library call to function FUN (a SYMBOL_REF rtx)
2053 (emitting the queue unless NO_QUEUE is nonzero),
2054 for a value of mode OUTMODE,
2055 with NARGS different arguments, passed as alternating rtx values
2056 and machine_modes to convert them to.
2057 The rtx values should have been passed through protect_from_queue already.
2058
2059 NO_QUEUE will be true if and only if the library call is a `const' call
2060 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
2061 to the variable is_const in expand_call.
2062
2063 NO_QUEUE must be true for const calls, because if it isn't, then
2064 any pending increment will be emitted between REG_LIBCALL/REG_RETVAL notes,
2065 and will be lost if the libcall sequence is optimized away.
2066
2067 NO_QUEUE must be false for non-const calls, because if it isn't, the
2068 call insn will have its CONST_CALL_P bit set, and it will be incorrectly
2069 optimized. For instance, the instruction scheduler may incorrectly
2070 move memory references across the non-const call. */
2071
2072 void
2073 emit_library_call (va_alist)
2074 va_dcl
2075 {
2076 va_list p;
2077 struct args_size args_size;
2078 register int argnum;
2079 enum machine_mode outmode;
2080 int nargs;
2081 rtx fun;
2082 rtx orgfun;
2083 int inc;
2084 int count;
2085 rtx argblock = 0;
2086 CUMULATIVE_ARGS args_so_far;
2087 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
2088 struct args_size offset; struct args_size size; };
2089 struct arg *argvec;
2090 int old_inhibit_defer_pop = inhibit_defer_pop;
2091 int no_queue = 0;
2092 rtx use_insns;
2093
2094 va_start (p);
2095 orgfun = fun = va_arg (p, rtx);
2096 no_queue = va_arg (p, int);
2097 outmode = va_arg (p, enum machine_mode);
2098 nargs = va_arg (p, int);
2099
2100 /* Copy all the libcall-arguments out of the varargs data
2101 and into a vector ARGVEC.
2102
2103 Compute how to pass each argument. We only support a very small subset
2104 of the full argument passing conventions to limit complexity here since
2105 library functions shouldn't have many args. */
2106
2107 argvec = (struct arg *) alloca (nargs * sizeof (struct arg));
2108
2109 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun);
2110
2111 args_size.constant = 0;
2112 args_size.var = 0;
2113
2114 for (count = 0; count < nargs; count++)
2115 {
2116 rtx val = va_arg (p, rtx);
2117 enum machine_mode mode = va_arg (p, enum machine_mode);
2118
2119 /* We cannot convert the arg value to the mode the library wants here;
2120 must do it earlier where we know the signedness of the arg. */
2121 if (mode == BLKmode
2122 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
2123 abort ();
2124
2125 /* On some machines, there's no way to pass a float to a library fcn.
2126 Pass it as a double instead. */
2127 #ifdef LIBGCC_NEEDS_DOUBLE
2128 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
2129 val = convert_to_mode (DFmode, val, 0), mode = DFmode;
2130 #endif
2131
2132 /* There's no need to call protect_from_queue, because
2133 either emit_move_insn or emit_push_insn will do that. */
2134
2135 /* Make sure it is a reasonable operand for a move or push insn. */
2136 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
2137 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
2138 val = force_operand (val, NULL_RTX);
2139
2140 argvec[count].value = val;
2141 argvec[count].mode = mode;
2142
2143 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2144 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
2145 abort ();
2146 #endif
2147
2148 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
2149 if (argvec[count].reg && GET_CODE (argvec[count].reg) == EXPR_LIST)
2150 abort ();
2151 #ifdef FUNCTION_ARG_PARTIAL_NREGS
2152 argvec[count].partial
2153 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
2154 #else
2155 argvec[count].partial = 0;
2156 #endif
2157
2158 locate_and_pad_parm (mode, NULL_TREE,
2159 argvec[count].reg && argvec[count].partial == 0,
2160 NULL_TREE, &args_size, &argvec[count].offset,
2161 &argvec[count].size);
2162
2163 if (argvec[count].size.var)
2164 abort ();
2165
2166 #ifndef REG_PARM_STACK_SPACE
2167 if (argvec[count].partial)
2168 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
2169 #endif
2170
2171 if (argvec[count].reg == 0 || argvec[count].partial != 0
2172 #ifdef REG_PARM_STACK_SPACE
2173 || 1
2174 #endif
2175 )
2176 args_size.constant += argvec[count].size.constant;
2177
2178 #ifdef ACCUMULATE_OUTGOING_ARGS
2179 /* If this arg is actually passed on the stack, it might be
2180 clobbering something we already put there (this library call might
2181 be inside the evaluation of an argument to a function whose call
2182 requires the stack). This will only occur when the library call
2183 has sufficient args to run out of argument registers. Abort in
2184 this case; if this ever occurs, code must be added to save and
2185 restore the arg slot. */
2186
2187 if (argvec[count].reg == 0 || argvec[count].partial != 0)
2188 abort ();
2189 #endif
2190
2191 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree)0, 1);
2192 }
2193 va_end (p);
2194
2195 /* If this machine requires an external definition for library
2196 functions, write one out. */
2197 assemble_external_libcall (fun);
2198
2199 #ifdef STACK_BOUNDARY
2200 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
2201 / STACK_BYTES) * STACK_BYTES);
2202 #endif
2203
2204 #ifdef REG_PARM_STACK_SPACE
2205 args_size.constant = MAX (args_size.constant,
2206 REG_PARM_STACK_SPACE (NULL_TREE));
2207 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2208 args_size.constant -= REG_PARM_STACK_SPACE (NULL_TREE);
2209 #endif
2210 #endif
2211
2212 #ifdef ACCUMULATE_OUTGOING_ARGS
2213 if (args_size.constant > current_function_outgoing_args_size)
2214 current_function_outgoing_args_size = args_size.constant;
2215 args_size.constant = 0;
2216 #endif
2217
2218 #ifndef PUSH_ROUNDING
2219 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
2220 #endif
2221
2222 #ifdef PUSH_ARGS_REVERSED
2223 inc = -1;
2224 argnum = nargs - 1;
2225 #else
2226 inc = 1;
2227 argnum = 0;
2228 #endif
2229
2230 /* Push the args that need to be pushed. */
2231
2232 for (count = 0; count < nargs; count++, argnum += inc)
2233 {
2234 register enum machine_mode mode = argvec[argnum].mode;
2235 register rtx val = argvec[argnum].value;
2236 rtx reg = argvec[argnum].reg;
2237 int partial = argvec[argnum].partial;
2238
2239 if (! (reg != 0 && partial == 0))
2240 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
2241 argblock, GEN_INT (argvec[count].offset.constant));
2242 NO_DEFER_POP;
2243 }
2244
2245 #ifdef PUSH_ARGS_REVERSED
2246 argnum = nargs - 1;
2247 #else
2248 argnum = 0;
2249 #endif
2250
2251 /* Now load any reg parms into their regs. */
2252
2253 for (count = 0; count < nargs; count++, argnum += inc)
2254 {
2255 register enum machine_mode mode = argvec[argnum].mode;
2256 register rtx val = argvec[argnum].value;
2257 rtx reg = argvec[argnum].reg;
2258 int partial = argvec[argnum].partial;
2259
2260 if (reg != 0 && partial == 0)
2261 emit_move_insn (reg, val);
2262 NO_DEFER_POP;
2263 }
2264
2265 /* For version 1.37, try deleting this entirely. */
2266 if (! no_queue)
2267 emit_queue ();
2268
2269 /* Any regs containing parms remain in use through the call. */
2270 start_sequence ();
2271 for (count = 0; count < nargs; count++)
2272 if (argvec[count].reg != 0)
2273 emit_insn (gen_rtx (USE, VOIDmode, argvec[count].reg));
2274
2275 use_insns = get_insns ();
2276 end_sequence ();
2277
2278 fun = prepare_call_address (fun, NULL_TREE, &use_insns);
2279
2280 /* Don't allow popping to be deferred, since then
2281 cse'ing of library calls could delete a call and leave the pop. */
2282 NO_DEFER_POP;
2283
2284 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2285 will set inhibit_defer_pop to that value. */
2286
2287 emit_call_1 (fun, get_identifier (XSTR (orgfun, 0)), args_size.constant, 0,
2288 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2289 outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
2290 old_inhibit_defer_pop + 1, use_insns, no_queue);
2291
2292 /* Now restore inhibit_defer_pop to its actual original value. */
2293 OK_DEFER_POP;
2294 }
2295 \f
2296 /* Expand an assignment that stores the value of FROM into TO.
2297 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2298 (This may contain a QUEUED rtx.)
2299 Otherwise, the returned value is not meaningful.
2300
2301 SUGGEST_REG is no longer actually used.
2302 It used to mean, copy the value through a register
2303 and return that register, if that is possible.
2304 But now we do this if WANT_VALUE.
2305
2306 If the value stored is a constant, we return the constant. */
2307
2308 rtx
2309 expand_assignment (to, from, want_value, suggest_reg)
2310 tree to, from;
2311 int want_value;
2312 int suggest_reg;
2313 {
2314 register rtx to_rtx = 0;
2315 rtx result;
2316
2317 /* Don't crash if the lhs of the assignment was erroneous. */
2318
2319 if (TREE_CODE (to) == ERROR_MARK)
2320 return expand_expr (from, NULL_RTX, VOIDmode, 0);
2321
2322 /* Assignment of a structure component needs special treatment
2323 if the structure component's rtx is not simply a MEM.
2324 Assignment of an array element at a constant index
2325 has the same problem. */
2326
2327 if (TREE_CODE (to) == COMPONENT_REF
2328 || TREE_CODE (to) == BIT_FIELD_REF
2329 || (TREE_CODE (to) == ARRAY_REF
2330 && TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2331 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST))
2332 {
2333 enum machine_mode mode1;
2334 int bitsize;
2335 int bitpos;
2336 tree offset;
2337 int unsignedp;
2338 int volatilep = 0;
2339 tree tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
2340 &mode1, &unsignedp, &volatilep);
2341
2342 /* If we are going to use store_bit_field and extract_bit_field,
2343 make sure to_rtx will be safe for multiple use. */
2344
2345 if (mode1 == VOIDmode && want_value)
2346 tem = stabilize_reference (tem);
2347
2348 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
2349 if (offset != 0)
2350 {
2351 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2352
2353 if (GET_CODE (to_rtx) != MEM)
2354 abort ();
2355 to_rtx = change_address (to_rtx, VOIDmode,
2356 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2357 force_reg (Pmode, offset_rtx)));
2358 }
2359 if (volatilep)
2360 {
2361 if (GET_CODE (to_rtx) == MEM)
2362 MEM_VOLATILE_P (to_rtx) = 1;
2363 #if 0 /* This was turned off because, when a field is volatile
2364 in an object which is not volatile, the object may be in a register,
2365 and then we would abort over here. */
2366 else
2367 abort ();
2368 #endif
2369 }
2370
2371 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2372 (want_value
2373 /* Spurious cast makes HPUX compiler happy. */
2374 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2375 : VOIDmode),
2376 unsignedp,
2377 /* Required alignment of containing datum. */
2378 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
2379 int_size_in_bytes (TREE_TYPE (tem)));
2380 preserve_temp_slots (result);
2381 free_temp_slots ();
2382
2383 return result;
2384 }
2385
2386 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2387 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2388
2389 if (to_rtx == 0)
2390 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2391
2392 /* In case we are returning the contents of an object which overlaps
2393 the place the value is being stored, use a safe function when copying
2394 a value through a pointer into a structure value return block. */
2395 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2396 && current_function_returns_struct
2397 && !current_function_returns_pcc_struct)
2398 {
2399 rtx from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
2400 rtx size = expr_size (from);
2401
2402 #ifdef TARGET_MEM_FUNCTIONS
2403 emit_library_call (memcpy_libfunc, 0,
2404 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2405 XEXP (from_rtx, 0), Pmode,
2406 size, Pmode);
2407 #else
2408 emit_library_call (bcopy_libfunc, 0,
2409 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2410 XEXP (to_rtx, 0), Pmode,
2411 size, Pmode);
2412 #endif
2413
2414 preserve_temp_slots (to_rtx);
2415 free_temp_slots ();
2416 return to_rtx;
2417 }
2418
2419 /* Compute FROM and store the value in the rtx we got. */
2420
2421 result = store_expr (from, to_rtx, want_value);
2422 preserve_temp_slots (result);
2423 free_temp_slots ();
2424 return result;
2425 }
2426
2427 /* Generate code for computing expression EXP,
2428 and storing the value into TARGET.
2429 Returns TARGET or an equivalent value.
2430 TARGET may contain a QUEUED rtx.
2431
2432 If SUGGEST_REG is nonzero, copy the value through a register
2433 and return that register, if that is possible.
2434
2435 If the value stored is a constant, we return the constant. */
2436
2437 rtx
2438 store_expr (exp, target, suggest_reg)
2439 register tree exp;
2440 register rtx target;
2441 int suggest_reg;
2442 {
2443 register rtx temp;
2444 int dont_return_target = 0;
2445
2446 if (TREE_CODE (exp) == COMPOUND_EXPR)
2447 {
2448 /* Perform first part of compound expression, then assign from second
2449 part. */
2450 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2451 emit_queue ();
2452 return store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2453 }
2454 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2455 {
2456 /* For conditional expression, get safe form of the target. Then
2457 test the condition, doing the appropriate assignment on either
2458 side. This avoids the creation of unnecessary temporaries.
2459 For non-BLKmode, it is more efficient not to do this. */
2460
2461 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2462
2463 emit_queue ();
2464 target = protect_from_queue (target, 1);
2465
2466 NO_DEFER_POP;
2467 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2468 store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2469 emit_queue ();
2470 emit_jump_insn (gen_jump (lab2));
2471 emit_barrier ();
2472 emit_label (lab1);
2473 store_expr (TREE_OPERAND (exp, 2), target, suggest_reg);
2474 emit_queue ();
2475 emit_label (lab2);
2476 OK_DEFER_POP;
2477 return target;
2478 }
2479 else if (suggest_reg && GET_CODE (target) == MEM
2480 && GET_MODE (target) != BLKmode)
2481 /* If target is in memory and caller wants value in a register instead,
2482 arrange that. Pass TARGET as target for expand_expr so that,
2483 if EXP is another assignment, SUGGEST_REG will be nonzero for it.
2484 We know expand_expr will not use the target in that case. */
2485 {
2486 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
2487 GET_MODE (target), 0);
2488 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2489 temp = copy_to_reg (temp);
2490 dont_return_target = 1;
2491 }
2492 else if (queued_subexp_p (target))
2493 /* If target contains a postincrement, it is not safe
2494 to use as the returned value. It would access the wrong
2495 place by the time the queued increment gets output.
2496 So copy the value through a temporary and use that temp
2497 as the result. */
2498 {
2499 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2500 {
2501 /* Expand EXP into a new pseudo. */
2502 temp = gen_reg_rtx (GET_MODE (target));
2503 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2504 }
2505 else
2506 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
2507 dont_return_target = 1;
2508 }
2509 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2510 /* If this is an scalar in a register that is stored in a wider mode
2511 than the declared mode, compute the result into its declared mode
2512 and then convert to the wider mode. Our value is the computed
2513 expression. */
2514 {
2515 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2516 convert_move (SUBREG_REG (target), temp,
2517 SUBREG_PROMOTED_UNSIGNED_P (target));
2518 return temp;
2519 }
2520 else
2521 {
2522 temp = expand_expr (exp, target, GET_MODE (target), 0);
2523 /* DO return TARGET if it's a specified hardware register.
2524 expand_return relies on this. */
2525 if (!(target && GET_CODE (target) == REG
2526 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2527 && CONSTANT_P (temp))
2528 dont_return_target = 1;
2529 }
2530
2531 /* If value was not generated in the target, store it there.
2532 Convert the value to TARGET's type first if nec. */
2533
2534 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2535 {
2536 target = protect_from_queue (target, 1);
2537 if (GET_MODE (temp) != GET_MODE (target)
2538 && GET_MODE (temp) != VOIDmode)
2539 {
2540 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2541 if (dont_return_target)
2542 {
2543 /* In this case, we will return TEMP,
2544 so make sure it has the proper mode.
2545 But don't forget to store the value into TARGET. */
2546 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2547 emit_move_insn (target, temp);
2548 }
2549 else
2550 convert_move (target, temp, unsignedp);
2551 }
2552
2553 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2554 {
2555 /* Handle copying a string constant into an array.
2556 The string constant may be shorter than the array.
2557 So copy just the string's actual length, and clear the rest. */
2558 rtx size;
2559
2560 /* Get the size of the data type of the string,
2561 which is actually the size of the target. */
2562 size = expr_size (exp);
2563 if (GET_CODE (size) == CONST_INT
2564 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2565 emit_block_move (target, temp, size,
2566 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2567 else
2568 {
2569 /* Compute the size of the data to copy from the string. */
2570 tree copy_size
2571 = fold (build (MIN_EXPR, sizetype,
2572 size_binop (CEIL_DIV_EXPR,
2573 TYPE_SIZE (TREE_TYPE (exp)),
2574 size_int (BITS_PER_UNIT)),
2575 convert (sizetype,
2576 build_int_2 (TREE_STRING_LENGTH (exp), 0))));
2577 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
2578 VOIDmode, 0);
2579 rtx label = 0;
2580
2581 /* Copy that much. */
2582 emit_block_move (target, temp, copy_size_rtx,
2583 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2584
2585 /* Figure out how much is left in TARGET
2586 that we have to clear. */
2587 if (GET_CODE (copy_size_rtx) == CONST_INT)
2588 {
2589 temp = plus_constant (XEXP (target, 0),
2590 TREE_STRING_LENGTH (exp));
2591 size = plus_constant (size,
2592 - TREE_STRING_LENGTH (exp));
2593 }
2594 else
2595 {
2596 enum machine_mode size_mode = Pmode;
2597
2598 temp = force_reg (Pmode, XEXP (target, 0));
2599 temp = expand_binop (size_mode, add_optab, temp,
2600 copy_size_rtx, NULL_RTX, 0,
2601 OPTAB_LIB_WIDEN);
2602
2603 size = expand_binop (size_mode, sub_optab, size,
2604 copy_size_rtx, NULL_RTX, 0,
2605 OPTAB_LIB_WIDEN);
2606
2607 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
2608 GET_MODE (size), 0, 0);
2609 label = gen_label_rtx ();
2610 emit_jump_insn (gen_blt (label));
2611 }
2612
2613 if (size != const0_rtx)
2614 {
2615 #ifdef TARGET_MEM_FUNCTIONS
2616 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
2617 temp, Pmode, const0_rtx, Pmode, size, Pmode);
2618 #else
2619 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
2620 temp, Pmode, size, Pmode);
2621 #endif
2622 }
2623 if (label)
2624 emit_label (label);
2625 }
2626 }
2627 else if (GET_MODE (temp) == BLKmode)
2628 emit_block_move (target, temp, expr_size (exp),
2629 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2630 else
2631 emit_move_insn (target, temp);
2632 }
2633 if (dont_return_target)
2634 return temp;
2635 return target;
2636 }
2637 \f
2638 /* Store the value of constructor EXP into the rtx TARGET.
2639 TARGET is either a REG or a MEM. */
2640
2641 static void
2642 store_constructor (exp, target)
2643 tree exp;
2644 rtx target;
2645 {
2646 tree type = TREE_TYPE (exp);
2647
2648 /* We know our target cannot conflict, since safe_from_p has been called. */
2649 #if 0
2650 /* Don't try copying piece by piece into a hard register
2651 since that is vulnerable to being clobbered by EXP.
2652 Instead, construct in a pseudo register and then copy it all. */
2653 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
2654 {
2655 rtx temp = gen_reg_rtx (GET_MODE (target));
2656 store_constructor (exp, temp);
2657 emit_move_insn (target, temp);
2658 return;
2659 }
2660 #endif
2661
2662 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE)
2663 {
2664 register tree elt;
2665
2666 /* Inform later passes that the whole union value is dead. */
2667 if (TREE_CODE (type) == UNION_TYPE)
2668 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2669
2670 /* If we are building a static constructor into a register,
2671 set the initial value as zero so we can fold the value into
2672 a constant. */
2673 else if (GET_CODE (target) == REG && TREE_STATIC (exp))
2674 emit_move_insn (target, const0_rtx);
2675
2676 /* If the constructor has fewer fields than the structure,
2677 clear the whole structure first. */
2678 else if (list_length (CONSTRUCTOR_ELTS (exp))
2679 != list_length (TYPE_FIELDS (type)))
2680 clear_storage (target, int_size_in_bytes (type));
2681 else
2682 /* Inform later passes that the old value is dead. */
2683 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2684
2685 /* Store each element of the constructor into
2686 the corresponding field of TARGET. */
2687
2688 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
2689 {
2690 register tree field = TREE_PURPOSE (elt);
2691 register enum machine_mode mode;
2692 int bitsize;
2693 int bitpos;
2694 int unsignedp;
2695
2696 /* Just ignore missing fields.
2697 We cleared the whole structure, above,
2698 if any fields are missing. */
2699 if (field == 0)
2700 continue;
2701
2702 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
2703 unsignedp = TREE_UNSIGNED (field);
2704 mode = DECL_MODE (field);
2705 if (DECL_BIT_FIELD (field))
2706 mode = VOIDmode;
2707
2708 if (TREE_CODE (DECL_FIELD_BITPOS (field)) != INTEGER_CST)
2709 /* ??? This case remains to be written. */
2710 abort ();
2711
2712 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
2713
2714 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2715 /* The alignment of TARGET is
2716 at least what its type requires. */
2717 VOIDmode, 0,
2718 TYPE_ALIGN (type) / BITS_PER_UNIT,
2719 int_size_in_bytes (type));
2720 }
2721 }
2722 else if (TREE_CODE (type) == ARRAY_TYPE)
2723 {
2724 register tree elt;
2725 register int i;
2726 tree domain = TYPE_DOMAIN (type);
2727 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
2728 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
2729 tree elttype = TREE_TYPE (type);
2730
2731 /* If the constructor has fewer fields than the structure,
2732 clear the whole structure first. Similarly if this this is
2733 static constructor of a non-BLKmode object. */
2734
2735 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
2736 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
2737 clear_storage (target, maxelt - minelt + 1);
2738 else
2739 /* Inform later passes that the old value is dead. */
2740 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2741
2742 /* Store each element of the constructor into
2743 the corresponding element of TARGET, determined
2744 by counting the elements. */
2745 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
2746 elt;
2747 elt = TREE_CHAIN (elt), i++)
2748 {
2749 register enum machine_mode mode;
2750 int bitsize;
2751 int bitpos;
2752 int unsignedp;
2753
2754 mode = TYPE_MODE (elttype);
2755 bitsize = GET_MODE_BITSIZE (mode);
2756 unsignedp = TREE_UNSIGNED (elttype);
2757
2758 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2759
2760 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2761 /* The alignment of TARGET is
2762 at least what its type requires. */
2763 VOIDmode, 0,
2764 TYPE_ALIGN (type) / BITS_PER_UNIT,
2765 int_size_in_bytes (type));
2766 }
2767 }
2768
2769 else
2770 abort ();
2771 }
2772
2773 /* Store the value of EXP (an expression tree)
2774 into a subfield of TARGET which has mode MODE and occupies
2775 BITSIZE bits, starting BITPOS bits from the start of TARGET.
2776 If MODE is VOIDmode, it means that we are storing into a bit-field.
2777
2778 If VALUE_MODE is VOIDmode, return nothing in particular.
2779 UNSIGNEDP is not used in this case.
2780
2781 Otherwise, return an rtx for the value stored. This rtx
2782 has mode VALUE_MODE if that is convenient to do.
2783 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
2784
2785 ALIGN is the alignment that TARGET is known to have, measured in bytes.
2786 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
2787
2788 static rtx
2789 store_field (target, bitsize, bitpos, mode, exp, value_mode,
2790 unsignedp, align, total_size)
2791 rtx target;
2792 int bitsize, bitpos;
2793 enum machine_mode mode;
2794 tree exp;
2795 enum machine_mode value_mode;
2796 int unsignedp;
2797 int align;
2798 int total_size;
2799 {
2800 HOST_WIDE_INT width_mask = 0;
2801
2802 if (bitsize < HOST_BITS_PER_WIDE_INT)
2803 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
2804
2805 /* If we are storing into an unaligned field of an aligned union that is
2806 in a register, we may have the mode of TARGET being an integer mode but
2807 MODE == BLKmode. In that case, get an aligned object whose size and
2808 alignment are the same as TARGET and store TARGET into it (we can avoid
2809 the store if the field being stored is the entire width of TARGET). Then
2810 call ourselves recursively to store the field into a BLKmode version of
2811 that object. Finally, load from the object into TARGET. This is not
2812 very efficient in general, but should only be slightly more expensive
2813 than the otherwise-required unaligned accesses. Perhaps this can be
2814 cleaned up later. */
2815
2816 if (mode == BLKmode
2817 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
2818 {
2819 rtx object = assign_stack_temp (GET_MODE (target),
2820 GET_MODE_SIZE (GET_MODE (target)), 0);
2821 rtx blk_object = copy_rtx (object);
2822
2823 PUT_MODE (blk_object, BLKmode);
2824
2825 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
2826 emit_move_insn (object, target);
2827
2828 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
2829 align, total_size);
2830
2831 emit_move_insn (target, object);
2832
2833 return target;
2834 }
2835
2836 /* If the structure is in a register or if the component
2837 is a bit field, we cannot use addressing to access it.
2838 Use bit-field techniques or SUBREG to store in it. */
2839
2840 if (mode == VOIDmode
2841 || (mode != BLKmode && ! direct_store[(int) mode])
2842 || GET_CODE (target) == REG
2843 || GET_CODE (target) == SUBREG)
2844 {
2845 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2846 /* Store the value in the bitfield. */
2847 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
2848 if (value_mode != VOIDmode)
2849 {
2850 /* The caller wants an rtx for the value. */
2851 /* If possible, avoid refetching from the bitfield itself. */
2852 if (width_mask != 0
2853 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
2854 {
2855 tree count;
2856 enum machine_mode tmode;
2857
2858 if (unsignedp)
2859 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
2860 tmode = GET_MODE (temp);
2861 if (tmode == VOIDmode)
2862 tmode = value_mode;
2863 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
2864 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
2865 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
2866 }
2867 return extract_bit_field (target, bitsize, bitpos, unsignedp,
2868 NULL_RTX, value_mode, 0, align,
2869 total_size);
2870 }
2871 return const0_rtx;
2872 }
2873 else
2874 {
2875 rtx addr = XEXP (target, 0);
2876 rtx to_rtx;
2877
2878 /* If a value is wanted, it must be the lhs;
2879 so make the address stable for multiple use. */
2880
2881 if (value_mode != VOIDmode && GET_CODE (addr) != REG
2882 && ! CONSTANT_ADDRESS_P (addr)
2883 /* A frame-pointer reference is already stable. */
2884 && ! (GET_CODE (addr) == PLUS
2885 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2886 && (XEXP (addr, 0) == virtual_incoming_args_rtx
2887 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
2888 addr = copy_to_reg (addr);
2889
2890 /* Now build a reference to just the desired component. */
2891
2892 to_rtx = change_address (target, mode,
2893 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
2894 MEM_IN_STRUCT_P (to_rtx) = 1;
2895
2896 return store_expr (exp, to_rtx, value_mode != VOIDmode);
2897 }
2898 }
2899 \f
2900 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
2901 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
2902 ARRAY_REFs at constant positions and find the ultimate containing object,
2903 which we return.
2904
2905 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
2906 bit position, and *PUNSIGNEDP to the signedness of the field.
2907 If the position of the field is variable, we store a tree
2908 giving the variable offset (in units) in *POFFSET.
2909 This offset is in addition to the bit position.
2910 If the position is not variable, we store 0 in *POFFSET.
2911
2912 If any of the extraction expressions is volatile,
2913 we store 1 in *PVOLATILEP. Otherwise we don't change that.
2914
2915 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
2916 is a mode that can be used to access the field. In that case, *PBITSIZE
2917 is redundant.
2918
2919 If the field describes a variable-sized object, *PMODE is set to
2920 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
2921 this case, but the address of the object can be found. */
2922
2923 tree
2924 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode, punsignedp, pvolatilep)
2925 tree exp;
2926 int *pbitsize;
2927 int *pbitpos;
2928 tree *poffset;
2929 enum machine_mode *pmode;
2930 int *punsignedp;
2931 int *pvolatilep;
2932 {
2933 tree size_tree = 0;
2934 enum machine_mode mode = VOIDmode;
2935 tree offset = 0;
2936
2937 if (TREE_CODE (exp) == COMPONENT_REF)
2938 {
2939 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
2940 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
2941 mode = DECL_MODE (TREE_OPERAND (exp, 1));
2942 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
2943 }
2944 else if (TREE_CODE (exp) == BIT_FIELD_REF)
2945 {
2946 size_tree = TREE_OPERAND (exp, 1);
2947 *punsignedp = TREE_UNSIGNED (exp);
2948 }
2949 else
2950 {
2951 mode = TYPE_MODE (TREE_TYPE (exp));
2952 *pbitsize = GET_MODE_BITSIZE (mode);
2953 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2954 }
2955
2956 if (size_tree)
2957 {
2958 if (TREE_CODE (size_tree) != INTEGER_CST)
2959 mode = BLKmode, *pbitsize = -1;
2960 else
2961 *pbitsize = TREE_INT_CST_LOW (size_tree);
2962 }
2963
2964 /* Compute cumulative bit-offset for nested component-refs and array-refs,
2965 and find the ultimate containing object. */
2966
2967 *pbitpos = 0;
2968
2969 while (1)
2970 {
2971 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
2972 {
2973 tree pos = (TREE_CODE (exp) == COMPONENT_REF
2974 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
2975 : TREE_OPERAND (exp, 2));
2976
2977 if (TREE_CODE (pos) == PLUS_EXPR)
2978 {
2979 tree constant, var;
2980 if (TREE_CODE (TREE_OPERAND (pos, 0)) == INTEGER_CST)
2981 {
2982 constant = TREE_OPERAND (pos, 0);
2983 var = TREE_OPERAND (pos, 1);
2984 }
2985 else if (TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
2986 {
2987 constant = TREE_OPERAND (pos, 1);
2988 var = TREE_OPERAND (pos, 0);
2989 }
2990 else
2991 abort ();
2992 *pbitpos += TREE_INT_CST_LOW (constant);
2993 if (offset)
2994 offset = size_binop (PLUS_EXPR, offset,
2995 size_binop (FLOOR_DIV_EXPR, var,
2996 size_int (BITS_PER_UNIT)));
2997 else
2998 offset = size_binop (FLOOR_DIV_EXPR, var,
2999 size_int (BITS_PER_UNIT));
3000 }
3001 else if (TREE_CODE (pos) == INTEGER_CST)
3002 *pbitpos += TREE_INT_CST_LOW (pos);
3003 else
3004 {
3005 /* Assume here that the offset is a multiple of a unit.
3006 If not, there should be an explicitly added constant. */
3007 if (offset)
3008 offset = size_binop (PLUS_EXPR, offset,
3009 size_binop (FLOOR_DIV_EXPR, pos,
3010 size_int (BITS_PER_UNIT)));
3011 else
3012 offset = size_binop (FLOOR_DIV_EXPR, pos,
3013 size_int (BITS_PER_UNIT));
3014 }
3015 }
3016
3017 else if (TREE_CODE (exp) == ARRAY_REF
3018 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
3019 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST)
3020 {
3021 *pbitpos += (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
3022 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp))));
3023 }
3024 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
3025 && ! ((TREE_CODE (exp) == NOP_EXPR
3026 || TREE_CODE (exp) == CONVERT_EXPR)
3027 && (TYPE_MODE (TREE_TYPE (exp))
3028 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
3029 break;
3030
3031 /* If any reference in the chain is volatile, the effect is volatile. */
3032 if (TREE_THIS_VOLATILE (exp))
3033 *pvolatilep = 1;
3034 exp = TREE_OPERAND (exp, 0);
3035 }
3036
3037 /* If this was a bit-field, see if there is a mode that allows direct
3038 access in case EXP is in memory. */
3039 if (mode == VOIDmode && *pbitpos % *pbitsize == 0)
3040 {
3041 mode = mode_for_size (*pbitsize, MODE_INT, 0);
3042 if (mode == BLKmode)
3043 mode = VOIDmode;
3044 }
3045
3046 *pmode = mode;
3047 *poffset = offset;
3048 #if 0
3049 /* We aren't finished fixing the callers to really handle nonzero offset. */
3050 if (offset != 0)
3051 abort ();
3052 #endif
3053
3054 return exp;
3055 }
3056 \f
3057 /* Given an rtx VALUE that may contain additions and multiplications,
3058 return an equivalent value that just refers to a register or memory.
3059 This is done by generating instructions to perform the arithmetic
3060 and returning a pseudo-register containing the value.
3061
3062 The returned value may be a REG, SUBREG, MEM or constant. */
3063
3064 rtx
3065 force_operand (value, target)
3066 rtx value, target;
3067 {
3068 register optab binoptab = 0;
3069 /* Use a temporary to force order of execution of calls to
3070 `force_operand'. */
3071 rtx tmp;
3072 register rtx op2;
3073 /* Use subtarget as the target for operand 0 of a binary operation. */
3074 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3075
3076 if (GET_CODE (value) == PLUS)
3077 binoptab = add_optab;
3078 else if (GET_CODE (value) == MINUS)
3079 binoptab = sub_optab;
3080 else if (GET_CODE (value) == MULT)
3081 {
3082 op2 = XEXP (value, 1);
3083 if (!CONSTANT_P (op2)
3084 && !(GET_CODE (op2) == REG && op2 != subtarget))
3085 subtarget = 0;
3086 tmp = force_operand (XEXP (value, 0), subtarget);
3087 return expand_mult (GET_MODE (value), tmp,
3088 force_operand (op2, NULL_RTX),
3089 target, 0);
3090 }
3091
3092 if (binoptab)
3093 {
3094 op2 = XEXP (value, 1);
3095 if (!CONSTANT_P (op2)
3096 && !(GET_CODE (op2) == REG && op2 != subtarget))
3097 subtarget = 0;
3098 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
3099 {
3100 binoptab = add_optab;
3101 op2 = negate_rtx (GET_MODE (value), op2);
3102 }
3103
3104 /* Check for an addition with OP2 a constant integer and our first
3105 operand a PLUS of a virtual register and something else. In that
3106 case, we want to emit the sum of the virtual register and the
3107 constant first and then add the other value. This allows virtual
3108 register instantiation to simply modify the constant rather than
3109 creating another one around this addition. */
3110 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
3111 && GET_CODE (XEXP (value, 0)) == PLUS
3112 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
3113 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
3114 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
3115 {
3116 rtx temp = expand_binop (GET_MODE (value), binoptab,
3117 XEXP (XEXP (value, 0), 0), op2,
3118 subtarget, 0, OPTAB_LIB_WIDEN);
3119 return expand_binop (GET_MODE (value), binoptab, temp,
3120 force_operand (XEXP (XEXP (value, 0), 1), 0),
3121 target, 0, OPTAB_LIB_WIDEN);
3122 }
3123
3124 tmp = force_operand (XEXP (value, 0), subtarget);
3125 return expand_binop (GET_MODE (value), binoptab, tmp,
3126 force_operand (op2, NULL_RTX),
3127 target, 0, OPTAB_LIB_WIDEN);
3128 /* We give UNSIGNEDP = 0 to expand_binop
3129 because the only operations we are expanding here are signed ones. */
3130 }
3131 return value;
3132 }
3133 \f
3134 /* Subroutine of expand_expr:
3135 save the non-copied parts (LIST) of an expr (LHS), and return a list
3136 which can restore these values to their previous values,
3137 should something modify their storage. */
3138
3139 static tree
3140 save_noncopied_parts (lhs, list)
3141 tree lhs;
3142 tree list;
3143 {
3144 tree tail;
3145 tree parts = 0;
3146
3147 for (tail = list; tail; tail = TREE_CHAIN (tail))
3148 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3149 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
3150 else
3151 {
3152 tree part = TREE_VALUE (tail);
3153 tree part_type = TREE_TYPE (part);
3154 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
3155 rtx target = assign_stack_temp (TYPE_MODE (part_type),
3156 int_size_in_bytes (part_type), 0);
3157 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
3158 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
3159 parts = tree_cons (to_be_saved,
3160 build (RTL_EXPR, part_type, NULL_TREE,
3161 (tree) target),
3162 parts);
3163 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
3164 }
3165 return parts;
3166 }
3167
3168 /* Subroutine of expand_expr:
3169 record the non-copied parts (LIST) of an expr (LHS), and return a list
3170 which specifies the initial values of these parts. */
3171
3172 static tree
3173 init_noncopied_parts (lhs, list)
3174 tree lhs;
3175 tree list;
3176 {
3177 tree tail;
3178 tree parts = 0;
3179
3180 for (tail = list; tail; tail = TREE_CHAIN (tail))
3181 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3182 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
3183 else
3184 {
3185 tree part = TREE_VALUE (tail);
3186 tree part_type = TREE_TYPE (part);
3187 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
3188 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
3189 }
3190 return parts;
3191 }
3192
3193 /* Subroutine of expand_expr: return nonzero iff there is no way that
3194 EXP can reference X, which is being modified. */
3195
3196 static int
3197 safe_from_p (x, exp)
3198 rtx x;
3199 tree exp;
3200 {
3201 rtx exp_rtl = 0;
3202 int i, nops;
3203
3204 if (x == 0)
3205 return 1;
3206
3207 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
3208 find the underlying pseudo. */
3209 if (GET_CODE (x) == SUBREG)
3210 {
3211 x = SUBREG_REG (x);
3212 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3213 return 0;
3214 }
3215
3216 /* If X is a location in the outgoing argument area, it is always safe. */
3217 if (GET_CODE (x) == MEM
3218 && (XEXP (x, 0) == virtual_outgoing_args_rtx
3219 || (GET_CODE (XEXP (x, 0)) == PLUS
3220 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
3221 return 1;
3222
3223 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3224 {
3225 case 'd':
3226 exp_rtl = DECL_RTL (exp);
3227 break;
3228
3229 case 'c':
3230 return 1;
3231
3232 case 'x':
3233 if (TREE_CODE (exp) == TREE_LIST)
3234 return ((TREE_VALUE (exp) == 0
3235 || safe_from_p (x, TREE_VALUE (exp)))
3236 && (TREE_CHAIN (exp) == 0
3237 || safe_from_p (x, TREE_CHAIN (exp))));
3238 else
3239 return 0;
3240
3241 case '1':
3242 return safe_from_p (x, TREE_OPERAND (exp, 0));
3243
3244 case '2':
3245 case '<':
3246 return (safe_from_p (x, TREE_OPERAND (exp, 0))
3247 && safe_from_p (x, TREE_OPERAND (exp, 1)));
3248
3249 case 'e':
3250 case 'r':
3251 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
3252 the expression. If it is set, we conflict iff we are that rtx or
3253 both are in memory. Otherwise, we check all operands of the
3254 expression recursively. */
3255
3256 switch (TREE_CODE (exp))
3257 {
3258 case ADDR_EXPR:
3259 return staticp (TREE_OPERAND (exp, 0));
3260
3261 case INDIRECT_REF:
3262 if (GET_CODE (x) == MEM)
3263 return 0;
3264 break;
3265
3266 case CALL_EXPR:
3267 exp_rtl = CALL_EXPR_RTL (exp);
3268 if (exp_rtl == 0)
3269 {
3270 /* Assume that the call will clobber all hard registers and
3271 all of memory. */
3272 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3273 || GET_CODE (x) == MEM)
3274 return 0;
3275 }
3276
3277 break;
3278
3279 case RTL_EXPR:
3280 exp_rtl = RTL_EXPR_RTL (exp);
3281 if (exp_rtl == 0)
3282 /* We don't know what this can modify. */
3283 return 0;
3284
3285 break;
3286
3287 case WITH_CLEANUP_EXPR:
3288 exp_rtl = RTL_EXPR_RTL (exp);
3289 break;
3290
3291 case SAVE_EXPR:
3292 exp_rtl = SAVE_EXPR_RTL (exp);
3293 break;
3294
3295 case BIND_EXPR:
3296 /* The only operand we look at is operand 1. The rest aren't
3297 part of the expression. */
3298 return safe_from_p (x, TREE_OPERAND (exp, 1));
3299
3300 case METHOD_CALL_EXPR:
3301 /* This takes a rtx argument, but shouldn't appear here. */
3302 abort ();
3303 }
3304
3305 /* If we have an rtx, we do not need to scan our operands. */
3306 if (exp_rtl)
3307 break;
3308
3309 nops = tree_code_length[(int) TREE_CODE (exp)];
3310 for (i = 0; i < nops; i++)
3311 if (TREE_OPERAND (exp, i) != 0
3312 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3313 return 0;
3314 }
3315
3316 /* If we have an rtl, find any enclosed object. Then see if we conflict
3317 with it. */
3318 if (exp_rtl)
3319 {
3320 if (GET_CODE (exp_rtl) == SUBREG)
3321 {
3322 exp_rtl = SUBREG_REG (exp_rtl);
3323 if (GET_CODE (exp_rtl) == REG
3324 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3325 return 0;
3326 }
3327
3328 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3329 are memory and EXP is not readonly. */
3330 return ! (rtx_equal_p (x, exp_rtl)
3331 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3332 && ! TREE_READONLY (exp)));
3333 }
3334
3335 /* If we reach here, it is safe. */
3336 return 1;
3337 }
3338
3339 /* Subroutine of expand_expr: return nonzero iff EXP is an
3340 expression whose type is statically determinable. */
3341
3342 static int
3343 fixed_type_p (exp)
3344 tree exp;
3345 {
3346 if (TREE_CODE (exp) == PARM_DECL
3347 || TREE_CODE (exp) == VAR_DECL
3348 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3349 || TREE_CODE (exp) == COMPONENT_REF
3350 || TREE_CODE (exp) == ARRAY_REF)
3351 return 1;
3352 return 0;
3353 }
3354 \f
3355 /* expand_expr: generate code for computing expression EXP.
3356 An rtx for the computed value is returned. The value is never null.
3357 In the case of a void EXP, const0_rtx is returned.
3358
3359 The value may be stored in TARGET if TARGET is nonzero.
3360 TARGET is just a suggestion; callers must assume that
3361 the rtx returned may not be the same as TARGET.
3362
3363 If TARGET is CONST0_RTX, it means that the value will be ignored.
3364
3365 If TMODE is not VOIDmode, it suggests generating the
3366 result in mode TMODE. But this is done only when convenient.
3367 Otherwise, TMODE is ignored and the value generated in its natural mode.
3368 TMODE is just a suggestion; callers must assume that
3369 the rtx returned may not have mode TMODE.
3370
3371 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
3372 with a constant address even if that address is not normally legitimate.
3373 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
3374
3375 If MODIFIER is EXPAND_SUM then when EXP is an addition
3376 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3377 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3378 products as above, or REG or MEM, or constant.
3379 Ordinarily in such cases we would output mul or add instructions
3380 and then return a pseudo reg containing the sum.
3381
3382 EXPAND_INITIALIZER is much like EXPAND_SUM except that
3383 it also marks a label as absolutely required (it can't be dead).
3384 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
3385 This is used for outputting expressions used in initializers. */
3386
3387 rtx
3388 expand_expr (exp, target, tmode, modifier)
3389 register tree exp;
3390 rtx target;
3391 enum machine_mode tmode;
3392 enum expand_modifier modifier;
3393 {
3394 register rtx op0, op1, temp;
3395 tree type = TREE_TYPE (exp);
3396 int unsignedp = TREE_UNSIGNED (type);
3397 register enum machine_mode mode = TYPE_MODE (type);
3398 register enum tree_code code = TREE_CODE (exp);
3399 optab this_optab;
3400 /* Use subtarget as the target for operand 0 of a binary operation. */
3401 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3402 rtx original_target = target;
3403 int ignore = target == const0_rtx;
3404 tree context;
3405
3406 /* Don't use hard regs as subtargets, because the combiner
3407 can only handle pseudo regs. */
3408 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
3409 subtarget = 0;
3410 /* Avoid subtargets inside loops,
3411 since they hide some invariant expressions. */
3412 if (preserve_subexpressions_p ())
3413 subtarget = 0;
3414
3415 if (ignore) target = 0, original_target = 0;
3416
3417 /* If will do cse, generate all results into pseudo registers
3418 since 1) that allows cse to find more things
3419 and 2) otherwise cse could produce an insn the machine
3420 cannot support. */
3421
3422 if (! cse_not_expected && mode != BLKmode && target
3423 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
3424 target = subtarget;
3425
3426 /* Ensure we reference a volatile object even if value is ignored. */
3427 if (ignore && TREE_THIS_VOLATILE (exp)
3428 && mode != VOIDmode && mode != BLKmode)
3429 {
3430 target = gen_reg_rtx (mode);
3431 temp = expand_expr (exp, target, VOIDmode, modifier);
3432 if (temp != target)
3433 emit_move_insn (target, temp);
3434 return target;
3435 }
3436
3437 switch (code)
3438 {
3439 case LABEL_DECL:
3440 {
3441 tree function = decl_function_context (exp);
3442 /* Handle using a label in a containing function. */
3443 if (function != current_function_decl && function != 0)
3444 {
3445 struct function *p = find_function_data (function);
3446 /* Allocate in the memory associated with the function
3447 that the label is in. */
3448 push_obstacks (p->function_obstack,
3449 p->function_maybepermanent_obstack);
3450
3451 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3452 label_rtx (exp), p->forced_labels);
3453 pop_obstacks ();
3454 }
3455 else if (modifier == EXPAND_INITIALIZER)
3456 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3457 label_rtx (exp), forced_labels);
3458 temp = gen_rtx (MEM, FUNCTION_MODE,
3459 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
3460 if (function != current_function_decl && function != 0)
3461 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
3462 return temp;
3463 }
3464
3465 case PARM_DECL:
3466 if (DECL_RTL (exp) == 0)
3467 {
3468 error_with_decl (exp, "prior parameter's size depends on `%s'");
3469 return CONST0_RTX (mode);
3470 }
3471
3472 case FUNCTION_DECL:
3473 case VAR_DECL:
3474 case RESULT_DECL:
3475 if (DECL_RTL (exp) == 0)
3476 abort ();
3477 /* Ensure variable marked as used
3478 even if it doesn't go through a parser. */
3479 TREE_USED (exp) = 1;
3480 /* Handle variables inherited from containing functions. */
3481 context = decl_function_context (exp);
3482
3483 /* We treat inline_function_decl as an alias for the current function
3484 because that is the inline function whose vars, types, etc.
3485 are being merged into the current function.
3486 See expand_inline_function. */
3487 if (context != 0 && context != current_function_decl
3488 && context != inline_function_decl
3489 /* If var is static, we don't need a static chain to access it. */
3490 && ! (GET_CODE (DECL_RTL (exp)) == MEM
3491 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
3492 {
3493 rtx addr;
3494
3495 /* Mark as non-local and addressable. */
3496 DECL_NONLOCAL (exp) = 1;
3497 mark_addressable (exp);
3498 if (GET_CODE (DECL_RTL (exp)) != MEM)
3499 abort ();
3500 addr = XEXP (DECL_RTL (exp), 0);
3501 if (GET_CODE (addr) == MEM)
3502 addr = gen_rtx (MEM, Pmode, fix_lexical_addr (XEXP (addr, 0), exp));
3503 else
3504 addr = fix_lexical_addr (addr, exp);
3505 return change_address (DECL_RTL (exp), mode, addr);
3506 }
3507
3508 /* This is the case of an array whose size is to be determined
3509 from its initializer, while the initializer is still being parsed.
3510 See expand_decl. */
3511 if (GET_CODE (DECL_RTL (exp)) == MEM
3512 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
3513 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
3514 XEXP (DECL_RTL (exp), 0));
3515 if (GET_CODE (DECL_RTL (exp)) == MEM
3516 && modifier != EXPAND_CONST_ADDRESS
3517 && modifier != EXPAND_SUM
3518 && modifier != EXPAND_INITIALIZER)
3519 {
3520 /* DECL_RTL probably contains a constant address.
3521 On RISC machines where a constant address isn't valid,
3522 make some insns to get that address into a register. */
3523 if (!memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
3524 || (flag_force_addr
3525 && CONSTANT_ADDRESS_P (XEXP (DECL_RTL (exp), 0))))
3526 return change_address (DECL_RTL (exp), VOIDmode,
3527 copy_rtx (XEXP (DECL_RTL (exp), 0)));
3528 }
3529
3530 /* If the mode of DECL_RTL does not match that of the decl, it
3531 must be a promoted value. We return a SUBREG of the wanted mode,
3532 but mark it so that we know that it was already extended. */
3533
3534 if (GET_CODE (DECL_RTL (exp)) == REG
3535 && GET_MODE (DECL_RTL (exp)) != mode)
3536 {
3537 enum machine_mode decl_mode = DECL_MODE (exp);
3538
3539 /* Get the signedness used for this variable. Ensure we get the
3540 same mode we got when the variable was declared. */
3541
3542 PROMOTE_MODE (decl_mode, unsignedp, type);
3543
3544 if (decl_mode != GET_MODE (DECL_RTL (exp)))
3545 abort ();
3546
3547 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
3548 SUBREG_PROMOTED_VAR_P (temp) = 1;
3549 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3550 return temp;
3551 }
3552
3553 return DECL_RTL (exp);
3554
3555 case INTEGER_CST:
3556 return immed_double_const (TREE_INT_CST_LOW (exp),
3557 TREE_INT_CST_HIGH (exp),
3558 mode);
3559
3560 case CONST_DECL:
3561 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
3562
3563 case REAL_CST:
3564 /* If optimized, generate immediate CONST_DOUBLE
3565 which will be turned into memory by reload if necessary.
3566
3567 We used to force a register so that loop.c could see it. But
3568 this does not allow gen_* patterns to perform optimizations with
3569 the constants. It also produces two insns in cases like "x = 1.0;".
3570 On most machines, floating-point constants are not permitted in
3571 many insns, so we'd end up copying it to a register in any case.
3572
3573 Now, we do the copying in expand_binop, if appropriate. */
3574 return immed_real_const (exp);
3575
3576 case COMPLEX_CST:
3577 case STRING_CST:
3578 if (! TREE_CST_RTL (exp))
3579 output_constant_def (exp);
3580
3581 /* TREE_CST_RTL probably contains a constant address.
3582 On RISC machines where a constant address isn't valid,
3583 make some insns to get that address into a register. */
3584 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
3585 && modifier != EXPAND_CONST_ADDRESS
3586 && modifier != EXPAND_INITIALIZER
3587 && modifier != EXPAND_SUM
3588 && !memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0)))
3589 return change_address (TREE_CST_RTL (exp), VOIDmode,
3590 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
3591 return TREE_CST_RTL (exp);
3592
3593 case SAVE_EXPR:
3594 context = decl_function_context (exp);
3595 /* We treat inline_function_decl as an alias for the current function
3596 because that is the inline function whose vars, types, etc.
3597 are being merged into the current function.
3598 See expand_inline_function. */
3599 if (context == current_function_decl || context == inline_function_decl)
3600 context = 0;
3601
3602 /* If this is non-local, handle it. */
3603 if (context)
3604 {
3605 temp = SAVE_EXPR_RTL (exp);
3606 if (temp && GET_CODE (temp) == REG)
3607 {
3608 put_var_into_stack (exp);
3609 temp = SAVE_EXPR_RTL (exp);
3610 }
3611 if (temp == 0 || GET_CODE (temp) != MEM)
3612 abort ();
3613 return change_address (temp, mode,
3614 fix_lexical_addr (XEXP (temp, 0), exp));
3615 }
3616 if (SAVE_EXPR_RTL (exp) == 0)
3617 {
3618 if (mode == BLKmode)
3619 temp
3620 = assign_stack_temp (mode,
3621 int_size_in_bytes (TREE_TYPE (exp)), 0);
3622 else
3623 {
3624 enum machine_mode var_mode = mode;
3625
3626 if (TREE_CODE (type) == INTEGER_TYPE
3627 || TREE_CODE (type) == ENUMERAL_TYPE
3628 || TREE_CODE (type) == BOOLEAN_TYPE
3629 || TREE_CODE (type) == CHAR_TYPE
3630 || TREE_CODE (type) == REAL_TYPE
3631 || TREE_CODE (type) == POINTER_TYPE
3632 || TREE_CODE (type) == OFFSET_TYPE)
3633 {
3634 PROMOTE_MODE (var_mode, unsignedp, type);
3635 }
3636
3637 temp = gen_reg_rtx (var_mode);
3638 }
3639
3640 SAVE_EXPR_RTL (exp) = temp;
3641 store_expr (TREE_OPERAND (exp, 0), temp, 0);
3642 if (!optimize && GET_CODE (temp) == REG)
3643 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
3644 save_expr_regs);
3645 }
3646
3647 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
3648 must be a promoted value. We return a SUBREG of the wanted mode,
3649 but mark it so that we know that it was already extended. Note
3650 that `unsignedp' was modified above in this case. */
3651
3652 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
3653 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
3654 {
3655 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
3656 SUBREG_PROMOTED_VAR_P (temp) = 1;
3657 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3658 return temp;
3659 }
3660
3661 return SAVE_EXPR_RTL (exp);
3662
3663 case EXIT_EXPR:
3664 /* Exit the current loop if the body-expression is true. */
3665 {
3666 rtx label = gen_label_rtx ();
3667 do_jump (TREE_OPERAND (exp, 0), label, NULL_RTX);
3668 expand_exit_loop (NULL_PTR);
3669 emit_label (label);
3670 }
3671 return const0_rtx;
3672
3673 case LOOP_EXPR:
3674 expand_start_loop (1);
3675 expand_expr_stmt (TREE_OPERAND (exp, 0));
3676 expand_end_loop ();
3677
3678 return const0_rtx;
3679
3680 case BIND_EXPR:
3681 {
3682 tree vars = TREE_OPERAND (exp, 0);
3683 int vars_need_expansion = 0;
3684
3685 /* Need to open a binding contour here because
3686 if there are any cleanups they most be contained here. */
3687 expand_start_bindings (0);
3688
3689 /* Mark the corresponding BLOCK for output in its proper place. */
3690 if (TREE_OPERAND (exp, 2) != 0
3691 && ! TREE_USED (TREE_OPERAND (exp, 2)))
3692 insert_block (TREE_OPERAND (exp, 2));
3693
3694 /* If VARS have not yet been expanded, expand them now. */
3695 while (vars)
3696 {
3697 if (DECL_RTL (vars) == 0)
3698 {
3699 vars_need_expansion = 1;
3700 expand_decl (vars);
3701 }
3702 expand_decl_init (vars);
3703 vars = TREE_CHAIN (vars);
3704 }
3705
3706 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
3707
3708 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
3709
3710 return temp;
3711 }
3712
3713 case RTL_EXPR:
3714 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
3715 abort ();
3716 emit_insns (RTL_EXPR_SEQUENCE (exp));
3717 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
3718 return RTL_EXPR_RTL (exp);
3719
3720 case CONSTRUCTOR:
3721 /* All elts simple constants => refer to a constant in memory. But
3722 if this is a non-BLKmode mode, let it store a field at a time
3723 since that should make a CONST_INT or CONST_DOUBLE when we
3724 fold. */
3725 if (TREE_STATIC (exp) && (mode == BLKmode || TREE_ADDRESSABLE (exp)))
3726 {
3727 rtx constructor = output_constant_def (exp);
3728 if (modifier != EXPAND_CONST_ADDRESS
3729 && modifier != EXPAND_INITIALIZER
3730 && modifier != EXPAND_SUM
3731 && !memory_address_p (GET_MODE (constructor),
3732 XEXP (constructor, 0)))
3733 constructor = change_address (constructor, VOIDmode,
3734 XEXP (constructor, 0));
3735 return constructor;
3736 }
3737
3738 if (ignore)
3739 {
3740 tree elt;
3741 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3742 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
3743 return const0_rtx;
3744 }
3745 else
3746 {
3747 if (target == 0 || ! safe_from_p (target, exp))
3748 {
3749 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
3750 target = gen_reg_rtx (mode);
3751 else
3752 {
3753 enum tree_code c = TREE_CODE (type);
3754 target
3755 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
3756 if (c == RECORD_TYPE || c == UNION_TYPE || c == ARRAY_TYPE)
3757 MEM_IN_STRUCT_P (target) = 1;
3758 }
3759 }
3760 store_constructor (exp, target);
3761 return target;
3762 }
3763
3764 case INDIRECT_REF:
3765 {
3766 tree exp1 = TREE_OPERAND (exp, 0);
3767 tree exp2;
3768
3769 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
3770 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
3771 This code has the same general effect as simply doing
3772 expand_expr on the save expr, except that the expression PTR
3773 is computed for use as a memory address. This means different
3774 code, suitable for indexing, may be generated. */
3775 if (TREE_CODE (exp1) == SAVE_EXPR
3776 && SAVE_EXPR_RTL (exp1) == 0
3777 && TREE_CODE (exp2 = TREE_OPERAND (exp1, 0)) != ERROR_MARK
3778 && TYPE_MODE (TREE_TYPE (exp1)) == Pmode
3779 && TYPE_MODE (TREE_TYPE (exp2)) == Pmode)
3780 {
3781 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
3782 VOIDmode, EXPAND_SUM);
3783 op0 = memory_address (mode, temp);
3784 op0 = copy_all_regs (op0);
3785 SAVE_EXPR_RTL (exp1) = op0;
3786 }
3787 else
3788 {
3789 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
3790 op0 = memory_address (mode, op0);
3791 }
3792
3793 temp = gen_rtx (MEM, mode, op0);
3794 /* If address was computed by addition,
3795 mark this as an element of an aggregate. */
3796 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
3797 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
3798 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
3799 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
3800 || TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3801 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
3802 || (TREE_CODE (exp1) == ADDR_EXPR
3803 && (exp2 = TREE_OPERAND (exp1, 0))
3804 && (TREE_CODE (TREE_TYPE (exp2)) == ARRAY_TYPE
3805 || TREE_CODE (TREE_TYPE (exp2)) == RECORD_TYPE
3806 || TREE_CODE (TREE_TYPE (exp2)) == UNION_TYPE)))
3807 MEM_IN_STRUCT_P (temp) = 1;
3808 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
3809 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
3810 a location is accessed through a pointer to const does not mean
3811 that the value there can never change. */
3812 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
3813 #endif
3814 return temp;
3815 }
3816
3817 case ARRAY_REF:
3818 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
3819 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3820 {
3821 /* Nonconstant array index or nonconstant element size.
3822 Generate the tree for *(&array+index) and expand that,
3823 except do it in a language-independent way
3824 and don't complain about non-lvalue arrays.
3825 `mark_addressable' should already have been called
3826 for any array for which this case will be reached. */
3827
3828 /* Don't forget the const or volatile flag from the array element. */
3829 tree variant_type = build_type_variant (type,
3830 TREE_READONLY (exp),
3831 TREE_THIS_VOLATILE (exp));
3832 tree array_adr = build1 (ADDR_EXPR, build_pointer_type (variant_type),
3833 TREE_OPERAND (exp, 0));
3834 tree index = TREE_OPERAND (exp, 1);
3835 tree elt;
3836
3837 /* Convert the integer argument to a type the same size as a pointer
3838 so the multiply won't overflow spuriously. */
3839 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
3840 index = convert (type_for_size (POINTER_SIZE, 0), index);
3841
3842 /* Don't think the address has side effects
3843 just because the array does.
3844 (In some cases the address might have side effects,
3845 and we fail to record that fact here. However, it should not
3846 matter, since expand_expr should not care.) */
3847 TREE_SIDE_EFFECTS (array_adr) = 0;
3848
3849 elt = build1 (INDIRECT_REF, type,
3850 fold (build (PLUS_EXPR, TYPE_POINTER_TO (variant_type),
3851 array_adr,
3852 fold (build (MULT_EXPR,
3853 TYPE_POINTER_TO (variant_type),
3854 index, size_in_bytes (type))))));
3855
3856 /* Volatility, etc., of new expression is same as old expression. */
3857 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
3858 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
3859 TREE_READONLY (elt) = TREE_READONLY (exp);
3860
3861 return expand_expr (elt, target, tmode, modifier);
3862 }
3863
3864 /* Fold an expression like: "foo"[2].
3865 This is not done in fold so it won't happen inside &. */
3866 {
3867 int i;
3868 tree arg0 = TREE_OPERAND (exp, 0);
3869 tree arg1 = TREE_OPERAND (exp, 1);
3870
3871 if (TREE_CODE (arg0) == STRING_CST
3872 && TREE_CODE (arg1) == INTEGER_CST
3873 && !TREE_INT_CST_HIGH (arg1)
3874 && (i = TREE_INT_CST_LOW (arg1)) < TREE_STRING_LENGTH (arg0))
3875 {
3876 if (TREE_TYPE (TREE_TYPE (arg0)) == integer_type_node)
3877 {
3878 exp = build_int_2 (((int *)TREE_STRING_POINTER (arg0))[i], 0);
3879 TREE_TYPE (exp) = integer_type_node;
3880 return expand_expr (exp, target, tmode, modifier);
3881 }
3882 if (TREE_TYPE (TREE_TYPE (arg0)) == char_type_node)
3883 {
3884 exp = build_int_2 (TREE_STRING_POINTER (arg0)[i], 0);
3885 TREE_TYPE (exp) = integer_type_node;
3886 return expand_expr (convert (TREE_TYPE (TREE_TYPE (arg0)), exp), target, tmode, modifier);
3887 }
3888 }
3889 }
3890
3891 /* If this is a constant index into a constant array,
3892 just get the value from the array. Handle both the cases when
3893 we have an explicit constructor and when our operand is a variable
3894 that was declared const. */
3895
3896 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
3897 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
3898 {
3899 tree index = fold (TREE_OPERAND (exp, 1));
3900 if (TREE_CODE (index) == INTEGER_CST
3901 && TREE_INT_CST_HIGH (index) == 0)
3902 {
3903 int i = TREE_INT_CST_LOW (index);
3904 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
3905
3906 while (elem && i--)
3907 elem = TREE_CHAIN (elem);
3908 if (elem)
3909 return expand_expr (fold (TREE_VALUE (elem)), target,
3910 tmode, modifier);
3911 }
3912 }
3913
3914 else if (TREE_READONLY (TREE_OPERAND (exp, 0))
3915 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
3916 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == ARRAY_TYPE
3917 && TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3918 && DECL_INITIAL (TREE_OPERAND (exp, 0))
3919 && optimize >= 1
3920 && (TREE_CODE (DECL_INITIAL (TREE_OPERAND (exp, 0)))
3921 != ERROR_MARK))
3922 {
3923 tree index = fold (TREE_OPERAND (exp, 1));
3924 if (TREE_CODE (index) == INTEGER_CST
3925 && TREE_INT_CST_HIGH (index) == 0)
3926 {
3927 int i = TREE_INT_CST_LOW (index);
3928 tree init = DECL_INITIAL (TREE_OPERAND (exp, 0));
3929
3930 if (TREE_CODE (init) == CONSTRUCTOR)
3931 {
3932 tree elem = CONSTRUCTOR_ELTS (init);
3933
3934 while (elem && i--)
3935 elem = TREE_CHAIN (elem);
3936 if (elem)
3937 return expand_expr (fold (TREE_VALUE (elem)), target,
3938 tmode, modifier);
3939 }
3940 else if (TREE_CODE (init) == STRING_CST
3941 && i < TREE_STRING_LENGTH (init))
3942 {
3943 temp = GEN_INT (TREE_STRING_POINTER (init)[i]);
3944 return convert_to_mode (mode, temp, 0);
3945 }
3946 }
3947 }
3948 /* Treat array-ref with constant index as a component-ref. */
3949
3950 case COMPONENT_REF:
3951 case BIT_FIELD_REF:
3952 /* If the operand is a CONSTRUCTOR, we can just extract the
3953 appropriate field if it is present. */
3954 if (code != ARRAY_REF
3955 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
3956 {
3957 tree elt;
3958
3959 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
3960 elt = TREE_CHAIN (elt))
3961 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
3962 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
3963 }
3964
3965 {
3966 enum machine_mode mode1;
3967 int bitsize;
3968 int bitpos;
3969 tree offset;
3970 int volatilep = 0;
3971 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
3972 &mode1, &unsignedp, &volatilep);
3973
3974 /* In some cases, we will be offsetting OP0's address by a constant.
3975 So get it as a sum, if possible. If we will be using it
3976 directly in an insn, we validate it. */
3977 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_SUM);
3978
3979 /* If this is a constant, put it into a register if it is a
3980 legitimate constant and memory if it isn't. */
3981 if (CONSTANT_P (op0))
3982 {
3983 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
3984 if (LEGITIMATE_CONSTANT_P (op0))
3985 op0 = force_reg (mode, op0);
3986 else
3987 op0 = validize_mem (force_const_mem (mode, op0));
3988 }
3989
3990 if (offset != 0)
3991 {
3992 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3993
3994 if (GET_CODE (op0) != MEM)
3995 abort ();
3996 op0 = change_address (op0, VOIDmode,
3997 gen_rtx (PLUS, Pmode, XEXP (op0, 0),
3998 force_reg (Pmode, offset_rtx)));
3999 }
4000
4001 /* Don't forget about volatility even if this is a bitfield. */
4002 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
4003 {
4004 op0 = copy_rtx (op0);
4005 MEM_VOLATILE_P (op0) = 1;
4006 }
4007
4008 if (mode1 == VOIDmode
4009 || (mode1 != BLKmode && ! direct_load[(int) mode1]
4010 && modifier != EXPAND_CONST_ADDRESS
4011 && modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4012 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG)
4013 {
4014 /* In cases where an aligned union has an unaligned object
4015 as a field, we might be extracting a BLKmode value from
4016 an integer-mode (e.g., SImode) object. Handle this case
4017 by doing the extract into an object as wide as the field
4018 (which we know to be the width of a basic mode), then
4019 storing into memory, and changing the mode to BLKmode. */
4020 enum machine_mode ext_mode = mode;
4021
4022 if (ext_mode == BLKmode)
4023 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
4024
4025 if (ext_mode == BLKmode)
4026 abort ();
4027
4028 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
4029 unsignedp, target, ext_mode, ext_mode,
4030 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
4031 int_size_in_bytes (TREE_TYPE (tem)));
4032 if (mode == BLKmode)
4033 {
4034 rtx new = assign_stack_temp (ext_mode,
4035 bitsize / BITS_PER_UNIT, 0);
4036
4037 emit_move_insn (new, op0);
4038 op0 = copy_rtx (new);
4039 PUT_MODE (op0, BLKmode);
4040 }
4041
4042 return op0;
4043 }
4044
4045 /* Get a reference to just this component. */
4046 if (modifier == EXPAND_CONST_ADDRESS
4047 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4048 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
4049 (bitpos / BITS_PER_UNIT)));
4050 else
4051 op0 = change_address (op0, mode1,
4052 plus_constant (XEXP (op0, 0),
4053 (bitpos / BITS_PER_UNIT)));
4054 MEM_IN_STRUCT_P (op0) = 1;
4055 MEM_VOLATILE_P (op0) |= volatilep;
4056 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
4057 return op0;
4058 if (target == 0)
4059 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4060 convert_move (target, op0, unsignedp);
4061 return target;
4062 }
4063
4064 case OFFSET_REF:
4065 {
4066 tree base = build_unary_op (ADDR_EXPR, TREE_OPERAND (exp, 0), 0);
4067 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
4068 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
4069 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
4070 MEM_IN_STRUCT_P (temp) = 1;
4071 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
4072 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
4073 a location is accessed through a pointer to const does not mean
4074 that the value there can never change. */
4075 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
4076 #endif
4077 return temp;
4078 }
4079
4080 /* Intended for a reference to a buffer of a file-object in Pascal.
4081 But it's not certain that a special tree code will really be
4082 necessary for these. INDIRECT_REF might work for them. */
4083 case BUFFER_REF:
4084 abort ();
4085
4086 /* IN_EXPR: Inlined pascal set IN expression.
4087
4088 Algorithm:
4089 rlo = set_low - (set_low%bits_per_word);
4090 the_word = set [ (index - rlo)/bits_per_word ];
4091 bit_index = index % bits_per_word;
4092 bitmask = 1 << bit_index;
4093 return !!(the_word & bitmask); */
4094 case IN_EXPR:
4095 preexpand_calls (exp);
4096 {
4097 tree set = TREE_OPERAND (exp, 0);
4098 tree index = TREE_OPERAND (exp, 1);
4099 tree set_type = TREE_TYPE (set);
4100
4101 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
4102 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
4103
4104 rtx index_val;
4105 rtx lo_r;
4106 rtx hi_r;
4107 rtx rlow;
4108 rtx diff, quo, rem, addr, bit, result;
4109 rtx setval, setaddr;
4110 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
4111
4112 if (target == 0)
4113 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4114
4115 /* If domain is empty, answer is no. */
4116 if (tree_int_cst_lt (set_high_bound, set_low_bound))
4117 return const0_rtx;
4118
4119 index_val = expand_expr (index, 0, VOIDmode, 0);
4120 lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
4121 hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
4122 setval = expand_expr (set, 0, VOIDmode, 0);
4123 setaddr = XEXP (setval, 0);
4124
4125 /* Compare index against bounds, if they are constant. */
4126 if (GET_CODE (index_val) == CONST_INT
4127 && GET_CODE (lo_r) == CONST_INT)
4128 {
4129 if (INTVAL (index_val) < INTVAL (lo_r))
4130 return const0_rtx;
4131 }
4132
4133 if (GET_CODE (index_val) == CONST_INT
4134 && GET_CODE (hi_r) == CONST_INT)
4135 {
4136 if (INTVAL (hi_r) < INTVAL (index_val))
4137 return const0_rtx;
4138 }
4139
4140 /* If we get here, we have to generate the code for both cases
4141 (in range and out of range). */
4142
4143 op0 = gen_label_rtx ();
4144 op1 = gen_label_rtx ();
4145
4146 if (! (GET_CODE (index_val) == CONST_INT
4147 && GET_CODE (lo_r) == CONST_INT))
4148 {
4149 emit_cmp_insn (index_val, lo_r, LT, 0, GET_MODE (index_val), 0, 0);
4150 emit_jump_insn (gen_blt (op1));
4151 }
4152
4153 if (! (GET_CODE (index_val) == CONST_INT
4154 && GET_CODE (hi_r) == CONST_INT))
4155 {
4156 emit_cmp_insn (index_val, hi_r, GT, 0, GET_MODE (index_val), 0, 0);
4157 emit_jump_insn (gen_bgt (op1));
4158 }
4159
4160 /* Calculate the element number of bit zero in the first word
4161 of the set. */
4162 if (GET_CODE (lo_r) == CONST_INT)
4163 rlow = gen_rtx (CONST_INT, VOIDmode,
4164 INTVAL (lo_r) & ~ (1 << BITS_PER_UNIT));
4165 else
4166 rlow = expand_binop (index_mode, and_optab,
4167 lo_r, gen_rtx (CONST_INT, VOIDmode,
4168 ~ (1 << BITS_PER_UNIT)),
4169 0, 0, OPTAB_LIB_WIDEN);
4170
4171 diff = expand_binop (index_mode, sub_optab,
4172 index_val, rlow, 0, 0, OPTAB_LIB_WIDEN);
4173
4174 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
4175 gen_rtx (CONST_INT, VOIDmode, BITS_PER_UNIT),
4176 0, 0);
4177 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
4178 gen_rtx (CONST_INT, VOIDmode, BITS_PER_UNIT),
4179 0, 0);
4180 addr = memory_address (byte_mode,
4181 expand_binop (index_mode, add_optab,
4182 diff, setaddr));
4183 /* Extract the bit we want to examine */
4184 bit = expand_shift (RSHIFT_EXPR, byte_mode,
4185 gen_rtx (MEM, byte_mode, addr), rem, 0, 1);
4186 result = expand_binop (SImode, and_optab, bit, const1_rtx, target,
4187 1, OPTAB_LIB_WIDEN);
4188 emit_move_insn (target, result);
4189
4190 /* Output the code to handle the out-of-range case. */
4191 emit_jump (op0);
4192 emit_label (op1);
4193 emit_move_insn (target, const0_rtx);
4194 emit_label (op0);
4195 return target;
4196 }
4197
4198 case WITH_CLEANUP_EXPR:
4199 if (RTL_EXPR_RTL (exp) == 0)
4200 {
4201 RTL_EXPR_RTL (exp)
4202 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
4203 cleanups_this_call
4204 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
4205 /* That's it for this cleanup. */
4206 TREE_OPERAND (exp, 2) = 0;
4207 }
4208 return RTL_EXPR_RTL (exp);
4209
4210 case CALL_EXPR:
4211 /* Check for a built-in function. */
4212 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
4213 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == FUNCTION_DECL
4214 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4215 return expand_builtin (exp, target, subtarget, tmode, ignore);
4216 /* If this call was expanded already by preexpand_calls,
4217 just return the result we got. */
4218 if (CALL_EXPR_RTL (exp) != 0)
4219 return CALL_EXPR_RTL (exp);
4220 return expand_call (exp, target, ignore);
4221
4222 case NON_LVALUE_EXPR:
4223 case NOP_EXPR:
4224 case CONVERT_EXPR:
4225 case REFERENCE_EXPR:
4226 if (TREE_CODE (type) == VOID_TYPE || ignore)
4227 {
4228 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
4229 return const0_rtx;
4230 }
4231 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4232 return expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
4233 if (TREE_CODE (type) == UNION_TYPE)
4234 {
4235 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
4236 if (target == 0)
4237 {
4238 if (mode == BLKmode)
4239 {
4240 if (TYPE_SIZE (type) == 0
4241 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4242 abort ();
4243 target = assign_stack_temp (BLKmode,
4244 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4245 + BITS_PER_UNIT - 1)
4246 / BITS_PER_UNIT, 0);
4247 }
4248 else
4249 target = gen_reg_rtx (mode);
4250 }
4251 if (GET_CODE (target) == MEM)
4252 /* Store data into beginning of memory target. */
4253 store_expr (TREE_OPERAND (exp, 0),
4254 change_address (target, TYPE_MODE (valtype), 0), 0);
4255
4256 else if (GET_CODE (target) == REG)
4257 /* Store this field into a union of the proper type. */
4258 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
4259 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
4260 VOIDmode, 0, 1,
4261 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
4262 else
4263 abort ();
4264
4265 /* Return the entire union. */
4266 return target;
4267 }
4268 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
4269 if (GET_MODE (op0) == mode)
4270 return op0;
4271 /* If arg is a constant integer being extended from a narrower mode,
4272 we must really truncate to get the extended bits right. Otherwise
4273 (unsigned long) (unsigned char) ("\377"[0])
4274 would come out as ffffffff. */
4275 if (GET_MODE (op0) == VOIDmode
4276 && (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4277 < GET_MODE_BITSIZE (mode)))
4278 {
4279 /* MODE must be narrower than HOST_BITS_PER_INT. */
4280 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4281
4282 if (width < HOST_BITS_PER_WIDE_INT)
4283 {
4284 HOST_WIDE_INT val = (GET_CODE (op0) == CONST_INT ? INTVAL (op0)
4285 : CONST_DOUBLE_LOW (op0));
4286 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4287 || !(val & ((HOST_WIDE_INT) 1 << (width - 1))))
4288 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4289 else
4290 val |= ~(((HOST_WIDE_INT) 1 << width) - 1);
4291
4292 op0 = GEN_INT (val);
4293 }
4294 else
4295 {
4296 op0 = (simplify_unary_operation
4297 ((TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4298 ? ZERO_EXTEND : SIGN_EXTEND),
4299 mode, op0,
4300 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))));
4301 if (op0 == 0)
4302 abort ();
4303 }
4304 }
4305 if (GET_MODE (op0) == VOIDmode)
4306 return op0;
4307 if (modifier == EXPAND_INITIALIZER)
4308 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
4309 if (flag_force_mem && GET_CODE (op0) == MEM)
4310 op0 = copy_to_reg (op0);
4311
4312 if (target == 0)
4313 return convert_to_mode (mode, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4314 else
4315 convert_move (target, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4316 return target;
4317
4318 case PLUS_EXPR:
4319 /* We come here from MINUS_EXPR when the second operand is a constant. */
4320 plus_expr:
4321 this_optab = add_optab;
4322
4323 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
4324 something else, make sure we add the register to the constant and
4325 then to the other thing. This case can occur during strength
4326 reduction and doing it this way will produce better code if the
4327 frame pointer or argument pointer is eliminated.
4328
4329 fold-const.c will ensure that the constant is always in the inner
4330 PLUS_EXPR, so the only case we need to do anything about is if
4331 sp, ap, or fp is our second argument, in which case we must swap
4332 the innermost first argument and our second argument. */
4333
4334 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4335 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
4336 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
4337 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
4338 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
4339 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
4340 {
4341 tree t = TREE_OPERAND (exp, 1);
4342
4343 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4344 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
4345 }
4346
4347 /* If the result is to be Pmode and we are adding an integer to
4348 something, we might be forming a constant. So try to use
4349 plus_constant. If it produces a sum and we can't accept it,
4350 use force_operand. This allows P = &ARR[const] to generate
4351 efficient code on machines where a SYMBOL_REF is not a valid
4352 address.
4353
4354 If this is an EXPAND_SUM call, always return the sum. */
4355 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
4356 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4357 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4358 || mode == Pmode))
4359 {
4360 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
4361 EXPAND_SUM);
4362 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
4363 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4364 op1 = force_operand (op1, target);
4365 return op1;
4366 }
4367
4368 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4369 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
4370 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4371 || mode == Pmode))
4372 {
4373 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
4374 EXPAND_SUM);
4375 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
4376 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4377 op0 = force_operand (op0, target);
4378 return op0;
4379 }
4380
4381 /* No sense saving up arithmetic to be done
4382 if it's all in the wrong mode to form part of an address.
4383 And force_operand won't know whether to sign-extend or
4384 zero-extend. */
4385 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4386 || mode != Pmode) goto binop;
4387
4388 preexpand_calls (exp);
4389 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4390 subtarget = 0;
4391
4392 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
4393 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
4394
4395 /* Make sure any term that's a sum with a constant comes last. */
4396 if (GET_CODE (op0) == PLUS
4397 && CONSTANT_P (XEXP (op0, 1)))
4398 {
4399 temp = op0;
4400 op0 = op1;
4401 op1 = temp;
4402 }
4403 /* If adding to a sum including a constant,
4404 associate it to put the constant outside. */
4405 if (GET_CODE (op1) == PLUS
4406 && CONSTANT_P (XEXP (op1, 1)))
4407 {
4408 rtx constant_term = const0_rtx;
4409
4410 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
4411 if (temp != 0)
4412 op0 = temp;
4413 /* Ensure that MULT comes first if there is one. */
4414 else if (GET_CODE (op0) == MULT)
4415 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
4416 else
4417 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
4418
4419 /* Let's also eliminate constants from op0 if possible. */
4420 op0 = eliminate_constant_term (op0, &constant_term);
4421
4422 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
4423 their sum should be a constant. Form it into OP1, since the
4424 result we want will then be OP0 + OP1. */
4425
4426 temp = simplify_binary_operation (PLUS, mode, constant_term,
4427 XEXP (op1, 1));
4428 if (temp != 0)
4429 op1 = temp;
4430 else
4431 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
4432 }
4433
4434 /* Put a constant term last and put a multiplication first. */
4435 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
4436 temp = op1, op1 = op0, op0 = temp;
4437
4438 temp = simplify_binary_operation (PLUS, mode, op0, op1);
4439 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
4440
4441 case MINUS_EXPR:
4442 /* Handle difference of two symbolic constants,
4443 for the sake of an initializer. */
4444 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4445 && really_constant_p (TREE_OPERAND (exp, 0))
4446 && really_constant_p (TREE_OPERAND (exp, 1)))
4447 {
4448 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
4449 VOIDmode, modifier);
4450 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4451 VOIDmode, modifier);
4452 return gen_rtx (MINUS, mode, op0, op1);
4453 }
4454 /* Convert A - const to A + (-const). */
4455 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4456 {
4457 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0),
4458 fold (build1 (NEGATE_EXPR, type,
4459 TREE_OPERAND (exp, 1))));
4460 goto plus_expr;
4461 }
4462 this_optab = sub_optab;
4463 goto binop;
4464
4465 case MULT_EXPR:
4466 preexpand_calls (exp);
4467 /* If first operand is constant, swap them.
4468 Thus the following special case checks need only
4469 check the second operand. */
4470 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
4471 {
4472 register tree t1 = TREE_OPERAND (exp, 0);
4473 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
4474 TREE_OPERAND (exp, 1) = t1;
4475 }
4476
4477 /* Attempt to return something suitable for generating an
4478 indexed address, for machines that support that. */
4479
4480 if (modifier == EXPAND_SUM && mode == Pmode
4481 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4482 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
4483 {
4484 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
4485
4486 /* Apply distributive law if OP0 is x+c. */
4487 if (GET_CODE (op0) == PLUS
4488 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
4489 return gen_rtx (PLUS, mode,
4490 gen_rtx (MULT, mode, XEXP (op0, 0),
4491 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
4492 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
4493 * INTVAL (XEXP (op0, 1))));
4494
4495 if (GET_CODE (op0) != REG)
4496 op0 = force_operand (op0, NULL_RTX);
4497 if (GET_CODE (op0) != REG)
4498 op0 = copy_to_mode_reg (mode, op0);
4499
4500 return gen_rtx (MULT, mode, op0,
4501 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
4502 }
4503
4504 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4505 subtarget = 0;
4506
4507 /* Check for multiplying things that have been extended
4508 from a narrower type. If this machine supports multiplying
4509 in that narrower type with a result in the desired type,
4510 do it that way, and avoid the explicit type-conversion. */
4511 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
4512 && TREE_CODE (type) == INTEGER_TYPE
4513 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4514 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
4515 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4516 && int_fits_type_p (TREE_OPERAND (exp, 1),
4517 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4518 /* Don't use a widening multiply if a shift will do. */
4519 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
4520 > HOST_BITS_PER_WIDE_INT)
4521 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
4522 ||
4523 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
4524 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4525 ==
4526 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
4527 /* If both operands are extended, they must either both
4528 be zero-extended or both be sign-extended. */
4529 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4530 ==
4531 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
4532 {
4533 enum machine_mode innermode
4534 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
4535 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4536 ? umul_widen_optab : smul_widen_optab);
4537 if (mode == GET_MODE_WIDER_MODE (innermode)
4538 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
4539 {
4540 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4541 NULL_RTX, VOIDmode, 0);
4542 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4543 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4544 VOIDmode, 0);
4545 else
4546 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
4547 NULL_RTX, VOIDmode, 0);
4548 goto binop2;
4549 }
4550 }
4551 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4552 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4553 return expand_mult (mode, op0, op1, target, unsignedp);
4554
4555 case TRUNC_DIV_EXPR:
4556 case FLOOR_DIV_EXPR:
4557 case CEIL_DIV_EXPR:
4558 case ROUND_DIV_EXPR:
4559 case EXACT_DIV_EXPR:
4560 preexpand_calls (exp);
4561 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4562 subtarget = 0;
4563 /* Possible optimization: compute the dividend with EXPAND_SUM
4564 then if the divisor is constant can optimize the case
4565 where some terms of the dividend have coeffs divisible by it. */
4566 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4567 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4568 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
4569
4570 case RDIV_EXPR:
4571 this_optab = flodiv_optab;
4572 goto binop;
4573
4574 case TRUNC_MOD_EXPR:
4575 case FLOOR_MOD_EXPR:
4576 case CEIL_MOD_EXPR:
4577 case ROUND_MOD_EXPR:
4578 preexpand_calls (exp);
4579 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4580 subtarget = 0;
4581 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4582 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4583 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
4584
4585 case FIX_ROUND_EXPR:
4586 case FIX_FLOOR_EXPR:
4587 case FIX_CEIL_EXPR:
4588 abort (); /* Not used for C. */
4589
4590 case FIX_TRUNC_EXPR:
4591 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
4592 if (target == 0)
4593 target = gen_reg_rtx (mode);
4594 expand_fix (target, op0, unsignedp);
4595 return target;
4596
4597 case FLOAT_EXPR:
4598 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
4599 if (target == 0)
4600 target = gen_reg_rtx (mode);
4601 /* expand_float can't figure out what to do if FROM has VOIDmode.
4602 So give it the correct mode. With -O, cse will optimize this. */
4603 if (GET_MODE (op0) == VOIDmode)
4604 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
4605 op0);
4606 expand_float (target, op0,
4607 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4608 return target;
4609
4610 case NEGATE_EXPR:
4611 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4612 temp = expand_unop (mode, neg_optab, op0, target, 0);
4613 if (temp == 0)
4614 abort ();
4615 return temp;
4616
4617 case ABS_EXPR:
4618 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4619
4620 /* Handle complex values specially. */
4621 {
4622 enum machine_mode opmode
4623 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4624
4625 if (GET_MODE_CLASS (opmode) == MODE_COMPLEX_INT
4626 || GET_MODE_CLASS (opmode) == MODE_COMPLEX_FLOAT)
4627 return expand_complex_abs (opmode, op0, target, unsignedp);
4628 }
4629
4630 /* Unsigned abs is simply the operand. Testing here means we don't
4631 risk generating incorrect code below. */
4632 if (TREE_UNSIGNED (type))
4633 return op0;
4634
4635 /* First try to do it with a special abs instruction. */
4636 temp = expand_unop (mode, abs_optab, op0, target, 0);
4637 if (temp != 0)
4638 return temp;
4639
4640 /* If this machine has expensive jumps, we can do integer absolute
4641 value of X as (((signed) x >> (W-1)) ^ x) - ((signed) x >> (W-1)),
4642 where W is the width of MODE. */
4643
4644 if (GET_MODE_CLASS (mode) == MODE_INT && BRANCH_COST >= 2)
4645 {
4646 rtx extended = expand_shift (RSHIFT_EXPR, mode, op0,
4647 size_int (GET_MODE_BITSIZE (mode) - 1),
4648 NULL_RTX, 0);
4649
4650 temp = expand_binop (mode, xor_optab, extended, op0, target, 0,
4651 OPTAB_LIB_WIDEN);
4652 if (temp != 0)
4653 temp = expand_binop (mode, sub_optab, temp, extended, target, 0,
4654 OPTAB_LIB_WIDEN);
4655
4656 if (temp != 0)
4657 return temp;
4658 }
4659
4660 /* If that does not win, use conditional jump and negate. */
4661 target = original_target;
4662 temp = gen_label_rtx ();
4663 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 0))
4664 || (GET_CODE (target) == REG
4665 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4666 target = gen_reg_rtx (mode);
4667 emit_move_insn (target, op0);
4668 emit_cmp_insn (target,
4669 expand_expr (convert (type, integer_zero_node),
4670 NULL_RTX, VOIDmode, 0),
4671 GE, NULL_RTX, mode, 0, 0);
4672 NO_DEFER_POP;
4673 emit_jump_insn (gen_bge (temp));
4674 op0 = expand_unop (mode, neg_optab, target, target, 0);
4675 if (op0 != target)
4676 emit_move_insn (target, op0);
4677 emit_label (temp);
4678 OK_DEFER_POP;
4679 return target;
4680
4681 case MAX_EXPR:
4682 case MIN_EXPR:
4683 target = original_target;
4684 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
4685 || (GET_CODE (target) == REG
4686 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4687 target = gen_reg_rtx (mode);
4688 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4689 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4690
4691 /* First try to do it with a special MIN or MAX instruction.
4692 If that does not win, use a conditional jump to select the proper
4693 value. */
4694 this_optab = (TREE_UNSIGNED (type)
4695 ? (code == MIN_EXPR ? umin_optab : umax_optab)
4696 : (code == MIN_EXPR ? smin_optab : smax_optab));
4697
4698 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
4699 OPTAB_WIDEN);
4700 if (temp != 0)
4701 return temp;
4702
4703 if (target != op0)
4704 emit_move_insn (target, op0);
4705 op0 = gen_label_rtx ();
4706 if (code == MAX_EXPR)
4707 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4708 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
4709 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
4710 else
4711 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4712 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
4713 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
4714 if (temp == const0_rtx)
4715 emit_move_insn (target, op1);
4716 else if (temp != const_true_rtx)
4717 {
4718 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
4719 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
4720 else
4721 abort ();
4722 emit_move_insn (target, op1);
4723 }
4724 emit_label (op0);
4725 return target;
4726
4727 /* ??? Can optimize when the operand of this is a bitwise operation,
4728 by using a different bitwise operation. */
4729 case BIT_NOT_EXPR:
4730 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4731 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
4732 if (temp == 0)
4733 abort ();
4734 return temp;
4735
4736 case FFS_EXPR:
4737 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4738 temp = expand_unop (mode, ffs_optab, op0, target, 1);
4739 if (temp == 0)
4740 abort ();
4741 return temp;
4742
4743 /* ??? Can optimize bitwise operations with one arg constant.
4744 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
4745 and (a bitwise1 b) bitwise2 b (etc)
4746 but that is probably not worth while. */
4747
4748 /* BIT_AND_EXPR is for bitwise anding.
4749 TRUTH_AND_EXPR is for anding two boolean values
4750 when we want in all cases to compute both of them.
4751 In general it is fastest to do TRUTH_AND_EXPR by
4752 computing both operands as actual zero-or-1 values
4753 and then bitwise anding. In cases where there cannot
4754 be any side effects, better code would be made by
4755 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR;
4756 but the question is how to recognize those cases. */
4757
4758 case TRUTH_AND_EXPR:
4759 case BIT_AND_EXPR:
4760 this_optab = and_optab;
4761 goto binop;
4762
4763 /* See comment above about TRUTH_AND_EXPR; it applies here too. */
4764 case TRUTH_OR_EXPR:
4765 case BIT_IOR_EXPR:
4766 this_optab = ior_optab;
4767 goto binop;
4768
4769 case TRUTH_XOR_EXPR:
4770 case BIT_XOR_EXPR:
4771 this_optab = xor_optab;
4772 goto binop;
4773
4774 case LSHIFT_EXPR:
4775 case RSHIFT_EXPR:
4776 case LROTATE_EXPR:
4777 case RROTATE_EXPR:
4778 preexpand_calls (exp);
4779 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4780 subtarget = 0;
4781 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4782 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
4783 unsignedp);
4784
4785 /* Could determine the answer when only additive constants differ.
4786 Also, the addition of one can be handled by changing the condition. */
4787 case LT_EXPR:
4788 case LE_EXPR:
4789 case GT_EXPR:
4790 case GE_EXPR:
4791 case EQ_EXPR:
4792 case NE_EXPR:
4793 preexpand_calls (exp);
4794 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
4795 if (temp != 0)
4796 return temp;
4797 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
4798 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
4799 && original_target
4800 && GET_CODE (original_target) == REG
4801 && (GET_MODE (original_target)
4802 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
4803 {
4804 temp = expand_expr (TREE_OPERAND (exp, 0), original_target, VOIDmode, 0);
4805 if (temp != original_target)
4806 temp = copy_to_reg (temp);
4807 op1 = gen_label_rtx ();
4808 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
4809 GET_MODE (temp), unsignedp, 0);
4810 emit_jump_insn (gen_beq (op1));
4811 emit_move_insn (temp, const1_rtx);
4812 emit_label (op1);
4813 return temp;
4814 }
4815 /* If no set-flag instruction, must generate a conditional
4816 store into a temporary variable. Drop through
4817 and handle this like && and ||. */
4818
4819 case TRUTH_ANDIF_EXPR:
4820 case TRUTH_ORIF_EXPR:
4821 if (target == 0 || ! safe_from_p (target, exp)
4822 /* Make sure we don't have a hard reg (such as function's return
4823 value) live across basic blocks, if not optimizing. */
4824 || (!optimize && GET_CODE (target) == REG
4825 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4826 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4827 emit_clr_insn (target);
4828 op1 = gen_label_rtx ();
4829 jumpifnot (exp, op1);
4830 emit_0_to_1_insn (target);
4831 emit_label (op1);
4832 return target;
4833
4834 case TRUTH_NOT_EXPR:
4835 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4836 /* The parser is careful to generate TRUTH_NOT_EXPR
4837 only with operands that are always zero or one. */
4838 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
4839 target, 1, OPTAB_LIB_WIDEN);
4840 if (temp == 0)
4841 abort ();
4842 return temp;
4843
4844 case COMPOUND_EXPR:
4845 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4846 emit_queue ();
4847 return expand_expr (TREE_OPERAND (exp, 1),
4848 (ignore ? const0_rtx : target),
4849 VOIDmode, 0);
4850
4851 case COND_EXPR:
4852 {
4853 /* Note that COND_EXPRs whose type is a structure or union
4854 are required to be constructed to contain assignments of
4855 a temporary variable, so that we can evaluate them here
4856 for side effect only. If type is void, we must do likewise. */
4857
4858 /* If an arm of the branch requires a cleanup,
4859 only that cleanup is performed. */
4860
4861 tree singleton = 0;
4862 tree binary_op = 0, unary_op = 0;
4863 tree old_cleanups = cleanups_this_call;
4864 cleanups_this_call = 0;
4865
4866 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
4867 convert it to our mode, if necessary. */
4868 if (integer_onep (TREE_OPERAND (exp, 1))
4869 && integer_zerop (TREE_OPERAND (exp, 2))
4870 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4871 {
4872 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
4873 if (GET_MODE (op0) == mode)
4874 return op0;
4875 if (target == 0)
4876 target = gen_reg_rtx (mode);
4877 convert_move (target, op0, unsignedp);
4878 return target;
4879 }
4880
4881 /* If we are not to produce a result, we have no target. Otherwise,
4882 if a target was specified use it; it will not be used as an
4883 intermediate target unless it is safe. If no target, use a
4884 temporary. */
4885
4886 if (mode == VOIDmode || ignore)
4887 temp = 0;
4888 else if (original_target
4889 && safe_from_p (original_target, TREE_OPERAND (exp, 0)))
4890 temp = original_target;
4891 else if (mode == BLKmode)
4892 {
4893 if (TYPE_SIZE (type) == 0
4894 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4895 abort ();
4896 temp = assign_stack_temp (BLKmode,
4897 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4898 + BITS_PER_UNIT - 1)
4899 / BITS_PER_UNIT, 0);
4900 }
4901 else
4902 temp = gen_reg_rtx (mode);
4903
4904 /* Check for X ? A + B : A. If we have this, we can copy
4905 A to the output and conditionally add B. Similarly for unary
4906 operations. Don't do this if X has side-effects because
4907 those side effects might affect A or B and the "?" operation is
4908 a sequence point in ANSI. (We test for side effects later.) */
4909
4910 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
4911 && operand_equal_p (TREE_OPERAND (exp, 2),
4912 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4913 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
4914 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
4915 && operand_equal_p (TREE_OPERAND (exp, 1),
4916 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4917 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
4918 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
4919 && operand_equal_p (TREE_OPERAND (exp, 2),
4920 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4921 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
4922 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
4923 && operand_equal_p (TREE_OPERAND (exp, 1),
4924 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4925 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
4926
4927 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
4928 operation, do this as A + (X != 0). Similarly for other simple
4929 binary operators. */
4930 if (singleton && binary_op
4931 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4932 && (TREE_CODE (binary_op) == PLUS_EXPR
4933 || TREE_CODE (binary_op) == MINUS_EXPR
4934 || TREE_CODE (binary_op) == BIT_IOR_EXPR
4935 || TREE_CODE (binary_op) == BIT_XOR_EXPR
4936 || TREE_CODE (binary_op) == BIT_AND_EXPR)
4937 && integer_onep (TREE_OPERAND (binary_op, 1))
4938 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4939 {
4940 rtx result;
4941 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
4942 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
4943 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
4944 : TREE_CODE (binary_op) == BIT_XOR_EXPR ? xor_optab
4945 : and_optab);
4946
4947 /* If we had X ? A : A + 1, do this as A + (X == 0).
4948
4949 We have to invert the truth value here and then put it
4950 back later if do_store_flag fails. We cannot simply copy
4951 TREE_OPERAND (exp, 0) to another variable and modify that
4952 because invert_truthvalue can modify the tree pointed to
4953 by its argument. */
4954 if (singleton == TREE_OPERAND (exp, 1))
4955 TREE_OPERAND (exp, 0)
4956 = invert_truthvalue (TREE_OPERAND (exp, 0));
4957
4958 result = do_store_flag (TREE_OPERAND (exp, 0),
4959 (safe_from_p (temp, singleton)
4960 ? temp : NULL_RTX),
4961 mode, BRANCH_COST <= 1);
4962
4963 if (result)
4964 {
4965 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
4966 return expand_binop (mode, boptab, op1, result, temp,
4967 unsignedp, OPTAB_LIB_WIDEN);
4968 }
4969 else if (singleton == TREE_OPERAND (exp, 1))
4970 TREE_OPERAND (exp, 0)
4971 = invert_truthvalue (TREE_OPERAND (exp, 0));
4972 }
4973
4974 NO_DEFER_POP;
4975 op0 = gen_label_rtx ();
4976
4977 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
4978 {
4979 if (temp != 0)
4980 {
4981 /* If the target conflicts with the other operand of the
4982 binary op, we can't use it. Also, we can't use the target
4983 if it is a hard register, because evaluating the condition
4984 might clobber it. */
4985 if ((binary_op
4986 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
4987 || (GET_CODE (temp) == REG
4988 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
4989 temp = gen_reg_rtx (mode);
4990 store_expr (singleton, temp, 0);
4991 }
4992 else
4993 expand_expr (singleton,
4994 ignore ? const1_rtx : NULL_RTX, VOIDmode, 0);
4995 if (cleanups_this_call)
4996 {
4997 sorry ("aggregate value in COND_EXPR");
4998 cleanups_this_call = 0;
4999 }
5000 if (singleton == TREE_OPERAND (exp, 1))
5001 jumpif (TREE_OPERAND (exp, 0), op0);
5002 else
5003 jumpifnot (TREE_OPERAND (exp, 0), op0);
5004
5005 if (binary_op && temp == 0)
5006 /* Just touch the other operand. */
5007 expand_expr (TREE_OPERAND (binary_op, 1),
5008 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5009 else if (binary_op)
5010 store_expr (build (TREE_CODE (binary_op), type,
5011 make_tree (type, temp),
5012 TREE_OPERAND (binary_op, 1)),
5013 temp, 0);
5014 else
5015 store_expr (build1 (TREE_CODE (unary_op), type,
5016 make_tree (type, temp)),
5017 temp, 0);
5018 op1 = op0;
5019 }
5020 #if 0
5021 /* This is now done in jump.c and is better done there because it
5022 produces shorter register lifetimes. */
5023
5024 /* Check for both possibilities either constants or variables
5025 in registers (but not the same as the target!). If so, can
5026 save branches by assigning one, branching, and assigning the
5027 other. */
5028 else if (temp && GET_MODE (temp) != BLKmode
5029 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
5030 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
5031 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
5032 && DECL_RTL (TREE_OPERAND (exp, 1))
5033 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
5034 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
5035 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
5036 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
5037 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
5038 && DECL_RTL (TREE_OPERAND (exp, 2))
5039 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
5040 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
5041 {
5042 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5043 temp = gen_reg_rtx (mode);
5044 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5045 jumpifnot (TREE_OPERAND (exp, 0), op0);
5046 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5047 op1 = op0;
5048 }
5049 #endif
5050 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
5051 comparison operator. If we have one of these cases, set the
5052 output to A, branch on A (cse will merge these two references),
5053 then set the output to FOO. */
5054 else if (temp
5055 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5056 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5057 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5058 TREE_OPERAND (exp, 1), 0)
5059 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5060 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
5061 {
5062 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5063 temp = gen_reg_rtx (mode);
5064 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5065 jumpif (TREE_OPERAND (exp, 0), op0);
5066 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5067 op1 = op0;
5068 }
5069 else if (temp
5070 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5071 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5072 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5073 TREE_OPERAND (exp, 2), 0)
5074 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5075 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
5076 {
5077 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5078 temp = gen_reg_rtx (mode);
5079 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5080 jumpifnot (TREE_OPERAND (exp, 0), op0);
5081 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5082 op1 = op0;
5083 }
5084 else
5085 {
5086 op1 = gen_label_rtx ();
5087 jumpifnot (TREE_OPERAND (exp, 0), op0);
5088 if (temp != 0)
5089 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5090 else
5091 expand_expr (TREE_OPERAND (exp, 1),
5092 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5093 if (cleanups_this_call)
5094 {
5095 sorry ("aggregate value in COND_EXPR");
5096 cleanups_this_call = 0;
5097 }
5098
5099 emit_queue ();
5100 emit_jump_insn (gen_jump (op1));
5101 emit_barrier ();
5102 emit_label (op0);
5103 if (temp != 0)
5104 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5105 else
5106 expand_expr (TREE_OPERAND (exp, 2),
5107 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5108 }
5109
5110 if (cleanups_this_call)
5111 {
5112 sorry ("aggregate value in COND_EXPR");
5113 cleanups_this_call = 0;
5114 }
5115
5116 emit_queue ();
5117 emit_label (op1);
5118 OK_DEFER_POP;
5119 cleanups_this_call = old_cleanups;
5120 return temp;
5121 }
5122
5123 case TARGET_EXPR:
5124 {
5125 /* Something needs to be initialized, but we didn't know
5126 where that thing was when building the tree. For example,
5127 it could be the return value of a function, or a parameter
5128 to a function which lays down in the stack, or a temporary
5129 variable which must be passed by reference.
5130
5131 We guarantee that the expression will either be constructed
5132 or copied into our original target. */
5133
5134 tree slot = TREE_OPERAND (exp, 0);
5135 tree exp1;
5136
5137 if (TREE_CODE (slot) != VAR_DECL)
5138 abort ();
5139
5140 if (target == 0)
5141 {
5142 if (DECL_RTL (slot) != 0)
5143 {
5144 target = DECL_RTL (slot);
5145 /* If we have already expanded the slot, so don't do
5146 it again. (mrs) */
5147 if (TREE_OPERAND (exp, 1) == NULL_TREE)
5148 return target;
5149 }
5150 else
5151 {
5152 target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
5153 /* All temp slots at this level must not conflict. */
5154 preserve_temp_slots (target);
5155 DECL_RTL (slot) = target;
5156 }
5157
5158 #if 0
5159 /* I bet this needs to be done, and I bet that it needs to
5160 be above, inside the else clause. The reason is
5161 simple, how else is it going to get cleaned up? (mrs)
5162
5163 The reason is probably did not work before, and was
5164 commented out is because this was re-expanding already
5165 expanded target_exprs (target == 0 and DECL_RTL (slot)
5166 != 0) also cleaning them up many times as well. :-( */
5167
5168 /* Since SLOT is not known to the called function
5169 to belong to its stack frame, we must build an explicit
5170 cleanup. This case occurs when we must build up a reference
5171 to pass the reference as an argument. In this case,
5172 it is very likely that such a reference need not be
5173 built here. */
5174
5175 if (TREE_OPERAND (exp, 2) == 0)
5176 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
5177 if (TREE_OPERAND (exp, 2))
5178 cleanups_this_call = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2),
5179 cleanups_this_call);
5180 #endif
5181 }
5182 else
5183 {
5184 /* This case does occur, when expanding a parameter which
5185 needs to be constructed on the stack. The target
5186 is the actual stack address that we want to initialize.
5187 The function we call will perform the cleanup in this case. */
5188
5189 DECL_RTL (slot) = target;
5190 }
5191
5192 exp1 = TREE_OPERAND (exp, 1);
5193 /* Mark it as expanded. */
5194 TREE_OPERAND (exp, 1) = NULL_TREE;
5195
5196 return expand_expr (exp1, target, tmode, modifier);
5197 }
5198
5199 case INIT_EXPR:
5200 {
5201 tree lhs = TREE_OPERAND (exp, 0);
5202 tree rhs = TREE_OPERAND (exp, 1);
5203 tree noncopied_parts = 0;
5204 tree lhs_type = TREE_TYPE (lhs);
5205
5206 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5207 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
5208 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
5209 TYPE_NONCOPIED_PARTS (lhs_type));
5210 while (noncopied_parts != 0)
5211 {
5212 expand_assignment (TREE_VALUE (noncopied_parts),
5213 TREE_PURPOSE (noncopied_parts), 0, 0);
5214 noncopied_parts = TREE_CHAIN (noncopied_parts);
5215 }
5216 return temp;
5217 }
5218
5219 case MODIFY_EXPR:
5220 {
5221 /* If lhs is complex, expand calls in rhs before computing it.
5222 That's so we don't compute a pointer and save it over a call.
5223 If lhs is simple, compute it first so we can give it as a
5224 target if the rhs is just a call. This avoids an extra temp and copy
5225 and that prevents a partial-subsumption which makes bad code.
5226 Actually we could treat component_ref's of vars like vars. */
5227
5228 tree lhs = TREE_OPERAND (exp, 0);
5229 tree rhs = TREE_OPERAND (exp, 1);
5230 tree noncopied_parts = 0;
5231 tree lhs_type = TREE_TYPE (lhs);
5232
5233 temp = 0;
5234
5235 if (TREE_CODE (lhs) != VAR_DECL
5236 && TREE_CODE (lhs) != RESULT_DECL
5237 && TREE_CODE (lhs) != PARM_DECL)
5238 preexpand_calls (exp);
5239
5240 /* Check for |= or &= of a bitfield of size one into another bitfield
5241 of size 1. In this case, (unless we need the result of the
5242 assignment) we can do this more efficiently with a
5243 test followed by an assignment, if necessary.
5244
5245 ??? At this point, we can't get a BIT_FIELD_REF here. But if
5246 things change so we do, this code should be enhanced to
5247 support it. */
5248 if (ignore
5249 && TREE_CODE (lhs) == COMPONENT_REF
5250 && (TREE_CODE (rhs) == BIT_IOR_EXPR
5251 || TREE_CODE (rhs) == BIT_AND_EXPR)
5252 && TREE_OPERAND (rhs, 0) == lhs
5253 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
5254 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
5255 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
5256 {
5257 rtx label = gen_label_rtx ();
5258
5259 do_jump (TREE_OPERAND (rhs, 1),
5260 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
5261 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
5262 expand_assignment (lhs, convert (TREE_TYPE (rhs),
5263 (TREE_CODE (rhs) == BIT_IOR_EXPR
5264 ? integer_one_node
5265 : integer_zero_node)),
5266 0, 0);
5267 do_pending_stack_adjust ();
5268 emit_label (label);
5269 return const0_rtx;
5270 }
5271
5272 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
5273 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
5274 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
5275 TYPE_NONCOPIED_PARTS (lhs_type));
5276
5277 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5278 while (noncopied_parts != 0)
5279 {
5280 expand_assignment (TREE_PURPOSE (noncopied_parts),
5281 TREE_VALUE (noncopied_parts), 0, 0);
5282 noncopied_parts = TREE_CHAIN (noncopied_parts);
5283 }
5284 return temp;
5285 }
5286
5287 case PREINCREMENT_EXPR:
5288 case PREDECREMENT_EXPR:
5289 return expand_increment (exp, 0);
5290
5291 case POSTINCREMENT_EXPR:
5292 case POSTDECREMENT_EXPR:
5293 /* Faster to treat as pre-increment if result is not used. */
5294 return expand_increment (exp, ! ignore);
5295
5296 case ADDR_EXPR:
5297 /* Are we taking the address of a nested function? */
5298 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
5299 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
5300 {
5301 op0 = trampoline_address (TREE_OPERAND (exp, 0));
5302 op0 = force_operand (op0, target);
5303 }
5304 else
5305 {
5306 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
5307 (modifier == EXPAND_INITIALIZER
5308 ? modifier : EXPAND_CONST_ADDRESS));
5309 if (GET_CODE (op0) != MEM)
5310 abort ();
5311
5312 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5313 return XEXP (op0, 0);
5314 op0 = force_operand (XEXP (op0, 0), target);
5315 }
5316 if (flag_force_addr && GET_CODE (op0) != REG)
5317 return force_reg (Pmode, op0);
5318 return op0;
5319
5320 case ENTRY_VALUE_EXPR:
5321 abort ();
5322
5323 /* COMPLEX type for Extended Pascal & Fortran */
5324 case COMPLEX_EXPR:
5325 {
5326 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5327
5328 rtx prev;
5329
5330 /* Get the rtx code of the operands. */
5331 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5332 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
5333
5334 if (! target)
5335 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5336
5337 prev = get_last_insn ();
5338
5339 /* Tell flow that the whole of the destination is being set. */
5340 if (GET_CODE (target) == REG)
5341 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5342
5343 /* Move the real (op0) and imaginary (op1) parts to their location. */
5344 emit_move_insn (gen_realpart (mode, target), op0);
5345 emit_move_insn (gen_imagpart (mode, target), op1);
5346
5347 /* Complex construction should appear as a single unit. */
5348 group_insns (prev);
5349
5350 return target;
5351 }
5352
5353 case REALPART_EXPR:
5354 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5355 return gen_realpart (mode, op0);
5356
5357 case IMAGPART_EXPR:
5358 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5359 return gen_imagpart (mode, op0);
5360
5361 case CONJ_EXPR:
5362 {
5363 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5364 rtx imag_t;
5365 rtx prev;
5366
5367 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5368
5369 if (! target)
5370 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5371
5372 prev = get_last_insn ();
5373
5374 /* Tell flow that the whole of the destination is being set. */
5375 if (GET_CODE (target) == REG)
5376 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5377
5378 /* Store the realpart and the negated imagpart to target. */
5379 emit_move_insn (gen_realpart (mode, target), gen_realpart (mode, op0));
5380
5381 imag_t = gen_imagpart (mode, target);
5382 temp = expand_unop (mode, neg_optab,
5383 gen_imagpart (mode, op0), imag_t, 0);
5384 if (temp != imag_t)
5385 emit_move_insn (imag_t, temp);
5386
5387 /* Conjugate should appear as a single unit */
5388 group_insns (prev);
5389
5390 return target;
5391 }
5392
5393 case ERROR_MARK:
5394 return const0_rtx;
5395
5396 default:
5397 return (*lang_expand_expr) (exp, target, tmode, modifier);
5398 }
5399
5400 /* Here to do an ordinary binary operator, generating an instruction
5401 from the optab already placed in `this_optab'. */
5402 binop:
5403 preexpand_calls (exp);
5404 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5405 subtarget = 0;
5406 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5407 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5408 binop2:
5409 temp = expand_binop (mode, this_optab, op0, op1, target,
5410 unsignedp, OPTAB_LIB_WIDEN);
5411 if (temp == 0)
5412 abort ();
5413 return temp;
5414 }
5415 \f
5416 /* Return the alignment in bits of EXP, a pointer valued expression.
5417 But don't return more than MAX_ALIGN no matter what.
5418 The alignment returned is, by default, the alignment of the thing that
5419 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
5420
5421 Otherwise, look at the expression to see if we can do better, i.e., if the
5422 expression is actually pointing at an object whose alignment is tighter. */
5423
5424 static int
5425 get_pointer_alignment (exp, max_align)
5426 tree exp;
5427 unsigned max_align;
5428 {
5429 unsigned align, inner;
5430
5431 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
5432 return 0;
5433
5434 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
5435 align = MIN (align, max_align);
5436
5437 while (1)
5438 {
5439 switch (TREE_CODE (exp))
5440 {
5441 case NOP_EXPR:
5442 case CONVERT_EXPR:
5443 case NON_LVALUE_EXPR:
5444 exp = TREE_OPERAND (exp, 0);
5445 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
5446 return align;
5447 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
5448 inner = MIN (inner, max_align);
5449 align = MAX (align, inner);
5450 break;
5451
5452 case PLUS_EXPR:
5453 /* If sum of pointer + int, restrict our maximum alignment to that
5454 imposed by the integer. If not, we can't do any better than
5455 ALIGN. */
5456 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
5457 return align;
5458
5459 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
5460 & (max_align - 1))
5461 != 0)
5462 max_align >>= 1;
5463
5464 exp = TREE_OPERAND (exp, 0);
5465 break;
5466
5467 case ADDR_EXPR:
5468 /* See what we are pointing at and look at its alignment. */
5469 exp = TREE_OPERAND (exp, 0);
5470 if (TREE_CODE (exp) == FUNCTION_DECL)
5471 align = MAX (align, FUNCTION_BOUNDARY);
5472 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
5473 align = MAX (align, DECL_ALIGN (exp));
5474 #ifdef CONSTANT_ALIGNMENT
5475 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
5476 align = CONSTANT_ALIGNMENT (exp, align);
5477 #endif
5478 return MIN (align, max_align);
5479
5480 default:
5481 return align;
5482 }
5483 }
5484 }
5485 \f
5486 /* Return the tree node and offset if a given argument corresponds to
5487 a string constant. */
5488
5489 static tree
5490 string_constant (arg, ptr_offset)
5491 tree arg;
5492 tree *ptr_offset;
5493 {
5494 STRIP_NOPS (arg);
5495
5496 if (TREE_CODE (arg) == ADDR_EXPR
5497 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
5498 {
5499 *ptr_offset = integer_zero_node;
5500 return TREE_OPERAND (arg, 0);
5501 }
5502 else if (TREE_CODE (arg) == PLUS_EXPR)
5503 {
5504 tree arg0 = TREE_OPERAND (arg, 0);
5505 tree arg1 = TREE_OPERAND (arg, 1);
5506
5507 STRIP_NOPS (arg0);
5508 STRIP_NOPS (arg1);
5509
5510 if (TREE_CODE (arg0) == ADDR_EXPR
5511 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
5512 {
5513 *ptr_offset = arg1;
5514 return TREE_OPERAND (arg0, 0);
5515 }
5516 else if (TREE_CODE (arg1) == ADDR_EXPR
5517 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
5518 {
5519 *ptr_offset = arg0;
5520 return TREE_OPERAND (arg1, 0);
5521 }
5522 }
5523
5524 return 0;
5525 }
5526
5527 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
5528 way, because it could contain a zero byte in the middle.
5529 TREE_STRING_LENGTH is the size of the character array, not the string.
5530
5531 Unfortunately, string_constant can't access the values of const char
5532 arrays with initializers, so neither can we do so here. */
5533
5534 static tree
5535 c_strlen (src)
5536 tree src;
5537 {
5538 tree offset_node;
5539 int offset, max;
5540 char *ptr;
5541
5542 src = string_constant (src, &offset_node);
5543 if (src == 0)
5544 return 0;
5545 max = TREE_STRING_LENGTH (src);
5546 ptr = TREE_STRING_POINTER (src);
5547 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
5548 {
5549 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
5550 compute the offset to the following null if we don't know where to
5551 start searching for it. */
5552 int i;
5553 for (i = 0; i < max; i++)
5554 if (ptr[i] == 0)
5555 return 0;
5556 /* We don't know the starting offset, but we do know that the string
5557 has no internal zero bytes. We can assume that the offset falls
5558 within the bounds of the string; otherwise, the programmer deserves
5559 what he gets. Subtract the offset from the length of the string,
5560 and return that. */
5561 /* This would perhaps not be valid if we were dealing with named
5562 arrays in addition to literal string constants. */
5563 return size_binop (MINUS_EXPR, size_int (max), offset_node);
5564 }
5565
5566 /* We have a known offset into the string. Start searching there for
5567 a null character. */
5568 if (offset_node == 0)
5569 offset = 0;
5570 else
5571 {
5572 /* Did we get a long long offset? If so, punt. */
5573 if (TREE_INT_CST_HIGH (offset_node) != 0)
5574 return 0;
5575 offset = TREE_INT_CST_LOW (offset_node);
5576 }
5577 /* If the offset is known to be out of bounds, warn, and call strlen at
5578 runtime. */
5579 if (offset < 0 || offset > max)
5580 {
5581 warning ("offset outside bounds of constant string");
5582 return 0;
5583 }
5584 /* Use strlen to search for the first zero byte. Since any strings
5585 constructed with build_string will have nulls appended, we win even
5586 if we get handed something like (char[4])"abcd".
5587
5588 Since OFFSET is our starting index into the string, no further
5589 calculation is needed. */
5590 return size_int (strlen (ptr + offset));
5591 }
5592 \f
5593 /* Expand an expression EXP that calls a built-in function,
5594 with result going to TARGET if that's convenient
5595 (and in mode MODE if that's convenient).
5596 SUBTARGET may be used as the target for computing one of EXP's operands.
5597 IGNORE is nonzero if the value is to be ignored. */
5598
5599 static rtx
5600 expand_builtin (exp, target, subtarget, mode, ignore)
5601 tree exp;
5602 rtx target;
5603 rtx subtarget;
5604 enum machine_mode mode;
5605 int ignore;
5606 {
5607 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5608 tree arglist = TREE_OPERAND (exp, 1);
5609 rtx op0;
5610 rtx lab1, insns;
5611 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
5612 optab builtin_optab;
5613
5614 switch (DECL_FUNCTION_CODE (fndecl))
5615 {
5616 case BUILT_IN_ABS:
5617 case BUILT_IN_LABS:
5618 case BUILT_IN_FABS:
5619 /* build_function_call changes these into ABS_EXPR. */
5620 abort ();
5621
5622 case BUILT_IN_SIN:
5623 case BUILT_IN_COS:
5624 case BUILT_IN_FSQRT:
5625 /* If not optimizing, call the library function. */
5626 if (! optimize)
5627 break;
5628
5629 if (arglist == 0
5630 /* Arg could be wrong type if user redeclared this fcn wrong. */
5631 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
5632 return CONST0_RTX (TYPE_MODE (TREE_TYPE (exp)));
5633
5634 /* Stabilize and compute the argument. */
5635 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
5636 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
5637 {
5638 exp = copy_node (exp);
5639 arglist = copy_node (arglist);
5640 TREE_OPERAND (exp, 1) = arglist;
5641 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
5642 }
5643 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
5644
5645 /* Make a suitable register to place result in. */
5646 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5647
5648 emit_queue ();
5649 start_sequence ();
5650
5651 switch (DECL_FUNCTION_CODE (fndecl))
5652 {
5653 case BUILT_IN_SIN:
5654 builtin_optab = sin_optab; break;
5655 case BUILT_IN_COS:
5656 builtin_optab = cos_optab; break;
5657 case BUILT_IN_FSQRT:
5658 builtin_optab = sqrt_optab; break;
5659 default:
5660 abort ();
5661 }
5662
5663 /* Compute into TARGET.
5664 Set TARGET to wherever the result comes back. */
5665 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
5666 builtin_optab, op0, target, 0);
5667
5668 /* If we were unable to expand via the builtin, stop the
5669 sequence (without outputting the insns) and break, causing
5670 a call the the library function. */
5671 if (target == 0)
5672 {
5673 end_sequence ();
5674 break;
5675 }
5676
5677 /* Check the results by default. But if flag_fast_math is turned on,
5678 then assume sqrt will always be called with valid arguments. */
5679
5680 if (! flag_fast_math)
5681 {
5682 /* Don't define the builtin FP instructions
5683 if your machine is not IEEE. */
5684 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
5685 abort ();
5686
5687 lab1 = gen_label_rtx ();
5688
5689 /* Test the result; if it is NaN, set errno=EDOM because
5690 the argument was not in the domain. */
5691 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
5692 emit_jump_insn (gen_beq (lab1));
5693
5694 #if TARGET_EDOM
5695 {
5696 #ifdef GEN_ERRNO_RTX
5697 rtx errno_rtx = GEN_ERRNO_RTX;
5698 #else
5699 rtx errno_rtx
5700 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "*errno"));
5701 #endif
5702
5703 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
5704 }
5705 #else
5706 /* We can't set errno=EDOM directly; let the library call do it.
5707 Pop the arguments right away in case the call gets deleted. */
5708 NO_DEFER_POP;
5709 expand_call (exp, target, 0);
5710 OK_DEFER_POP;
5711 #endif
5712
5713 emit_label (lab1);
5714 }
5715
5716 /* Output the entire sequence. */
5717 insns = get_insns ();
5718 end_sequence ();
5719 emit_insns (insns);
5720
5721 return target;
5722
5723 case BUILT_IN_SAVEREGS:
5724 /* Don't do __builtin_saveregs more than once in a function.
5725 Save the result of the first call and reuse it. */
5726 if (saveregs_value != 0)
5727 return saveregs_value;
5728 {
5729 /* When this function is called, it means that registers must be
5730 saved on entry to this function. So we migrate the
5731 call to the first insn of this function. */
5732 rtx temp;
5733 rtx seq;
5734 rtx valreg, saved_valreg;
5735
5736 /* Now really call the function. `expand_call' does not call
5737 expand_builtin, so there is no danger of infinite recursion here. */
5738 start_sequence ();
5739
5740 #ifdef EXPAND_BUILTIN_SAVEREGS
5741 /* Do whatever the machine needs done in this case. */
5742 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
5743 #else
5744 /* The register where the function returns its value
5745 is likely to have something else in it, such as an argument.
5746 So preserve that register around the call. */
5747 if (value_mode != VOIDmode)
5748 {
5749 valreg = hard_libcall_value (value_mode);
5750 saved_valreg = gen_reg_rtx (value_mode);
5751 emit_move_insn (saved_valreg, valreg);
5752 }
5753
5754 /* Generate the call, putting the value in a pseudo. */
5755 temp = expand_call (exp, target, ignore);
5756
5757 if (value_mode != VOIDmode)
5758 emit_move_insn (valreg, saved_valreg);
5759 #endif
5760
5761 seq = get_insns ();
5762 end_sequence ();
5763
5764 saveregs_value = temp;
5765
5766 /* This won't work inside a SEQUENCE--it really has to be
5767 at the start of the function. */
5768 if (in_sequence_p ())
5769 {
5770 /* Better to do this than to crash. */
5771 error ("`va_start' used within `({...})'");
5772 return temp;
5773 }
5774
5775 /* Put the sequence after the NOTE that starts the function. */
5776 emit_insns_before (seq, NEXT_INSN (get_insns ()));
5777 return temp;
5778 }
5779
5780 /* __builtin_args_info (N) returns word N of the arg space info
5781 for the current function. The number and meanings of words
5782 is controlled by the definition of CUMULATIVE_ARGS. */
5783 case BUILT_IN_ARGS_INFO:
5784 {
5785 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
5786 int i;
5787 int *word_ptr = (int *) &current_function_args_info;
5788 tree type, elts, result;
5789
5790 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
5791 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
5792 __FILE__, __LINE__);
5793
5794 if (arglist != 0)
5795 {
5796 tree arg = TREE_VALUE (arglist);
5797 if (TREE_CODE (arg) != INTEGER_CST)
5798 error ("argument of __builtin_args_info must be constant");
5799 else
5800 {
5801 int wordnum = TREE_INT_CST_LOW (arg);
5802
5803 if (wordnum < 0 || wordnum >= nwords)
5804 error ("argument of __builtin_args_info out of range");
5805 else
5806 return GEN_INT (word_ptr[wordnum]);
5807 }
5808 }
5809 else
5810 error ("missing argument in __builtin_args_info");
5811
5812 return const0_rtx;
5813
5814 #if 0
5815 for (i = 0; i < nwords; i++)
5816 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
5817
5818 type = build_array_type (integer_type_node,
5819 build_index_type (build_int_2 (nwords, 0)));
5820 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
5821 TREE_CONSTANT (result) = 1;
5822 TREE_STATIC (result) = 1;
5823 result = build (INDIRECT_REF, build_pointer_type (type), result);
5824 TREE_CONSTANT (result) = 1;
5825 return expand_expr (result, NULL_RTX, VOIDmode, 0);
5826 #endif
5827 }
5828
5829 /* Return the address of the first anonymous stack arg. */
5830 case BUILT_IN_NEXT_ARG:
5831 {
5832 tree fntype = TREE_TYPE (current_function_decl);
5833 if (!(TYPE_ARG_TYPES (fntype) != 0
5834 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
5835 != void_type_node)))
5836 {
5837 error ("`va_start' used in function with fixed args");
5838 return const0_rtx;
5839 }
5840 }
5841
5842 return expand_binop (Pmode, add_optab,
5843 current_function_internal_arg_pointer,
5844 current_function_arg_offset_rtx,
5845 NULL_RTX, 0, OPTAB_LIB_WIDEN);
5846
5847 case BUILT_IN_CLASSIFY_TYPE:
5848 if (arglist != 0)
5849 {
5850 tree type = TREE_TYPE (TREE_VALUE (arglist));
5851 enum tree_code code = TREE_CODE (type);
5852 if (code == VOID_TYPE)
5853 return GEN_INT (void_type_class);
5854 if (code == INTEGER_TYPE)
5855 return GEN_INT (integer_type_class);
5856 if (code == CHAR_TYPE)
5857 return GEN_INT (char_type_class);
5858 if (code == ENUMERAL_TYPE)
5859 return GEN_INT (enumeral_type_class);
5860 if (code == BOOLEAN_TYPE)
5861 return GEN_INT (boolean_type_class);
5862 if (code == POINTER_TYPE)
5863 return GEN_INT (pointer_type_class);
5864 if (code == REFERENCE_TYPE)
5865 return GEN_INT (reference_type_class);
5866 if (code == OFFSET_TYPE)
5867 return GEN_INT (offset_type_class);
5868 if (code == REAL_TYPE)
5869 return GEN_INT (real_type_class);
5870 if (code == COMPLEX_TYPE)
5871 return GEN_INT (complex_type_class);
5872 if (code == FUNCTION_TYPE)
5873 return GEN_INT (function_type_class);
5874 if (code == METHOD_TYPE)
5875 return GEN_INT (method_type_class);
5876 if (code == RECORD_TYPE)
5877 return GEN_INT (record_type_class);
5878 if (code == UNION_TYPE)
5879 return GEN_INT (union_type_class);
5880 if (code == ARRAY_TYPE)
5881 return GEN_INT (array_type_class);
5882 if (code == STRING_TYPE)
5883 return GEN_INT (string_type_class);
5884 if (code == SET_TYPE)
5885 return GEN_INT (set_type_class);
5886 if (code == FILE_TYPE)
5887 return GEN_INT (file_type_class);
5888 if (code == LANG_TYPE)
5889 return GEN_INT (lang_type_class);
5890 }
5891 return GEN_INT (no_type_class);
5892
5893 case BUILT_IN_CONSTANT_P:
5894 if (arglist == 0)
5895 return const0_rtx;
5896 else
5897 return (TREE_CODE_CLASS (TREE_CODE (TREE_VALUE (arglist))) == 'c'
5898 ? const1_rtx : const0_rtx);
5899
5900 case BUILT_IN_FRAME_ADDRESS:
5901 /* The argument must be a nonnegative integer constant.
5902 It counts the number of frames to scan up the stack.
5903 The value is the address of that frame. */
5904 case BUILT_IN_RETURN_ADDRESS:
5905 /* The argument must be a nonnegative integer constant.
5906 It counts the number of frames to scan up the stack.
5907 The value is the return address saved in that frame. */
5908 if (arglist == 0)
5909 /* Warning about missing arg was already issued. */
5910 return const0_rtx;
5911 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
5912 {
5913 error ("invalid arg to __builtin_return_address");
5914 return const0_rtx;
5915 }
5916 else if (tree_int_cst_lt (TREE_VALUE (arglist), integer_zero_node))
5917 {
5918 error ("invalid arg to __builtin_return_address");
5919 return const0_rtx;
5920 }
5921 else
5922 {
5923 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
5924 rtx tem = frame_pointer_rtx;
5925 int i;
5926
5927 /* Scan back COUNT frames to the specified frame. */
5928 for (i = 0; i < count; i++)
5929 {
5930 /* Assume the dynamic chain pointer is in the word that
5931 the frame address points to, unless otherwise specified. */
5932 #ifdef DYNAMIC_CHAIN_ADDRESS
5933 tem = DYNAMIC_CHAIN_ADDRESS (tem);
5934 #endif
5935 tem = memory_address (Pmode, tem);
5936 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
5937 }
5938
5939 /* For __builtin_frame_address, return what we've got. */
5940 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5941 return tem;
5942
5943 /* For __builtin_return_address,
5944 Get the return address from that frame. */
5945 #ifdef RETURN_ADDR_RTX
5946 return RETURN_ADDR_RTX (count, tem);
5947 #else
5948 tem = memory_address (Pmode,
5949 plus_constant (tem, GET_MODE_SIZE (Pmode)));
5950 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
5951 #endif
5952 }
5953
5954 case BUILT_IN_ALLOCA:
5955 if (arglist == 0
5956 /* Arg could be non-integer if user redeclared this fcn wrong. */
5957 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
5958 return const0_rtx;
5959 current_function_calls_alloca = 1;
5960 /* Compute the argument. */
5961 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
5962
5963 /* Allocate the desired space. */
5964 target = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5965
5966 /* Record the new stack level for nonlocal gotos. */
5967 if (nonlocal_goto_handler_slot != 0)
5968 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
5969 return target;
5970
5971 case BUILT_IN_FFS:
5972 /* If not optimizing, call the library function. */
5973 if (!optimize)
5974 break;
5975
5976 if (arglist == 0
5977 /* Arg could be non-integer if user redeclared this fcn wrong. */
5978 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
5979 return const0_rtx;
5980
5981 /* Compute the argument. */
5982 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
5983 /* Compute ffs, into TARGET if possible.
5984 Set TARGET to wherever the result comes back. */
5985 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
5986 ffs_optab, op0, target, 1);
5987 if (target == 0)
5988 abort ();
5989 return target;
5990
5991 case BUILT_IN_STRLEN:
5992 /* If not optimizing, call the library function. */
5993 if (!optimize)
5994 break;
5995
5996 if (arglist == 0
5997 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5998 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
5999 return const0_rtx;
6000 else
6001 {
6002 tree src = TREE_VALUE (arglist);
6003 tree len = c_strlen (src);
6004
6005 int align
6006 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6007
6008 rtx result, src_rtx, char_rtx;
6009 enum machine_mode insn_mode = value_mode, char_mode;
6010 enum insn_code icode;
6011
6012 /* If the length is known, just return it. */
6013 if (len != 0)
6014 return expand_expr (len, target, mode, 0);
6015
6016 /* If SRC is not a pointer type, don't do this operation inline. */
6017 if (align == 0)
6018 break;
6019
6020 /* Call a function if we can't compute strlen in the right mode. */
6021
6022 while (insn_mode != VOIDmode)
6023 {
6024 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
6025 if (icode != CODE_FOR_nothing)
6026 break;
6027
6028 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
6029 }
6030 if (insn_mode == VOIDmode)
6031 break;
6032
6033 /* Make a place to write the result of the instruction. */
6034 result = target;
6035 if (! (result != 0
6036 && GET_CODE (result) == REG
6037 && GET_MODE (result) == insn_mode
6038 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
6039 result = gen_reg_rtx (insn_mode);
6040
6041 /* Make sure the operands are acceptable to the predicates. */
6042
6043 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
6044 result = gen_reg_rtx (insn_mode);
6045
6046 src_rtx = memory_address (BLKmode,
6047 expand_expr (src, NULL_RTX, Pmode,
6048 EXPAND_NORMAL));
6049 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
6050 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
6051
6052 char_rtx = const0_rtx;
6053 char_mode = insn_operand_mode[(int)icode][2];
6054 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
6055 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
6056
6057 emit_insn (GEN_FCN (icode) (result,
6058 gen_rtx (MEM, BLKmode, src_rtx),
6059 char_rtx, GEN_INT (align)));
6060
6061 /* Return the value in the proper mode for this function. */
6062 if (GET_MODE (result) == value_mode)
6063 return result;
6064 else if (target != 0)
6065 {
6066 convert_move (target, result, 0);
6067 return target;
6068 }
6069 else
6070 return convert_to_mode (value_mode, result, 0);
6071 }
6072
6073 case BUILT_IN_STRCPY:
6074 /* If not optimizing, call the library function. */
6075 if (!optimize)
6076 break;
6077
6078 if (arglist == 0
6079 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6080 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6081 || TREE_CHAIN (arglist) == 0
6082 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
6083 return const0_rtx;
6084 else
6085 {
6086 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
6087
6088 if (len == 0)
6089 break;
6090
6091 len = size_binop (PLUS_EXPR, len, integer_one_node);
6092
6093 chainon (arglist, build_tree_list (NULL_TREE, len));
6094 }
6095
6096 /* Drops in. */
6097 case BUILT_IN_MEMCPY:
6098 /* If not optimizing, call the library function. */
6099 if (!optimize)
6100 break;
6101
6102 if (arglist == 0
6103 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6104 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6105 || TREE_CHAIN (arglist) == 0
6106 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6107 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6108 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6109 return const0_rtx;
6110 else
6111 {
6112 tree dest = TREE_VALUE (arglist);
6113 tree src = TREE_VALUE (TREE_CHAIN (arglist));
6114 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6115
6116 int src_align
6117 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6118 int dest_align
6119 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6120 rtx dest_rtx;
6121
6122 /* If either SRC or DEST is not a pointer type, don't do
6123 this operation in-line. */
6124 if (src_align == 0 || dest_align == 0)
6125 {
6126 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
6127 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
6128 break;
6129 }
6130
6131 dest_rtx = expand_expr (dest, NULL_RTX, Pmode, EXPAND_NORMAL);
6132
6133 /* Copy word part most expediently. */
6134 emit_block_move (gen_rtx (MEM, BLKmode,
6135 memory_address (BLKmode, dest_rtx)),
6136 gen_rtx (MEM, BLKmode,
6137 memory_address (BLKmode,
6138 expand_expr (src, NULL_RTX,
6139 Pmode,
6140 EXPAND_NORMAL))),
6141 expand_expr (len, NULL_RTX, VOIDmode, 0),
6142 MIN (src_align, dest_align));
6143 return dest_rtx;
6144 }
6145
6146 /* These comparison functions need an instruction that returns an actual
6147 index. An ordinary compare that just sets the condition codes
6148 is not enough. */
6149 #ifdef HAVE_cmpstrsi
6150 case BUILT_IN_STRCMP:
6151 /* If not optimizing, call the library function. */
6152 if (!optimize)
6153 break;
6154
6155 if (arglist == 0
6156 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6157 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6158 || TREE_CHAIN (arglist) == 0
6159 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
6160 return const0_rtx;
6161 else if (!HAVE_cmpstrsi)
6162 break;
6163 {
6164 tree arg1 = TREE_VALUE (arglist);
6165 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
6166 tree offset;
6167 tree len, len2;
6168
6169 len = c_strlen (arg1);
6170 if (len)
6171 len = size_binop (PLUS_EXPR, integer_one_node, len);
6172 len2 = c_strlen (arg2);
6173 if (len2)
6174 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
6175
6176 /* If we don't have a constant length for the first, use the length
6177 of the second, if we know it. We don't require a constant for
6178 this case; some cost analysis could be done if both are available
6179 but neither is constant. For now, assume they're equally cheap.
6180
6181 If both strings have constant lengths, use the smaller. This
6182 could arise if optimization results in strcpy being called with
6183 two fixed strings, or if the code was machine-generated. We should
6184 add some code to the `memcmp' handler below to deal with such
6185 situations, someday. */
6186 if (!len || TREE_CODE (len) != INTEGER_CST)
6187 {
6188 if (len2)
6189 len = len2;
6190 else if (len == 0)
6191 break;
6192 }
6193 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
6194 {
6195 if (tree_int_cst_lt (len2, len))
6196 len = len2;
6197 }
6198
6199 chainon (arglist, build_tree_list (NULL_TREE, len));
6200 }
6201
6202 /* Drops in. */
6203 case BUILT_IN_MEMCMP:
6204 /* If not optimizing, call the library function. */
6205 if (!optimize)
6206 break;
6207
6208 if (arglist == 0
6209 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6210 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6211 || TREE_CHAIN (arglist) == 0
6212 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6213 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6214 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6215 return const0_rtx;
6216 else if (!HAVE_cmpstrsi)
6217 break;
6218 {
6219 tree arg1 = TREE_VALUE (arglist);
6220 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
6221 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6222 rtx result;
6223
6224 int arg1_align
6225 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6226 int arg2_align
6227 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6228 enum machine_mode insn_mode
6229 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
6230
6231 /* If we don't have POINTER_TYPE, call the function. */
6232 if (arg1_align == 0 || arg2_align == 0)
6233 {
6234 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
6235 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
6236 break;
6237 }
6238
6239 /* Make a place to write the result of the instruction. */
6240 result = target;
6241 if (! (result != 0
6242 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
6243 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
6244 result = gen_reg_rtx (insn_mode);
6245
6246 emit_insn (gen_cmpstrsi (result,
6247 gen_rtx (MEM, BLKmode,
6248 expand_expr (arg1, NULL_RTX, Pmode,
6249 EXPAND_NORMAL)),
6250 gen_rtx (MEM, BLKmode,
6251 expand_expr (arg2, NULL_RTX, Pmode,
6252 EXPAND_NORMAL)),
6253 expand_expr (len, NULL_RTX, VOIDmode, 0),
6254 GEN_INT (MIN (arg1_align, arg2_align))));
6255
6256 /* Return the value in the proper mode for this function. */
6257 mode = TYPE_MODE (TREE_TYPE (exp));
6258 if (GET_MODE (result) == mode)
6259 return result;
6260 else if (target != 0)
6261 {
6262 convert_move (target, result, 0);
6263 return target;
6264 }
6265 else
6266 return convert_to_mode (mode, result, 0);
6267 }
6268 #else
6269 case BUILT_IN_STRCMP:
6270 case BUILT_IN_MEMCMP:
6271 break;
6272 #endif
6273
6274 default: /* just do library call, if unknown builtin */
6275 error ("built-in function %s not currently supported",
6276 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
6277 }
6278
6279 /* The switch statement above can drop through to cause the function
6280 to be called normally. */
6281
6282 return expand_call (exp, target, ignore);
6283 }
6284 \f
6285 /* Expand code for a post- or pre- increment or decrement
6286 and return the RTX for the result.
6287 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
6288
6289 static rtx
6290 expand_increment (exp, post)
6291 register tree exp;
6292 int post;
6293 {
6294 register rtx op0, op1;
6295 register rtx temp, value;
6296 register tree incremented = TREE_OPERAND (exp, 0);
6297 optab this_optab = add_optab;
6298 int icode;
6299 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
6300 int op0_is_copy = 0;
6301
6302 /* Stabilize any component ref that might need to be
6303 evaluated more than once below. */
6304 if (!post
6305 || TREE_CODE (incremented) == BIT_FIELD_REF
6306 || (TREE_CODE (incremented) == COMPONENT_REF
6307 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
6308 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
6309 incremented = stabilize_reference (incremented);
6310
6311 /* Compute the operands as RTX.
6312 Note whether OP0 is the actual lvalue or a copy of it:
6313 I believe it is a copy iff it is a register or subreg
6314 and insns were generated in computing it. */
6315
6316 temp = get_last_insn ();
6317 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
6318
6319 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
6320 in place but intead must do sign- or zero-extension during assignment,
6321 so we copy it into a new register and let the code below use it as
6322 a copy.
6323
6324 Note that we can safely modify this SUBREG since it is know not to be
6325 shared (it was made by the expand_expr call above). */
6326
6327 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
6328 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
6329
6330 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
6331 && temp != get_last_insn ());
6332 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6333
6334 /* Decide whether incrementing or decrementing. */
6335 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
6336 || TREE_CODE (exp) == PREDECREMENT_EXPR)
6337 this_optab = sub_optab;
6338
6339 /* If OP0 is not the actual lvalue, but rather a copy in a register,
6340 then we cannot just increment OP0. We must therefore contrive to
6341 increment the original value. Then, for postincrement, we can return
6342 OP0 since it is a copy of the old value. For preincrement, we want
6343 to always expand here, since this generates better or equivalent code. */
6344 if (!post || op0_is_copy)
6345 {
6346 /* This is the easiest way to increment the value wherever it is.
6347 Problems with multiple evaluation of INCREMENTED are prevented
6348 because either (1) it is a component_ref or preincrement,
6349 in which case it was stabilized above, or (2) it is an array_ref
6350 with constant index in an array in a register, which is
6351 safe to reevaluate. */
6352 tree newexp = build ((this_optab == add_optab
6353 ? PLUS_EXPR : MINUS_EXPR),
6354 TREE_TYPE (exp),
6355 incremented,
6356 TREE_OPERAND (exp, 1));
6357 temp = expand_assignment (incremented, newexp, ! post, 0);
6358 return post ? op0 : temp;
6359 }
6360
6361 /* Convert decrement by a constant into a negative increment. */
6362 if (this_optab == sub_optab
6363 && GET_CODE (op1) == CONST_INT)
6364 {
6365 op1 = GEN_INT (- INTVAL (op1));
6366 this_optab = add_optab;
6367 }
6368
6369 if (post)
6370 {
6371 /* We have a true reference to the value in OP0.
6372 If there is an insn to add or subtract in this mode, queue it. */
6373
6374 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
6375 op0 = stabilize (op0);
6376 #endif
6377
6378 icode = (int) this_optab->handlers[(int) mode].insn_code;
6379 if (icode != (int) CODE_FOR_nothing
6380 /* Make sure that OP0 is valid for operands 0 and 1
6381 of the insn we want to queue. */
6382 && (*insn_operand_predicate[icode][0]) (op0, mode)
6383 && (*insn_operand_predicate[icode][1]) (op0, mode))
6384 {
6385 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
6386 op1 = force_reg (mode, op1);
6387
6388 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
6389 }
6390 }
6391
6392 /* Preincrement, or we can't increment with one simple insn. */
6393 if (post)
6394 /* Save a copy of the value before inc or dec, to return it later. */
6395 temp = value = copy_to_reg (op0);
6396 else
6397 /* Arrange to return the incremented value. */
6398 /* Copy the rtx because expand_binop will protect from the queue,
6399 and the results of that would be invalid for us to return
6400 if our caller does emit_queue before using our result. */
6401 temp = copy_rtx (value = op0);
6402
6403 /* Increment however we can. */
6404 op1 = expand_binop (mode, this_optab, value, op1, op0,
6405 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
6406 /* Make sure the value is stored into OP0. */
6407 if (op1 != op0)
6408 emit_move_insn (op0, op1);
6409
6410 return temp;
6411 }
6412 \f
6413 /* Expand all function calls contained within EXP, innermost ones first.
6414 But don't look within expressions that have sequence points.
6415 For each CALL_EXPR, record the rtx for its value
6416 in the CALL_EXPR_RTL field. */
6417
6418 static void
6419 preexpand_calls (exp)
6420 tree exp;
6421 {
6422 register int nops, i;
6423 int type = TREE_CODE_CLASS (TREE_CODE (exp));
6424
6425 if (! do_preexpand_calls)
6426 return;
6427
6428 /* Only expressions and references can contain calls. */
6429
6430 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
6431 return;
6432
6433 switch (TREE_CODE (exp))
6434 {
6435 case CALL_EXPR:
6436 /* Do nothing if already expanded. */
6437 if (CALL_EXPR_RTL (exp) != 0)
6438 return;
6439
6440 /* Do nothing to built-in functions. */
6441 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
6442 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
6443 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6444 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
6445 return;
6446
6447 case COMPOUND_EXPR:
6448 case COND_EXPR:
6449 case TRUTH_ANDIF_EXPR:
6450 case TRUTH_ORIF_EXPR:
6451 /* If we find one of these, then we can be sure
6452 the adjust will be done for it (since it makes jumps).
6453 Do it now, so that if this is inside an argument
6454 of a function, we don't get the stack adjustment
6455 after some other args have already been pushed. */
6456 do_pending_stack_adjust ();
6457 return;
6458
6459 case BLOCK:
6460 case RTL_EXPR:
6461 case WITH_CLEANUP_EXPR:
6462 return;
6463
6464 case SAVE_EXPR:
6465 if (SAVE_EXPR_RTL (exp) != 0)
6466 return;
6467 }
6468
6469 nops = tree_code_length[(int) TREE_CODE (exp)];
6470 for (i = 0; i < nops; i++)
6471 if (TREE_OPERAND (exp, i) != 0)
6472 {
6473 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
6474 if (type == 'e' || type == '<' || type == '1' || type == '2'
6475 || type == 'r')
6476 preexpand_calls (TREE_OPERAND (exp, i));
6477 }
6478 }
6479 \f
6480 /* At the start of a function, record that we have no previously-pushed
6481 arguments waiting to be popped. */
6482
6483 void
6484 init_pending_stack_adjust ()
6485 {
6486 pending_stack_adjust = 0;
6487 }
6488
6489 /* When exiting from function, if safe, clear out any pending stack adjust
6490 so the adjustment won't get done. */
6491
6492 void
6493 clear_pending_stack_adjust ()
6494 {
6495 #ifdef EXIT_IGNORE_STACK
6496 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
6497 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
6498 && ! flag_inline_functions)
6499 pending_stack_adjust = 0;
6500 #endif
6501 }
6502
6503 /* Pop any previously-pushed arguments that have not been popped yet. */
6504
6505 void
6506 do_pending_stack_adjust ()
6507 {
6508 if (inhibit_defer_pop == 0)
6509 {
6510 if (pending_stack_adjust != 0)
6511 adjust_stack (GEN_INT (pending_stack_adjust));
6512 pending_stack_adjust = 0;
6513 }
6514 }
6515
6516 /* Expand all cleanups up to OLD_CLEANUPS.
6517 Needed here, and also for language-dependent calls. */
6518
6519 void
6520 expand_cleanups_to (old_cleanups)
6521 tree old_cleanups;
6522 {
6523 while (cleanups_this_call != old_cleanups)
6524 {
6525 expand_expr (TREE_VALUE (cleanups_this_call), NULL_RTX, VOIDmode, 0);
6526 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
6527 }
6528 }
6529 \f
6530 /* Expand conditional expressions. */
6531
6532 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
6533 LABEL is an rtx of code CODE_LABEL, in this function and all the
6534 functions here. */
6535
6536 void
6537 jumpifnot (exp, label)
6538 tree exp;
6539 rtx label;
6540 {
6541 do_jump (exp, label, NULL_RTX);
6542 }
6543
6544 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
6545
6546 void
6547 jumpif (exp, label)
6548 tree exp;
6549 rtx label;
6550 {
6551 do_jump (exp, NULL_RTX, label);
6552 }
6553
6554 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
6555 the result is zero, or IF_TRUE_LABEL if the result is one.
6556 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
6557 meaning fall through in that case.
6558
6559 do_jump always does any pending stack adjust except when it does not
6560 actually perform a jump. An example where there is no jump
6561 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
6562
6563 This function is responsible for optimizing cases such as
6564 &&, || and comparison operators in EXP. */
6565
6566 void
6567 do_jump (exp, if_false_label, if_true_label)
6568 tree exp;
6569 rtx if_false_label, if_true_label;
6570 {
6571 register enum tree_code code = TREE_CODE (exp);
6572 /* Some cases need to create a label to jump to
6573 in order to properly fall through.
6574 These cases set DROP_THROUGH_LABEL nonzero. */
6575 rtx drop_through_label = 0;
6576 rtx temp;
6577 rtx comparison = 0;
6578 int i;
6579 tree type;
6580
6581 emit_queue ();
6582
6583 switch (code)
6584 {
6585 case ERROR_MARK:
6586 break;
6587
6588 case INTEGER_CST:
6589 temp = integer_zerop (exp) ? if_false_label : if_true_label;
6590 if (temp)
6591 emit_jump (temp);
6592 break;
6593
6594 #if 0
6595 /* This is not true with #pragma weak */
6596 case ADDR_EXPR:
6597 /* The address of something can never be zero. */
6598 if (if_true_label)
6599 emit_jump (if_true_label);
6600 break;
6601 #endif
6602
6603 case NOP_EXPR:
6604 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
6605 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
6606 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
6607 goto normal;
6608 case CONVERT_EXPR:
6609 /* If we are narrowing the operand, we have to do the compare in the
6610 narrower mode. */
6611 if ((TYPE_PRECISION (TREE_TYPE (exp))
6612 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6613 goto normal;
6614 case NON_LVALUE_EXPR:
6615 case REFERENCE_EXPR:
6616 case ABS_EXPR:
6617 case NEGATE_EXPR:
6618 case LROTATE_EXPR:
6619 case RROTATE_EXPR:
6620 /* These cannot change zero->non-zero or vice versa. */
6621 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
6622 break;
6623
6624 #if 0
6625 /* This is never less insns than evaluating the PLUS_EXPR followed by
6626 a test and can be longer if the test is eliminated. */
6627 case PLUS_EXPR:
6628 /* Reduce to minus. */
6629 exp = build (MINUS_EXPR, TREE_TYPE (exp),
6630 TREE_OPERAND (exp, 0),
6631 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
6632 TREE_OPERAND (exp, 1))));
6633 /* Process as MINUS. */
6634 #endif
6635
6636 case MINUS_EXPR:
6637 /* Non-zero iff operands of minus differ. */
6638 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
6639 TREE_OPERAND (exp, 0),
6640 TREE_OPERAND (exp, 1)),
6641 NE, NE);
6642 break;
6643
6644 case BIT_AND_EXPR:
6645 /* If we are AND'ing with a small constant, do this comparison in the
6646 smallest type that fits. If the machine doesn't have comparisons
6647 that small, it will be converted back to the wider comparison.
6648 This helps if we are testing the sign bit of a narrower object.
6649 combine can't do this for us because it can't know whether a
6650 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
6651
6652 if (! SLOW_BYTE_ACCESS
6653 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6654 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
6655 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
6656 && (type = type_for_size (i + 1, 1)) != 0
6657 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
6658 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
6659 != CODE_FOR_nothing))
6660 {
6661 do_jump (convert (type, exp), if_false_label, if_true_label);
6662 break;
6663 }
6664 goto normal;
6665
6666 case TRUTH_NOT_EXPR:
6667 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6668 break;
6669
6670 case TRUTH_ANDIF_EXPR:
6671 if (if_false_label == 0)
6672 if_false_label = drop_through_label = gen_label_rtx ();
6673 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
6674 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6675 break;
6676
6677 case TRUTH_ORIF_EXPR:
6678 if (if_true_label == 0)
6679 if_true_label = drop_through_label = gen_label_rtx ();
6680 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
6681 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6682 break;
6683
6684 case COMPOUND_EXPR:
6685 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6686 free_temp_slots ();
6687 emit_queue ();
6688 do_pending_stack_adjust ();
6689 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6690 break;
6691
6692 case COMPONENT_REF:
6693 case BIT_FIELD_REF:
6694 case ARRAY_REF:
6695 {
6696 int bitsize, bitpos, unsignedp;
6697 enum machine_mode mode;
6698 tree type;
6699 tree offset;
6700 int volatilep = 0;
6701
6702 /* Get description of this reference. We don't actually care
6703 about the underlying object here. */
6704 get_inner_reference (exp, &bitsize, &bitpos, &offset,
6705 &mode, &unsignedp, &volatilep);
6706
6707 type = type_for_size (bitsize, unsignedp);
6708 if (! SLOW_BYTE_ACCESS
6709 && type != 0 && bitsize >= 0
6710 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
6711 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
6712 != CODE_FOR_nothing))
6713 {
6714 do_jump (convert (type, exp), if_false_label, if_true_label);
6715 break;
6716 }
6717 goto normal;
6718 }
6719
6720 case COND_EXPR:
6721 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
6722 if (integer_onep (TREE_OPERAND (exp, 1))
6723 && integer_zerop (TREE_OPERAND (exp, 2)))
6724 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
6725
6726 else if (integer_zerop (TREE_OPERAND (exp, 1))
6727 && integer_onep (TREE_OPERAND (exp, 2)))
6728 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6729
6730 else
6731 {
6732 register rtx label1 = gen_label_rtx ();
6733 drop_through_label = gen_label_rtx ();
6734 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
6735 /* Now the THEN-expression. */
6736 do_jump (TREE_OPERAND (exp, 1),
6737 if_false_label ? if_false_label : drop_through_label,
6738 if_true_label ? if_true_label : drop_through_label);
6739 /* In case the do_jump just above never jumps. */
6740 do_pending_stack_adjust ();
6741 emit_label (label1);
6742 /* Now the ELSE-expression. */
6743 do_jump (TREE_OPERAND (exp, 2),
6744 if_false_label ? if_false_label : drop_through_label,
6745 if_true_label ? if_true_label : drop_through_label);
6746 }
6747 break;
6748
6749 case EQ_EXPR:
6750 if (integer_zerop (TREE_OPERAND (exp, 1)))
6751 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6752 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6753 == MODE_INT)
6754 &&
6755 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6756 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
6757 else
6758 comparison = compare (exp, EQ, EQ);
6759 break;
6760
6761 case NE_EXPR:
6762 if (integer_zerop (TREE_OPERAND (exp, 1)))
6763 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
6764 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6765 == MODE_INT)
6766 &&
6767 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6768 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
6769 else
6770 comparison = compare (exp, NE, NE);
6771 break;
6772
6773 case LT_EXPR:
6774 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6775 == MODE_INT)
6776 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6777 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
6778 else
6779 comparison = compare (exp, LT, LTU);
6780 break;
6781
6782 case LE_EXPR:
6783 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6784 == MODE_INT)
6785 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6786 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
6787 else
6788 comparison = compare (exp, LE, LEU);
6789 break;
6790
6791 case GT_EXPR:
6792 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6793 == MODE_INT)
6794 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6795 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
6796 else
6797 comparison = compare (exp, GT, GTU);
6798 break;
6799
6800 case GE_EXPR:
6801 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6802 == MODE_INT)
6803 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6804 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
6805 else
6806 comparison = compare (exp, GE, GEU);
6807 break;
6808
6809 default:
6810 normal:
6811 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
6812 #if 0
6813 /* This is not needed any more and causes poor code since it causes
6814 comparisons and tests from non-SI objects to have different code
6815 sequences. */
6816 /* Copy to register to avoid generating bad insns by cse
6817 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
6818 if (!cse_not_expected && GET_CODE (temp) == MEM)
6819 temp = copy_to_reg (temp);
6820 #endif
6821 do_pending_stack_adjust ();
6822 if (GET_CODE (temp) == CONST_INT)
6823 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
6824 else if (GET_CODE (temp) == LABEL_REF)
6825 comparison = const_true_rtx;
6826 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
6827 && !can_compare_p (GET_MODE (temp)))
6828 /* Note swapping the labels gives us not-equal. */
6829 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
6830 else if (GET_MODE (temp) != VOIDmode)
6831 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
6832 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
6833 GET_MODE (temp), NULL_RTX, 0);
6834 else
6835 abort ();
6836 }
6837
6838 /* Do any postincrements in the expression that was tested. */
6839 emit_queue ();
6840
6841 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
6842 straight into a conditional jump instruction as the jump condition.
6843 Otherwise, all the work has been done already. */
6844
6845 if (comparison == const_true_rtx)
6846 {
6847 if (if_true_label)
6848 emit_jump (if_true_label);
6849 }
6850 else if (comparison == const0_rtx)
6851 {
6852 if (if_false_label)
6853 emit_jump (if_false_label);
6854 }
6855 else if (comparison)
6856 do_jump_for_compare (comparison, if_false_label, if_true_label);
6857
6858 free_temp_slots ();
6859
6860 if (drop_through_label)
6861 {
6862 /* If do_jump produces code that might be jumped around,
6863 do any stack adjusts from that code, before the place
6864 where control merges in. */
6865 do_pending_stack_adjust ();
6866 emit_label (drop_through_label);
6867 }
6868 }
6869 \f
6870 /* Given a comparison expression EXP for values too wide to be compared
6871 with one insn, test the comparison and jump to the appropriate label.
6872 The code of EXP is ignored; we always test GT if SWAP is 0,
6873 and LT if SWAP is 1. */
6874
6875 static void
6876 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
6877 tree exp;
6878 int swap;
6879 rtx if_false_label, if_true_label;
6880 {
6881 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
6882 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
6883 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6884 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
6885 rtx drop_through_label = 0;
6886 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
6887 int i;
6888
6889 if (! if_true_label || ! if_false_label)
6890 drop_through_label = gen_label_rtx ();
6891 if (! if_true_label)
6892 if_true_label = drop_through_label;
6893 if (! if_false_label)
6894 if_false_label = drop_through_label;
6895
6896 /* Compare a word at a time, high order first. */
6897 for (i = 0; i < nwords; i++)
6898 {
6899 rtx comp;
6900 rtx op0_word, op1_word;
6901
6902 if (WORDS_BIG_ENDIAN)
6903 {
6904 op0_word = operand_subword_force (op0, i, mode);
6905 op1_word = operand_subword_force (op1, i, mode);
6906 }
6907 else
6908 {
6909 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
6910 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
6911 }
6912
6913 /* All but high-order word must be compared as unsigned. */
6914 comp = compare_from_rtx (op0_word, op1_word,
6915 (unsignedp || i > 0) ? GTU : GT,
6916 unsignedp, word_mode, NULL_RTX, 0);
6917 if (comp == const_true_rtx)
6918 emit_jump (if_true_label);
6919 else if (comp != const0_rtx)
6920 do_jump_for_compare (comp, NULL_RTX, if_true_label);
6921
6922 /* Consider lower words only if these are equal. */
6923 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
6924 NULL_RTX, 0);
6925 if (comp == const_true_rtx)
6926 emit_jump (if_false_label);
6927 else if (comp != const0_rtx)
6928 do_jump_for_compare (comp, NULL_RTX, if_false_label);
6929 }
6930
6931 if (if_false_label)
6932 emit_jump (if_false_label);
6933 if (drop_through_label)
6934 emit_label (drop_through_label);
6935 }
6936
6937 /* Given an EQ_EXPR expression EXP for values too wide to be compared
6938 with one insn, test the comparison and jump to the appropriate label. */
6939
6940 static void
6941 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
6942 tree exp;
6943 rtx if_false_label, if_true_label;
6944 {
6945 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6946 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6947 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6948 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
6949 int i;
6950 rtx drop_through_label = 0;
6951
6952 if (! if_false_label)
6953 drop_through_label = if_false_label = gen_label_rtx ();
6954
6955 for (i = 0; i < nwords; i++)
6956 {
6957 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
6958 operand_subword_force (op1, i, mode),
6959 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
6960 word_mode, NULL_RTX, 0);
6961 if (comp == const_true_rtx)
6962 emit_jump (if_false_label);
6963 else if (comp != const0_rtx)
6964 do_jump_for_compare (comp, if_false_label, NULL_RTX);
6965 }
6966
6967 if (if_true_label)
6968 emit_jump (if_true_label);
6969 if (drop_through_label)
6970 emit_label (drop_through_label);
6971 }
6972 \f
6973 /* Jump according to whether OP0 is 0.
6974 We assume that OP0 has an integer mode that is too wide
6975 for the available compare insns. */
6976
6977 static void
6978 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
6979 rtx op0;
6980 rtx if_false_label, if_true_label;
6981 {
6982 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
6983 int i;
6984 rtx drop_through_label = 0;
6985
6986 if (! if_false_label)
6987 drop_through_label = if_false_label = gen_label_rtx ();
6988
6989 for (i = 0; i < nwords; i++)
6990 {
6991 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
6992 GET_MODE (op0)),
6993 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
6994 if (comp == const_true_rtx)
6995 emit_jump (if_false_label);
6996 else if (comp != const0_rtx)
6997 do_jump_for_compare (comp, if_false_label, NULL_RTX);
6998 }
6999
7000 if (if_true_label)
7001 emit_jump (if_true_label);
7002 if (drop_through_label)
7003 emit_label (drop_through_label);
7004 }
7005
7006 /* Given a comparison expression in rtl form, output conditional branches to
7007 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
7008
7009 static void
7010 do_jump_for_compare (comparison, if_false_label, if_true_label)
7011 rtx comparison, if_false_label, if_true_label;
7012 {
7013 if (if_true_label)
7014 {
7015 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
7016 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
7017 else
7018 abort ();
7019
7020 if (if_false_label)
7021 emit_jump (if_false_label);
7022 }
7023 else if (if_false_label)
7024 {
7025 rtx insn;
7026 rtx prev = PREV_INSN (get_last_insn ());
7027 rtx branch = 0;
7028
7029 /* Output the branch with the opposite condition. Then try to invert
7030 what is generated. If more than one insn is a branch, or if the
7031 branch is not the last insn written, abort. If we can't invert
7032 the branch, emit make a true label, redirect this jump to that,
7033 emit a jump to the false label and define the true label. */
7034
7035 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
7036 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_false_label));
7037 else
7038 abort ();
7039
7040 /* Here we get the insn before what was just emitted.
7041 On some machines, emitting the branch can discard
7042 the previous compare insn and emit a replacement. */
7043 if (prev == 0)
7044 /* If there's only one preceding insn... */
7045 insn = get_insns ();
7046 else
7047 insn = NEXT_INSN (prev);
7048
7049 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
7050 if (GET_CODE (insn) == JUMP_INSN)
7051 {
7052 if (branch)
7053 abort ();
7054 branch = insn;
7055 }
7056
7057 if (branch != get_last_insn ())
7058 abort ();
7059
7060 if (! invert_jump (branch, if_false_label))
7061 {
7062 if_true_label = gen_label_rtx ();
7063 redirect_jump (branch, if_true_label);
7064 emit_jump (if_false_label);
7065 emit_label (if_true_label);
7066 }
7067 }
7068 }
7069 \f
7070 /* Generate code for a comparison expression EXP
7071 (including code to compute the values to be compared)
7072 and set (CC0) according to the result.
7073 SIGNED_CODE should be the rtx operation for this comparison for
7074 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
7075
7076 We force a stack adjustment unless there are currently
7077 things pushed on the stack that aren't yet used. */
7078
7079 static rtx
7080 compare (exp, signed_code, unsigned_code)
7081 register tree exp;
7082 enum rtx_code signed_code, unsigned_code;
7083 {
7084 register rtx op0
7085 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7086 register rtx op1
7087 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7088 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
7089 register enum machine_mode mode = TYPE_MODE (type);
7090 int unsignedp = TREE_UNSIGNED (type);
7091 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
7092
7093 return compare_from_rtx (op0, op1, code, unsignedp, mode,
7094 ((mode == BLKmode)
7095 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
7096 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
7097 }
7098
7099 /* Like compare but expects the values to compare as two rtx's.
7100 The decision as to signed or unsigned comparison must be made by the caller.
7101
7102 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
7103 compared.
7104
7105 If ALIGN is non-zero, it is the alignment of this type; if zero, the
7106 size of MODE should be used. */
7107
7108 rtx
7109 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
7110 register rtx op0, op1;
7111 enum rtx_code code;
7112 int unsignedp;
7113 enum machine_mode mode;
7114 rtx size;
7115 int align;
7116 {
7117 rtx tem;
7118
7119 /* If one operand is constant, make it the second one. Only do this
7120 if the other operand is not constant as well. */
7121
7122 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
7123 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
7124 {
7125 tem = op0;
7126 op0 = op1;
7127 op1 = tem;
7128 code = swap_condition (code);
7129 }
7130
7131 if (flag_force_mem)
7132 {
7133 op0 = force_not_mem (op0);
7134 op1 = force_not_mem (op1);
7135 }
7136
7137 do_pending_stack_adjust ();
7138
7139 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
7140 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
7141 return tem;
7142
7143 #if 0
7144 /* There's no need to do this now that combine.c can eliminate lots of
7145 sign extensions. This can be less efficient in certain cases on other
7146 machines.
7147
7148 /* If this is a signed equality comparison, we can do it as an
7149 unsigned comparison since zero-extension is cheaper than sign
7150 extension and comparisons with zero are done as unsigned. This is
7151 the case even on machines that can do fast sign extension, since
7152 zero-extension is easier to combine with other operations than
7153 sign-extension is. If we are comparing against a constant, we must
7154 convert it to what it would look like unsigned. */
7155 if ((code == EQ || code == NE) && ! unsignedp
7156 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
7157 {
7158 if (GET_CODE (op1) == CONST_INT
7159 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
7160 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
7161 unsignedp = 1;
7162 }
7163 #endif
7164
7165 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
7166
7167 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
7168 }
7169 \f
7170 /* Generate code to calculate EXP using a store-flag instruction
7171 and return an rtx for the result. EXP is either a comparison
7172 or a TRUTH_NOT_EXPR whose operand is a comparison.
7173
7174 If TARGET is nonzero, store the result there if convenient.
7175
7176 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
7177 cheap.
7178
7179 Return zero if there is no suitable set-flag instruction
7180 available on this machine.
7181
7182 Once expand_expr has been called on the arguments of the comparison,
7183 we are committed to doing the store flag, since it is not safe to
7184 re-evaluate the expression. We emit the store-flag insn by calling
7185 emit_store_flag, but only expand the arguments if we have a reason
7186 to believe that emit_store_flag will be successful. If we think that
7187 it will, but it isn't, we have to simulate the store-flag with a
7188 set/jump/set sequence. */
7189
7190 static rtx
7191 do_store_flag (exp, target, mode, only_cheap)
7192 tree exp;
7193 rtx target;
7194 enum machine_mode mode;
7195 int only_cheap;
7196 {
7197 enum rtx_code code;
7198 tree arg0, arg1, type;
7199 tree tem;
7200 enum machine_mode operand_mode;
7201 int invert = 0;
7202 int unsignedp;
7203 rtx op0, op1;
7204 enum insn_code icode;
7205 rtx subtarget = target;
7206 rtx result, label, pattern, jump_pat;
7207
7208 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
7209 result at the end. We can't simply invert the test since it would
7210 have already been inverted if it were valid. This case occurs for
7211 some floating-point comparisons. */
7212
7213 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
7214 invert = 1, exp = TREE_OPERAND (exp, 0);
7215
7216 arg0 = TREE_OPERAND (exp, 0);
7217 arg1 = TREE_OPERAND (exp, 1);
7218 type = TREE_TYPE (arg0);
7219 operand_mode = TYPE_MODE (type);
7220 unsignedp = TREE_UNSIGNED (type);
7221
7222 /* We won't bother with BLKmode store-flag operations because it would mean
7223 passing a lot of information to emit_store_flag. */
7224 if (operand_mode == BLKmode)
7225 return 0;
7226
7227 STRIP_NOPS (arg0);
7228 STRIP_NOPS (arg1);
7229
7230 /* Get the rtx comparison code to use. We know that EXP is a comparison
7231 operation of some type. Some comparisons against 1 and -1 can be
7232 converted to comparisons with zero. Do so here so that the tests
7233 below will be aware that we have a comparison with zero. These
7234 tests will not catch constants in the first operand, but constants
7235 are rarely passed as the first operand. */
7236
7237 switch (TREE_CODE (exp))
7238 {
7239 case EQ_EXPR:
7240 code = EQ;
7241 break;
7242 case NE_EXPR:
7243 code = NE;
7244 break;
7245 case LT_EXPR:
7246 if (integer_onep (arg1))
7247 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
7248 else
7249 code = unsignedp ? LTU : LT;
7250 break;
7251 case LE_EXPR:
7252 if (integer_all_onesp (arg1))
7253 arg1 = integer_zero_node, code = unsignedp ? LTU : LT;
7254 else
7255 code = unsignedp ? LEU : LE;
7256 break;
7257 case GT_EXPR:
7258 if (integer_all_onesp (arg1))
7259 arg1 = integer_zero_node, code = unsignedp ? GEU : GE;
7260 else
7261 code = unsignedp ? GTU : GT;
7262 break;
7263 case GE_EXPR:
7264 if (integer_onep (arg1))
7265 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
7266 else
7267 code = unsignedp ? GEU : GE;
7268 break;
7269 default:
7270 abort ();
7271 }
7272
7273 /* Put a constant second. */
7274 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
7275 {
7276 tem = arg0; arg0 = arg1; arg1 = tem;
7277 code = swap_condition (code);
7278 }
7279
7280 /* If this is an equality or inequality test of a single bit, we can
7281 do this by shifting the bit being tested to the low-order bit and
7282 masking the result with the constant 1. If the condition was EQ,
7283 we xor it with 1. This does not require an scc insn and is faster
7284 than an scc insn even if we have it. */
7285
7286 if ((code == NE || code == EQ)
7287 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7288 && integer_pow2p (TREE_OPERAND (arg0, 1))
7289 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
7290 {
7291 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
7292 NULL_RTX, VOIDmode, 0)));
7293
7294 if (subtarget == 0 || GET_CODE (subtarget) != REG
7295 || GET_MODE (subtarget) != operand_mode
7296 || ! safe_from_p (subtarget, TREE_OPERAND (arg0, 0)))
7297 subtarget = 0;
7298
7299 op0 = expand_expr (TREE_OPERAND (arg0, 0), subtarget, VOIDmode, 0);
7300
7301 if (bitnum != 0)
7302 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
7303 size_int (bitnum), target, 1);
7304
7305 if (GET_MODE (op0) != mode)
7306 op0 = convert_to_mode (mode, op0, 1);
7307
7308 if (bitnum != TYPE_PRECISION (type) - 1)
7309 op0 = expand_and (op0, const1_rtx, target);
7310
7311 if ((code == EQ && ! invert) || (code == NE && invert))
7312 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, target, 0,
7313 OPTAB_LIB_WIDEN);
7314
7315 return op0;
7316 }
7317
7318 /* Now see if we are likely to be able to do this. Return if not. */
7319 if (! can_compare_p (operand_mode))
7320 return 0;
7321 icode = setcc_gen_code[(int) code];
7322 if (icode == CODE_FOR_nothing
7323 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
7324 {
7325 /* We can only do this if it is one of the special cases that
7326 can be handled without an scc insn. */
7327 if ((code == LT && integer_zerop (arg1))
7328 || (! only_cheap && code == GE && integer_zerop (arg1)))
7329 ;
7330 else if (BRANCH_COST >= 0
7331 && ! only_cheap && (code == NE || code == EQ)
7332 && TREE_CODE (type) != REAL_TYPE
7333 && ((abs_optab->handlers[(int) operand_mode].insn_code
7334 != CODE_FOR_nothing)
7335 || (ffs_optab->handlers[(int) operand_mode].insn_code
7336 != CODE_FOR_nothing)))
7337 ;
7338 else
7339 return 0;
7340 }
7341
7342 preexpand_calls (exp);
7343 if (subtarget == 0 || GET_CODE (subtarget) != REG
7344 || GET_MODE (subtarget) != operand_mode
7345 || ! safe_from_p (subtarget, arg1))
7346 subtarget = 0;
7347
7348 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
7349 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
7350
7351 if (target == 0)
7352 target = gen_reg_rtx (mode);
7353
7354 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
7355 because, if the emit_store_flag does anything it will succeed and
7356 OP0 and OP1 will not be used subsequently. */
7357
7358 result = emit_store_flag (target, code,
7359 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
7360 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
7361 operand_mode, unsignedp, 1);
7362
7363 if (result)
7364 {
7365 if (invert)
7366 result = expand_binop (mode, xor_optab, result, const1_rtx,
7367 result, 0, OPTAB_LIB_WIDEN);
7368 return result;
7369 }
7370
7371 /* If this failed, we have to do this with set/compare/jump/set code. */
7372 if (target == 0 || GET_CODE (target) != REG
7373 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
7374 target = gen_reg_rtx (GET_MODE (target));
7375
7376 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
7377 result = compare_from_rtx (op0, op1, code, unsignedp,
7378 operand_mode, NULL_RTX, 0);
7379 if (GET_CODE (result) == CONST_INT)
7380 return (((result == const0_rtx && ! invert)
7381 || (result != const0_rtx && invert))
7382 ? const0_rtx : const1_rtx);
7383
7384 label = gen_label_rtx ();
7385 if (bcc_gen_fctn[(int) code] == 0)
7386 abort ();
7387
7388 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
7389 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
7390 emit_label (label);
7391
7392 return target;
7393 }
7394 \f
7395 /* Generate a tablejump instruction (used for switch statements). */
7396
7397 #ifdef HAVE_tablejump
7398
7399 /* INDEX is the value being switched on, with the lowest value
7400 in the table already subtracted.
7401 MODE is its expected mode (needed if INDEX is constant).
7402 RANGE is the length of the jump table.
7403 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
7404
7405 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
7406 index value is out of range. */
7407
7408 void
7409 do_tablejump (index, mode, range, table_label, default_label)
7410 rtx index, range, table_label, default_label;
7411 enum machine_mode mode;
7412 {
7413 register rtx temp, vector;
7414
7415 /* Do an unsigned comparison (in the proper mode) between the index
7416 expression and the value which represents the length of the range.
7417 Since we just finished subtracting the lower bound of the range
7418 from the index expression, this comparison allows us to simultaneously
7419 check that the original index expression value is both greater than
7420 or equal to the minimum value of the range and less than or equal to
7421 the maximum value of the range. */
7422
7423 emit_cmp_insn (range, index, LTU, NULL_RTX, mode, 1, 0);
7424 emit_jump_insn (gen_bltu (default_label));
7425
7426 /* If index is in range, it must fit in Pmode.
7427 Convert to Pmode so we can index with it. */
7428 if (mode != Pmode)
7429 index = convert_to_mode (Pmode, index, 1);
7430
7431 /* If flag_force_addr were to affect this address
7432 it could interfere with the tricky assumptions made
7433 about addresses that contain label-refs,
7434 which may be valid only very near the tablejump itself. */
7435 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
7436 GET_MODE_SIZE, because this indicates how large insns are. The other
7437 uses should all be Pmode, because they are addresses. This code
7438 could fail if addresses and insns are not the same size. */
7439 index = memory_address_noforce
7440 (CASE_VECTOR_MODE,
7441 gen_rtx (PLUS, Pmode,
7442 gen_rtx (MULT, Pmode, index,
7443 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
7444 gen_rtx (LABEL_REF, Pmode, table_label)));
7445 temp = gen_reg_rtx (CASE_VECTOR_MODE);
7446 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
7447 RTX_UNCHANGING_P (vector) = 1;
7448 convert_move (temp, vector, 0);
7449
7450 emit_jump_insn (gen_tablejump (temp, table_label));
7451
7452 #ifndef CASE_VECTOR_PC_RELATIVE
7453 /* If we are generating PIC code or if the table is PC-relative, the
7454 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
7455 if (! flag_pic)
7456 emit_barrier ();
7457 #endif
7458 }
7459
7460 #endif /* HAVE_tablejump */
This page took 0.42034 seconds and 6 git commands to generate.