]> gcc.gnu.org Git - gcc.git/blob - gcc/expr.c
(move_block_from_reg): New argument SIZE.
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21 #include "config.h"
22 #include "rtl.h"
23 #include "tree.h"
24 #include "flags.h"
25 #include "function.h"
26 #include "insn-flags.h"
27 #include "insn-codes.h"
28 #include "expr.h"
29 #include "insn-config.h"
30 #include "recog.h"
31 #include "output.h"
32 #include "typeclass.h"
33
34 #define CEIL(x,y) (((x) + (y) - 1) / (y))
35
36 /* Decide whether a function's arguments should be processed
37 from first to last or from last to first.
38
39 They should if the stack and args grow in opposite directions, but
40 only if we have push insns. */
41
42 #ifdef PUSH_ROUNDING
43
44 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
45 #define PUSH_ARGS_REVERSED /* If it's last to first */
46 #endif
47
48 #endif
49
50 #ifndef STACK_PUSH_CODE
51 #ifdef STACK_GROWS_DOWNWARD
52 #define STACK_PUSH_CODE PRE_DEC
53 #else
54 #define STACK_PUSH_CODE PRE_INC
55 #endif
56 #endif
57
58 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
59 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
60
61 /* If this is nonzero, we do not bother generating VOLATILE
62 around volatile memory references, and we are willing to
63 output indirect addresses. If cse is to follow, we reject
64 indirect addresses so a useful potential cse is generated;
65 if it is used only once, instruction combination will produce
66 the same indirect address eventually. */
67 int cse_not_expected;
68
69 /* Nonzero to generate code for all the subroutines within an
70 expression before generating the upper levels of the expression.
71 Nowadays this is never zero. */
72 int do_preexpand_calls = 1;
73
74 /* Number of units that we should eventually pop off the stack.
75 These are the arguments to function calls that have already returned. */
76 int pending_stack_adjust;
77
78 /* Nonzero means stack pops must not be deferred, and deferred stack
79 pops must not be output. It is nonzero inside a function call,
80 inside a conditional expression, inside a statement expression,
81 and in other cases as well. */
82 int inhibit_defer_pop;
83
84 /* A list of all cleanups which belong to the arguments of
85 function calls being expanded by expand_call. */
86 tree cleanups_this_call;
87
88 /* Nonzero means __builtin_saveregs has already been done in this function.
89 The value is the pseudoreg containing the value __builtin_saveregs
90 returned. */
91 static rtx saveregs_value;
92
93 /* Similarly for __builtin_apply_args. */
94 static rtx apply_args_value;
95
96 /* This structure is used by move_by_pieces to describe the move to
97 be performed. */
98
99 struct move_by_pieces
100 {
101 rtx to;
102 rtx to_addr;
103 int autinc_to;
104 int explicit_inc_to;
105 rtx from;
106 rtx from_addr;
107 int autinc_from;
108 int explicit_inc_from;
109 int len;
110 int offset;
111 int reverse;
112 };
113
114 static rtx enqueue_insn PROTO((rtx, rtx));
115 static int queued_subexp_p PROTO((rtx));
116 static void init_queue PROTO((void));
117 static void move_by_pieces PROTO((rtx, rtx, int, int));
118 static int move_by_pieces_ninsns PROTO((unsigned int, int));
119 static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
120 struct move_by_pieces *));
121 static void group_insns PROTO((rtx));
122 static void store_constructor PROTO((tree, rtx));
123 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
124 enum machine_mode, int, int, int));
125 static tree save_noncopied_parts PROTO((tree, tree));
126 static tree init_noncopied_parts PROTO((tree, tree));
127 static int safe_from_p PROTO((rtx, tree));
128 static int fixed_type_p PROTO((tree));
129 static int get_pointer_alignment PROTO((tree, unsigned));
130 static tree string_constant PROTO((tree, tree *));
131 static tree c_strlen PROTO((tree));
132 static rtx expand_builtin PROTO((tree, rtx, rtx, enum machine_mode, int));
133 static int apply_args_size PROTO((void));
134 static int apply_result_size PROTO((void));
135 static rtx result_vector PROTO((int, rtx));
136 static rtx expand_builtin_apply_args PROTO((void));
137 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
138 static void expand_builtin_return PROTO((rtx));
139 static rtx expand_increment PROTO((tree, int));
140 static void preexpand_calls PROTO((tree));
141 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
142 static void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
143 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
144 static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
145 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
146 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
147 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
148
149 /* Record for each mode whether we can move a register directly to or
150 from an object of that mode in memory. If we can't, we won't try
151 to use that mode directly when accessing a field of that mode. */
152
153 static char direct_load[NUM_MACHINE_MODES];
154 static char direct_store[NUM_MACHINE_MODES];
155
156 /* MOVE_RATIO is the number of move instructions that is better than
157 a block move. */
158
159 #ifndef MOVE_RATIO
160 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
161 #define MOVE_RATIO 2
162 #else
163 /* A value of around 6 would minimize code size; infinity would minimize
164 execution time. */
165 #define MOVE_RATIO 15
166 #endif
167 #endif
168
169 /* This array records the insn_code of insns to perform block moves. */
170 enum insn_code movstr_optab[NUM_MACHINE_MODES];
171
172 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
173
174 #ifndef SLOW_UNALIGNED_ACCESS
175 #define SLOW_UNALIGNED_ACCESS 0
176 #endif
177
178 /* Register mappings for target machines without register windows. */
179 #ifndef INCOMING_REGNO
180 #define INCOMING_REGNO(OUT) (OUT)
181 #endif
182 #ifndef OUTGOING_REGNO
183 #define OUTGOING_REGNO(IN) (IN)
184 #endif
185 \f
186 /* This is run once per compilation to set up which modes can be used
187 directly in memory and to initialize the block move optab. */
188
189 void
190 init_expr_once ()
191 {
192 rtx insn, pat;
193 enum machine_mode mode;
194 /* Try indexing by frame ptr and try by stack ptr.
195 It is known that on the Convex the stack ptr isn't a valid index.
196 With luck, one or the other is valid on any machine. */
197 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
198 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
199
200 start_sequence ();
201 insn = emit_insn (gen_rtx (SET, 0, 0));
202 pat = PATTERN (insn);
203
204 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
205 mode = (enum machine_mode) ((int) mode + 1))
206 {
207 int regno;
208 rtx reg;
209 int num_clobbers;
210
211 direct_load[(int) mode] = direct_store[(int) mode] = 0;
212 PUT_MODE (mem, mode);
213 PUT_MODE (mem1, mode);
214
215 /* See if there is some register that can be used in this mode and
216 directly loaded or stored from memory. */
217
218 if (mode != VOIDmode && mode != BLKmode)
219 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
220 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
221 regno++)
222 {
223 if (! HARD_REGNO_MODE_OK (regno, mode))
224 continue;
225
226 reg = gen_rtx (REG, mode, regno);
227
228 SET_SRC (pat) = mem;
229 SET_DEST (pat) = reg;
230 if (recog (pat, insn, &num_clobbers) >= 0)
231 direct_load[(int) mode] = 1;
232
233 SET_SRC (pat) = mem1;
234 SET_DEST (pat) = reg;
235 if (recog (pat, insn, &num_clobbers) >= 0)
236 direct_load[(int) mode] = 1;
237
238 SET_SRC (pat) = reg;
239 SET_DEST (pat) = mem;
240 if (recog (pat, insn, &num_clobbers) >= 0)
241 direct_store[(int) mode] = 1;
242
243 SET_SRC (pat) = reg;
244 SET_DEST (pat) = mem1;
245 if (recog (pat, insn, &num_clobbers) >= 0)
246 direct_store[(int) mode] = 1;
247 }
248 }
249
250 end_sequence ();
251 }
252
253 /* This is run at the start of compiling a function. */
254
255 void
256 init_expr ()
257 {
258 init_queue ();
259
260 pending_stack_adjust = 0;
261 inhibit_defer_pop = 0;
262 cleanups_this_call = 0;
263 saveregs_value = 0;
264 apply_args_value = 0;
265 forced_labels = 0;
266 }
267
268 /* Save all variables describing the current status into the structure *P.
269 This is used before starting a nested function. */
270
271 void
272 save_expr_status (p)
273 struct function *p;
274 {
275 /* Instead of saving the postincrement queue, empty it. */
276 emit_queue ();
277
278 p->pending_stack_adjust = pending_stack_adjust;
279 p->inhibit_defer_pop = inhibit_defer_pop;
280 p->cleanups_this_call = cleanups_this_call;
281 p->saveregs_value = saveregs_value;
282 p->apply_args_value = apply_args_value;
283 p->forced_labels = forced_labels;
284
285 pending_stack_adjust = 0;
286 inhibit_defer_pop = 0;
287 cleanups_this_call = 0;
288 saveregs_value = 0;
289 apply_args_value = 0;
290 forced_labels = 0;
291 }
292
293 /* Restore all variables describing the current status from the structure *P.
294 This is used after a nested function. */
295
296 void
297 restore_expr_status (p)
298 struct function *p;
299 {
300 pending_stack_adjust = p->pending_stack_adjust;
301 inhibit_defer_pop = p->inhibit_defer_pop;
302 cleanups_this_call = p->cleanups_this_call;
303 saveregs_value = p->saveregs_value;
304 apply_args_value = p->apply_args_value;
305 forced_labels = p->forced_labels;
306 }
307 \f
308 /* Manage the queue of increment instructions to be output
309 for POSTINCREMENT_EXPR expressions, etc. */
310
311 static rtx pending_chain;
312
313 /* Queue up to increment (or change) VAR later. BODY says how:
314 BODY should be the same thing you would pass to emit_insn
315 to increment right away. It will go to emit_insn later on.
316
317 The value is a QUEUED expression to be used in place of VAR
318 where you want to guarantee the pre-incrementation value of VAR. */
319
320 static rtx
321 enqueue_insn (var, body)
322 rtx var, body;
323 {
324 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
325 var, NULL_RTX, NULL_RTX, body, pending_chain);
326 return pending_chain;
327 }
328
329 /* Use protect_from_queue to convert a QUEUED expression
330 into something that you can put immediately into an instruction.
331 If the queued incrementation has not happened yet,
332 protect_from_queue returns the variable itself.
333 If the incrementation has happened, protect_from_queue returns a temp
334 that contains a copy of the old value of the variable.
335
336 Any time an rtx which might possibly be a QUEUED is to be put
337 into an instruction, it must be passed through protect_from_queue first.
338 QUEUED expressions are not meaningful in instructions.
339
340 Do not pass a value through protect_from_queue and then hold
341 on to it for a while before putting it in an instruction!
342 If the queue is flushed in between, incorrect code will result. */
343
344 rtx
345 protect_from_queue (x, modify)
346 register rtx x;
347 int modify;
348 {
349 register RTX_CODE code = GET_CODE (x);
350
351 #if 0 /* A QUEUED can hang around after the queue is forced out. */
352 /* Shortcut for most common case. */
353 if (pending_chain == 0)
354 return x;
355 #endif
356
357 if (code != QUEUED)
358 {
359 /* A special hack for read access to (MEM (QUEUED ...))
360 to facilitate use of autoincrement.
361 Make a copy of the contents of the memory location
362 rather than a copy of the address, but not
363 if the value is of mode BLKmode. */
364 if (code == MEM && GET_MODE (x) != BLKmode
365 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
366 {
367 register rtx y = XEXP (x, 0);
368 XEXP (x, 0) = QUEUED_VAR (y);
369 if (QUEUED_INSN (y))
370 {
371 register rtx temp = gen_reg_rtx (GET_MODE (x));
372 emit_insn_before (gen_move_insn (temp, x),
373 QUEUED_INSN (y));
374 return temp;
375 }
376 return x;
377 }
378 /* Otherwise, recursively protect the subexpressions of all
379 the kinds of rtx's that can contain a QUEUED. */
380 if (code == MEM)
381 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
382 else if (code == PLUS || code == MULT)
383 {
384 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
385 XEXP (x, 1) = protect_from_queue (XEXP (x, 1), 0);
386 }
387 return x;
388 }
389 /* If the increment has not happened, use the variable itself. */
390 if (QUEUED_INSN (x) == 0)
391 return QUEUED_VAR (x);
392 /* If the increment has happened and a pre-increment copy exists,
393 use that copy. */
394 if (QUEUED_COPY (x) != 0)
395 return QUEUED_COPY (x);
396 /* The increment has happened but we haven't set up a pre-increment copy.
397 Set one up now, and use it. */
398 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
399 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
400 QUEUED_INSN (x));
401 return QUEUED_COPY (x);
402 }
403
404 /* Return nonzero if X contains a QUEUED expression:
405 if it contains anything that will be altered by a queued increment.
406 We handle only combinations of MEM, PLUS, MINUS and MULT operators
407 since memory addresses generally contain only those. */
408
409 static int
410 queued_subexp_p (x)
411 rtx x;
412 {
413 register enum rtx_code code = GET_CODE (x);
414 switch (code)
415 {
416 case QUEUED:
417 return 1;
418 case MEM:
419 return queued_subexp_p (XEXP (x, 0));
420 case MULT:
421 case PLUS:
422 case MINUS:
423 return queued_subexp_p (XEXP (x, 0))
424 || queued_subexp_p (XEXP (x, 1));
425 }
426 return 0;
427 }
428
429 /* Perform all the pending incrementations. */
430
431 void
432 emit_queue ()
433 {
434 register rtx p;
435 while (p = pending_chain)
436 {
437 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
438 pending_chain = QUEUED_NEXT (p);
439 }
440 }
441
442 static void
443 init_queue ()
444 {
445 if (pending_chain)
446 abort ();
447 }
448 \f
449 /* Copy data from FROM to TO, where the machine modes are not the same.
450 Both modes may be integer, or both may be floating.
451 UNSIGNEDP should be nonzero if FROM is an unsigned type.
452 This causes zero-extension instead of sign-extension. */
453
454 void
455 convert_move (to, from, unsignedp)
456 register rtx to, from;
457 int unsignedp;
458 {
459 enum machine_mode to_mode = GET_MODE (to);
460 enum machine_mode from_mode = GET_MODE (from);
461 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
462 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
463 enum insn_code code;
464 rtx libcall;
465
466 /* rtx code for making an equivalent value. */
467 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
468
469 to = protect_from_queue (to, 1);
470 from = protect_from_queue (from, 0);
471
472 if (to_real != from_real)
473 abort ();
474
475 /* If FROM is a SUBREG that indicates that we have already done at least
476 the required extension, strip it. We don't handle such SUBREGs as
477 TO here. */
478
479 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
480 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
481 >= GET_MODE_SIZE (to_mode))
482 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
483 from = gen_lowpart (to_mode, from), from_mode = to_mode;
484
485 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
486 abort ();
487
488 if (to_mode == from_mode
489 || (from_mode == VOIDmode && CONSTANT_P (from)))
490 {
491 emit_move_insn (to, from);
492 return;
493 }
494
495 if (to_real)
496 {
497 #ifdef HAVE_extendqfhf2
498 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == HFmode)
499 {
500 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
501 return;
502 }
503 #endif
504 #ifdef HAVE_extendqfsf2
505 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == SFmode)
506 {
507 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
508 return;
509 }
510 #endif
511 #ifdef HAVE_extendqfdf2
512 if (HAVE_extendqfdf2 && from_mode == QFmode && to_mode == DFmode)
513 {
514 emit_unop_insn (CODE_FOR_extendqfdf2, to, from, UNKNOWN);
515 return;
516 }
517 #endif
518 #ifdef HAVE_extendqfxf2
519 if (HAVE_extendqfxf2 && from_mode == QFmode && to_mode == XFmode)
520 {
521 emit_unop_insn (CODE_FOR_extendqfxf2, to, from, UNKNOWN);
522 return;
523 }
524 #endif
525 #ifdef HAVE_extendqftf2
526 if (HAVE_extendqftf2 && from_mode == QFmode && to_mode == TFmode)
527 {
528 emit_unop_insn (CODE_FOR_extendqftf2, to, from, UNKNOWN);
529 return;
530 }
531 #endif
532
533 #ifdef HAVE_extendhfsf2
534 if (HAVE_extendhfsf2 && from_mode == HFmode && to_mode == SFmode)
535 {
536 emit_unop_insn (CODE_FOR_extendhfsf2, to, from, UNKNOWN);
537 return;
538 }
539 #endif
540 #ifdef HAVE_extendhfdf2
541 if (HAVE_extendhfdf2 && from_mode == HFmode && to_mode == DFmode)
542 {
543 emit_unop_insn (CODE_FOR_extendhfdf2, to, from, UNKNOWN);
544 return;
545 }
546 #endif
547 #ifdef HAVE_extendhfxf2
548 if (HAVE_extendhfxf2 && from_mode == HFmode && to_mode == XFmode)
549 {
550 emit_unop_insn (CODE_FOR_extendhfxf2, to, from, UNKNOWN);
551 return;
552 }
553 #endif
554 #ifdef HAVE_extendhftf2
555 if (HAVE_extendhftf2 && from_mode == HFmode && to_mode == TFmode)
556 {
557 emit_unop_insn (CODE_FOR_extendhftf2, to, from, UNKNOWN);
558 return;
559 }
560 #endif
561
562 #ifdef HAVE_extendsfdf2
563 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
564 {
565 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
566 return;
567 }
568 #endif
569 #ifdef HAVE_extendsfxf2
570 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
571 {
572 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
573 return;
574 }
575 #endif
576 #ifdef HAVE_extendsftf2
577 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
578 {
579 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
580 return;
581 }
582 #endif
583 #ifdef HAVE_extenddfxf2
584 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
585 {
586 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
587 return;
588 }
589 #endif
590 #ifdef HAVE_extenddftf2
591 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
592 {
593 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
594 return;
595 }
596 #endif
597
598 #ifdef HAVE_trunchfqf2
599 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
600 {
601 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
602 return;
603 }
604 #endif
605 #ifdef HAVE_truncsfqf2
606 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
607 {
608 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
609 return;
610 }
611 #endif
612 #ifdef HAVE_truncdfqf2
613 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
614 {
615 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
616 return;
617 }
618 #endif
619 #ifdef HAVE_truncxfqf2
620 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
621 {
622 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
623 return;
624 }
625 #endif
626 #ifdef HAVE_trunctfqf2
627 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
628 {
629 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
630 return;
631 }
632 #endif
633 #ifdef HAVE_truncsfhf2
634 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
635 {
636 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
637 return;
638 }
639 #endif
640 #ifdef HAVE_truncdfhf2
641 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
642 {
643 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
644 return;
645 }
646 #endif
647 #ifdef HAVE_truncxfhf2
648 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
649 {
650 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
651 return;
652 }
653 #endif
654 #ifdef HAVE_trunctfhf2
655 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
656 {
657 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
658 return;
659 }
660 #endif
661 #ifdef HAVE_truncdfsf2
662 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
663 {
664 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
665 return;
666 }
667 #endif
668 #ifdef HAVE_truncxfsf2
669 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
670 {
671 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
672 return;
673 }
674 #endif
675 #ifdef HAVE_trunctfsf2
676 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
677 {
678 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
679 return;
680 }
681 #endif
682 #ifdef HAVE_truncxfdf2
683 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
684 {
685 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
686 return;
687 }
688 #endif
689 #ifdef HAVE_trunctfdf2
690 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
691 {
692 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
693 return;
694 }
695 #endif
696
697 libcall = (rtx) 0;
698 switch (from_mode)
699 {
700 case SFmode:
701 switch (to_mode)
702 {
703 case DFmode:
704 libcall = extendsfdf2_libfunc;
705 break;
706
707 case XFmode:
708 libcall = extendsfxf2_libfunc;
709 break;
710
711 case TFmode:
712 libcall = extendsftf2_libfunc;
713 break;
714 }
715 break;
716
717 case DFmode:
718 switch (to_mode)
719 {
720 case SFmode:
721 libcall = truncdfsf2_libfunc;
722 break;
723
724 case XFmode:
725 libcall = extenddfxf2_libfunc;
726 break;
727
728 case TFmode:
729 libcall = extenddftf2_libfunc;
730 break;
731 }
732 break;
733
734 case XFmode:
735 switch (to_mode)
736 {
737 case SFmode:
738 libcall = truncxfsf2_libfunc;
739 break;
740
741 case DFmode:
742 libcall = truncxfdf2_libfunc;
743 break;
744 }
745 break;
746
747 case TFmode:
748 switch (to_mode)
749 {
750 case SFmode:
751 libcall = trunctfsf2_libfunc;
752 break;
753
754 case DFmode:
755 libcall = trunctfdf2_libfunc;
756 break;
757 }
758 break;
759 }
760
761 if (libcall == (rtx) 0)
762 /* This conversion is not implemented yet. */
763 abort ();
764
765 emit_library_call (libcall, 1, to_mode, 1, from, from_mode);
766 emit_move_insn (to, hard_libcall_value (to_mode));
767 return;
768 }
769
770 /* Now both modes are integers. */
771
772 /* Handle expanding beyond a word. */
773 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
774 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
775 {
776 rtx insns;
777 rtx lowpart;
778 rtx fill_value;
779 rtx lowfrom;
780 int i;
781 enum machine_mode lowpart_mode;
782 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
783
784 /* Try converting directly if the insn is supported. */
785 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
786 != CODE_FOR_nothing)
787 {
788 /* If FROM is a SUBREG, put it into a register. Do this
789 so that we always generate the same set of insns for
790 better cse'ing; if an intermediate assignment occurred,
791 we won't be doing the operation directly on the SUBREG. */
792 if (optimize > 0 && GET_CODE (from) == SUBREG)
793 from = force_reg (from_mode, from);
794 emit_unop_insn (code, to, from, equiv_code);
795 return;
796 }
797 /* Next, try converting via full word. */
798 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
799 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
800 != CODE_FOR_nothing))
801 {
802 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
803 emit_unop_insn (code, to,
804 gen_lowpart (word_mode, to), equiv_code);
805 return;
806 }
807
808 /* No special multiword conversion insn; do it by hand. */
809 start_sequence ();
810
811 /* Get a copy of FROM widened to a word, if necessary. */
812 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
813 lowpart_mode = word_mode;
814 else
815 lowpart_mode = from_mode;
816
817 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
818
819 lowpart = gen_lowpart (lowpart_mode, to);
820 emit_move_insn (lowpart, lowfrom);
821
822 /* Compute the value to put in each remaining word. */
823 if (unsignedp)
824 fill_value = const0_rtx;
825 else
826 {
827 #ifdef HAVE_slt
828 if (HAVE_slt
829 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
830 && STORE_FLAG_VALUE == -1)
831 {
832 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
833 lowpart_mode, 0, 0);
834 fill_value = gen_reg_rtx (word_mode);
835 emit_insn (gen_slt (fill_value));
836 }
837 else
838 #endif
839 {
840 fill_value
841 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
842 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
843 NULL_RTX, 0);
844 fill_value = convert_to_mode (word_mode, fill_value, 1);
845 }
846 }
847
848 /* Fill the remaining words. */
849 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
850 {
851 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
852 rtx subword = operand_subword (to, index, 1, to_mode);
853
854 if (subword == 0)
855 abort ();
856
857 if (fill_value != subword)
858 emit_move_insn (subword, fill_value);
859 }
860
861 insns = get_insns ();
862 end_sequence ();
863
864 emit_no_conflict_block (insns, to, from, NULL_RTX,
865 gen_rtx (equiv_code, to_mode, copy_rtx (from)));
866 return;
867 }
868
869 /* Truncating multi-word to a word or less. */
870 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
871 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
872 {
873 convert_move (to, gen_lowpart (word_mode, from), 0);
874 return;
875 }
876
877 /* Handle pointer conversion */ /* SPEE 900220 */
878 if (to_mode == PSImode)
879 {
880 if (from_mode != SImode)
881 from = convert_to_mode (SImode, from, unsignedp);
882
883 #ifdef HAVE_truncsipsi
884 if (HAVE_truncsipsi)
885 {
886 emit_unop_insn (CODE_FOR_truncsipsi, to, from, UNKNOWN);
887 return;
888 }
889 #endif /* HAVE_truncsipsi */
890 abort ();
891 }
892
893 if (from_mode == PSImode)
894 {
895 if (to_mode != SImode)
896 {
897 from = convert_to_mode (SImode, from, unsignedp);
898 from_mode = SImode;
899 }
900 else
901 {
902 #ifdef HAVE_extendpsisi
903 if (HAVE_extendpsisi)
904 {
905 emit_unop_insn (CODE_FOR_extendpsisi, to, from, UNKNOWN);
906 return;
907 }
908 #endif /* HAVE_extendpsisi */
909 abort ();
910 }
911 }
912
913 /* Now follow all the conversions between integers
914 no more than a word long. */
915
916 /* For truncation, usually we can just refer to FROM in a narrower mode. */
917 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
918 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
919 GET_MODE_BITSIZE (from_mode)))
920 {
921 if (!((GET_CODE (from) == MEM
922 && ! MEM_VOLATILE_P (from)
923 && direct_load[(int) to_mode]
924 && ! mode_dependent_address_p (XEXP (from, 0)))
925 || GET_CODE (from) == REG
926 || GET_CODE (from) == SUBREG))
927 from = force_reg (from_mode, from);
928 emit_move_insn (to, gen_lowpart (to_mode, from));
929 return;
930 }
931
932 /* Handle extension. */
933 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
934 {
935 /* Convert directly if that works. */
936 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
937 != CODE_FOR_nothing)
938 {
939 /* If FROM is a SUBREG, put it into a register. Do this
940 so that we always generate the same set of insns for
941 better cse'ing; if an intermediate assignment occurred,
942 we won't be doing the operation directly on the SUBREG. */
943 if (optimize > 0 && GET_CODE (from) == SUBREG)
944 from = force_reg (from_mode, from);
945 emit_unop_insn (code, to, from, equiv_code);
946 return;
947 }
948 else
949 {
950 enum machine_mode intermediate;
951
952 /* Search for a mode to convert via. */
953 for (intermediate = from_mode; intermediate != VOIDmode;
954 intermediate = GET_MODE_WIDER_MODE (intermediate))
955 if ((can_extend_p (to_mode, intermediate, unsignedp)
956 != CODE_FOR_nothing)
957 && (can_extend_p (intermediate, from_mode, unsignedp)
958 != CODE_FOR_nothing))
959 {
960 convert_move (to, convert_to_mode (intermediate, from,
961 unsignedp), unsignedp);
962 return;
963 }
964
965 /* No suitable intermediate mode. */
966 abort ();
967 }
968 }
969
970 /* Support special truncate insns for certain modes. */
971
972 if (from_mode == DImode && to_mode == SImode)
973 {
974 #ifdef HAVE_truncdisi2
975 if (HAVE_truncdisi2)
976 {
977 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
978 return;
979 }
980 #endif
981 convert_move (to, force_reg (from_mode, from), unsignedp);
982 return;
983 }
984
985 if (from_mode == DImode && to_mode == HImode)
986 {
987 #ifdef HAVE_truncdihi2
988 if (HAVE_truncdihi2)
989 {
990 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
991 return;
992 }
993 #endif
994 convert_move (to, force_reg (from_mode, from), unsignedp);
995 return;
996 }
997
998 if (from_mode == DImode && to_mode == QImode)
999 {
1000 #ifdef HAVE_truncdiqi2
1001 if (HAVE_truncdiqi2)
1002 {
1003 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1004 return;
1005 }
1006 #endif
1007 convert_move (to, force_reg (from_mode, from), unsignedp);
1008 return;
1009 }
1010
1011 if (from_mode == SImode && to_mode == HImode)
1012 {
1013 #ifdef HAVE_truncsihi2
1014 if (HAVE_truncsihi2)
1015 {
1016 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1017 return;
1018 }
1019 #endif
1020 convert_move (to, force_reg (from_mode, from), unsignedp);
1021 return;
1022 }
1023
1024 if (from_mode == SImode && to_mode == QImode)
1025 {
1026 #ifdef HAVE_truncsiqi2
1027 if (HAVE_truncsiqi2)
1028 {
1029 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1030 return;
1031 }
1032 #endif
1033 convert_move (to, force_reg (from_mode, from), unsignedp);
1034 return;
1035 }
1036
1037 if (from_mode == HImode && to_mode == QImode)
1038 {
1039 #ifdef HAVE_trunchiqi2
1040 if (HAVE_trunchiqi2)
1041 {
1042 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1043 return;
1044 }
1045 #endif
1046 convert_move (to, force_reg (from_mode, from), unsignedp);
1047 return;
1048 }
1049
1050 /* Handle truncation of volatile memrefs, and so on;
1051 the things that couldn't be truncated directly,
1052 and for which there was no special instruction. */
1053 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1054 {
1055 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1056 emit_move_insn (to, temp);
1057 return;
1058 }
1059
1060 /* Mode combination is not recognized. */
1061 abort ();
1062 }
1063
1064 /* Return an rtx for a value that would result
1065 from converting X to mode MODE.
1066 Both X and MODE may be floating, or both integer.
1067 UNSIGNEDP is nonzero if X is an unsigned value.
1068 This can be done by referring to a part of X in place
1069 or by copying to a new temporary with conversion.
1070
1071 This function *must not* call protect_from_queue
1072 except when putting X into an insn (in which case convert_move does it). */
1073
1074 rtx
1075 convert_to_mode (mode, x, unsignedp)
1076 enum machine_mode mode;
1077 rtx x;
1078 int unsignedp;
1079 {
1080 register rtx temp;
1081
1082 /* If FROM is a SUBREG that indicates that we have already done at least
1083 the required extension, strip it. */
1084
1085 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1086 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1087 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1088 x = gen_lowpart (mode, x);
1089
1090 if (mode == GET_MODE (x))
1091 return x;
1092
1093 /* There is one case that we must handle specially: If we are converting
1094 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1095 we are to interpret the constant as unsigned, gen_lowpart will do
1096 the wrong if the constant appears negative. What we want to do is
1097 make the high-order word of the constant zero, not all ones. */
1098
1099 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1100 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1101 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1102 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
1103
1104 /* We can do this with a gen_lowpart if both desired and current modes
1105 are integer, and this is either a constant integer, a register, or a
1106 non-volatile MEM. Except for the constant case, we must be narrowing
1107 the operand. */
1108
1109 if (GET_CODE (x) == CONST_INT
1110 || (GET_MODE_CLASS (mode) == MODE_INT
1111 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
1112 && (GET_CODE (x) == CONST_DOUBLE
1113 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (x))
1114 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1115 && direct_load[(int) mode])
1116 || GET_CODE (x) == REG)))))
1117 return gen_lowpart (mode, x);
1118
1119 temp = gen_reg_rtx (mode);
1120 convert_move (temp, x, unsignedp);
1121 return temp;
1122 }
1123 \f
1124 /* Generate several move instructions to copy LEN bytes
1125 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1126 The caller must pass FROM and TO
1127 through protect_from_queue before calling.
1128 ALIGN (in bytes) is maximum alignment we can assume. */
1129
1130 static void
1131 move_by_pieces (to, from, len, align)
1132 rtx to, from;
1133 int len, align;
1134 {
1135 struct move_by_pieces data;
1136 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1137 int max_size = MOVE_MAX + 1;
1138
1139 data.offset = 0;
1140 data.to_addr = to_addr;
1141 data.from_addr = from_addr;
1142 data.to = to;
1143 data.from = from;
1144 data.autinc_to
1145 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1146 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1147 data.autinc_from
1148 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1149 || GET_CODE (from_addr) == POST_INC
1150 || GET_CODE (from_addr) == POST_DEC);
1151
1152 data.explicit_inc_from = 0;
1153 data.explicit_inc_to = 0;
1154 data.reverse
1155 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1156 if (data.reverse) data.offset = len;
1157 data.len = len;
1158
1159 /* If copying requires more than two move insns,
1160 copy addresses to registers (to make displacements shorter)
1161 and use post-increment if available. */
1162 if (!(data.autinc_from && data.autinc_to)
1163 && move_by_pieces_ninsns (len, align) > 2)
1164 {
1165 #ifdef HAVE_PRE_DECREMENT
1166 if (data.reverse && ! data.autinc_from)
1167 {
1168 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1169 data.autinc_from = 1;
1170 data.explicit_inc_from = -1;
1171 }
1172 #endif
1173 #ifdef HAVE_POST_INCREMENT
1174 if (! data.autinc_from)
1175 {
1176 data.from_addr = copy_addr_to_reg (from_addr);
1177 data.autinc_from = 1;
1178 data.explicit_inc_from = 1;
1179 }
1180 #endif
1181 if (!data.autinc_from && CONSTANT_P (from_addr))
1182 data.from_addr = copy_addr_to_reg (from_addr);
1183 #ifdef HAVE_PRE_DECREMENT
1184 if (data.reverse && ! data.autinc_to)
1185 {
1186 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1187 data.autinc_to = 1;
1188 data.explicit_inc_to = -1;
1189 }
1190 #endif
1191 #ifdef HAVE_POST_INCREMENT
1192 if (! data.reverse && ! data.autinc_to)
1193 {
1194 data.to_addr = copy_addr_to_reg (to_addr);
1195 data.autinc_to = 1;
1196 data.explicit_inc_to = 1;
1197 }
1198 #endif
1199 if (!data.autinc_to && CONSTANT_P (to_addr))
1200 data.to_addr = copy_addr_to_reg (to_addr);
1201 }
1202
1203 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1204 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1205 align = MOVE_MAX;
1206
1207 /* First move what we can in the largest integer mode, then go to
1208 successively smaller modes. */
1209
1210 while (max_size > 1)
1211 {
1212 enum machine_mode mode = VOIDmode, tmode;
1213 enum insn_code icode;
1214
1215 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1216 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1217 if (GET_MODE_SIZE (tmode) < max_size)
1218 mode = tmode;
1219
1220 if (mode == VOIDmode)
1221 break;
1222
1223 icode = mov_optab->handlers[(int) mode].insn_code;
1224 if (icode != CODE_FOR_nothing
1225 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1226 GET_MODE_SIZE (mode)))
1227 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1228
1229 max_size = GET_MODE_SIZE (mode);
1230 }
1231
1232 /* The code above should have handled everything. */
1233 if (data.len != 0)
1234 abort ();
1235 }
1236
1237 /* Return number of insns required to move L bytes by pieces.
1238 ALIGN (in bytes) is maximum alignment we can assume. */
1239
1240 static int
1241 move_by_pieces_ninsns (l, align)
1242 unsigned int l;
1243 int align;
1244 {
1245 register int n_insns = 0;
1246 int max_size = MOVE_MAX + 1;
1247
1248 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1249 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1250 align = MOVE_MAX;
1251
1252 while (max_size > 1)
1253 {
1254 enum machine_mode mode = VOIDmode, tmode;
1255 enum insn_code icode;
1256
1257 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1258 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1259 if (GET_MODE_SIZE (tmode) < max_size)
1260 mode = tmode;
1261
1262 if (mode == VOIDmode)
1263 break;
1264
1265 icode = mov_optab->handlers[(int) mode].insn_code;
1266 if (icode != CODE_FOR_nothing
1267 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1268 GET_MODE_SIZE (mode)))
1269 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1270
1271 max_size = GET_MODE_SIZE (mode);
1272 }
1273
1274 return n_insns;
1275 }
1276
1277 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1278 with move instructions for mode MODE. GENFUN is the gen_... function
1279 to make a move insn for that mode. DATA has all the other info. */
1280
1281 static void
1282 move_by_pieces_1 (genfun, mode, data)
1283 rtx (*genfun) ();
1284 enum machine_mode mode;
1285 struct move_by_pieces *data;
1286 {
1287 register int size = GET_MODE_SIZE (mode);
1288 register rtx to1, from1;
1289
1290 while (data->len >= size)
1291 {
1292 if (data->reverse) data->offset -= size;
1293
1294 to1 = (data->autinc_to
1295 ? gen_rtx (MEM, mode, data->to_addr)
1296 : change_address (data->to, mode,
1297 plus_constant (data->to_addr, data->offset)));
1298 from1 =
1299 (data->autinc_from
1300 ? gen_rtx (MEM, mode, data->from_addr)
1301 : change_address (data->from, mode,
1302 plus_constant (data->from_addr, data->offset)));
1303
1304 #ifdef HAVE_PRE_DECREMENT
1305 if (data->explicit_inc_to < 0)
1306 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1307 if (data->explicit_inc_from < 0)
1308 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1309 #endif
1310
1311 emit_insn ((*genfun) (to1, from1));
1312 #ifdef HAVE_POST_INCREMENT
1313 if (data->explicit_inc_to > 0)
1314 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1315 if (data->explicit_inc_from > 0)
1316 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1317 #endif
1318
1319 if (! data->reverse) data->offset += size;
1320
1321 data->len -= size;
1322 }
1323 }
1324 \f
1325 /* Emit code to move a block Y to a block X.
1326 This may be done with string-move instructions,
1327 with multiple scalar move instructions, or with a library call.
1328
1329 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1330 with mode BLKmode.
1331 SIZE is an rtx that says how long they are.
1332 ALIGN is the maximum alignment we can assume they have,
1333 measured in bytes. */
1334
1335 void
1336 emit_block_move (x, y, size, align)
1337 rtx x, y;
1338 rtx size;
1339 int align;
1340 {
1341 if (GET_MODE (x) != BLKmode)
1342 abort ();
1343
1344 if (GET_MODE (y) != BLKmode)
1345 abort ();
1346
1347 x = protect_from_queue (x, 1);
1348 y = protect_from_queue (y, 0);
1349 size = protect_from_queue (size, 0);
1350
1351 if (GET_CODE (x) != MEM)
1352 abort ();
1353 if (GET_CODE (y) != MEM)
1354 abort ();
1355 if (size == 0)
1356 abort ();
1357
1358 if (GET_CODE (size) == CONST_INT
1359 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1360 move_by_pieces (x, y, INTVAL (size), align);
1361 else
1362 {
1363 /* Try the most limited insn first, because there's no point
1364 including more than one in the machine description unless
1365 the more limited one has some advantage. */
1366
1367 rtx opalign = GEN_INT (align);
1368 enum machine_mode mode;
1369
1370 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1371 mode = GET_MODE_WIDER_MODE (mode))
1372 {
1373 enum insn_code code = movstr_optab[(int) mode];
1374
1375 if (code != CODE_FOR_nothing
1376 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1377 here because if SIZE is less than the mode mask, as it is
1378 returned by the macro, it will definitely be less than the
1379 actual mode mask. */
1380 && (unsigned HOST_WIDE_INT) INTVAL (size) <= GET_MODE_MASK (mode)
1381 && (insn_operand_predicate[(int) code][0] == 0
1382 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1383 && (insn_operand_predicate[(int) code][1] == 0
1384 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1385 && (insn_operand_predicate[(int) code][3] == 0
1386 || (*insn_operand_predicate[(int) code][3]) (opalign,
1387 VOIDmode)))
1388 {
1389 rtx op2;
1390 rtx last = get_last_insn ();
1391 rtx pat;
1392
1393 op2 = convert_to_mode (mode, size, 1);
1394 if (insn_operand_predicate[(int) code][2] != 0
1395 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1396 op2 = copy_to_mode_reg (mode, op2);
1397
1398 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1399 if (pat)
1400 {
1401 emit_insn (pat);
1402 return;
1403 }
1404 else
1405 delete_insns_since (last);
1406 }
1407 }
1408
1409 #ifdef TARGET_MEM_FUNCTIONS
1410 emit_library_call (memcpy_libfunc, 0,
1411 VOIDmode, 3, XEXP (x, 0), Pmode,
1412 XEXP (y, 0), Pmode,
1413 convert_to_mode (TYPE_MODE (sizetype), size,
1414 TREE_UNSIGNED (sizetype)),
1415 TYPE_MODE (sizetype));
1416 #else
1417 emit_library_call (bcopy_libfunc, 0,
1418 VOIDmode, 3, XEXP (y, 0), Pmode,
1419 XEXP (x, 0), Pmode,
1420 convert_to_mode (TYPE_MODE (sizetype), size,
1421 TREE_UNSIGNED (sizetype)),
1422 TYPE_MODE (sizetype));
1423 #endif
1424 }
1425 }
1426 \f
1427 /* Copy all or part of a value X into registers starting at REGNO.
1428 The number of registers to be filled is NREGS. */
1429
1430 void
1431 move_block_to_reg (regno, x, nregs, mode)
1432 int regno;
1433 rtx x;
1434 int nregs;
1435 enum machine_mode mode;
1436 {
1437 int i;
1438 rtx pat, last;
1439
1440 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1441 x = validize_mem (force_const_mem (mode, x));
1442
1443 /* See if the machine can do this with a load multiple insn. */
1444 #ifdef HAVE_load_multiple
1445 last = get_last_insn ();
1446 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1447 GEN_INT (nregs));
1448 if (pat)
1449 {
1450 emit_insn (pat);
1451 return;
1452 }
1453 else
1454 delete_insns_since (last);
1455 #endif
1456
1457 for (i = 0; i < nregs; i++)
1458 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1459 operand_subword_force (x, i, mode));
1460 }
1461
1462 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1463 The number of registers to be filled is NREGS. SIZE indicates the number
1464 of bytes in the object X. */
1465
1466
1467 void
1468 move_block_from_reg (regno, x, nregs, size)
1469 int regno;
1470 rtx x;
1471 int nregs;
1472 int size;
1473 {
1474 int i;
1475 rtx pat, last;
1476
1477 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1478 to the left before storing to memory. */
1479 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1480 {
1481 rtx tem = operand_subword (x, 0, 1, BLKmode);
1482 rtx shift;
1483
1484 if (tem == 0)
1485 abort ();
1486
1487 shift = expand_shift (LSHIFT_EXPR, word_mode,
1488 gen_rtx (REG, word_mode, regno),
1489 build_int_2 ((UNITS_PER_WORD - size)
1490 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1491 emit_move_insn (tem, shift);
1492 return;
1493 }
1494
1495 /* See if the machine can do this with a store multiple insn. */
1496 #ifdef HAVE_store_multiple
1497 last = get_last_insn ();
1498 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1499 GEN_INT (nregs));
1500 if (pat)
1501 {
1502 emit_insn (pat);
1503 return;
1504 }
1505 else
1506 delete_insns_since (last);
1507 #endif
1508
1509 for (i = 0; i < nregs; i++)
1510 {
1511 rtx tem = operand_subword (x, i, 1, BLKmode);
1512
1513 if (tem == 0)
1514 abort ();
1515
1516 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1517 }
1518 }
1519
1520 /* Mark NREGS consecutive regs, starting at REGNO, as being live now. */
1521
1522 void
1523 use_regs (regno, nregs)
1524 int regno;
1525 int nregs;
1526 {
1527 int i;
1528
1529 for (i = 0; i < nregs; i++)
1530 emit_insn (gen_rtx (USE, VOIDmode, gen_rtx (REG, word_mode, regno + i)));
1531 }
1532
1533 /* Mark the instructions since PREV as a libcall block.
1534 Add REG_LIBCALL to PREV and add a REG_RETVAL to the most recent insn. */
1535
1536 static void
1537 group_insns (prev)
1538 rtx prev;
1539 {
1540 rtx insn_first;
1541 rtx insn_last;
1542
1543 /* Find the instructions to mark */
1544 if (prev)
1545 insn_first = NEXT_INSN (prev);
1546 else
1547 insn_first = get_insns ();
1548
1549 insn_last = get_last_insn ();
1550
1551 REG_NOTES (insn_last) = gen_rtx (INSN_LIST, REG_RETVAL, insn_first,
1552 REG_NOTES (insn_last));
1553
1554 REG_NOTES (insn_first) = gen_rtx (INSN_LIST, REG_LIBCALL, insn_last,
1555 REG_NOTES (insn_first));
1556 }
1557 \f
1558 /* Write zeros through the storage of OBJECT.
1559 If OBJECT has BLKmode, SIZE is its length in bytes. */
1560
1561 void
1562 clear_storage (object, size)
1563 rtx object;
1564 int size;
1565 {
1566 if (GET_MODE (object) == BLKmode)
1567 {
1568 #ifdef TARGET_MEM_FUNCTIONS
1569 emit_library_call (memset_libfunc, 0,
1570 VOIDmode, 3,
1571 XEXP (object, 0), Pmode, const0_rtx, Pmode,
1572 GEN_INT (size), Pmode);
1573 #else
1574 emit_library_call (bzero_libfunc, 0,
1575 VOIDmode, 2,
1576 XEXP (object, 0), Pmode,
1577 GEN_INT (size), Pmode);
1578 #endif
1579 }
1580 else
1581 emit_move_insn (object, const0_rtx);
1582 }
1583
1584 /* Generate code to copy Y into X.
1585 Both Y and X must have the same mode, except that
1586 Y can be a constant with VOIDmode.
1587 This mode cannot be BLKmode; use emit_block_move for that.
1588
1589 Return the last instruction emitted. */
1590
1591 rtx
1592 emit_move_insn (x, y)
1593 rtx x, y;
1594 {
1595 enum machine_mode mode = GET_MODE (x);
1596 enum machine_mode submode;
1597 enum mode_class class = GET_MODE_CLASS (mode);
1598 int i;
1599
1600 x = protect_from_queue (x, 1);
1601 y = protect_from_queue (y, 0);
1602
1603 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1604 abort ();
1605
1606 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1607 y = force_const_mem (mode, y);
1608
1609 /* If X or Y are memory references, verify that their addresses are valid
1610 for the machine. */
1611 if (GET_CODE (x) == MEM
1612 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1613 && ! push_operand (x, GET_MODE (x)))
1614 || (flag_force_addr
1615 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1616 x = change_address (x, VOIDmode, XEXP (x, 0));
1617
1618 if (GET_CODE (y) == MEM
1619 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1620 || (flag_force_addr
1621 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1622 y = change_address (y, VOIDmode, XEXP (y, 0));
1623
1624 if (mode == BLKmode)
1625 abort ();
1626
1627 return emit_move_insn_1 (x, y);
1628 }
1629
1630 /* Low level part of emit_move_insn.
1631 Called just like emit_move_insn, but assumes X and Y
1632 are basically valid. */
1633
1634 rtx
1635 emit_move_insn_1 (x, y)
1636 rtx x, y;
1637 {
1638 enum machine_mode mode = GET_MODE (x);
1639 enum machine_mode submode;
1640 enum mode_class class = GET_MODE_CLASS (mode);
1641 int i;
1642
1643 if (class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1644 submode = mode_for_size (GET_MODE_UNIT_SIZE (mode) * BITS_PER_UNIT,
1645 (class == MODE_COMPLEX_INT
1646 ? MODE_INT : MODE_FLOAT),
1647 0);
1648
1649 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1650 return
1651 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1652
1653 /* Expand complex moves by moving real part and imag part, if possible. */
1654 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1655 && submode != BLKmode
1656 && (mov_optab->handlers[(int) submode].insn_code
1657 != CODE_FOR_nothing))
1658 {
1659 /* Don't split destination if it is a stack push. */
1660 int stack = push_operand (x, GET_MODE (x));
1661 rtx prev = get_last_insn ();
1662
1663 /* Tell flow that the whole of the destination is being set. */
1664 if (GET_CODE (x) == REG)
1665 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
1666
1667 /* If this is a stack, push the highpart first, so it
1668 will be in the argument order.
1669
1670 In that case, change_address is used only to convert
1671 the mode, not to change the address. */
1672 if (stack)
1673 {
1674 #ifdef STACK_GROWS_DOWNWARD
1675 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1676 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1677 gen_highpart (submode, y)));
1678 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1679 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1680 gen_lowpart (submode, y)));
1681 #else
1682 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1683 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1684 gen_lowpart (submode, y)));
1685 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1686 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1687 gen_highpart (submode, y)));
1688 #endif
1689 }
1690 else
1691 {
1692 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1693 (gen_highpart (submode, x), gen_highpart (submode, y)));
1694 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1695 (gen_lowpart (submode, x), gen_lowpart (submode, y)));
1696 }
1697
1698 group_insns (prev);
1699
1700 return get_last_insn ();
1701 }
1702
1703 /* This will handle any multi-word mode that lacks a move_insn pattern.
1704 However, you will get better code if you define such patterns,
1705 even if they must turn into multiple assembler instructions. */
1706 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1707 {
1708 rtx last_insn = 0;
1709 rtx prev_insn = get_last_insn ();
1710
1711 for (i = 0;
1712 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1713 i++)
1714 {
1715 rtx xpart = operand_subword (x, i, 1, mode);
1716 rtx ypart = operand_subword (y, i, 1, mode);
1717
1718 /* If we can't get a part of Y, put Y into memory if it is a
1719 constant. Otherwise, force it into a register. If we still
1720 can't get a part of Y, abort. */
1721 if (ypart == 0 && CONSTANT_P (y))
1722 {
1723 y = force_const_mem (mode, y);
1724 ypart = operand_subword (y, i, 1, mode);
1725 }
1726 else if (ypart == 0)
1727 ypart = operand_subword_force (y, i, mode);
1728
1729 if (xpart == 0 || ypart == 0)
1730 abort ();
1731
1732 last_insn = emit_move_insn (xpart, ypart);
1733 }
1734 /* Mark these insns as a libcall block. */
1735 group_insns (prev_insn);
1736
1737 return last_insn;
1738 }
1739 else
1740 abort ();
1741 }
1742 \f
1743 /* Pushing data onto the stack. */
1744
1745 /* Push a block of length SIZE (perhaps variable)
1746 and return an rtx to address the beginning of the block.
1747 Note that it is not possible for the value returned to be a QUEUED.
1748 The value may be virtual_outgoing_args_rtx.
1749
1750 EXTRA is the number of bytes of padding to push in addition to SIZE.
1751 BELOW nonzero means this padding comes at low addresses;
1752 otherwise, the padding comes at high addresses. */
1753
1754 rtx
1755 push_block (size, extra, below)
1756 rtx size;
1757 int extra, below;
1758 {
1759 register rtx temp;
1760 if (CONSTANT_P (size))
1761 anti_adjust_stack (plus_constant (size, extra));
1762 else if (GET_CODE (size) == REG && extra == 0)
1763 anti_adjust_stack (size);
1764 else
1765 {
1766 rtx temp = copy_to_mode_reg (Pmode, size);
1767 if (extra != 0)
1768 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
1769 temp, 0, OPTAB_LIB_WIDEN);
1770 anti_adjust_stack (temp);
1771 }
1772
1773 #ifdef STACK_GROWS_DOWNWARD
1774 temp = virtual_outgoing_args_rtx;
1775 if (extra != 0 && below)
1776 temp = plus_constant (temp, extra);
1777 #else
1778 if (GET_CODE (size) == CONST_INT)
1779 temp = plus_constant (virtual_outgoing_args_rtx,
1780 - INTVAL (size) - (below ? 0 : extra));
1781 else if (extra != 0 && !below)
1782 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1783 negate_rtx (Pmode, plus_constant (size, extra)));
1784 else
1785 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1786 negate_rtx (Pmode, size));
1787 #endif
1788
1789 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
1790 }
1791
1792 rtx
1793 gen_push_operand ()
1794 {
1795 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
1796 }
1797
1798 /* Generate code to push X onto the stack, assuming it has mode MODE and
1799 type TYPE.
1800 MODE is redundant except when X is a CONST_INT (since they don't
1801 carry mode info).
1802 SIZE is an rtx for the size of data to be copied (in bytes),
1803 needed only if X is BLKmode.
1804
1805 ALIGN (in bytes) is maximum alignment we can assume.
1806
1807 If PARTIAL and REG are both nonzero, then copy that many of the first
1808 words of X into registers starting with REG, and push the rest of X.
1809 The amount of space pushed is decreased by PARTIAL words,
1810 rounded *down* to a multiple of PARM_BOUNDARY.
1811 REG must be a hard register in this case.
1812 If REG is zero but PARTIAL is not, take any all others actions for an
1813 argument partially in registers, but do not actually load any
1814 registers.
1815
1816 EXTRA is the amount in bytes of extra space to leave next to this arg.
1817 This is ignored if an argument block has already been allocated.
1818
1819 On a machine that lacks real push insns, ARGS_ADDR is the address of
1820 the bottom of the argument block for this call. We use indexing off there
1821 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
1822 argument block has not been preallocated.
1823
1824 ARGS_SO_FAR is the size of args previously pushed for this call. */
1825
1826 void
1827 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
1828 args_addr, args_so_far)
1829 register rtx x;
1830 enum machine_mode mode;
1831 tree type;
1832 rtx size;
1833 int align;
1834 int partial;
1835 rtx reg;
1836 int extra;
1837 rtx args_addr;
1838 rtx args_so_far;
1839 {
1840 rtx xinner;
1841 enum direction stack_direction
1842 #ifdef STACK_GROWS_DOWNWARD
1843 = downward;
1844 #else
1845 = upward;
1846 #endif
1847
1848 /* Decide where to pad the argument: `downward' for below,
1849 `upward' for above, or `none' for don't pad it.
1850 Default is below for small data on big-endian machines; else above. */
1851 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
1852
1853 /* Invert direction if stack is post-update. */
1854 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
1855 if (where_pad != none)
1856 where_pad = (where_pad == downward ? upward : downward);
1857
1858 xinner = x = protect_from_queue (x, 0);
1859
1860 if (mode == BLKmode)
1861 {
1862 /* Copy a block into the stack, entirely or partially. */
1863
1864 register rtx temp;
1865 int used = partial * UNITS_PER_WORD;
1866 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
1867 int skip;
1868
1869 if (size == 0)
1870 abort ();
1871
1872 used -= offset;
1873
1874 /* USED is now the # of bytes we need not copy to the stack
1875 because registers will take care of them. */
1876
1877 if (partial != 0)
1878 xinner = change_address (xinner, BLKmode,
1879 plus_constant (XEXP (xinner, 0), used));
1880
1881 /* If the partial register-part of the arg counts in its stack size,
1882 skip the part of stack space corresponding to the registers.
1883 Otherwise, start copying to the beginning of the stack space,
1884 by setting SKIP to 0. */
1885 #ifndef REG_PARM_STACK_SPACE
1886 skip = 0;
1887 #else
1888 skip = used;
1889 #endif
1890
1891 #ifdef PUSH_ROUNDING
1892 /* Do it with several push insns if that doesn't take lots of insns
1893 and if there is no difficulty with push insns that skip bytes
1894 on the stack for alignment purposes. */
1895 if (args_addr == 0
1896 && GET_CODE (size) == CONST_INT
1897 && skip == 0
1898 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
1899 < MOVE_RATIO)
1900 /* Here we avoid the case of a structure whose weak alignment
1901 forces many pushes of a small amount of data,
1902 and such small pushes do rounding that causes trouble. */
1903 && ((! STRICT_ALIGNMENT && ! SLOW_UNALIGNED_ACCESS)
1904 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
1905 || PUSH_ROUNDING (align) == align)
1906 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
1907 {
1908 /* Push padding now if padding above and stack grows down,
1909 or if padding below and stack grows up.
1910 But if space already allocated, this has already been done. */
1911 if (extra && args_addr == 0
1912 && where_pad != none && where_pad != stack_direction)
1913 anti_adjust_stack (GEN_INT (extra));
1914
1915 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
1916 INTVAL (size) - used, align);
1917 }
1918 else
1919 #endif /* PUSH_ROUNDING */
1920 {
1921 /* Otherwise make space on the stack and copy the data
1922 to the address of that space. */
1923
1924 /* Deduct words put into registers from the size we must copy. */
1925 if (partial != 0)
1926 {
1927 if (GET_CODE (size) == CONST_INT)
1928 size = GEN_INT (INTVAL (size) - used);
1929 else
1930 size = expand_binop (GET_MODE (size), sub_optab, size,
1931 GEN_INT (used), NULL_RTX, 0,
1932 OPTAB_LIB_WIDEN);
1933 }
1934
1935 /* Get the address of the stack space.
1936 In this case, we do not deal with EXTRA separately.
1937 A single stack adjust will do. */
1938 if (! args_addr)
1939 {
1940 temp = push_block (size, extra, where_pad == downward);
1941 extra = 0;
1942 }
1943 else if (GET_CODE (args_so_far) == CONST_INT)
1944 temp = memory_address (BLKmode,
1945 plus_constant (args_addr,
1946 skip + INTVAL (args_so_far)));
1947 else
1948 temp = memory_address (BLKmode,
1949 plus_constant (gen_rtx (PLUS, Pmode,
1950 args_addr, args_so_far),
1951 skip));
1952
1953 /* TEMP is the address of the block. Copy the data there. */
1954 if (GET_CODE (size) == CONST_INT
1955 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
1956 < MOVE_RATIO))
1957 {
1958 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
1959 INTVAL (size), align);
1960 goto ret;
1961 }
1962 /* Try the most limited insn first, because there's no point
1963 including more than one in the machine description unless
1964 the more limited one has some advantage. */
1965 #ifdef HAVE_movstrqi
1966 if (HAVE_movstrqi
1967 && GET_CODE (size) == CONST_INT
1968 && ((unsigned) INTVAL (size)
1969 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
1970 {
1971 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
1972 xinner, size, GEN_INT (align));
1973 if (pat != 0)
1974 {
1975 emit_insn (pat);
1976 goto ret;
1977 }
1978 }
1979 #endif
1980 #ifdef HAVE_movstrhi
1981 if (HAVE_movstrhi
1982 && GET_CODE (size) == CONST_INT
1983 && ((unsigned) INTVAL (size)
1984 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
1985 {
1986 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
1987 xinner, size, GEN_INT (align));
1988 if (pat != 0)
1989 {
1990 emit_insn (pat);
1991 goto ret;
1992 }
1993 }
1994 #endif
1995 #ifdef HAVE_movstrsi
1996 if (HAVE_movstrsi)
1997 {
1998 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
1999 xinner, size, GEN_INT (align));
2000 if (pat != 0)
2001 {
2002 emit_insn (pat);
2003 goto ret;
2004 }
2005 }
2006 #endif
2007 #ifdef HAVE_movstrdi
2008 if (HAVE_movstrdi)
2009 {
2010 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
2011 xinner, size, GEN_INT (align));
2012 if (pat != 0)
2013 {
2014 emit_insn (pat);
2015 goto ret;
2016 }
2017 }
2018 #endif
2019
2020 #ifndef ACCUMULATE_OUTGOING_ARGS
2021 /* If the source is referenced relative to the stack pointer,
2022 copy it to another register to stabilize it. We do not need
2023 to do this if we know that we won't be changing sp. */
2024
2025 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2026 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2027 temp = copy_to_reg (temp);
2028 #endif
2029
2030 /* Make inhibit_defer_pop nonzero around the library call
2031 to force it to pop the bcopy-arguments right away. */
2032 NO_DEFER_POP;
2033 #ifdef TARGET_MEM_FUNCTIONS
2034 emit_library_call (memcpy_libfunc, 0,
2035 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
2036 convert_to_mode (TYPE_MODE (sizetype),
2037 size, TREE_UNSIGNED (sizetype)),
2038 TYPE_MODE (sizetype));
2039 #else
2040 emit_library_call (bcopy_libfunc, 0,
2041 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
2042 convert_to_mode (TYPE_MODE (sizetype),
2043 size, TREE_UNSIGNED (sizetype)),
2044 TYPE_MODE (sizetype));
2045 #endif
2046 OK_DEFER_POP;
2047 }
2048 }
2049 else if (partial > 0)
2050 {
2051 /* Scalar partly in registers. */
2052
2053 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2054 int i;
2055 int not_stack;
2056 /* # words of start of argument
2057 that we must make space for but need not store. */
2058 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2059 int args_offset = INTVAL (args_so_far);
2060 int skip;
2061
2062 /* Push padding now if padding above and stack grows down,
2063 or if padding below and stack grows up.
2064 But if space already allocated, this has already been done. */
2065 if (extra && args_addr == 0
2066 && where_pad != none && where_pad != stack_direction)
2067 anti_adjust_stack (GEN_INT (extra));
2068
2069 /* If we make space by pushing it, we might as well push
2070 the real data. Otherwise, we can leave OFFSET nonzero
2071 and leave the space uninitialized. */
2072 if (args_addr == 0)
2073 offset = 0;
2074
2075 /* Now NOT_STACK gets the number of words that we don't need to
2076 allocate on the stack. */
2077 not_stack = partial - offset;
2078
2079 /* If the partial register-part of the arg counts in its stack size,
2080 skip the part of stack space corresponding to the registers.
2081 Otherwise, start copying to the beginning of the stack space,
2082 by setting SKIP to 0. */
2083 #ifndef REG_PARM_STACK_SPACE
2084 skip = 0;
2085 #else
2086 skip = not_stack;
2087 #endif
2088
2089 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2090 x = validize_mem (force_const_mem (mode, x));
2091
2092 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2093 SUBREGs of such registers are not allowed. */
2094 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2095 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2096 x = copy_to_reg (x);
2097
2098 /* Loop over all the words allocated on the stack for this arg. */
2099 /* We can do it by words, because any scalar bigger than a word
2100 has a size a multiple of a word. */
2101 #ifndef PUSH_ARGS_REVERSED
2102 for (i = not_stack; i < size; i++)
2103 #else
2104 for (i = size - 1; i >= not_stack; i--)
2105 #endif
2106 if (i >= not_stack + offset)
2107 emit_push_insn (operand_subword_force (x, i, mode),
2108 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2109 0, args_addr,
2110 GEN_INT (args_offset + ((i - not_stack + skip)
2111 * UNITS_PER_WORD)));
2112 }
2113 else
2114 {
2115 rtx addr;
2116
2117 /* Push padding now if padding above and stack grows down,
2118 or if padding below and stack grows up.
2119 But if space already allocated, this has already been done. */
2120 if (extra && args_addr == 0
2121 && where_pad != none && where_pad != stack_direction)
2122 anti_adjust_stack (GEN_INT (extra));
2123
2124 #ifdef PUSH_ROUNDING
2125 if (args_addr == 0)
2126 addr = gen_push_operand ();
2127 else
2128 #endif
2129 if (GET_CODE (args_so_far) == CONST_INT)
2130 addr
2131 = memory_address (mode,
2132 plus_constant (args_addr, INTVAL (args_so_far)));
2133 else
2134 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2135 args_so_far));
2136
2137 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2138 }
2139
2140 ret:
2141 /* If part should go in registers, copy that part
2142 into the appropriate registers. Do this now, at the end,
2143 since mem-to-mem copies above may do function calls. */
2144 if (partial > 0 && reg != 0)
2145 move_block_to_reg (REGNO (reg), x, partial, mode);
2146
2147 if (extra && args_addr == 0 && where_pad == stack_direction)
2148 anti_adjust_stack (GEN_INT (extra));
2149 }
2150 \f
2151 /* Expand an assignment that stores the value of FROM into TO.
2152 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2153 (This may contain a QUEUED rtx.)
2154 Otherwise, the returned value is not meaningful.
2155
2156 SUGGEST_REG is no longer actually used.
2157 It used to mean, copy the value through a register
2158 and return that register, if that is possible.
2159 But now we do this if WANT_VALUE.
2160
2161 If the value stored is a constant, we return the constant. */
2162
2163 rtx
2164 expand_assignment (to, from, want_value, suggest_reg)
2165 tree to, from;
2166 int want_value;
2167 int suggest_reg;
2168 {
2169 register rtx to_rtx = 0;
2170 rtx result;
2171
2172 /* Don't crash if the lhs of the assignment was erroneous. */
2173
2174 if (TREE_CODE (to) == ERROR_MARK)
2175 return expand_expr (from, NULL_RTX, VOIDmode, 0);
2176
2177 /* Assignment of a structure component needs special treatment
2178 if the structure component's rtx is not simply a MEM.
2179 Assignment of an array element at a constant index
2180 has the same problem. */
2181
2182 if (TREE_CODE (to) == COMPONENT_REF
2183 || TREE_CODE (to) == BIT_FIELD_REF
2184 || (TREE_CODE (to) == ARRAY_REF
2185 && TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2186 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST))
2187 {
2188 enum machine_mode mode1;
2189 int bitsize;
2190 int bitpos;
2191 tree offset;
2192 int unsignedp;
2193 int volatilep = 0;
2194 tree tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
2195 &mode1, &unsignedp, &volatilep);
2196
2197 /* If we are going to use store_bit_field and extract_bit_field,
2198 make sure to_rtx will be safe for multiple use. */
2199
2200 if (mode1 == VOIDmode && want_value)
2201 tem = stabilize_reference (tem);
2202
2203 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
2204 if (offset != 0)
2205 {
2206 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2207
2208 if (GET_CODE (to_rtx) != MEM)
2209 abort ();
2210 to_rtx = change_address (to_rtx, VOIDmode,
2211 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2212 force_reg (Pmode, offset_rtx)));
2213 }
2214 if (volatilep)
2215 {
2216 if (GET_CODE (to_rtx) == MEM)
2217 MEM_VOLATILE_P (to_rtx) = 1;
2218 #if 0 /* This was turned off because, when a field is volatile
2219 in an object which is not volatile, the object may be in a register,
2220 and then we would abort over here. */
2221 else
2222 abort ();
2223 #endif
2224 }
2225
2226 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2227 (want_value
2228 /* Spurious cast makes HPUX compiler happy. */
2229 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2230 : VOIDmode),
2231 unsignedp,
2232 /* Required alignment of containing datum. */
2233 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
2234 int_size_in_bytes (TREE_TYPE (tem)));
2235 preserve_temp_slots (result);
2236 free_temp_slots ();
2237
2238 /* If we aren't returning a result, just pass on what expand_expr
2239 returned; it was probably const0_rtx. Otherwise, convert RESULT
2240 to the proper mode. */
2241 return (want_value ? convert_to_mode (TYPE_MODE (TREE_TYPE (to)), result,
2242 TREE_UNSIGNED (TREE_TYPE (to)))
2243 : result);
2244 }
2245
2246 /* If the rhs is a function call and its value is not an aggregate,
2247 call the function before we start to compute the lhs.
2248 This is needed for correct code for cases such as
2249 val = setjmp (buf) on machines where reference to val
2250 requires loading up part of an address in a separate insn. */
2251 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from))
2252 {
2253 rtx value = expand_expr (from, NULL_RTX, VOIDmode, 0);
2254 if (to_rtx == 0)
2255 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2256 emit_move_insn (to_rtx, value);
2257 preserve_temp_slots (to_rtx);
2258 free_temp_slots ();
2259 return to_rtx;
2260 }
2261
2262 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2263 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2264
2265 if (to_rtx == 0)
2266 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2267
2268 /* Don't move directly into a return register. */
2269 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
2270 {
2271 rtx temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
2272 emit_move_insn (to_rtx, temp);
2273 preserve_temp_slots (to_rtx);
2274 free_temp_slots ();
2275 return to_rtx;
2276 }
2277
2278 /* In case we are returning the contents of an object which overlaps
2279 the place the value is being stored, use a safe function when copying
2280 a value through a pointer into a structure value return block. */
2281 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2282 && current_function_returns_struct
2283 && !current_function_returns_pcc_struct)
2284 {
2285 rtx from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
2286 rtx size = expr_size (from);
2287
2288 #ifdef TARGET_MEM_FUNCTIONS
2289 emit_library_call (memcpy_libfunc, 0,
2290 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2291 XEXP (from_rtx, 0), Pmode,
2292 convert_to_mode (TYPE_MODE (sizetype),
2293 size, TREE_UNSIGNED (sizetype)),
2294 TYPE_MODE (sizetype));
2295 #else
2296 emit_library_call (bcopy_libfunc, 0,
2297 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2298 XEXP (to_rtx, 0), Pmode,
2299 convert_to_mode (TYPE_MODE (sizetype),
2300 size, TREE_UNSIGNED (sizetype)),
2301 TYPE_MODE (sizetype));
2302 #endif
2303
2304 preserve_temp_slots (to_rtx);
2305 free_temp_slots ();
2306 return to_rtx;
2307 }
2308
2309 /* Compute FROM and store the value in the rtx we got. */
2310
2311 result = store_expr (from, to_rtx, want_value);
2312 preserve_temp_slots (result);
2313 free_temp_slots ();
2314 return result;
2315 }
2316
2317 /* Generate code for computing expression EXP,
2318 and storing the value into TARGET.
2319 Returns TARGET or an equivalent value.
2320 TARGET may contain a QUEUED rtx.
2321
2322 If SUGGEST_REG is nonzero, copy the value through a register
2323 and return that register, if that is possible.
2324
2325 If the value stored is a constant, we return the constant. */
2326
2327 rtx
2328 store_expr (exp, target, suggest_reg)
2329 register tree exp;
2330 register rtx target;
2331 int suggest_reg;
2332 {
2333 register rtx temp;
2334 int dont_return_target = 0;
2335
2336 if (TREE_CODE (exp) == COMPOUND_EXPR)
2337 {
2338 /* Perform first part of compound expression, then assign from second
2339 part. */
2340 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2341 emit_queue ();
2342 return store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2343 }
2344 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2345 {
2346 /* For conditional expression, get safe form of the target. Then
2347 test the condition, doing the appropriate assignment on either
2348 side. This avoids the creation of unnecessary temporaries.
2349 For non-BLKmode, it is more efficient not to do this. */
2350
2351 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2352
2353 emit_queue ();
2354 target = protect_from_queue (target, 1);
2355
2356 NO_DEFER_POP;
2357 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2358 store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2359 emit_queue ();
2360 emit_jump_insn (gen_jump (lab2));
2361 emit_barrier ();
2362 emit_label (lab1);
2363 store_expr (TREE_OPERAND (exp, 2), target, suggest_reg);
2364 emit_queue ();
2365 emit_label (lab2);
2366 OK_DEFER_POP;
2367 return target;
2368 }
2369 else if (suggest_reg && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
2370 && GET_MODE (target) != BLKmode)
2371 /* If target is in memory and caller wants value in a register instead,
2372 arrange that. Pass TARGET as target for expand_expr so that,
2373 if EXP is another assignment, SUGGEST_REG will be nonzero for it.
2374 We know expand_expr will not use the target in that case.
2375 Don't do this if TARGET is volatile because we are supposed
2376 to write it and then read it. */
2377 {
2378 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
2379 GET_MODE (target), 0);
2380 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2381 temp = copy_to_reg (temp);
2382 dont_return_target = 1;
2383 }
2384 else if (queued_subexp_p (target))
2385 /* If target contains a postincrement, it is not safe
2386 to use as the returned value. It would access the wrong
2387 place by the time the queued increment gets output.
2388 So copy the value through a temporary and use that temp
2389 as the result. */
2390 {
2391 /* ??? There may be a bug here in the case of a target
2392 that is volatile, but I' too sleepy today to write anything
2393 to handle it. */
2394 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2395 {
2396 /* Expand EXP into a new pseudo. */
2397 temp = gen_reg_rtx (GET_MODE (target));
2398 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2399 }
2400 else
2401 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
2402 dont_return_target = 1;
2403 }
2404 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2405 /* If this is an scalar in a register that is stored in a wider mode
2406 than the declared mode, compute the result into its declared mode
2407 and then convert to the wider mode. Our value is the computed
2408 expression. */
2409 {
2410 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2411 convert_move (SUBREG_REG (target), temp,
2412 SUBREG_PROMOTED_UNSIGNED_P (target));
2413 return temp;
2414 }
2415 else
2416 {
2417 temp = expand_expr (exp, target, GET_MODE (target), 0);
2418 /* DO return TARGET if it's a specified hardware register.
2419 expand_return relies on this.
2420 DO return TARGET if it's a volatile mem ref; ANSI requires this. */
2421 if (!(target && GET_CODE (target) == REG
2422 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2423 && CONSTANT_P (temp)
2424 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
2425 dont_return_target = 1;
2426 }
2427
2428 /* If value was not generated in the target, store it there.
2429 Convert the value to TARGET's type first if nec. */
2430
2431 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2432 {
2433 target = protect_from_queue (target, 1);
2434 if (GET_MODE (temp) != GET_MODE (target)
2435 && GET_MODE (temp) != VOIDmode)
2436 {
2437 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2438 if (dont_return_target)
2439 {
2440 /* In this case, we will return TEMP,
2441 so make sure it has the proper mode.
2442 But don't forget to store the value into TARGET. */
2443 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2444 emit_move_insn (target, temp);
2445 }
2446 else
2447 convert_move (target, temp, unsignedp);
2448 }
2449
2450 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2451 {
2452 /* Handle copying a string constant into an array.
2453 The string constant may be shorter than the array.
2454 So copy just the string's actual length, and clear the rest. */
2455 rtx size;
2456
2457 /* Get the size of the data type of the string,
2458 which is actually the size of the target. */
2459 size = expr_size (exp);
2460 if (GET_CODE (size) == CONST_INT
2461 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2462 emit_block_move (target, temp, size,
2463 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2464 else
2465 {
2466 /* Compute the size of the data to copy from the string. */
2467 tree copy_size
2468 = size_binop (MIN_EXPR,
2469 size_binop (CEIL_DIV_EXPR,
2470 TYPE_SIZE (TREE_TYPE (exp)),
2471 size_int (BITS_PER_UNIT)),
2472 convert (sizetype,
2473 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
2474 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
2475 VOIDmode, 0);
2476 rtx label = 0;
2477
2478 /* Copy that much. */
2479 emit_block_move (target, temp, copy_size_rtx,
2480 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2481
2482 /* Figure out how much is left in TARGET
2483 that we have to clear. */
2484 if (GET_CODE (copy_size_rtx) == CONST_INT)
2485 {
2486 temp = plus_constant (XEXP (target, 0),
2487 TREE_STRING_LENGTH (exp));
2488 size = plus_constant (size,
2489 - TREE_STRING_LENGTH (exp));
2490 }
2491 else
2492 {
2493 enum machine_mode size_mode = Pmode;
2494
2495 temp = force_reg (Pmode, XEXP (target, 0));
2496 temp = expand_binop (size_mode, add_optab, temp,
2497 copy_size_rtx, NULL_RTX, 0,
2498 OPTAB_LIB_WIDEN);
2499
2500 size = expand_binop (size_mode, sub_optab, size,
2501 copy_size_rtx, NULL_RTX, 0,
2502 OPTAB_LIB_WIDEN);
2503
2504 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
2505 GET_MODE (size), 0, 0);
2506 label = gen_label_rtx ();
2507 emit_jump_insn (gen_blt (label));
2508 }
2509
2510 if (size != const0_rtx)
2511 {
2512 #ifdef TARGET_MEM_FUNCTIONS
2513 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
2514 temp, Pmode, const0_rtx, Pmode, size, Pmode);
2515 #else
2516 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
2517 temp, Pmode, size, Pmode);
2518 #endif
2519 }
2520 if (label)
2521 emit_label (label);
2522 }
2523 }
2524 else if (GET_MODE (temp) == BLKmode)
2525 emit_block_move (target, temp, expr_size (exp),
2526 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2527 else
2528 emit_move_insn (target, temp);
2529 }
2530 if (dont_return_target)
2531 return temp;
2532 return target;
2533 }
2534 \f
2535 /* Store the value of constructor EXP into the rtx TARGET.
2536 TARGET is either a REG or a MEM. */
2537
2538 static void
2539 store_constructor (exp, target)
2540 tree exp;
2541 rtx target;
2542 {
2543 tree type = TREE_TYPE (exp);
2544
2545 /* We know our target cannot conflict, since safe_from_p has been called. */
2546 #if 0
2547 /* Don't try copying piece by piece into a hard register
2548 since that is vulnerable to being clobbered by EXP.
2549 Instead, construct in a pseudo register and then copy it all. */
2550 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
2551 {
2552 rtx temp = gen_reg_rtx (GET_MODE (target));
2553 store_constructor (exp, temp);
2554 emit_move_insn (target, temp);
2555 return;
2556 }
2557 #endif
2558
2559 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
2560 || TREE_CODE (type) == QUAL_UNION_TYPE)
2561 {
2562 register tree elt;
2563
2564 /* Inform later passes that the whole union value is dead. */
2565 if (TREE_CODE (type) == UNION_TYPE
2566 || TREE_CODE (type) == QUAL_UNION_TYPE)
2567 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2568
2569 /* If we are building a static constructor into a register,
2570 set the initial value as zero so we can fold the value into
2571 a constant. */
2572 else if (GET_CODE (target) == REG && TREE_STATIC (exp))
2573 emit_move_insn (target, const0_rtx);
2574
2575 /* If the constructor has fewer fields than the structure,
2576 clear the whole structure first. */
2577 else if (list_length (CONSTRUCTOR_ELTS (exp))
2578 != list_length (TYPE_FIELDS (type)))
2579 clear_storage (target, int_size_in_bytes (type));
2580 else
2581 /* Inform later passes that the old value is dead. */
2582 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2583
2584 /* Store each element of the constructor into
2585 the corresponding field of TARGET. */
2586
2587 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
2588 {
2589 register tree field = TREE_PURPOSE (elt);
2590 register enum machine_mode mode;
2591 int bitsize;
2592 int bitpos;
2593 int unsignedp;
2594
2595 /* Just ignore missing fields.
2596 We cleared the whole structure, above,
2597 if any fields are missing. */
2598 if (field == 0)
2599 continue;
2600
2601 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
2602 unsignedp = TREE_UNSIGNED (field);
2603 mode = DECL_MODE (field);
2604 if (DECL_BIT_FIELD (field))
2605 mode = VOIDmode;
2606
2607 if (TREE_CODE (DECL_FIELD_BITPOS (field)) != INTEGER_CST)
2608 /* ??? This case remains to be written. */
2609 abort ();
2610
2611 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
2612
2613 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2614 /* The alignment of TARGET is
2615 at least what its type requires. */
2616 VOIDmode, 0,
2617 TYPE_ALIGN (type) / BITS_PER_UNIT,
2618 int_size_in_bytes (type));
2619 }
2620 }
2621 else if (TREE_CODE (type) == ARRAY_TYPE)
2622 {
2623 register tree elt;
2624 register int i;
2625 tree domain = TYPE_DOMAIN (type);
2626 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
2627 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
2628 tree elttype = TREE_TYPE (type);
2629
2630 /* If the constructor has fewer fields than the structure,
2631 clear the whole structure first. Similarly if this this is
2632 static constructor of a non-BLKmode object. */
2633
2634 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
2635 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
2636 clear_storage (target, int_size_in_bytes (type));
2637 else
2638 /* Inform later passes that the old value is dead. */
2639 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2640
2641 /* Store each element of the constructor into
2642 the corresponding element of TARGET, determined
2643 by counting the elements. */
2644 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
2645 elt;
2646 elt = TREE_CHAIN (elt), i++)
2647 {
2648 register enum machine_mode mode;
2649 int bitsize;
2650 int bitpos;
2651 int unsignedp;
2652
2653 mode = TYPE_MODE (elttype);
2654 bitsize = GET_MODE_BITSIZE (mode);
2655 unsignedp = TREE_UNSIGNED (elttype);
2656
2657 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2658
2659 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2660 /* The alignment of TARGET is
2661 at least what its type requires. */
2662 VOIDmode, 0,
2663 TYPE_ALIGN (type) / BITS_PER_UNIT,
2664 int_size_in_bytes (type));
2665 }
2666 }
2667
2668 else
2669 abort ();
2670 }
2671
2672 /* Store the value of EXP (an expression tree)
2673 into a subfield of TARGET which has mode MODE and occupies
2674 BITSIZE bits, starting BITPOS bits from the start of TARGET.
2675 If MODE is VOIDmode, it means that we are storing into a bit-field.
2676
2677 If VALUE_MODE is VOIDmode, return nothing in particular.
2678 UNSIGNEDP is not used in this case.
2679
2680 Otherwise, return an rtx for the value stored. This rtx
2681 has mode VALUE_MODE if that is convenient to do.
2682 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
2683
2684 ALIGN is the alignment that TARGET is known to have, measured in bytes.
2685 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
2686
2687 static rtx
2688 store_field (target, bitsize, bitpos, mode, exp, value_mode,
2689 unsignedp, align, total_size)
2690 rtx target;
2691 int bitsize, bitpos;
2692 enum machine_mode mode;
2693 tree exp;
2694 enum machine_mode value_mode;
2695 int unsignedp;
2696 int align;
2697 int total_size;
2698 {
2699 HOST_WIDE_INT width_mask = 0;
2700
2701 if (bitsize < HOST_BITS_PER_WIDE_INT)
2702 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
2703
2704 /* If we are storing into an unaligned field of an aligned union that is
2705 in a register, we may have the mode of TARGET being an integer mode but
2706 MODE == BLKmode. In that case, get an aligned object whose size and
2707 alignment are the same as TARGET and store TARGET into it (we can avoid
2708 the store if the field being stored is the entire width of TARGET). Then
2709 call ourselves recursively to store the field into a BLKmode version of
2710 that object. Finally, load from the object into TARGET. This is not
2711 very efficient in general, but should only be slightly more expensive
2712 than the otherwise-required unaligned accesses. Perhaps this can be
2713 cleaned up later. */
2714
2715 if (mode == BLKmode
2716 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
2717 {
2718 rtx object = assign_stack_temp (GET_MODE (target),
2719 GET_MODE_SIZE (GET_MODE (target)), 0);
2720 rtx blk_object = copy_rtx (object);
2721
2722 PUT_MODE (blk_object, BLKmode);
2723
2724 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
2725 emit_move_insn (object, target);
2726
2727 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
2728 align, total_size);
2729
2730 emit_move_insn (target, object);
2731
2732 return target;
2733 }
2734
2735 /* If the structure is in a register or if the component
2736 is a bit field, we cannot use addressing to access it.
2737 Use bit-field techniques or SUBREG to store in it. */
2738
2739 if (mode == VOIDmode
2740 || (mode != BLKmode && ! direct_store[(int) mode])
2741 || GET_CODE (target) == REG
2742 || GET_CODE (target) == SUBREG
2743 /* If the field isn't aligned enough to store as an ordinary memref,
2744 store it as a bit field. */
2745 || (STRICT_ALIGNMENT
2746 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
2747 || (STRICT_ALIGNMENT && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
2748 {
2749 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2750 /* Store the value in the bitfield. */
2751 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
2752 if (value_mode != VOIDmode)
2753 {
2754 /* The caller wants an rtx for the value. */
2755 /* If possible, avoid refetching from the bitfield itself. */
2756 if (width_mask != 0
2757 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
2758 {
2759 tree count;
2760 enum machine_mode tmode;
2761
2762 if (unsignedp)
2763 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
2764 tmode = GET_MODE (temp);
2765 if (tmode == VOIDmode)
2766 tmode = value_mode;
2767 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
2768 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
2769 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
2770 }
2771 return extract_bit_field (target, bitsize, bitpos, unsignedp,
2772 NULL_RTX, value_mode, 0, align,
2773 total_size);
2774 }
2775 return const0_rtx;
2776 }
2777 else
2778 {
2779 rtx addr = XEXP (target, 0);
2780 rtx to_rtx;
2781
2782 /* If a value is wanted, it must be the lhs;
2783 so make the address stable for multiple use. */
2784
2785 if (value_mode != VOIDmode && GET_CODE (addr) != REG
2786 && ! CONSTANT_ADDRESS_P (addr)
2787 /* A frame-pointer reference is already stable. */
2788 && ! (GET_CODE (addr) == PLUS
2789 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2790 && (XEXP (addr, 0) == virtual_incoming_args_rtx
2791 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
2792 addr = copy_to_reg (addr);
2793
2794 /* Now build a reference to just the desired component. */
2795
2796 to_rtx = change_address (target, mode,
2797 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
2798 MEM_IN_STRUCT_P (to_rtx) = 1;
2799
2800 return store_expr (exp, to_rtx, value_mode != VOIDmode);
2801 }
2802 }
2803 \f
2804 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
2805 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
2806 ARRAY_REFs and find the ultimate containing object, which we return.
2807
2808 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
2809 bit position, and *PUNSIGNEDP to the signedness of the field.
2810 If the position of the field is variable, we store a tree
2811 giving the variable offset (in units) in *POFFSET.
2812 This offset is in addition to the bit position.
2813 If the position is not variable, we store 0 in *POFFSET.
2814
2815 If any of the extraction expressions is volatile,
2816 we store 1 in *PVOLATILEP. Otherwise we don't change that.
2817
2818 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
2819 is a mode that can be used to access the field. In that case, *PBITSIZE
2820 is redundant.
2821
2822 If the field describes a variable-sized object, *PMODE is set to
2823 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
2824 this case, but the address of the object can be found. */
2825
2826 tree
2827 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
2828 punsignedp, pvolatilep)
2829 tree exp;
2830 int *pbitsize;
2831 int *pbitpos;
2832 tree *poffset;
2833 enum machine_mode *pmode;
2834 int *punsignedp;
2835 int *pvolatilep;
2836 {
2837 tree size_tree = 0;
2838 enum machine_mode mode = VOIDmode;
2839 tree offset = integer_zero_node;
2840
2841 if (TREE_CODE (exp) == COMPONENT_REF)
2842 {
2843 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
2844 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
2845 mode = DECL_MODE (TREE_OPERAND (exp, 1));
2846 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
2847 }
2848 else if (TREE_CODE (exp) == BIT_FIELD_REF)
2849 {
2850 size_tree = TREE_OPERAND (exp, 1);
2851 *punsignedp = TREE_UNSIGNED (exp);
2852 }
2853 else
2854 {
2855 mode = TYPE_MODE (TREE_TYPE (exp));
2856 *pbitsize = GET_MODE_BITSIZE (mode);
2857 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2858 }
2859
2860 if (size_tree)
2861 {
2862 if (TREE_CODE (size_tree) != INTEGER_CST)
2863 mode = BLKmode, *pbitsize = -1;
2864 else
2865 *pbitsize = TREE_INT_CST_LOW (size_tree);
2866 }
2867
2868 /* Compute cumulative bit-offset for nested component-refs and array-refs,
2869 and find the ultimate containing object. */
2870
2871 *pbitpos = 0;
2872
2873 while (1)
2874 {
2875 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
2876 {
2877 tree pos = (TREE_CODE (exp) == COMPONENT_REF
2878 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
2879 : TREE_OPERAND (exp, 2));
2880
2881 /* If this field hasn't been filled in yet, don't go
2882 past it. This should only happen when folding expressions
2883 made during type construction. */
2884 if (pos == 0)
2885 break;
2886
2887 if (TREE_CODE (pos) == PLUS_EXPR)
2888 {
2889 tree constant, var;
2890 if (TREE_CODE (TREE_OPERAND (pos, 0)) == INTEGER_CST)
2891 {
2892 constant = TREE_OPERAND (pos, 0);
2893 var = TREE_OPERAND (pos, 1);
2894 }
2895 else if (TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
2896 {
2897 constant = TREE_OPERAND (pos, 1);
2898 var = TREE_OPERAND (pos, 0);
2899 }
2900 else
2901 abort ();
2902
2903 *pbitpos += TREE_INT_CST_LOW (constant);
2904 offset = size_binop (PLUS_EXPR, offset,
2905 size_binop (FLOOR_DIV_EXPR, var,
2906 size_int (BITS_PER_UNIT)));
2907 }
2908 else if (TREE_CODE (pos) == INTEGER_CST)
2909 *pbitpos += TREE_INT_CST_LOW (pos);
2910 else
2911 {
2912 /* Assume here that the offset is a multiple of a unit.
2913 If not, there should be an explicitly added constant. */
2914 offset = size_binop (PLUS_EXPR, offset,
2915 size_binop (FLOOR_DIV_EXPR, pos,
2916 size_int (BITS_PER_UNIT)));
2917 }
2918 }
2919
2920 else if (TREE_CODE (exp) == ARRAY_REF)
2921 {
2922 /* This code is based on the code in case ARRAY_REF in expand_expr
2923 below. We assume here that the size of an array element is
2924 always an integral multiple of BITS_PER_UNIT. */
2925
2926 tree index = TREE_OPERAND (exp, 1);
2927 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
2928 tree low_bound
2929 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
2930 tree index_type = TREE_TYPE (index);
2931
2932 if (! integer_zerop (low_bound))
2933 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
2934
2935 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
2936 {
2937 index = convert (type_for_size (POINTER_SIZE, 0), index);
2938 index_type = TREE_TYPE (index);
2939 }
2940
2941 index = fold (build (MULT_EXPR, index_type, index,
2942 TYPE_SIZE (TREE_TYPE (exp))));
2943
2944 if (TREE_CODE (index) == INTEGER_CST
2945 && TREE_INT_CST_HIGH (index) == 0)
2946 *pbitpos += TREE_INT_CST_LOW (index);
2947 else
2948 offset = size_binop (PLUS_EXPR, offset,
2949 size_binop (FLOOR_DIV_EXPR, index,
2950 size_int (BITS_PER_UNIT)));
2951 }
2952 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
2953 && ! ((TREE_CODE (exp) == NOP_EXPR
2954 || TREE_CODE (exp) == CONVERT_EXPR)
2955 && (TYPE_MODE (TREE_TYPE (exp))
2956 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
2957 break;
2958
2959 /* If any reference in the chain is volatile, the effect is volatile. */
2960 if (TREE_THIS_VOLATILE (exp))
2961 *pvolatilep = 1;
2962 exp = TREE_OPERAND (exp, 0);
2963 }
2964
2965 /* If this was a bit-field, see if there is a mode that allows direct
2966 access in case EXP is in memory. */
2967 if (mode == VOIDmode && *pbitsize != 0 && *pbitpos % *pbitsize == 0)
2968 {
2969 mode = mode_for_size (*pbitsize, MODE_INT, 0);
2970 if (mode == BLKmode)
2971 mode = VOIDmode;
2972 }
2973
2974 if (integer_zerop (offset))
2975 offset = 0;
2976
2977 *pmode = mode;
2978 *poffset = offset;
2979 #if 0
2980 /* We aren't finished fixing the callers to really handle nonzero offset. */
2981 if (offset != 0)
2982 abort ();
2983 #endif
2984
2985 return exp;
2986 }
2987 \f
2988 /* Given an rtx VALUE that may contain additions and multiplications,
2989 return an equivalent value that just refers to a register or memory.
2990 This is done by generating instructions to perform the arithmetic
2991 and returning a pseudo-register containing the value.
2992
2993 The returned value may be a REG, SUBREG, MEM or constant. */
2994
2995 rtx
2996 force_operand (value, target)
2997 rtx value, target;
2998 {
2999 register optab binoptab = 0;
3000 /* Use a temporary to force order of execution of calls to
3001 `force_operand'. */
3002 rtx tmp;
3003 register rtx op2;
3004 /* Use subtarget as the target for operand 0 of a binary operation. */
3005 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3006
3007 if (GET_CODE (value) == PLUS)
3008 binoptab = add_optab;
3009 else if (GET_CODE (value) == MINUS)
3010 binoptab = sub_optab;
3011 else if (GET_CODE (value) == MULT)
3012 {
3013 op2 = XEXP (value, 1);
3014 if (!CONSTANT_P (op2)
3015 && !(GET_CODE (op2) == REG && op2 != subtarget))
3016 subtarget = 0;
3017 tmp = force_operand (XEXP (value, 0), subtarget);
3018 return expand_mult (GET_MODE (value), tmp,
3019 force_operand (op2, NULL_RTX),
3020 target, 0);
3021 }
3022
3023 if (binoptab)
3024 {
3025 op2 = XEXP (value, 1);
3026 if (!CONSTANT_P (op2)
3027 && !(GET_CODE (op2) == REG && op2 != subtarget))
3028 subtarget = 0;
3029 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
3030 {
3031 binoptab = add_optab;
3032 op2 = negate_rtx (GET_MODE (value), op2);
3033 }
3034
3035 /* Check for an addition with OP2 a constant integer and our first
3036 operand a PLUS of a virtual register and something else. In that
3037 case, we want to emit the sum of the virtual register and the
3038 constant first and then add the other value. This allows virtual
3039 register instantiation to simply modify the constant rather than
3040 creating another one around this addition. */
3041 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
3042 && GET_CODE (XEXP (value, 0)) == PLUS
3043 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
3044 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
3045 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
3046 {
3047 rtx temp = expand_binop (GET_MODE (value), binoptab,
3048 XEXP (XEXP (value, 0), 0), op2,
3049 subtarget, 0, OPTAB_LIB_WIDEN);
3050 return expand_binop (GET_MODE (value), binoptab, temp,
3051 force_operand (XEXP (XEXP (value, 0), 1), 0),
3052 target, 0, OPTAB_LIB_WIDEN);
3053 }
3054
3055 tmp = force_operand (XEXP (value, 0), subtarget);
3056 return expand_binop (GET_MODE (value), binoptab, tmp,
3057 force_operand (op2, NULL_RTX),
3058 target, 0, OPTAB_LIB_WIDEN);
3059 /* We give UNSIGNEDP = 0 to expand_binop
3060 because the only operations we are expanding here are signed ones. */
3061 }
3062 return value;
3063 }
3064 \f
3065 /* Subroutine of expand_expr:
3066 save the non-copied parts (LIST) of an expr (LHS), and return a list
3067 which can restore these values to their previous values,
3068 should something modify their storage. */
3069
3070 static tree
3071 save_noncopied_parts (lhs, list)
3072 tree lhs;
3073 tree list;
3074 {
3075 tree tail;
3076 tree parts = 0;
3077
3078 for (tail = list; tail; tail = TREE_CHAIN (tail))
3079 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3080 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
3081 else
3082 {
3083 tree part = TREE_VALUE (tail);
3084 tree part_type = TREE_TYPE (part);
3085 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
3086 rtx target = assign_stack_temp (TYPE_MODE (part_type),
3087 int_size_in_bytes (part_type), 0);
3088 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
3089 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
3090 parts = tree_cons (to_be_saved,
3091 build (RTL_EXPR, part_type, NULL_TREE,
3092 (tree) target),
3093 parts);
3094 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
3095 }
3096 return parts;
3097 }
3098
3099 /* Subroutine of expand_expr:
3100 record the non-copied parts (LIST) of an expr (LHS), and return a list
3101 which specifies the initial values of these parts. */
3102
3103 static tree
3104 init_noncopied_parts (lhs, list)
3105 tree lhs;
3106 tree list;
3107 {
3108 tree tail;
3109 tree parts = 0;
3110
3111 for (tail = list; tail; tail = TREE_CHAIN (tail))
3112 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3113 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
3114 else
3115 {
3116 tree part = TREE_VALUE (tail);
3117 tree part_type = TREE_TYPE (part);
3118 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
3119 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
3120 }
3121 return parts;
3122 }
3123
3124 /* Subroutine of expand_expr: return nonzero iff there is no way that
3125 EXP can reference X, which is being modified. */
3126
3127 static int
3128 safe_from_p (x, exp)
3129 rtx x;
3130 tree exp;
3131 {
3132 rtx exp_rtl = 0;
3133 int i, nops;
3134
3135 if (x == 0)
3136 return 1;
3137
3138 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
3139 find the underlying pseudo. */
3140 if (GET_CODE (x) == SUBREG)
3141 {
3142 x = SUBREG_REG (x);
3143 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3144 return 0;
3145 }
3146
3147 /* If X is a location in the outgoing argument area, it is always safe. */
3148 if (GET_CODE (x) == MEM
3149 && (XEXP (x, 0) == virtual_outgoing_args_rtx
3150 || (GET_CODE (XEXP (x, 0)) == PLUS
3151 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
3152 return 1;
3153
3154 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3155 {
3156 case 'd':
3157 exp_rtl = DECL_RTL (exp);
3158 break;
3159
3160 case 'c':
3161 return 1;
3162
3163 case 'x':
3164 if (TREE_CODE (exp) == TREE_LIST)
3165 return ((TREE_VALUE (exp) == 0
3166 || safe_from_p (x, TREE_VALUE (exp)))
3167 && (TREE_CHAIN (exp) == 0
3168 || safe_from_p (x, TREE_CHAIN (exp))));
3169 else
3170 return 0;
3171
3172 case '1':
3173 return safe_from_p (x, TREE_OPERAND (exp, 0));
3174
3175 case '2':
3176 case '<':
3177 return (safe_from_p (x, TREE_OPERAND (exp, 0))
3178 && safe_from_p (x, TREE_OPERAND (exp, 1)));
3179
3180 case 'e':
3181 case 'r':
3182 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
3183 the expression. If it is set, we conflict iff we are that rtx or
3184 both are in memory. Otherwise, we check all operands of the
3185 expression recursively. */
3186
3187 switch (TREE_CODE (exp))
3188 {
3189 case ADDR_EXPR:
3190 return (staticp (TREE_OPERAND (exp, 0))
3191 || safe_from_p (x, TREE_OPERAND (exp, 0)));
3192
3193 case INDIRECT_REF:
3194 if (GET_CODE (x) == MEM)
3195 return 0;
3196 break;
3197
3198 case CALL_EXPR:
3199 exp_rtl = CALL_EXPR_RTL (exp);
3200 if (exp_rtl == 0)
3201 {
3202 /* Assume that the call will clobber all hard registers and
3203 all of memory. */
3204 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3205 || GET_CODE (x) == MEM)
3206 return 0;
3207 }
3208
3209 break;
3210
3211 case RTL_EXPR:
3212 exp_rtl = RTL_EXPR_RTL (exp);
3213 if (exp_rtl == 0)
3214 /* We don't know what this can modify. */
3215 return 0;
3216
3217 break;
3218
3219 case WITH_CLEANUP_EXPR:
3220 exp_rtl = RTL_EXPR_RTL (exp);
3221 break;
3222
3223 case SAVE_EXPR:
3224 exp_rtl = SAVE_EXPR_RTL (exp);
3225 break;
3226
3227 case BIND_EXPR:
3228 /* The only operand we look at is operand 1. The rest aren't
3229 part of the expression. */
3230 return safe_from_p (x, TREE_OPERAND (exp, 1));
3231
3232 case METHOD_CALL_EXPR:
3233 /* This takes a rtx argument, but shouldn't appear here. */
3234 abort ();
3235 }
3236
3237 /* If we have an rtx, we do not need to scan our operands. */
3238 if (exp_rtl)
3239 break;
3240
3241 nops = tree_code_length[(int) TREE_CODE (exp)];
3242 for (i = 0; i < nops; i++)
3243 if (TREE_OPERAND (exp, i) != 0
3244 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3245 return 0;
3246 }
3247
3248 /* If we have an rtl, find any enclosed object. Then see if we conflict
3249 with it. */
3250 if (exp_rtl)
3251 {
3252 if (GET_CODE (exp_rtl) == SUBREG)
3253 {
3254 exp_rtl = SUBREG_REG (exp_rtl);
3255 if (GET_CODE (exp_rtl) == REG
3256 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3257 return 0;
3258 }
3259
3260 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3261 are memory and EXP is not readonly. */
3262 return ! (rtx_equal_p (x, exp_rtl)
3263 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3264 && ! TREE_READONLY (exp)));
3265 }
3266
3267 /* If we reach here, it is safe. */
3268 return 1;
3269 }
3270
3271 /* Subroutine of expand_expr: return nonzero iff EXP is an
3272 expression whose type is statically determinable. */
3273
3274 static int
3275 fixed_type_p (exp)
3276 tree exp;
3277 {
3278 if (TREE_CODE (exp) == PARM_DECL
3279 || TREE_CODE (exp) == VAR_DECL
3280 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3281 || TREE_CODE (exp) == COMPONENT_REF
3282 || TREE_CODE (exp) == ARRAY_REF)
3283 return 1;
3284 return 0;
3285 }
3286 \f
3287 /* expand_expr: generate code for computing expression EXP.
3288 An rtx for the computed value is returned. The value is never null.
3289 In the case of a void EXP, const0_rtx is returned.
3290
3291 The value may be stored in TARGET if TARGET is nonzero.
3292 TARGET is just a suggestion; callers must assume that
3293 the rtx returned may not be the same as TARGET.
3294
3295 If TARGET is CONST0_RTX, it means that the value will be ignored.
3296
3297 If TMODE is not VOIDmode, it suggests generating the
3298 result in mode TMODE. But this is done only when convenient.
3299 Otherwise, TMODE is ignored and the value generated in its natural mode.
3300 TMODE is just a suggestion; callers must assume that
3301 the rtx returned may not have mode TMODE.
3302
3303 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
3304 with a constant address even if that address is not normally legitimate.
3305 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
3306
3307 If MODIFIER is EXPAND_SUM then when EXP is an addition
3308 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3309 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3310 products as above, or REG or MEM, or constant.
3311 Ordinarily in such cases we would output mul or add instructions
3312 and then return a pseudo reg containing the sum.
3313
3314 EXPAND_INITIALIZER is much like EXPAND_SUM except that
3315 it also marks a label as absolutely required (it can't be dead).
3316 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
3317 This is used for outputting expressions used in initializers. */
3318
3319 rtx
3320 expand_expr (exp, target, tmode, modifier)
3321 register tree exp;
3322 rtx target;
3323 enum machine_mode tmode;
3324 enum expand_modifier modifier;
3325 {
3326 register rtx op0, op1, temp;
3327 tree type = TREE_TYPE (exp);
3328 int unsignedp = TREE_UNSIGNED (type);
3329 register enum machine_mode mode = TYPE_MODE (type);
3330 register enum tree_code code = TREE_CODE (exp);
3331 optab this_optab;
3332 /* Use subtarget as the target for operand 0 of a binary operation. */
3333 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3334 rtx original_target = target;
3335 int ignore = (target == const0_rtx
3336 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
3337 || code == CONVERT_EXPR || code == REFERENCE_EXPR)
3338 && TREE_CODE (type) == VOID_TYPE));
3339 tree context;
3340
3341 /* Don't use hard regs as subtargets, because the combiner
3342 can only handle pseudo regs. */
3343 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
3344 subtarget = 0;
3345 /* Avoid subtargets inside loops,
3346 since they hide some invariant expressions. */
3347 if (preserve_subexpressions_p ())
3348 subtarget = 0;
3349
3350 /* If we are going to ignore this result, we need only do something
3351 if there is a side-effect somewhere in the expression. If there
3352 is, short-circuit the most common cases here. */
3353
3354 if (ignore)
3355 {
3356 if (! TREE_SIDE_EFFECTS (exp))
3357 return const0_rtx;
3358
3359 /* Ensure we reference a volatile object even if value is ignored. */
3360 if (TREE_THIS_VOLATILE (exp)
3361 && TREE_CODE (exp) != FUNCTION_DECL
3362 && mode != VOIDmode && mode != BLKmode)
3363 {
3364 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
3365 if (GET_CODE (temp) == MEM)
3366 temp = copy_to_reg (temp);
3367 return const0_rtx;
3368 }
3369
3370 if (TREE_CODE_CLASS (code) == '1')
3371 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
3372 VOIDmode, modifier);
3373 else if (TREE_CODE_CLASS (code) == '2'
3374 || TREE_CODE_CLASS (code) == '<')
3375 {
3376 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
3377 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
3378 return const0_rtx;
3379 }
3380 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
3381 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
3382 /* If the second operand has no side effects, just evaluate
3383 the first. */
3384 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
3385 VOIDmode, modifier);
3386
3387 target = 0, original_target = 0;
3388 }
3389
3390 /* If will do cse, generate all results into pseudo registers
3391 since 1) that allows cse to find more things
3392 and 2) otherwise cse could produce an insn the machine
3393 cannot support. */
3394
3395 if (! cse_not_expected && mode != BLKmode && target
3396 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
3397 target = subtarget;
3398
3399 switch (code)
3400 {
3401 case LABEL_DECL:
3402 {
3403 tree function = decl_function_context (exp);
3404 /* Handle using a label in a containing function. */
3405 if (function != current_function_decl && function != 0)
3406 {
3407 struct function *p = find_function_data (function);
3408 /* Allocate in the memory associated with the function
3409 that the label is in. */
3410 push_obstacks (p->function_obstack,
3411 p->function_maybepermanent_obstack);
3412
3413 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3414 label_rtx (exp), p->forced_labels);
3415 pop_obstacks ();
3416 }
3417 else if (modifier == EXPAND_INITIALIZER)
3418 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3419 label_rtx (exp), forced_labels);
3420 temp = gen_rtx (MEM, FUNCTION_MODE,
3421 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
3422 if (function != current_function_decl && function != 0)
3423 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
3424 return temp;
3425 }
3426
3427 case PARM_DECL:
3428 if (DECL_RTL (exp) == 0)
3429 {
3430 error_with_decl (exp, "prior parameter's size depends on `%s'");
3431 return CONST0_RTX (mode);
3432 }
3433
3434 case FUNCTION_DECL:
3435 case VAR_DECL:
3436 case RESULT_DECL:
3437 if (DECL_RTL (exp) == 0)
3438 abort ();
3439 /* Ensure variable marked as used even if it doesn't go through
3440 a parser. If it hasn't be used yet, write out an external
3441 definition. */
3442 if (! TREE_USED (exp))
3443 {
3444 assemble_external (exp);
3445 TREE_USED (exp) = 1;
3446 }
3447
3448 /* Handle variables inherited from containing functions. */
3449 context = decl_function_context (exp);
3450
3451 /* We treat inline_function_decl as an alias for the current function
3452 because that is the inline function whose vars, types, etc.
3453 are being merged into the current function.
3454 See expand_inline_function. */
3455 if (context != 0 && context != current_function_decl
3456 && context != inline_function_decl
3457 /* If var is static, we don't need a static chain to access it. */
3458 && ! (GET_CODE (DECL_RTL (exp)) == MEM
3459 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
3460 {
3461 rtx addr;
3462
3463 /* Mark as non-local and addressable. */
3464 DECL_NONLOCAL (exp) = 1;
3465 mark_addressable (exp);
3466 if (GET_CODE (DECL_RTL (exp)) != MEM)
3467 abort ();
3468 addr = XEXP (DECL_RTL (exp), 0);
3469 if (GET_CODE (addr) == MEM)
3470 addr = gen_rtx (MEM, Pmode, fix_lexical_addr (XEXP (addr, 0), exp));
3471 else
3472 addr = fix_lexical_addr (addr, exp);
3473 return change_address (DECL_RTL (exp), mode, addr);
3474 }
3475
3476 /* This is the case of an array whose size is to be determined
3477 from its initializer, while the initializer is still being parsed.
3478 See expand_decl. */
3479 if (GET_CODE (DECL_RTL (exp)) == MEM
3480 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
3481 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
3482 XEXP (DECL_RTL (exp), 0));
3483 if (GET_CODE (DECL_RTL (exp)) == MEM
3484 && modifier != EXPAND_CONST_ADDRESS
3485 && modifier != EXPAND_SUM
3486 && modifier != EXPAND_INITIALIZER)
3487 {
3488 /* DECL_RTL probably contains a constant address.
3489 On RISC machines where a constant address isn't valid,
3490 make some insns to get that address into a register. */
3491 if (!memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
3492 || (flag_force_addr
3493 && CONSTANT_ADDRESS_P (XEXP (DECL_RTL (exp), 0))))
3494 return change_address (DECL_RTL (exp), VOIDmode,
3495 copy_rtx (XEXP (DECL_RTL (exp), 0)));
3496 }
3497
3498 /* If the mode of DECL_RTL does not match that of the decl, it
3499 must be a promoted value. We return a SUBREG of the wanted mode,
3500 but mark it so that we know that it was already extended. */
3501
3502 if (GET_CODE (DECL_RTL (exp)) == REG
3503 && GET_MODE (DECL_RTL (exp)) != mode)
3504 {
3505 enum machine_mode decl_mode = DECL_MODE (exp);
3506
3507 /* Get the signedness used for this variable. Ensure we get the
3508 same mode we got when the variable was declared. */
3509
3510 PROMOTE_MODE (decl_mode, unsignedp, type);
3511
3512 if (decl_mode != GET_MODE (DECL_RTL (exp)))
3513 abort ();
3514
3515 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
3516 SUBREG_PROMOTED_VAR_P (temp) = 1;
3517 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3518 return temp;
3519 }
3520
3521 return DECL_RTL (exp);
3522
3523 case INTEGER_CST:
3524 return immed_double_const (TREE_INT_CST_LOW (exp),
3525 TREE_INT_CST_HIGH (exp),
3526 mode);
3527
3528 case CONST_DECL:
3529 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
3530
3531 case REAL_CST:
3532 /* If optimized, generate immediate CONST_DOUBLE
3533 which will be turned into memory by reload if necessary.
3534
3535 We used to force a register so that loop.c could see it. But
3536 this does not allow gen_* patterns to perform optimizations with
3537 the constants. It also produces two insns in cases like "x = 1.0;".
3538 On most machines, floating-point constants are not permitted in
3539 many insns, so we'd end up copying it to a register in any case.
3540
3541 Now, we do the copying in expand_binop, if appropriate. */
3542 return immed_real_const (exp);
3543
3544 case COMPLEX_CST:
3545 case STRING_CST:
3546 if (! TREE_CST_RTL (exp))
3547 output_constant_def (exp);
3548
3549 /* TREE_CST_RTL probably contains a constant address.
3550 On RISC machines where a constant address isn't valid,
3551 make some insns to get that address into a register. */
3552 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
3553 && modifier != EXPAND_CONST_ADDRESS
3554 && modifier != EXPAND_INITIALIZER
3555 && modifier != EXPAND_SUM
3556 && !memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0)))
3557 return change_address (TREE_CST_RTL (exp), VOIDmode,
3558 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
3559 return TREE_CST_RTL (exp);
3560
3561 case SAVE_EXPR:
3562 context = decl_function_context (exp);
3563 /* We treat inline_function_decl as an alias for the current function
3564 because that is the inline function whose vars, types, etc.
3565 are being merged into the current function.
3566 See expand_inline_function. */
3567 if (context == current_function_decl || context == inline_function_decl)
3568 context = 0;
3569
3570 /* If this is non-local, handle it. */
3571 if (context)
3572 {
3573 temp = SAVE_EXPR_RTL (exp);
3574 if (temp && GET_CODE (temp) == REG)
3575 {
3576 put_var_into_stack (exp);
3577 temp = SAVE_EXPR_RTL (exp);
3578 }
3579 if (temp == 0 || GET_CODE (temp) != MEM)
3580 abort ();
3581 return change_address (temp, mode,
3582 fix_lexical_addr (XEXP (temp, 0), exp));
3583 }
3584 if (SAVE_EXPR_RTL (exp) == 0)
3585 {
3586 if (mode == BLKmode)
3587 temp
3588 = assign_stack_temp (mode,
3589 int_size_in_bytes (TREE_TYPE (exp)), 0);
3590 else
3591 {
3592 enum machine_mode var_mode = mode;
3593
3594 if (TREE_CODE (type) == INTEGER_TYPE
3595 || TREE_CODE (type) == ENUMERAL_TYPE
3596 || TREE_CODE (type) == BOOLEAN_TYPE
3597 || TREE_CODE (type) == CHAR_TYPE
3598 || TREE_CODE (type) == REAL_TYPE
3599 || TREE_CODE (type) == POINTER_TYPE
3600 || TREE_CODE (type) == OFFSET_TYPE)
3601 {
3602 PROMOTE_MODE (var_mode, unsignedp, type);
3603 }
3604
3605 temp = gen_reg_rtx (var_mode);
3606 }
3607
3608 SAVE_EXPR_RTL (exp) = temp;
3609 if (!optimize && GET_CODE (temp) == REG)
3610 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
3611 save_expr_regs);
3612
3613 /* If the mode of TEMP does not match that of the expression, it
3614 must be a promoted value. We pass store_expr a SUBREG of the
3615 wanted mode but mark it so that we know that it was already
3616 extended. Note that `unsignedp' was modified above in
3617 this case. */
3618
3619 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
3620 {
3621 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
3622 SUBREG_PROMOTED_VAR_P (temp) = 1;
3623 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3624 }
3625
3626 store_expr (TREE_OPERAND (exp, 0), temp, 0);
3627 }
3628
3629 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
3630 must be a promoted value. We return a SUBREG of the wanted mode,
3631 but mark it so that we know that it was already extended. Note
3632 that `unsignedp' was modified above in this case. */
3633
3634 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
3635 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
3636 {
3637 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
3638 SUBREG_PROMOTED_VAR_P (temp) = 1;
3639 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3640 return temp;
3641 }
3642
3643 return SAVE_EXPR_RTL (exp);
3644
3645 case EXIT_EXPR:
3646 expand_exit_loop_if_false (NULL_PTR,
3647 invert_truthvalue (TREE_OPERAND (exp, 0)));
3648 return const0_rtx;
3649
3650 case LOOP_EXPR:
3651 expand_start_loop (1);
3652 expand_expr_stmt (TREE_OPERAND (exp, 0));
3653 expand_end_loop ();
3654
3655 return const0_rtx;
3656
3657 case BIND_EXPR:
3658 {
3659 tree vars = TREE_OPERAND (exp, 0);
3660 int vars_need_expansion = 0;
3661
3662 /* Need to open a binding contour here because
3663 if there are any cleanups they most be contained here. */
3664 expand_start_bindings (0);
3665
3666 /* Mark the corresponding BLOCK for output in its proper place. */
3667 if (TREE_OPERAND (exp, 2) != 0
3668 && ! TREE_USED (TREE_OPERAND (exp, 2)))
3669 insert_block (TREE_OPERAND (exp, 2));
3670
3671 /* If VARS have not yet been expanded, expand them now. */
3672 while (vars)
3673 {
3674 if (DECL_RTL (vars) == 0)
3675 {
3676 vars_need_expansion = 1;
3677 expand_decl (vars);
3678 }
3679 expand_decl_init (vars);
3680 vars = TREE_CHAIN (vars);
3681 }
3682
3683 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
3684
3685 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
3686
3687 return temp;
3688 }
3689
3690 case RTL_EXPR:
3691 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
3692 abort ();
3693 emit_insns (RTL_EXPR_SEQUENCE (exp));
3694 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
3695 return RTL_EXPR_RTL (exp);
3696
3697 case CONSTRUCTOR:
3698 /* If we don't need the result, just ensure we evaluate any
3699 subexpressions. */
3700 if (ignore)
3701 {
3702 tree elt;
3703 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3704 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
3705 return const0_rtx;
3706 }
3707 /* All elts simple constants => refer to a constant in memory. But
3708 if this is a non-BLKmode mode, let it store a field at a time
3709 since that should make a CONST_INT or CONST_DOUBLE when we
3710 fold. If we are making an initializer and all operands are
3711 constant, put it in memory as well. */
3712 else if ((TREE_STATIC (exp)
3713 && (mode == BLKmode || TREE_ADDRESSABLE (exp)))
3714 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
3715 {
3716 rtx constructor = output_constant_def (exp);
3717 if (modifier != EXPAND_CONST_ADDRESS
3718 && modifier != EXPAND_INITIALIZER
3719 && modifier != EXPAND_SUM
3720 && !memory_address_p (GET_MODE (constructor),
3721 XEXP (constructor, 0)))
3722 constructor = change_address (constructor, VOIDmode,
3723 XEXP (constructor, 0));
3724 return constructor;
3725 }
3726
3727 else
3728 {
3729 if (target == 0 || ! safe_from_p (target, exp))
3730 {
3731 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
3732 target = gen_reg_rtx (mode);
3733 else
3734 {
3735 enum tree_code c = TREE_CODE (type);
3736 target
3737 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
3738 if (c == RECORD_TYPE || c == UNION_TYPE
3739 || c == QUAL_UNION_TYPE || c == ARRAY_TYPE)
3740 MEM_IN_STRUCT_P (target) = 1;
3741 }
3742 }
3743 store_constructor (exp, target);
3744 return target;
3745 }
3746
3747 case INDIRECT_REF:
3748 {
3749 tree exp1 = TREE_OPERAND (exp, 0);
3750 tree exp2;
3751
3752 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
3753 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
3754 This code has the same general effect as simply doing
3755 expand_expr on the save expr, except that the expression PTR
3756 is computed for use as a memory address. This means different
3757 code, suitable for indexing, may be generated. */
3758 if (TREE_CODE (exp1) == SAVE_EXPR
3759 && SAVE_EXPR_RTL (exp1) == 0
3760 && TREE_CODE (exp2 = TREE_OPERAND (exp1, 0)) != ERROR_MARK
3761 && TYPE_MODE (TREE_TYPE (exp1)) == Pmode
3762 && TYPE_MODE (TREE_TYPE (exp2)) == Pmode)
3763 {
3764 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
3765 VOIDmode, EXPAND_SUM);
3766 op0 = memory_address (mode, temp);
3767 op0 = copy_all_regs (op0);
3768 SAVE_EXPR_RTL (exp1) = op0;
3769 }
3770 else
3771 {
3772 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
3773 op0 = memory_address (mode, op0);
3774 }
3775
3776 temp = gen_rtx (MEM, mode, op0);
3777 /* If address was computed by addition,
3778 mark this as an element of an aggregate. */
3779 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
3780 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
3781 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
3782 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
3783 || TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3784 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
3785 || TREE_CODE (TREE_TYPE (exp)) == QUAL_UNION_TYPE
3786 || (TREE_CODE (exp1) == ADDR_EXPR
3787 && (exp2 = TREE_OPERAND (exp1, 0))
3788 && (TREE_CODE (TREE_TYPE (exp2)) == ARRAY_TYPE
3789 || TREE_CODE (TREE_TYPE (exp2)) == RECORD_TYPE
3790 || TREE_CODE (TREE_TYPE (exp2)) == UNION_TYPE
3791 || TREE_CODE (TREE_TYPE (exp2)) == QUAL_UNION_TYPE)))
3792 MEM_IN_STRUCT_P (temp) = 1;
3793 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
3794 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
3795 a location is accessed through a pointer to const does not mean
3796 that the value there can never change. */
3797 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
3798 #endif
3799 return temp;
3800 }
3801
3802 case ARRAY_REF:
3803 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
3804 abort ();
3805
3806 {
3807 tree array = TREE_OPERAND (exp, 0);
3808 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
3809 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
3810 tree index = TREE_OPERAND (exp, 1);
3811 tree index_type = TREE_TYPE (index);
3812 int i;
3813
3814 /* Optimize the special-case of a zero lower bound. */
3815 if (! integer_zerop (low_bound))
3816 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
3817
3818 if (TREE_CODE (index) != INTEGER_CST
3819 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3820 {
3821 /* Nonconstant array index or nonconstant element size.
3822 Generate the tree for *(&array+index) and expand that,
3823 except do it in a language-independent way
3824 and don't complain about non-lvalue arrays.
3825 `mark_addressable' should already have been called
3826 for any array for which this case will be reached. */
3827
3828 /* Don't forget the const or volatile flag from the array
3829 element. */
3830 tree variant_type = build_type_variant (type,
3831 TREE_READONLY (exp),
3832 TREE_THIS_VOLATILE (exp));
3833 tree array_adr = build1 (ADDR_EXPR,
3834 build_pointer_type (variant_type), array);
3835 tree elt;
3836
3837 /* Convert the integer argument to a type the same size as a
3838 pointer so the multiply won't overflow spuriously. */
3839 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
3840 index = convert (type_for_size (POINTER_SIZE, 0), index);
3841
3842 /* Don't think the address has side effects
3843 just because the array does.
3844 (In some cases the address might have side effects,
3845 and we fail to record that fact here. However, it should not
3846 matter, since expand_expr should not care.) */
3847 TREE_SIDE_EFFECTS (array_adr) = 0;
3848
3849 elt = build1 (INDIRECT_REF, type,
3850 fold (build (PLUS_EXPR,
3851 TYPE_POINTER_TO (variant_type),
3852 array_adr,
3853 fold (build (MULT_EXPR,
3854 TYPE_POINTER_TO (variant_type),
3855 index,
3856 size_in_bytes (type))))));
3857
3858 /* Volatility, etc., of new expression is same as old
3859 expression. */
3860 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
3861 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
3862 TREE_READONLY (elt) = TREE_READONLY (exp);
3863
3864 return expand_expr (elt, target, tmode, modifier);
3865 }
3866
3867 /* Fold an expression like: "foo"[2].
3868 This is not done in fold so it won't happen inside &. */
3869
3870 if (TREE_CODE (array) == STRING_CST
3871 && TREE_CODE (index) == INTEGER_CST
3872 && !TREE_INT_CST_HIGH (index)
3873 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array))
3874 {
3875 if (TREE_TYPE (TREE_TYPE (array)) == integer_type_node)
3876 {
3877 exp = build_int_2 (((int *)TREE_STRING_POINTER (array))[i], 0);
3878 TREE_TYPE (exp) = integer_type_node;
3879 return expand_expr (exp, target, tmode, modifier);
3880 }
3881 if (TREE_TYPE (TREE_TYPE (array)) == char_type_node)
3882 {
3883 exp = build_int_2 (TREE_STRING_POINTER (array)[i], 0);
3884 TREE_TYPE (exp) = integer_type_node;
3885 return expand_expr (convert (TREE_TYPE (TREE_TYPE (array)),
3886 exp),
3887 target, tmode, modifier);
3888 }
3889 }
3890
3891 /* If this is a constant index into a constant array,
3892 just get the value from the array. Handle both the cases when
3893 we have an explicit constructor and when our operand is a variable
3894 that was declared const. */
3895
3896 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
3897 {
3898 if (TREE_CODE (index) == INTEGER_CST
3899 && TREE_INT_CST_HIGH (index) == 0)
3900 {
3901 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
3902
3903 i = TREE_INT_CST_LOW (index);
3904 while (elem && i--)
3905 elem = TREE_CHAIN (elem);
3906 if (elem)
3907 return expand_expr (fold (TREE_VALUE (elem)), target,
3908 tmode, modifier);
3909 }
3910 }
3911
3912 else if (optimize >= 1
3913 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
3914 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
3915 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
3916 {
3917 if (TREE_CODE (index) == INTEGER_CST
3918 && TREE_INT_CST_HIGH (index) == 0)
3919 {
3920 tree init = DECL_INITIAL (array);
3921
3922 i = TREE_INT_CST_LOW (index);
3923 if (TREE_CODE (init) == CONSTRUCTOR)
3924 {
3925 tree elem = CONSTRUCTOR_ELTS (init);
3926
3927 while (elem && i--)
3928 elem = TREE_CHAIN (elem);
3929 if (elem)
3930 return expand_expr (fold (TREE_VALUE (elem)), target,
3931 tmode, modifier);
3932 }
3933 else if (TREE_CODE (init) == STRING_CST
3934 && i < TREE_STRING_LENGTH (init))
3935 {
3936 temp = GEN_INT (TREE_STRING_POINTER (init)[i]);
3937 return convert_to_mode (mode, temp, 0);
3938 }
3939 }
3940 }
3941 }
3942
3943 /* Treat array-ref with constant index as a component-ref. */
3944
3945 case COMPONENT_REF:
3946 case BIT_FIELD_REF:
3947 /* If the operand is a CONSTRUCTOR, we can just extract the
3948 appropriate field if it is present. */
3949 if (code != ARRAY_REF
3950 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
3951 {
3952 tree elt;
3953
3954 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
3955 elt = TREE_CHAIN (elt))
3956 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
3957 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
3958 }
3959
3960 {
3961 enum machine_mode mode1;
3962 int bitsize;
3963 int bitpos;
3964 tree offset;
3965 int volatilep = 0;
3966 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
3967 &mode1, &unsignedp, &volatilep);
3968
3969 /* If we got back the original object, something is wrong. Perhaps
3970 we are evaluating an expression too early. In any event, don't
3971 infinitely recurse. */
3972 if (tem == exp)
3973 abort ();
3974
3975 /* In some cases, we will be offsetting OP0's address by a constant.
3976 So get it as a sum, if possible. If we will be using it
3977 directly in an insn, we validate it. */
3978 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_SUM);
3979
3980 /* If this is a constant, put it into a register if it is a
3981 legitimate constant and memory if it isn't. */
3982 if (CONSTANT_P (op0))
3983 {
3984 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
3985 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
3986 op0 = force_reg (mode, op0);
3987 else
3988 op0 = validize_mem (force_const_mem (mode, op0));
3989 }
3990
3991 if (offset != 0)
3992 {
3993 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3994
3995 if (GET_CODE (op0) != MEM)
3996 abort ();
3997 op0 = change_address (op0, VOIDmode,
3998 gen_rtx (PLUS, Pmode, XEXP (op0, 0),
3999 force_reg (Pmode, offset_rtx)));
4000 }
4001
4002 /* Don't forget about volatility even if this is a bitfield. */
4003 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
4004 {
4005 op0 = copy_rtx (op0);
4006 MEM_VOLATILE_P (op0) = 1;
4007 }
4008
4009 /* In cases where an aligned union has an unaligned object
4010 as a field, we might be extracting a BLKmode value from
4011 an integer-mode (e.g., SImode) object. Handle this case
4012 by doing the extract into an object as wide as the field
4013 (which we know to be the width of a basic mode), then
4014 storing into memory, and changing the mode to BLKmode. */
4015 if (mode1 == VOIDmode
4016 || (mode1 != BLKmode && ! direct_load[(int) mode1]
4017 && modifier != EXPAND_CONST_ADDRESS
4018 && modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4019 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
4020 /* If the field isn't aligned enough to fetch as a memref,
4021 fetch it as a bit field. */
4022 || (STRICT_ALIGNMENT
4023 && TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
4024 || (STRICT_ALIGNMENT && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
4025 {
4026 enum machine_mode ext_mode = mode;
4027
4028 if (ext_mode == BLKmode)
4029 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
4030
4031 if (ext_mode == BLKmode)
4032 abort ();
4033
4034 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
4035 unsignedp, target, ext_mode, ext_mode,
4036 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
4037 int_size_in_bytes (TREE_TYPE (tem)));
4038 if (mode == BLKmode)
4039 {
4040 rtx new = assign_stack_temp (ext_mode,
4041 bitsize / BITS_PER_UNIT, 0);
4042
4043 emit_move_insn (new, op0);
4044 op0 = copy_rtx (new);
4045 PUT_MODE (op0, BLKmode);
4046 MEM_IN_STRUCT_P (op0) = 1;
4047 }
4048
4049 return op0;
4050 }
4051
4052 /* Get a reference to just this component. */
4053 if (modifier == EXPAND_CONST_ADDRESS
4054 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4055 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
4056 (bitpos / BITS_PER_UNIT)));
4057 else
4058 op0 = change_address (op0, mode1,
4059 plus_constant (XEXP (op0, 0),
4060 (bitpos / BITS_PER_UNIT)));
4061 MEM_IN_STRUCT_P (op0) = 1;
4062 MEM_VOLATILE_P (op0) |= volatilep;
4063 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
4064 return op0;
4065 if (target == 0)
4066 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4067 convert_move (target, op0, unsignedp);
4068 return target;
4069 }
4070
4071 case OFFSET_REF:
4072 {
4073 tree base = build1 (ADDR_EXPR, type, TREE_OPERAND (exp, 0));
4074 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
4075 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
4076 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
4077 MEM_IN_STRUCT_P (temp) = 1;
4078 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
4079 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
4080 a location is accessed through a pointer to const does not mean
4081 that the value there can never change. */
4082 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
4083 #endif
4084 return temp;
4085 }
4086
4087 /* Intended for a reference to a buffer of a file-object in Pascal.
4088 But it's not certain that a special tree code will really be
4089 necessary for these. INDIRECT_REF might work for them. */
4090 case BUFFER_REF:
4091 abort ();
4092
4093 /* IN_EXPR: Inlined pascal set IN expression.
4094
4095 Algorithm:
4096 rlo = set_low - (set_low%bits_per_word);
4097 the_word = set [ (index - rlo)/bits_per_word ];
4098 bit_index = index % bits_per_word;
4099 bitmask = 1 << bit_index;
4100 return !!(the_word & bitmask); */
4101 case IN_EXPR:
4102 preexpand_calls (exp);
4103 {
4104 tree set = TREE_OPERAND (exp, 0);
4105 tree index = TREE_OPERAND (exp, 1);
4106 tree set_type = TREE_TYPE (set);
4107
4108 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
4109 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
4110
4111 rtx index_val;
4112 rtx lo_r;
4113 rtx hi_r;
4114 rtx rlow;
4115 rtx diff, quo, rem, addr, bit, result;
4116 rtx setval, setaddr;
4117 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
4118
4119 if (target == 0)
4120 target = gen_reg_rtx (mode);
4121
4122 /* If domain is empty, answer is no. */
4123 if (tree_int_cst_lt (set_high_bound, set_low_bound))
4124 return const0_rtx;
4125
4126 index_val = expand_expr (index, 0, VOIDmode, 0);
4127 lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
4128 hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
4129 setval = expand_expr (set, 0, VOIDmode, 0);
4130 setaddr = XEXP (setval, 0);
4131
4132 /* Compare index against bounds, if they are constant. */
4133 if (GET_CODE (index_val) == CONST_INT
4134 && GET_CODE (lo_r) == CONST_INT
4135 && INTVAL (index_val) < INTVAL (lo_r))
4136 return const0_rtx;
4137
4138 if (GET_CODE (index_val) == CONST_INT
4139 && GET_CODE (hi_r) == CONST_INT
4140 && INTVAL (hi_r) < INTVAL (index_val))
4141 return const0_rtx;
4142
4143 /* If we get here, we have to generate the code for both cases
4144 (in range and out of range). */
4145
4146 op0 = gen_label_rtx ();
4147 op1 = gen_label_rtx ();
4148
4149 if (! (GET_CODE (index_val) == CONST_INT
4150 && GET_CODE (lo_r) == CONST_INT))
4151 {
4152 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
4153 GET_MODE (index_val), 0, 0);
4154 emit_jump_insn (gen_blt (op1));
4155 }
4156
4157 if (! (GET_CODE (index_val) == CONST_INT
4158 && GET_CODE (hi_r) == CONST_INT))
4159 {
4160 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
4161 GET_MODE (index_val), 0, 0);
4162 emit_jump_insn (gen_bgt (op1));
4163 }
4164
4165 /* Calculate the element number of bit zero in the first word
4166 of the set. */
4167 if (GET_CODE (lo_r) == CONST_INT)
4168 rlow = GEN_INT (INTVAL (lo_r)
4169 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
4170 else
4171 rlow = expand_binop (index_mode, and_optab, lo_r,
4172 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
4173 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4174
4175 diff = expand_binop (index_mode, sub_optab,
4176 index_val, rlow, NULL_RTX, 0, OPTAB_LIB_WIDEN);
4177
4178 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
4179 GEN_INT (BITS_PER_UNIT), NULL_RTX, 0);
4180 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
4181 GEN_INT (BITS_PER_UNIT), NULL_RTX, 0);
4182 addr = memory_address (byte_mode,
4183 expand_binop (index_mode, add_optab,
4184 diff, setaddr, NULL_RTX, 0,
4185 OPTAB_LIB_WIDEN));
4186 /* Extract the bit we want to examine */
4187 bit = expand_shift (RSHIFT_EXPR, byte_mode,
4188 gen_rtx (MEM, byte_mode, addr),
4189 make_tree (TREE_TYPE (index), rem),
4190 NULL_RTX, 1);
4191 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
4192 GET_MODE (target) == byte_mode ? target : 0,
4193 1, OPTAB_LIB_WIDEN);
4194
4195 if (result != target)
4196 convert_move (target, result, 1);
4197
4198 /* Output the code to handle the out-of-range case. */
4199 emit_jump (op0);
4200 emit_label (op1);
4201 emit_move_insn (target, const0_rtx);
4202 emit_label (op0);
4203 return target;
4204 }
4205
4206 case WITH_CLEANUP_EXPR:
4207 if (RTL_EXPR_RTL (exp) == 0)
4208 {
4209 RTL_EXPR_RTL (exp)
4210 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
4211 cleanups_this_call
4212 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
4213 /* That's it for this cleanup. */
4214 TREE_OPERAND (exp, 2) = 0;
4215 }
4216 return RTL_EXPR_RTL (exp);
4217
4218 case CALL_EXPR:
4219 /* Check for a built-in function. */
4220 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
4221 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == FUNCTION_DECL
4222 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4223 return expand_builtin (exp, target, subtarget, tmode, ignore);
4224 /* If this call was expanded already by preexpand_calls,
4225 just return the result we got. */
4226 if (CALL_EXPR_RTL (exp) != 0)
4227 return CALL_EXPR_RTL (exp);
4228 return expand_call (exp, target, ignore);
4229
4230 case NON_LVALUE_EXPR:
4231 case NOP_EXPR:
4232 case CONVERT_EXPR:
4233 case REFERENCE_EXPR:
4234 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4235 return expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
4236 if (TREE_CODE (type) == UNION_TYPE)
4237 {
4238 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
4239 if (target == 0)
4240 {
4241 if (mode == BLKmode)
4242 {
4243 if (TYPE_SIZE (type) == 0
4244 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4245 abort ();
4246 target = assign_stack_temp (BLKmode,
4247 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4248 + BITS_PER_UNIT - 1)
4249 / BITS_PER_UNIT, 0);
4250 }
4251 else
4252 target = gen_reg_rtx (mode);
4253 }
4254 if (GET_CODE (target) == MEM)
4255 /* Store data into beginning of memory target. */
4256 store_expr (TREE_OPERAND (exp, 0),
4257 change_address (target, TYPE_MODE (valtype), 0), 0);
4258
4259 else if (GET_CODE (target) == REG)
4260 /* Store this field into a union of the proper type. */
4261 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
4262 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
4263 VOIDmode, 0, 1,
4264 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
4265 else
4266 abort ();
4267
4268 /* Return the entire union. */
4269 return target;
4270 }
4271 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
4272 if (GET_MODE (op0) == mode)
4273 return op0;
4274 /* If arg is a constant integer being extended from a narrower mode,
4275 we must really truncate to get the extended bits right. Otherwise
4276 (unsigned long) (unsigned char) ("\377"[0])
4277 would come out as ffffffff. */
4278 if (GET_MODE (op0) == VOIDmode
4279 && (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4280 < GET_MODE_BITSIZE (mode)))
4281 {
4282 /* MODE must be narrower than HOST_BITS_PER_INT. */
4283 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4284
4285 if (width < HOST_BITS_PER_WIDE_INT)
4286 {
4287 HOST_WIDE_INT val = (GET_CODE (op0) == CONST_INT ? INTVAL (op0)
4288 : CONST_DOUBLE_LOW (op0));
4289 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4290 || !(val & ((HOST_WIDE_INT) 1 << (width - 1))))
4291 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4292 else
4293 val |= ~(((HOST_WIDE_INT) 1 << width) - 1);
4294
4295 op0 = GEN_INT (val);
4296 }
4297 else
4298 {
4299 op0 = (simplify_unary_operation
4300 ((TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4301 ? ZERO_EXTEND : SIGN_EXTEND),
4302 mode, op0,
4303 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))));
4304 if (op0 == 0)
4305 abort ();
4306 }
4307 }
4308 if (GET_MODE (op0) == VOIDmode)
4309 return op0;
4310 if (modifier == EXPAND_INITIALIZER)
4311 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
4312 if (flag_force_mem && GET_CODE (op0) == MEM)
4313 op0 = copy_to_reg (op0);
4314
4315 if (target == 0)
4316 return convert_to_mode (mode, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4317 else
4318 convert_move (target, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4319 return target;
4320
4321 case PLUS_EXPR:
4322 /* We come here from MINUS_EXPR when the second operand is a constant. */
4323 plus_expr:
4324 this_optab = add_optab;
4325
4326 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
4327 something else, make sure we add the register to the constant and
4328 then to the other thing. This case can occur during strength
4329 reduction and doing it this way will produce better code if the
4330 frame pointer or argument pointer is eliminated.
4331
4332 fold-const.c will ensure that the constant is always in the inner
4333 PLUS_EXPR, so the only case we need to do anything about is if
4334 sp, ap, or fp is our second argument, in which case we must swap
4335 the innermost first argument and our second argument. */
4336
4337 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4338 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
4339 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
4340 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
4341 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
4342 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
4343 {
4344 tree t = TREE_OPERAND (exp, 1);
4345
4346 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4347 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
4348 }
4349
4350 /* If the result is to be Pmode and we are adding an integer to
4351 something, we might be forming a constant. So try to use
4352 plus_constant. If it produces a sum and we can't accept it,
4353 use force_operand. This allows P = &ARR[const] to generate
4354 efficient code on machines where a SYMBOL_REF is not a valid
4355 address.
4356
4357 If this is an EXPAND_SUM call, always return the sum. */
4358 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4359 || mode == Pmode)
4360 {
4361 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
4362 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4363 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
4364 {
4365 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
4366 EXPAND_SUM);
4367 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
4368 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4369 op1 = force_operand (op1, target);
4370 return op1;
4371 }
4372
4373 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4374 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
4375 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
4376 {
4377 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
4378 EXPAND_SUM);
4379 if (! CONSTANT_P (op0))
4380 {
4381 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4382 VOIDmode, modifier);
4383 goto both_summands;
4384 }
4385 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
4386 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4387 op0 = force_operand (op0, target);
4388 return op0;
4389 }
4390 }
4391
4392 /* No sense saving up arithmetic to be done
4393 if it's all in the wrong mode to form part of an address.
4394 And force_operand won't know whether to sign-extend or
4395 zero-extend. */
4396 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4397 || mode != Pmode)
4398 goto binop;
4399
4400 preexpand_calls (exp);
4401 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4402 subtarget = 0;
4403
4404 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
4405 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
4406
4407 both_summands:
4408 /* Make sure any term that's a sum with a constant comes last. */
4409 if (GET_CODE (op0) == PLUS
4410 && CONSTANT_P (XEXP (op0, 1)))
4411 {
4412 temp = op0;
4413 op0 = op1;
4414 op1 = temp;
4415 }
4416 /* If adding to a sum including a constant,
4417 associate it to put the constant outside. */
4418 if (GET_CODE (op1) == PLUS
4419 && CONSTANT_P (XEXP (op1, 1)))
4420 {
4421 rtx constant_term = const0_rtx;
4422
4423 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
4424 if (temp != 0)
4425 op0 = temp;
4426 /* Ensure that MULT comes first if there is one. */
4427 else if (GET_CODE (op0) == MULT)
4428 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
4429 else
4430 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
4431
4432 /* Let's also eliminate constants from op0 if possible. */
4433 op0 = eliminate_constant_term (op0, &constant_term);
4434
4435 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
4436 their sum should be a constant. Form it into OP1, since the
4437 result we want will then be OP0 + OP1. */
4438
4439 temp = simplify_binary_operation (PLUS, mode, constant_term,
4440 XEXP (op1, 1));
4441 if (temp != 0)
4442 op1 = temp;
4443 else
4444 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
4445 }
4446
4447 /* Put a constant term last and put a multiplication first. */
4448 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
4449 temp = op1, op1 = op0, op0 = temp;
4450
4451 temp = simplify_binary_operation (PLUS, mode, op0, op1);
4452 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
4453
4454 case MINUS_EXPR:
4455 /* Handle difference of two symbolic constants,
4456 for the sake of an initializer. */
4457 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4458 && really_constant_p (TREE_OPERAND (exp, 0))
4459 && really_constant_p (TREE_OPERAND (exp, 1)))
4460 {
4461 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
4462 VOIDmode, modifier);
4463 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4464 VOIDmode, modifier);
4465 return gen_rtx (MINUS, mode, op0, op1);
4466 }
4467 /* Convert A - const to A + (-const). */
4468 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4469 {
4470 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0),
4471 fold (build1 (NEGATE_EXPR, type,
4472 TREE_OPERAND (exp, 1))));
4473 goto plus_expr;
4474 }
4475 this_optab = sub_optab;
4476 goto binop;
4477
4478 case MULT_EXPR:
4479 preexpand_calls (exp);
4480 /* If first operand is constant, swap them.
4481 Thus the following special case checks need only
4482 check the second operand. */
4483 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
4484 {
4485 register tree t1 = TREE_OPERAND (exp, 0);
4486 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
4487 TREE_OPERAND (exp, 1) = t1;
4488 }
4489
4490 /* Attempt to return something suitable for generating an
4491 indexed address, for machines that support that. */
4492
4493 if (modifier == EXPAND_SUM && mode == Pmode
4494 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4495 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
4496 {
4497 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
4498
4499 /* Apply distributive law if OP0 is x+c. */
4500 if (GET_CODE (op0) == PLUS
4501 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
4502 return gen_rtx (PLUS, mode,
4503 gen_rtx (MULT, mode, XEXP (op0, 0),
4504 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
4505 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
4506 * INTVAL (XEXP (op0, 1))));
4507
4508 if (GET_CODE (op0) != REG)
4509 op0 = force_operand (op0, NULL_RTX);
4510 if (GET_CODE (op0) != REG)
4511 op0 = copy_to_mode_reg (mode, op0);
4512
4513 return gen_rtx (MULT, mode, op0,
4514 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
4515 }
4516
4517 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4518 subtarget = 0;
4519
4520 /* Check for multiplying things that have been extended
4521 from a narrower type. If this machine supports multiplying
4522 in that narrower type with a result in the desired type,
4523 do it that way, and avoid the explicit type-conversion. */
4524 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
4525 && TREE_CODE (type) == INTEGER_TYPE
4526 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4527 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
4528 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4529 && int_fits_type_p (TREE_OPERAND (exp, 1),
4530 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4531 /* Don't use a widening multiply if a shift will do. */
4532 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
4533 > HOST_BITS_PER_WIDE_INT)
4534 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
4535 ||
4536 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
4537 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4538 ==
4539 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
4540 /* If both operands are extended, they must either both
4541 be zero-extended or both be sign-extended. */
4542 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4543 ==
4544 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
4545 {
4546 enum machine_mode innermode
4547 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
4548 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4549 ? umul_widen_optab : smul_widen_optab);
4550 if (mode == GET_MODE_WIDER_MODE (innermode)
4551 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
4552 {
4553 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4554 NULL_RTX, VOIDmode, 0);
4555 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4556 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4557 VOIDmode, 0);
4558 else
4559 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
4560 NULL_RTX, VOIDmode, 0);
4561 goto binop2;
4562 }
4563 }
4564 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4565 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4566 return expand_mult (mode, op0, op1, target, unsignedp);
4567
4568 case TRUNC_DIV_EXPR:
4569 case FLOOR_DIV_EXPR:
4570 case CEIL_DIV_EXPR:
4571 case ROUND_DIV_EXPR:
4572 case EXACT_DIV_EXPR:
4573 preexpand_calls (exp);
4574 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4575 subtarget = 0;
4576 /* Possible optimization: compute the dividend with EXPAND_SUM
4577 then if the divisor is constant can optimize the case
4578 where some terms of the dividend have coeffs divisible by it. */
4579 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4580 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4581 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
4582
4583 case RDIV_EXPR:
4584 this_optab = flodiv_optab;
4585 goto binop;
4586
4587 case TRUNC_MOD_EXPR:
4588 case FLOOR_MOD_EXPR:
4589 case CEIL_MOD_EXPR:
4590 case ROUND_MOD_EXPR:
4591 preexpand_calls (exp);
4592 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4593 subtarget = 0;
4594 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4595 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4596 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
4597
4598 case FIX_ROUND_EXPR:
4599 case FIX_FLOOR_EXPR:
4600 case FIX_CEIL_EXPR:
4601 abort (); /* Not used for C. */
4602
4603 case FIX_TRUNC_EXPR:
4604 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
4605 if (target == 0)
4606 target = gen_reg_rtx (mode);
4607 expand_fix (target, op0, unsignedp);
4608 return target;
4609
4610 case FLOAT_EXPR:
4611 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
4612 if (target == 0)
4613 target = gen_reg_rtx (mode);
4614 /* expand_float can't figure out what to do if FROM has VOIDmode.
4615 So give it the correct mode. With -O, cse will optimize this. */
4616 if (GET_MODE (op0) == VOIDmode)
4617 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
4618 op0);
4619 expand_float (target, op0,
4620 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4621 return target;
4622
4623 case NEGATE_EXPR:
4624 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4625 temp = expand_unop (mode, neg_optab, op0, target, 0);
4626 if (temp == 0)
4627 abort ();
4628 return temp;
4629
4630 case ABS_EXPR:
4631 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4632
4633 /* Handle complex values specially. */
4634 {
4635 enum machine_mode opmode
4636 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4637
4638 if (GET_MODE_CLASS (opmode) == MODE_COMPLEX_INT
4639 || GET_MODE_CLASS (opmode) == MODE_COMPLEX_FLOAT)
4640 return expand_complex_abs (opmode, op0, target, unsignedp);
4641 }
4642
4643 /* Unsigned abs is simply the operand. Testing here means we don't
4644 risk generating incorrect code below. */
4645 if (TREE_UNSIGNED (type))
4646 return op0;
4647
4648 /* First try to do it with a special abs instruction. */
4649 temp = expand_unop (mode, abs_optab, op0, target, 0);
4650 if (temp != 0)
4651 return temp;
4652
4653 /* If this machine has expensive jumps, we can do integer absolute
4654 value of X as (((signed) x >> (W-1)) ^ x) - ((signed) x >> (W-1)),
4655 where W is the width of MODE. */
4656
4657 if (GET_MODE_CLASS (mode) == MODE_INT && BRANCH_COST >= 2)
4658 {
4659 rtx extended = expand_shift (RSHIFT_EXPR, mode, op0,
4660 size_int (GET_MODE_BITSIZE (mode) - 1),
4661 NULL_RTX, 0);
4662
4663 temp = expand_binop (mode, xor_optab, extended, op0, target, 0,
4664 OPTAB_LIB_WIDEN);
4665 if (temp != 0)
4666 temp = expand_binop (mode, sub_optab, temp, extended, target, 0,
4667 OPTAB_LIB_WIDEN);
4668
4669 if (temp != 0)
4670 return temp;
4671 }
4672
4673 /* If that does not win, use conditional jump and negate. */
4674 target = original_target;
4675 temp = gen_label_rtx ();
4676 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 0))
4677 || (GET_CODE (target) == REG
4678 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4679 target = gen_reg_rtx (mode);
4680 emit_move_insn (target, op0);
4681 emit_cmp_insn (target,
4682 expand_expr (convert (type, integer_zero_node),
4683 NULL_RTX, VOIDmode, 0),
4684 GE, NULL_RTX, mode, 0, 0);
4685 NO_DEFER_POP;
4686 emit_jump_insn (gen_bge (temp));
4687 op0 = expand_unop (mode, neg_optab, target, target, 0);
4688 if (op0 != target)
4689 emit_move_insn (target, op0);
4690 emit_label (temp);
4691 OK_DEFER_POP;
4692 return target;
4693
4694 case MAX_EXPR:
4695 case MIN_EXPR:
4696 target = original_target;
4697 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
4698 || (GET_CODE (target) == REG
4699 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4700 target = gen_reg_rtx (mode);
4701 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4702 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4703
4704 /* First try to do it with a special MIN or MAX instruction.
4705 If that does not win, use a conditional jump to select the proper
4706 value. */
4707 this_optab = (TREE_UNSIGNED (type)
4708 ? (code == MIN_EXPR ? umin_optab : umax_optab)
4709 : (code == MIN_EXPR ? smin_optab : smax_optab));
4710
4711 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
4712 OPTAB_WIDEN);
4713 if (temp != 0)
4714 return temp;
4715
4716 if (target != op0)
4717 emit_move_insn (target, op0);
4718 op0 = gen_label_rtx ();
4719 /* If this mode is an integer too wide to compare properly,
4720 compare word by word. Rely on cse to optimize constant cases. */
4721 if (GET_MODE_CLASS (mode) == MODE_INT
4722 && !can_compare_p (mode))
4723 {
4724 if (code == MAX_EXPR)
4725 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type), target, op1, NULL, op0);
4726 else
4727 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type), op1, target, NULL, op0);
4728 emit_move_insn (target, op1);
4729 }
4730 else
4731 {
4732 if (code == MAX_EXPR)
4733 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4734 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
4735 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
4736 else
4737 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4738 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
4739 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
4740 if (temp == const0_rtx)
4741 emit_move_insn (target, op1);
4742 else if (temp != const_true_rtx)
4743 {
4744 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
4745 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
4746 else
4747 abort ();
4748 emit_move_insn (target, op1);
4749 }
4750 }
4751 emit_label (op0);
4752 return target;
4753
4754 /* ??? Can optimize when the operand of this is a bitwise operation,
4755 by using a different bitwise operation. */
4756 case BIT_NOT_EXPR:
4757 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4758 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
4759 if (temp == 0)
4760 abort ();
4761 return temp;
4762
4763 case FFS_EXPR:
4764 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4765 temp = expand_unop (mode, ffs_optab, op0, target, 1);
4766 if (temp == 0)
4767 abort ();
4768 return temp;
4769
4770 /* ??? Can optimize bitwise operations with one arg constant.
4771 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
4772 and (a bitwise1 b) bitwise2 b (etc)
4773 but that is probably not worth while. */
4774
4775 /* BIT_AND_EXPR is for bitwise anding.
4776 TRUTH_AND_EXPR is for anding two boolean values
4777 when we want in all cases to compute both of them.
4778 In general it is fastest to do TRUTH_AND_EXPR by
4779 computing both operands as actual zero-or-1 values
4780 and then bitwise anding. In cases where there cannot
4781 be any side effects, better code would be made by
4782 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR;
4783 but the question is how to recognize those cases. */
4784
4785 case TRUTH_AND_EXPR:
4786 case BIT_AND_EXPR:
4787 this_optab = and_optab;
4788 goto binop;
4789
4790 /* See comment above about TRUTH_AND_EXPR; it applies here too. */
4791 case TRUTH_OR_EXPR:
4792 case BIT_IOR_EXPR:
4793 this_optab = ior_optab;
4794 goto binop;
4795
4796 case TRUTH_XOR_EXPR:
4797 case BIT_XOR_EXPR:
4798 this_optab = xor_optab;
4799 goto binop;
4800
4801 case LSHIFT_EXPR:
4802 case RSHIFT_EXPR:
4803 case LROTATE_EXPR:
4804 case RROTATE_EXPR:
4805 preexpand_calls (exp);
4806 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4807 subtarget = 0;
4808 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4809 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
4810 unsignedp);
4811
4812 /* Could determine the answer when only additive constants differ.
4813 Also, the addition of one can be handled by changing the condition. */
4814 case LT_EXPR:
4815 case LE_EXPR:
4816 case GT_EXPR:
4817 case GE_EXPR:
4818 case EQ_EXPR:
4819 case NE_EXPR:
4820 preexpand_calls (exp);
4821 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
4822 if (temp != 0)
4823 return temp;
4824 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
4825 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
4826 && original_target
4827 && GET_CODE (original_target) == REG
4828 && (GET_MODE (original_target)
4829 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
4830 {
4831 temp = expand_expr (TREE_OPERAND (exp, 0), original_target, VOIDmode, 0);
4832 if (temp != original_target)
4833 temp = copy_to_reg (temp);
4834 op1 = gen_label_rtx ();
4835 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
4836 GET_MODE (temp), unsignedp, 0);
4837 emit_jump_insn (gen_beq (op1));
4838 emit_move_insn (temp, const1_rtx);
4839 emit_label (op1);
4840 return temp;
4841 }
4842 /* If no set-flag instruction, must generate a conditional
4843 store into a temporary variable. Drop through
4844 and handle this like && and ||. */
4845
4846 case TRUTH_ANDIF_EXPR:
4847 case TRUTH_ORIF_EXPR:
4848 if (! ignore
4849 && (target == 0 || ! safe_from_p (target, exp)
4850 /* Make sure we don't have a hard reg (such as function's return
4851 value) live across basic blocks, if not optimizing. */
4852 || (!optimize && GET_CODE (target) == REG
4853 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
4854 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4855
4856 if (target)
4857 emit_clr_insn (target);
4858
4859 op1 = gen_label_rtx ();
4860 jumpifnot (exp, op1);
4861
4862 if (target)
4863 emit_0_to_1_insn (target);
4864
4865 emit_label (op1);
4866 return ignore ? const0_rtx : target;
4867
4868 case TRUTH_NOT_EXPR:
4869 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4870 /* The parser is careful to generate TRUTH_NOT_EXPR
4871 only with operands that are always zero or one. */
4872 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
4873 target, 1, OPTAB_LIB_WIDEN);
4874 if (temp == 0)
4875 abort ();
4876 return temp;
4877
4878 case COMPOUND_EXPR:
4879 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4880 emit_queue ();
4881 return expand_expr (TREE_OPERAND (exp, 1),
4882 (ignore ? const0_rtx : target),
4883 VOIDmode, 0);
4884
4885 case COND_EXPR:
4886 {
4887 /* Note that COND_EXPRs whose type is a structure or union
4888 are required to be constructed to contain assignments of
4889 a temporary variable, so that we can evaluate them here
4890 for side effect only. If type is void, we must do likewise. */
4891
4892 /* If an arm of the branch requires a cleanup,
4893 only that cleanup is performed. */
4894
4895 tree singleton = 0;
4896 tree binary_op = 0, unary_op = 0;
4897 tree old_cleanups = cleanups_this_call;
4898 cleanups_this_call = 0;
4899
4900 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
4901 convert it to our mode, if necessary. */
4902 if (integer_onep (TREE_OPERAND (exp, 1))
4903 && integer_zerop (TREE_OPERAND (exp, 2))
4904 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4905 {
4906 if (ignore)
4907 {
4908 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4909 modifier);
4910 return const0_rtx;
4911 }
4912
4913 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
4914 if (GET_MODE (op0) == mode)
4915 return op0;
4916 if (target == 0)
4917 target = gen_reg_rtx (mode);
4918 convert_move (target, op0, unsignedp);
4919 return target;
4920 }
4921
4922 /* If we are not to produce a result, we have no target. Otherwise,
4923 if a target was specified use it; it will not be used as an
4924 intermediate target unless it is safe. If no target, use a
4925 temporary. */
4926
4927 if (ignore)
4928 temp = 0;
4929 else if (original_target
4930 && safe_from_p (original_target, TREE_OPERAND (exp, 0)))
4931 temp = original_target;
4932 else if (mode == BLKmode)
4933 {
4934 if (TYPE_SIZE (type) == 0
4935 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4936 abort ();
4937 temp = assign_stack_temp (BLKmode,
4938 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4939 + BITS_PER_UNIT - 1)
4940 / BITS_PER_UNIT, 0);
4941 }
4942 else
4943 temp = gen_reg_rtx (mode);
4944
4945 /* Check for X ? A + B : A. If we have this, we can copy
4946 A to the output and conditionally add B. Similarly for unary
4947 operations. Don't do this if X has side-effects because
4948 those side effects might affect A or B and the "?" operation is
4949 a sequence point in ANSI. (We test for side effects later.) */
4950
4951 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
4952 && operand_equal_p (TREE_OPERAND (exp, 2),
4953 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4954 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
4955 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
4956 && operand_equal_p (TREE_OPERAND (exp, 1),
4957 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4958 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
4959 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
4960 && operand_equal_p (TREE_OPERAND (exp, 2),
4961 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4962 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
4963 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
4964 && operand_equal_p (TREE_OPERAND (exp, 1),
4965 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4966 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
4967
4968 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
4969 operation, do this as A + (X != 0). Similarly for other simple
4970 binary operators. */
4971 if (temp && singleton && binary_op
4972 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4973 && (TREE_CODE (binary_op) == PLUS_EXPR
4974 || TREE_CODE (binary_op) == MINUS_EXPR
4975 || TREE_CODE (binary_op) == BIT_IOR_EXPR
4976 || TREE_CODE (binary_op) == BIT_XOR_EXPR
4977 || TREE_CODE (binary_op) == BIT_AND_EXPR)
4978 && integer_onep (TREE_OPERAND (binary_op, 1))
4979 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4980 {
4981 rtx result;
4982 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
4983 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
4984 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
4985 : TREE_CODE (binary_op) == BIT_XOR_EXPR ? xor_optab
4986 : and_optab);
4987
4988 /* If we had X ? A : A + 1, do this as A + (X == 0).
4989
4990 We have to invert the truth value here and then put it
4991 back later if do_store_flag fails. We cannot simply copy
4992 TREE_OPERAND (exp, 0) to another variable and modify that
4993 because invert_truthvalue can modify the tree pointed to
4994 by its argument. */
4995 if (singleton == TREE_OPERAND (exp, 1))
4996 TREE_OPERAND (exp, 0)
4997 = invert_truthvalue (TREE_OPERAND (exp, 0));
4998
4999 result = do_store_flag (TREE_OPERAND (exp, 0),
5000 (safe_from_p (temp, singleton)
5001 ? temp : NULL_RTX),
5002 mode, BRANCH_COST <= 1);
5003
5004 if (result)
5005 {
5006 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
5007 return expand_binop (mode, boptab, op1, result, temp,
5008 unsignedp, OPTAB_LIB_WIDEN);
5009 }
5010 else if (singleton == TREE_OPERAND (exp, 1))
5011 TREE_OPERAND (exp, 0)
5012 = invert_truthvalue (TREE_OPERAND (exp, 0));
5013 }
5014
5015 NO_DEFER_POP;
5016 op0 = gen_label_rtx ();
5017
5018 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
5019 {
5020 if (temp != 0)
5021 {
5022 /* If the target conflicts with the other operand of the
5023 binary op, we can't use it. Also, we can't use the target
5024 if it is a hard register, because evaluating the condition
5025 might clobber it. */
5026 if ((binary_op
5027 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
5028 || (GET_CODE (temp) == REG
5029 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
5030 temp = gen_reg_rtx (mode);
5031 store_expr (singleton, temp, 0);
5032 }
5033 else
5034 expand_expr (singleton,
5035 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5036 if (cleanups_this_call)
5037 {
5038 sorry ("aggregate value in COND_EXPR");
5039 cleanups_this_call = 0;
5040 }
5041 if (singleton == TREE_OPERAND (exp, 1))
5042 jumpif (TREE_OPERAND (exp, 0), op0);
5043 else
5044 jumpifnot (TREE_OPERAND (exp, 0), op0);
5045
5046 if (binary_op && temp == 0)
5047 /* Just touch the other operand. */
5048 expand_expr (TREE_OPERAND (binary_op, 1),
5049 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5050 else if (binary_op)
5051 store_expr (build (TREE_CODE (binary_op), type,
5052 make_tree (type, temp),
5053 TREE_OPERAND (binary_op, 1)),
5054 temp, 0);
5055 else
5056 store_expr (build1 (TREE_CODE (unary_op), type,
5057 make_tree (type, temp)),
5058 temp, 0);
5059 op1 = op0;
5060 }
5061 #if 0
5062 /* This is now done in jump.c and is better done there because it
5063 produces shorter register lifetimes. */
5064
5065 /* Check for both possibilities either constants or variables
5066 in registers (but not the same as the target!). If so, can
5067 save branches by assigning one, branching, and assigning the
5068 other. */
5069 else if (temp && GET_MODE (temp) != BLKmode
5070 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
5071 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
5072 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
5073 && DECL_RTL (TREE_OPERAND (exp, 1))
5074 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
5075 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
5076 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
5077 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
5078 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
5079 && DECL_RTL (TREE_OPERAND (exp, 2))
5080 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
5081 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
5082 {
5083 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5084 temp = gen_reg_rtx (mode);
5085 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5086 jumpifnot (TREE_OPERAND (exp, 0), op0);
5087 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5088 op1 = op0;
5089 }
5090 #endif
5091 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
5092 comparison operator. If we have one of these cases, set the
5093 output to A, branch on A (cse will merge these two references),
5094 then set the output to FOO. */
5095 else if (temp
5096 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5097 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5098 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5099 TREE_OPERAND (exp, 1), 0)
5100 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5101 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
5102 {
5103 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5104 temp = gen_reg_rtx (mode);
5105 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5106 jumpif (TREE_OPERAND (exp, 0), op0);
5107 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5108 op1 = op0;
5109 }
5110 else if (temp
5111 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5112 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5113 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5114 TREE_OPERAND (exp, 2), 0)
5115 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5116 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
5117 {
5118 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5119 temp = gen_reg_rtx (mode);
5120 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5121 jumpifnot (TREE_OPERAND (exp, 0), op0);
5122 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5123 op1 = op0;
5124 }
5125 else
5126 {
5127 op1 = gen_label_rtx ();
5128 jumpifnot (TREE_OPERAND (exp, 0), op0);
5129 if (temp != 0)
5130 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5131 else
5132 expand_expr (TREE_OPERAND (exp, 1),
5133 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5134 if (cleanups_this_call)
5135 {
5136 sorry ("aggregate value in COND_EXPR");
5137 cleanups_this_call = 0;
5138 }
5139
5140 emit_queue ();
5141 emit_jump_insn (gen_jump (op1));
5142 emit_barrier ();
5143 emit_label (op0);
5144 if (temp != 0)
5145 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5146 else
5147 expand_expr (TREE_OPERAND (exp, 2),
5148 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5149 }
5150
5151 if (cleanups_this_call)
5152 {
5153 sorry ("aggregate value in COND_EXPR");
5154 cleanups_this_call = 0;
5155 }
5156
5157 emit_queue ();
5158 emit_label (op1);
5159 OK_DEFER_POP;
5160 cleanups_this_call = old_cleanups;
5161 return temp;
5162 }
5163
5164 case TARGET_EXPR:
5165 {
5166 /* Something needs to be initialized, but we didn't know
5167 where that thing was when building the tree. For example,
5168 it could be the return value of a function, or a parameter
5169 to a function which lays down in the stack, or a temporary
5170 variable which must be passed by reference.
5171
5172 We guarantee that the expression will either be constructed
5173 or copied into our original target. */
5174
5175 tree slot = TREE_OPERAND (exp, 0);
5176 tree exp1;
5177
5178 if (TREE_CODE (slot) != VAR_DECL)
5179 abort ();
5180
5181 if (target == 0)
5182 {
5183 if (DECL_RTL (slot) != 0)
5184 {
5185 target = DECL_RTL (slot);
5186 /* If we have already expanded the slot, so don't do
5187 it again. (mrs) */
5188 if (TREE_OPERAND (exp, 1) == NULL_TREE)
5189 return target;
5190 }
5191 else
5192 {
5193 target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
5194 /* All temp slots at this level must not conflict. */
5195 preserve_temp_slots (target);
5196 DECL_RTL (slot) = target;
5197 }
5198
5199 #if 0
5200 /* I bet this needs to be done, and I bet that it needs to
5201 be above, inside the else clause. The reason is
5202 simple, how else is it going to get cleaned up? (mrs)
5203
5204 The reason is probably did not work before, and was
5205 commented out is because this was re-expanding already
5206 expanded target_exprs (target == 0 and DECL_RTL (slot)
5207 != 0) also cleaning them up many times as well. :-( */
5208
5209 /* Since SLOT is not known to the called function
5210 to belong to its stack frame, we must build an explicit
5211 cleanup. This case occurs when we must build up a reference
5212 to pass the reference as an argument. In this case,
5213 it is very likely that such a reference need not be
5214 built here. */
5215
5216 if (TREE_OPERAND (exp, 2) == 0)
5217 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
5218 if (TREE_OPERAND (exp, 2))
5219 cleanups_this_call = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2),
5220 cleanups_this_call);
5221 #endif
5222 }
5223 else
5224 {
5225 /* This case does occur, when expanding a parameter which
5226 needs to be constructed on the stack. The target
5227 is the actual stack address that we want to initialize.
5228 The function we call will perform the cleanup in this case. */
5229
5230 /* If we have already assigned it space, use that space,
5231 not target that we were passed in, as our target
5232 parameter is only a hint. */
5233 if (DECL_RTL (slot) != 0)
5234 {
5235 target = DECL_RTL (slot);
5236 /* If we have already expanded the slot, so don't do
5237 it again. (mrs) */
5238 if (TREE_OPERAND (exp, 1) == NULL_TREE)
5239 return target;
5240 }
5241
5242 DECL_RTL (slot) = target;
5243 }
5244
5245 exp1 = TREE_OPERAND (exp, 1);
5246 /* Mark it as expanded. */
5247 TREE_OPERAND (exp, 1) = NULL_TREE;
5248
5249 return expand_expr (exp1, target, tmode, modifier);
5250 }
5251
5252 case INIT_EXPR:
5253 {
5254 tree lhs = TREE_OPERAND (exp, 0);
5255 tree rhs = TREE_OPERAND (exp, 1);
5256 tree noncopied_parts = 0;
5257 tree lhs_type = TREE_TYPE (lhs);
5258
5259 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5260 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
5261 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
5262 TYPE_NONCOPIED_PARTS (lhs_type));
5263 while (noncopied_parts != 0)
5264 {
5265 expand_assignment (TREE_VALUE (noncopied_parts),
5266 TREE_PURPOSE (noncopied_parts), 0, 0);
5267 noncopied_parts = TREE_CHAIN (noncopied_parts);
5268 }
5269 return temp;
5270 }
5271
5272 case MODIFY_EXPR:
5273 {
5274 /* If lhs is complex, expand calls in rhs before computing it.
5275 That's so we don't compute a pointer and save it over a call.
5276 If lhs is simple, compute it first so we can give it as a
5277 target if the rhs is just a call. This avoids an extra temp and copy
5278 and that prevents a partial-subsumption which makes bad code.
5279 Actually we could treat component_ref's of vars like vars. */
5280
5281 tree lhs = TREE_OPERAND (exp, 0);
5282 tree rhs = TREE_OPERAND (exp, 1);
5283 tree noncopied_parts = 0;
5284 tree lhs_type = TREE_TYPE (lhs);
5285
5286 temp = 0;
5287
5288 if (TREE_CODE (lhs) != VAR_DECL
5289 && TREE_CODE (lhs) != RESULT_DECL
5290 && TREE_CODE (lhs) != PARM_DECL)
5291 preexpand_calls (exp);
5292
5293 /* Check for |= or &= of a bitfield of size one into another bitfield
5294 of size 1. In this case, (unless we need the result of the
5295 assignment) we can do this more efficiently with a
5296 test followed by an assignment, if necessary.
5297
5298 ??? At this point, we can't get a BIT_FIELD_REF here. But if
5299 things change so we do, this code should be enhanced to
5300 support it. */
5301 if (ignore
5302 && TREE_CODE (lhs) == COMPONENT_REF
5303 && (TREE_CODE (rhs) == BIT_IOR_EXPR
5304 || TREE_CODE (rhs) == BIT_AND_EXPR)
5305 && TREE_OPERAND (rhs, 0) == lhs
5306 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
5307 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
5308 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
5309 {
5310 rtx label = gen_label_rtx ();
5311
5312 do_jump (TREE_OPERAND (rhs, 1),
5313 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
5314 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
5315 expand_assignment (lhs, convert (TREE_TYPE (rhs),
5316 (TREE_CODE (rhs) == BIT_IOR_EXPR
5317 ? integer_one_node
5318 : integer_zero_node)),
5319 0, 0);
5320 do_pending_stack_adjust ();
5321 emit_label (label);
5322 return const0_rtx;
5323 }
5324
5325 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
5326 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
5327 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
5328 TYPE_NONCOPIED_PARTS (lhs_type));
5329
5330 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5331 while (noncopied_parts != 0)
5332 {
5333 expand_assignment (TREE_PURPOSE (noncopied_parts),
5334 TREE_VALUE (noncopied_parts), 0, 0);
5335 noncopied_parts = TREE_CHAIN (noncopied_parts);
5336 }
5337 return temp;
5338 }
5339
5340 case PREINCREMENT_EXPR:
5341 case PREDECREMENT_EXPR:
5342 return expand_increment (exp, 0);
5343
5344 case POSTINCREMENT_EXPR:
5345 case POSTDECREMENT_EXPR:
5346 /* Faster to treat as pre-increment if result is not used. */
5347 return expand_increment (exp, ! ignore);
5348
5349 case ADDR_EXPR:
5350 /* Are we taking the address of a nested function? */
5351 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
5352 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
5353 {
5354 op0 = trampoline_address (TREE_OPERAND (exp, 0));
5355 op0 = force_operand (op0, target);
5356 }
5357 else
5358 {
5359 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
5360 (modifier == EXPAND_INITIALIZER
5361 ? modifier : EXPAND_CONST_ADDRESS));
5362
5363 /* We would like the object in memory. If it is a constant,
5364 we can have it be statically allocated into memory. For
5365 a non-constant (REG or SUBREG), we need to allocate some
5366 memory and store the value into it. */
5367
5368 if (CONSTANT_P (op0))
5369 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5370 op0);
5371
5372 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG)
5373 {
5374 /* If this object is in a register, it must be not
5375 be BLKmode. */
5376 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
5377 enum machine_mode inner_mode = TYPE_MODE (inner_type);
5378 rtx memloc
5379 = assign_stack_temp (inner_mode,
5380 int_size_in_bytes (inner_type), 1);
5381
5382 emit_move_insn (memloc, op0);
5383 op0 = memloc;
5384 }
5385
5386 if (GET_CODE (op0) != MEM)
5387 abort ();
5388
5389 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5390 return XEXP (op0, 0);
5391 op0 = force_operand (XEXP (op0, 0), target);
5392 }
5393 if (flag_force_addr && GET_CODE (op0) != REG)
5394 return force_reg (Pmode, op0);
5395 return op0;
5396
5397 case ENTRY_VALUE_EXPR:
5398 abort ();
5399
5400 /* COMPLEX type for Extended Pascal & Fortran */
5401 case COMPLEX_EXPR:
5402 {
5403 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5404
5405 rtx prev;
5406
5407 /* Get the rtx code of the operands. */
5408 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5409 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
5410
5411 if (! target)
5412 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5413
5414 prev = get_last_insn ();
5415
5416 /* Tell flow that the whole of the destination is being set. */
5417 if (GET_CODE (target) == REG)
5418 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5419
5420 /* Move the real (op0) and imaginary (op1) parts to their location. */
5421 emit_move_insn (gen_realpart (mode, target), op0);
5422 emit_move_insn (gen_imagpart (mode, target), op1);
5423
5424 /* Complex construction should appear as a single unit. */
5425 group_insns (prev);
5426
5427 return target;
5428 }
5429
5430 case REALPART_EXPR:
5431 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5432 return gen_realpart (mode, op0);
5433
5434 case IMAGPART_EXPR:
5435 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5436 return gen_imagpart (mode, op0);
5437
5438 case CONJ_EXPR:
5439 {
5440 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5441 rtx imag_t;
5442 rtx prev;
5443
5444 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5445
5446 if (! target)
5447 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5448
5449 prev = get_last_insn ();
5450
5451 /* Tell flow that the whole of the destination is being set. */
5452 if (GET_CODE (target) == REG)
5453 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5454
5455 /* Store the realpart and the negated imagpart to target. */
5456 emit_move_insn (gen_realpart (mode, target), gen_realpart (mode, op0));
5457
5458 imag_t = gen_imagpart (mode, target);
5459 temp = expand_unop (mode, neg_optab,
5460 gen_imagpart (mode, op0), imag_t, 0);
5461 if (temp != imag_t)
5462 emit_move_insn (imag_t, temp);
5463
5464 /* Conjugate should appear as a single unit */
5465 group_insns (prev);
5466
5467 return target;
5468 }
5469
5470 case ERROR_MARK:
5471 op0 = CONST0_RTX (tmode);
5472 if (op0 != 0)
5473 return op0;
5474 return const0_rtx;
5475
5476 default:
5477 return (*lang_expand_expr) (exp, target, tmode, modifier);
5478 }
5479
5480 /* Here to do an ordinary binary operator, generating an instruction
5481 from the optab already placed in `this_optab'. */
5482 binop:
5483 preexpand_calls (exp);
5484 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5485 subtarget = 0;
5486 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5487 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5488 binop2:
5489 temp = expand_binop (mode, this_optab, op0, op1, target,
5490 unsignedp, OPTAB_LIB_WIDEN);
5491 if (temp == 0)
5492 abort ();
5493 return temp;
5494 }
5495 \f
5496 /* Return the alignment in bits of EXP, a pointer valued expression.
5497 But don't return more than MAX_ALIGN no matter what.
5498 The alignment returned is, by default, the alignment of the thing that
5499 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
5500
5501 Otherwise, look at the expression to see if we can do better, i.e., if the
5502 expression is actually pointing at an object whose alignment is tighter. */
5503
5504 static int
5505 get_pointer_alignment (exp, max_align)
5506 tree exp;
5507 unsigned max_align;
5508 {
5509 unsigned align, inner;
5510
5511 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
5512 return 0;
5513
5514 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
5515 align = MIN (align, max_align);
5516
5517 while (1)
5518 {
5519 switch (TREE_CODE (exp))
5520 {
5521 case NOP_EXPR:
5522 case CONVERT_EXPR:
5523 case NON_LVALUE_EXPR:
5524 exp = TREE_OPERAND (exp, 0);
5525 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
5526 return align;
5527 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
5528 inner = MIN (inner, max_align);
5529 align = MAX (align, inner);
5530 break;
5531
5532 case PLUS_EXPR:
5533 /* If sum of pointer + int, restrict our maximum alignment to that
5534 imposed by the integer. If not, we can't do any better than
5535 ALIGN. */
5536 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
5537 return align;
5538
5539 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
5540 & (max_align - 1))
5541 != 0)
5542 max_align >>= 1;
5543
5544 exp = TREE_OPERAND (exp, 0);
5545 break;
5546
5547 case ADDR_EXPR:
5548 /* See what we are pointing at and look at its alignment. */
5549 exp = TREE_OPERAND (exp, 0);
5550 if (TREE_CODE (exp) == FUNCTION_DECL)
5551 align = MAX (align, FUNCTION_BOUNDARY);
5552 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
5553 align = MAX (align, DECL_ALIGN (exp));
5554 #ifdef CONSTANT_ALIGNMENT
5555 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
5556 align = CONSTANT_ALIGNMENT (exp, align);
5557 #endif
5558 return MIN (align, max_align);
5559
5560 default:
5561 return align;
5562 }
5563 }
5564 }
5565 \f
5566 /* Return the tree node and offset if a given argument corresponds to
5567 a string constant. */
5568
5569 static tree
5570 string_constant (arg, ptr_offset)
5571 tree arg;
5572 tree *ptr_offset;
5573 {
5574 STRIP_NOPS (arg);
5575
5576 if (TREE_CODE (arg) == ADDR_EXPR
5577 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
5578 {
5579 *ptr_offset = integer_zero_node;
5580 return TREE_OPERAND (arg, 0);
5581 }
5582 else if (TREE_CODE (arg) == PLUS_EXPR)
5583 {
5584 tree arg0 = TREE_OPERAND (arg, 0);
5585 tree arg1 = TREE_OPERAND (arg, 1);
5586
5587 STRIP_NOPS (arg0);
5588 STRIP_NOPS (arg1);
5589
5590 if (TREE_CODE (arg0) == ADDR_EXPR
5591 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
5592 {
5593 *ptr_offset = arg1;
5594 return TREE_OPERAND (arg0, 0);
5595 }
5596 else if (TREE_CODE (arg1) == ADDR_EXPR
5597 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
5598 {
5599 *ptr_offset = arg0;
5600 return TREE_OPERAND (arg1, 0);
5601 }
5602 }
5603
5604 return 0;
5605 }
5606
5607 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
5608 way, because it could contain a zero byte in the middle.
5609 TREE_STRING_LENGTH is the size of the character array, not the string.
5610
5611 Unfortunately, string_constant can't access the values of const char
5612 arrays with initializers, so neither can we do so here. */
5613
5614 static tree
5615 c_strlen (src)
5616 tree src;
5617 {
5618 tree offset_node;
5619 int offset, max;
5620 char *ptr;
5621
5622 src = string_constant (src, &offset_node);
5623 if (src == 0)
5624 return 0;
5625 max = TREE_STRING_LENGTH (src);
5626 ptr = TREE_STRING_POINTER (src);
5627 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
5628 {
5629 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
5630 compute the offset to the following null if we don't know where to
5631 start searching for it. */
5632 int i;
5633 for (i = 0; i < max; i++)
5634 if (ptr[i] == 0)
5635 return 0;
5636 /* We don't know the starting offset, but we do know that the string
5637 has no internal zero bytes. We can assume that the offset falls
5638 within the bounds of the string; otherwise, the programmer deserves
5639 what he gets. Subtract the offset from the length of the string,
5640 and return that. */
5641 /* This would perhaps not be valid if we were dealing with named
5642 arrays in addition to literal string constants. */
5643 return size_binop (MINUS_EXPR, size_int (max), offset_node);
5644 }
5645
5646 /* We have a known offset into the string. Start searching there for
5647 a null character. */
5648 if (offset_node == 0)
5649 offset = 0;
5650 else
5651 {
5652 /* Did we get a long long offset? If so, punt. */
5653 if (TREE_INT_CST_HIGH (offset_node) != 0)
5654 return 0;
5655 offset = TREE_INT_CST_LOW (offset_node);
5656 }
5657 /* If the offset is known to be out of bounds, warn, and call strlen at
5658 runtime. */
5659 if (offset < 0 || offset > max)
5660 {
5661 warning ("offset outside bounds of constant string");
5662 return 0;
5663 }
5664 /* Use strlen to search for the first zero byte. Since any strings
5665 constructed with build_string will have nulls appended, we win even
5666 if we get handed something like (char[4])"abcd".
5667
5668 Since OFFSET is our starting index into the string, no further
5669 calculation is needed. */
5670 return size_int (strlen (ptr + offset));
5671 }
5672 \f
5673 /* Expand an expression EXP that calls a built-in function,
5674 with result going to TARGET if that's convenient
5675 (and in mode MODE if that's convenient).
5676 SUBTARGET may be used as the target for computing one of EXP's operands.
5677 IGNORE is nonzero if the value is to be ignored. */
5678
5679 static rtx
5680 expand_builtin (exp, target, subtarget, mode, ignore)
5681 tree exp;
5682 rtx target;
5683 rtx subtarget;
5684 enum machine_mode mode;
5685 int ignore;
5686 {
5687 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5688 tree arglist = TREE_OPERAND (exp, 1);
5689 rtx op0;
5690 rtx lab1, insns;
5691 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
5692 optab builtin_optab;
5693
5694 switch (DECL_FUNCTION_CODE (fndecl))
5695 {
5696 case BUILT_IN_ABS:
5697 case BUILT_IN_LABS:
5698 case BUILT_IN_FABS:
5699 /* build_function_call changes these into ABS_EXPR. */
5700 abort ();
5701
5702 case BUILT_IN_SIN:
5703 case BUILT_IN_COS:
5704 case BUILT_IN_FSQRT:
5705 /* If not optimizing, call the library function. */
5706 if (! optimize)
5707 break;
5708
5709 if (arglist == 0
5710 /* Arg could be wrong type if user redeclared this fcn wrong. */
5711 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
5712 return CONST0_RTX (TYPE_MODE (TREE_TYPE (exp)));
5713
5714 /* Stabilize and compute the argument. */
5715 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
5716 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
5717 {
5718 exp = copy_node (exp);
5719 arglist = copy_node (arglist);
5720 TREE_OPERAND (exp, 1) = arglist;
5721 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
5722 }
5723 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
5724
5725 /* Make a suitable register to place result in. */
5726 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5727
5728 emit_queue ();
5729 start_sequence ();
5730
5731 switch (DECL_FUNCTION_CODE (fndecl))
5732 {
5733 case BUILT_IN_SIN:
5734 builtin_optab = sin_optab; break;
5735 case BUILT_IN_COS:
5736 builtin_optab = cos_optab; break;
5737 case BUILT_IN_FSQRT:
5738 builtin_optab = sqrt_optab; break;
5739 default:
5740 abort ();
5741 }
5742
5743 /* Compute into TARGET.
5744 Set TARGET to wherever the result comes back. */
5745 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
5746 builtin_optab, op0, target, 0);
5747
5748 /* If we were unable to expand via the builtin, stop the
5749 sequence (without outputting the insns) and break, causing
5750 a call the the library function. */
5751 if (target == 0)
5752 {
5753 end_sequence ();
5754 break;
5755 }
5756
5757 /* Check the results by default. But if flag_fast_math is turned on,
5758 then assume sqrt will always be called with valid arguments. */
5759
5760 if (! flag_fast_math)
5761 {
5762 /* Don't define the builtin FP instructions
5763 if your machine is not IEEE. */
5764 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
5765 abort ();
5766
5767 lab1 = gen_label_rtx ();
5768
5769 /* Test the result; if it is NaN, set errno=EDOM because
5770 the argument was not in the domain. */
5771 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
5772 emit_jump_insn (gen_beq (lab1));
5773
5774 #if TARGET_EDOM
5775 {
5776 #ifdef GEN_ERRNO_RTX
5777 rtx errno_rtx = GEN_ERRNO_RTX;
5778 #else
5779 rtx errno_rtx
5780 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "*errno"));
5781 #endif
5782
5783 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
5784 }
5785 #else
5786 /* We can't set errno=EDOM directly; let the library call do it.
5787 Pop the arguments right away in case the call gets deleted. */
5788 NO_DEFER_POP;
5789 expand_call (exp, target, 0);
5790 OK_DEFER_POP;
5791 #endif
5792
5793 emit_label (lab1);
5794 }
5795
5796 /* Output the entire sequence. */
5797 insns = get_insns ();
5798 end_sequence ();
5799 emit_insns (insns);
5800
5801 return target;
5802
5803 /* __builtin_apply_args returns block of memory allocated on
5804 the stack into which is stored the arg pointer, structure
5805 value address, static chain, and all the registers that might
5806 possibly be used in performing a function call. The code is
5807 moved to the start of the function so the incoming values are
5808 saved. */
5809 case BUILT_IN_APPLY_ARGS:
5810 /* Don't do __builtin_apply_args more than once in a function.
5811 Save the result of the first call and reuse it. */
5812 if (apply_args_value != 0)
5813 return apply_args_value;
5814 {
5815 /* When this function is called, it means that registers must be
5816 saved on entry to this function. So we migrate the
5817 call to the first insn of this function. */
5818 rtx temp;
5819 rtx seq;
5820
5821 start_sequence ();
5822 temp = expand_builtin_apply_args ();
5823 seq = get_insns ();
5824 end_sequence ();
5825
5826 apply_args_value = temp;
5827
5828 /* Put the sequence after the NOTE that starts the function.
5829 If this is inside a SEQUENCE, make the outer-level insn
5830 chain current, so the code is placed at the start of the
5831 function. */
5832 push_topmost_sequence ();
5833 emit_insns_before (seq, NEXT_INSN (get_insns ()));
5834 pop_topmost_sequence ();
5835 return temp;
5836 }
5837
5838 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5839 FUNCTION with a copy of the parameters described by
5840 ARGUMENTS, and ARGSIZE. It returns a block of memory
5841 allocated on the stack into which is stored all the registers
5842 that might possibly be used for returning the result of a
5843 function. ARGUMENTS is the value returned by
5844 __builtin_apply_args. ARGSIZE is the number of bytes of
5845 arguments that must be copied. ??? How should this value be
5846 computed? We'll also need a safe worst case value for varargs
5847 functions. */
5848 case BUILT_IN_APPLY:
5849 if (arglist == 0
5850 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5851 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5852 || TREE_CHAIN (arglist) == 0
5853 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
5854 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
5855 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
5856 return const0_rtx;
5857 else
5858 {
5859 int i;
5860 tree t;
5861 rtx ops[3];
5862
5863 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
5864 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
5865
5866 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5867 }
5868
5869 /* __builtin_return (RESULT) causes the function to return the
5870 value described by RESULT. RESULT is address of the block of
5871 memory returned by __builtin_apply. */
5872 case BUILT_IN_RETURN:
5873 if (arglist
5874 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5875 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
5876 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
5877 NULL_RTX, VOIDmode, 0));
5878 return const0_rtx;
5879
5880 case BUILT_IN_SAVEREGS:
5881 /* Don't do __builtin_saveregs more than once in a function.
5882 Save the result of the first call and reuse it. */
5883 if (saveregs_value != 0)
5884 return saveregs_value;
5885 {
5886 /* When this function is called, it means that registers must be
5887 saved on entry to this function. So we migrate the
5888 call to the first insn of this function. */
5889 rtx temp;
5890 rtx seq;
5891 rtx valreg, saved_valreg;
5892
5893 /* Now really call the function. `expand_call' does not call
5894 expand_builtin, so there is no danger of infinite recursion here. */
5895 start_sequence ();
5896
5897 #ifdef EXPAND_BUILTIN_SAVEREGS
5898 /* Do whatever the machine needs done in this case. */
5899 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
5900 #else
5901 /* The register where the function returns its value
5902 is likely to have something else in it, such as an argument.
5903 So preserve that register around the call. */
5904 if (value_mode != VOIDmode)
5905 {
5906 valreg = hard_libcall_value (value_mode);
5907 saved_valreg = gen_reg_rtx (value_mode);
5908 emit_move_insn (saved_valreg, valreg);
5909 }
5910
5911 /* Generate the call, putting the value in a pseudo. */
5912 temp = expand_call (exp, target, ignore);
5913
5914 if (value_mode != VOIDmode)
5915 emit_move_insn (valreg, saved_valreg);
5916 #endif
5917
5918 seq = get_insns ();
5919 end_sequence ();
5920
5921 saveregs_value = temp;
5922
5923 /* Put the sequence after the NOTE that starts the function.
5924 If this is inside a SEQUENCE, make the outer-level insn
5925 chain current, so the code is placed at the start of the
5926 function. */
5927 push_topmost_sequence ();
5928 emit_insns_before (seq, NEXT_INSN (get_insns ()));
5929 pop_topmost_sequence ();
5930 return temp;
5931 }
5932
5933 /* __builtin_args_info (N) returns word N of the arg space info
5934 for the current function. The number and meanings of words
5935 is controlled by the definition of CUMULATIVE_ARGS. */
5936 case BUILT_IN_ARGS_INFO:
5937 {
5938 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
5939 int i;
5940 int *word_ptr = (int *) &current_function_args_info;
5941 tree type, elts, result;
5942
5943 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
5944 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
5945 __FILE__, __LINE__);
5946
5947 if (arglist != 0)
5948 {
5949 tree arg = TREE_VALUE (arglist);
5950 if (TREE_CODE (arg) != INTEGER_CST)
5951 error ("argument of `__builtin_args_info' must be constant");
5952 else
5953 {
5954 int wordnum = TREE_INT_CST_LOW (arg);
5955
5956 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
5957 error ("argument of `__builtin_args_info' out of range");
5958 else
5959 return GEN_INT (word_ptr[wordnum]);
5960 }
5961 }
5962 else
5963 error ("missing argument in `__builtin_args_info'");
5964
5965 return const0_rtx;
5966
5967 #if 0
5968 for (i = 0; i < nwords; i++)
5969 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
5970
5971 type = build_array_type (integer_type_node,
5972 build_index_type (build_int_2 (nwords, 0)));
5973 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
5974 TREE_CONSTANT (result) = 1;
5975 TREE_STATIC (result) = 1;
5976 result = build (INDIRECT_REF, build_pointer_type (type), result);
5977 TREE_CONSTANT (result) = 1;
5978 return expand_expr (result, NULL_RTX, VOIDmode, 0);
5979 #endif
5980 }
5981
5982 /* Return the address of the first anonymous stack arg. */
5983 case BUILT_IN_NEXT_ARG:
5984 {
5985 tree fntype = TREE_TYPE (current_function_decl);
5986 if (!(TYPE_ARG_TYPES (fntype) != 0
5987 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
5988 != void_type_node)))
5989 {
5990 error ("`va_start' used in function with fixed args");
5991 return const0_rtx;
5992 }
5993 }
5994
5995 return expand_binop (Pmode, add_optab,
5996 current_function_internal_arg_pointer,
5997 current_function_arg_offset_rtx,
5998 NULL_RTX, 0, OPTAB_LIB_WIDEN);
5999
6000 case BUILT_IN_CLASSIFY_TYPE:
6001 if (arglist != 0)
6002 {
6003 tree type = TREE_TYPE (TREE_VALUE (arglist));
6004 enum tree_code code = TREE_CODE (type);
6005 if (code == VOID_TYPE)
6006 return GEN_INT (void_type_class);
6007 if (code == INTEGER_TYPE)
6008 return GEN_INT (integer_type_class);
6009 if (code == CHAR_TYPE)
6010 return GEN_INT (char_type_class);
6011 if (code == ENUMERAL_TYPE)
6012 return GEN_INT (enumeral_type_class);
6013 if (code == BOOLEAN_TYPE)
6014 return GEN_INT (boolean_type_class);
6015 if (code == POINTER_TYPE)
6016 return GEN_INT (pointer_type_class);
6017 if (code == REFERENCE_TYPE)
6018 return GEN_INT (reference_type_class);
6019 if (code == OFFSET_TYPE)
6020 return GEN_INT (offset_type_class);
6021 if (code == REAL_TYPE)
6022 return GEN_INT (real_type_class);
6023 if (code == COMPLEX_TYPE)
6024 return GEN_INT (complex_type_class);
6025 if (code == FUNCTION_TYPE)
6026 return GEN_INT (function_type_class);
6027 if (code == METHOD_TYPE)
6028 return GEN_INT (method_type_class);
6029 if (code == RECORD_TYPE)
6030 return GEN_INT (record_type_class);
6031 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
6032 return GEN_INT (union_type_class);
6033 if (code == ARRAY_TYPE)
6034 return GEN_INT (array_type_class);
6035 if (code == STRING_TYPE)
6036 return GEN_INT (string_type_class);
6037 if (code == SET_TYPE)
6038 return GEN_INT (set_type_class);
6039 if (code == FILE_TYPE)
6040 return GEN_INT (file_type_class);
6041 if (code == LANG_TYPE)
6042 return GEN_INT (lang_type_class);
6043 }
6044 return GEN_INT (no_type_class);
6045
6046 case BUILT_IN_CONSTANT_P:
6047 if (arglist == 0)
6048 return const0_rtx;
6049 else
6050 return (TREE_CODE_CLASS (TREE_CODE (TREE_VALUE (arglist))) == 'c'
6051 ? const1_rtx : const0_rtx);
6052
6053 case BUILT_IN_FRAME_ADDRESS:
6054 /* The argument must be a nonnegative integer constant.
6055 It counts the number of frames to scan up the stack.
6056 The value is the address of that frame. */
6057 case BUILT_IN_RETURN_ADDRESS:
6058 /* The argument must be a nonnegative integer constant.
6059 It counts the number of frames to scan up the stack.
6060 The value is the return address saved in that frame. */
6061 if (arglist == 0)
6062 /* Warning about missing arg was already issued. */
6063 return const0_rtx;
6064 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
6065 {
6066 error ("invalid arg to `__builtin_return_address'");
6067 return const0_rtx;
6068 }
6069 else if (tree_int_cst_lt (TREE_VALUE (arglist), integer_zero_node))
6070 {
6071 error ("invalid arg to `__builtin_return_address'");
6072 return const0_rtx;
6073 }
6074 else
6075 {
6076 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
6077 rtx tem = frame_pointer_rtx;
6078 int i;
6079
6080 /* Some machines need special handling before we can access arbitrary
6081 frames. For example, on the sparc, we must first flush all
6082 register windows to the stack. */
6083 #ifdef SETUP_FRAME_ADDRESSES
6084 SETUP_FRAME_ADDRESSES ();
6085 #endif
6086
6087 /* On the sparc, the return address is not in the frame, it is
6088 in a register. There is no way to access it off of the current
6089 frame pointer, but it can be accessed off the previous frame
6090 pointer by reading the value from the register window save
6091 area. */
6092 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
6093 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_RETURN_ADDRESS)
6094 count--;
6095 #endif
6096
6097 /* Scan back COUNT frames to the specified frame. */
6098 for (i = 0; i < count; i++)
6099 {
6100 /* Assume the dynamic chain pointer is in the word that
6101 the frame address points to, unless otherwise specified. */
6102 #ifdef DYNAMIC_CHAIN_ADDRESS
6103 tem = DYNAMIC_CHAIN_ADDRESS (tem);
6104 #endif
6105 tem = memory_address (Pmode, tem);
6106 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
6107 }
6108
6109 /* For __builtin_frame_address, return what we've got. */
6110 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
6111 return tem;
6112
6113 /* For __builtin_return_address,
6114 Get the return address from that frame. */
6115 #ifdef RETURN_ADDR_RTX
6116 return RETURN_ADDR_RTX (count, tem);
6117 #else
6118 tem = memory_address (Pmode,
6119 plus_constant (tem, GET_MODE_SIZE (Pmode)));
6120 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
6121 #endif
6122 }
6123
6124 case BUILT_IN_ALLOCA:
6125 if (arglist == 0
6126 /* Arg could be non-integer if user redeclared this fcn wrong. */
6127 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
6128 return const0_rtx;
6129 current_function_calls_alloca = 1;
6130 /* Compute the argument. */
6131 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
6132
6133 /* Allocate the desired space. */
6134 target = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
6135
6136 /* Record the new stack level for nonlocal gotos. */
6137 if (nonlocal_goto_handler_slot != 0)
6138 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
6139 return target;
6140
6141 case BUILT_IN_FFS:
6142 /* If not optimizing, call the library function. */
6143 if (!optimize)
6144 break;
6145
6146 if (arglist == 0
6147 /* Arg could be non-integer if user redeclared this fcn wrong. */
6148 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
6149 return const0_rtx;
6150
6151 /* Compute the argument. */
6152 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
6153 /* Compute ffs, into TARGET if possible.
6154 Set TARGET to wherever the result comes back. */
6155 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
6156 ffs_optab, op0, target, 1);
6157 if (target == 0)
6158 abort ();
6159 return target;
6160
6161 case BUILT_IN_STRLEN:
6162 /* If not optimizing, call the library function. */
6163 if (!optimize)
6164 break;
6165
6166 if (arglist == 0
6167 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6168 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
6169 return const0_rtx;
6170 else
6171 {
6172 tree src = TREE_VALUE (arglist);
6173 tree len = c_strlen (src);
6174
6175 int align
6176 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6177
6178 rtx result, src_rtx, char_rtx;
6179 enum machine_mode insn_mode = value_mode, char_mode;
6180 enum insn_code icode;
6181
6182 /* If the length is known, just return it. */
6183 if (len != 0)
6184 return expand_expr (len, target, mode, 0);
6185
6186 /* If SRC is not a pointer type, don't do this operation inline. */
6187 if (align == 0)
6188 break;
6189
6190 /* Call a function if we can't compute strlen in the right mode. */
6191
6192 while (insn_mode != VOIDmode)
6193 {
6194 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
6195 if (icode != CODE_FOR_nothing)
6196 break;
6197
6198 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
6199 }
6200 if (insn_mode == VOIDmode)
6201 break;
6202
6203 /* Make a place to write the result of the instruction. */
6204 result = target;
6205 if (! (result != 0
6206 && GET_CODE (result) == REG
6207 && GET_MODE (result) == insn_mode
6208 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
6209 result = gen_reg_rtx (insn_mode);
6210
6211 /* Make sure the operands are acceptable to the predicates. */
6212
6213 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
6214 result = gen_reg_rtx (insn_mode);
6215
6216 src_rtx = memory_address (BLKmode,
6217 expand_expr (src, NULL_RTX, Pmode,
6218 EXPAND_NORMAL));
6219 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
6220 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
6221
6222 char_rtx = const0_rtx;
6223 char_mode = insn_operand_mode[(int)icode][2];
6224 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
6225 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
6226
6227 emit_insn (GEN_FCN (icode) (result,
6228 gen_rtx (MEM, BLKmode, src_rtx),
6229 char_rtx, GEN_INT (align)));
6230
6231 /* Return the value in the proper mode for this function. */
6232 if (GET_MODE (result) == value_mode)
6233 return result;
6234 else if (target != 0)
6235 {
6236 convert_move (target, result, 0);
6237 return target;
6238 }
6239 else
6240 return convert_to_mode (value_mode, result, 0);
6241 }
6242
6243 case BUILT_IN_STRCPY:
6244 /* If not optimizing, call the library function. */
6245 if (!optimize)
6246 break;
6247
6248 if (arglist == 0
6249 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6250 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6251 || TREE_CHAIN (arglist) == 0
6252 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
6253 return const0_rtx;
6254 else
6255 {
6256 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
6257
6258 if (len == 0)
6259 break;
6260
6261 len = size_binop (PLUS_EXPR, len, integer_one_node);
6262
6263 chainon (arglist, build_tree_list (NULL_TREE, len));
6264 }
6265
6266 /* Drops in. */
6267 case BUILT_IN_MEMCPY:
6268 /* If not optimizing, call the library function. */
6269 if (!optimize)
6270 break;
6271
6272 if (arglist == 0
6273 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6274 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6275 || TREE_CHAIN (arglist) == 0
6276 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6277 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6278 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6279 return const0_rtx;
6280 else
6281 {
6282 tree dest = TREE_VALUE (arglist);
6283 tree src = TREE_VALUE (TREE_CHAIN (arglist));
6284 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6285
6286 int src_align
6287 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6288 int dest_align
6289 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6290 rtx dest_rtx, dest_mem, src_mem;
6291
6292 /* If either SRC or DEST is not a pointer type, don't do
6293 this operation in-line. */
6294 if (src_align == 0 || dest_align == 0)
6295 {
6296 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
6297 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
6298 break;
6299 }
6300
6301 dest_rtx = expand_expr (dest, NULL_RTX, Pmode, EXPAND_NORMAL);
6302 dest_mem = gen_rtx (MEM, BLKmode,
6303 memory_address (BLKmode, dest_rtx));
6304 src_mem = gen_rtx (MEM, BLKmode,
6305 memory_address (BLKmode,
6306 expand_expr (src, NULL_RTX,
6307 Pmode,
6308 EXPAND_NORMAL)));
6309
6310 /* Copy word part most expediently. */
6311 emit_block_move (dest_mem, src_mem,
6312 expand_expr (len, NULL_RTX, VOIDmode, 0),
6313 MIN (src_align, dest_align));
6314 return dest_rtx;
6315 }
6316
6317 /* These comparison functions need an instruction that returns an actual
6318 index. An ordinary compare that just sets the condition codes
6319 is not enough. */
6320 #ifdef HAVE_cmpstrsi
6321 case BUILT_IN_STRCMP:
6322 /* If not optimizing, call the library function. */
6323 if (!optimize)
6324 break;
6325
6326 if (arglist == 0
6327 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6328 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6329 || TREE_CHAIN (arglist) == 0
6330 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
6331 return const0_rtx;
6332 else if (!HAVE_cmpstrsi)
6333 break;
6334 {
6335 tree arg1 = TREE_VALUE (arglist);
6336 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
6337 tree offset;
6338 tree len, len2;
6339
6340 len = c_strlen (arg1);
6341 if (len)
6342 len = size_binop (PLUS_EXPR, integer_one_node, len);
6343 len2 = c_strlen (arg2);
6344 if (len2)
6345 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
6346
6347 /* If we don't have a constant length for the first, use the length
6348 of the second, if we know it. We don't require a constant for
6349 this case; some cost analysis could be done if both are available
6350 but neither is constant. For now, assume they're equally cheap.
6351
6352 If both strings have constant lengths, use the smaller. This
6353 could arise if optimization results in strcpy being called with
6354 two fixed strings, or if the code was machine-generated. We should
6355 add some code to the `memcmp' handler below to deal with such
6356 situations, someday. */
6357 if (!len || TREE_CODE (len) != INTEGER_CST)
6358 {
6359 if (len2)
6360 len = len2;
6361 else if (len == 0)
6362 break;
6363 }
6364 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
6365 {
6366 if (tree_int_cst_lt (len2, len))
6367 len = len2;
6368 }
6369
6370 chainon (arglist, build_tree_list (NULL_TREE, len));
6371 }
6372
6373 /* Drops in. */
6374 case BUILT_IN_MEMCMP:
6375 /* If not optimizing, call the library function. */
6376 if (!optimize)
6377 break;
6378
6379 if (arglist == 0
6380 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6381 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6382 || TREE_CHAIN (arglist) == 0
6383 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6384 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6385 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6386 return const0_rtx;
6387 else if (!HAVE_cmpstrsi)
6388 break;
6389 {
6390 tree arg1 = TREE_VALUE (arglist);
6391 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
6392 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6393 rtx result;
6394
6395 int arg1_align
6396 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6397 int arg2_align
6398 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6399 enum machine_mode insn_mode
6400 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
6401
6402 /* If we don't have POINTER_TYPE, call the function. */
6403 if (arg1_align == 0 || arg2_align == 0)
6404 {
6405 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
6406 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
6407 break;
6408 }
6409
6410 /* Make a place to write the result of the instruction. */
6411 result = target;
6412 if (! (result != 0
6413 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
6414 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
6415 result = gen_reg_rtx (insn_mode);
6416
6417 emit_insn (gen_cmpstrsi (result,
6418 gen_rtx (MEM, BLKmode,
6419 expand_expr (arg1, NULL_RTX, Pmode,
6420 EXPAND_NORMAL)),
6421 gen_rtx (MEM, BLKmode,
6422 expand_expr (arg2, NULL_RTX, Pmode,
6423 EXPAND_NORMAL)),
6424 expand_expr (len, NULL_RTX, VOIDmode, 0),
6425 GEN_INT (MIN (arg1_align, arg2_align))));
6426
6427 /* Return the value in the proper mode for this function. */
6428 mode = TYPE_MODE (TREE_TYPE (exp));
6429 if (GET_MODE (result) == mode)
6430 return result;
6431 else if (target != 0)
6432 {
6433 convert_move (target, result, 0);
6434 return target;
6435 }
6436 else
6437 return convert_to_mode (mode, result, 0);
6438 }
6439 #else
6440 case BUILT_IN_STRCMP:
6441 case BUILT_IN_MEMCMP:
6442 break;
6443 #endif
6444
6445 default: /* just do library call, if unknown builtin */
6446 error ("built-in function `%s' not currently supported",
6447 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
6448 }
6449
6450 /* The switch statement above can drop through to cause the function
6451 to be called normally. */
6452
6453 return expand_call (exp, target, ignore);
6454 }
6455 \f
6456 /* Built-in functions to perform an untyped call and return. */
6457
6458 /* For each register that may be used for calling a function, this
6459 gives a mode used to copy the register's value. VOIDmode indicates
6460 the register is not used for calling a function. If the machine
6461 has register windows, this gives only the outbound registers.
6462 INCOMING_REGNO gives the corresponding inbound register. */
6463 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
6464
6465 /* For each register that may be used for returning values, this gives
6466 a mode used to copy the register's value. VOIDmode indicates the
6467 register is not used for returning values. If the machine has
6468 register windows, this gives only the outbound registers.
6469 INCOMING_REGNO gives the corresponding inbound register. */
6470 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
6471
6472 /* Return the size required for the block returned by __builtin_apply_args,
6473 and initialize apply_args_mode. */
6474 static int
6475 apply_args_size ()
6476 {
6477 static int size = -1;
6478 int align, regno;
6479 enum machine_mode mode;
6480
6481 /* The values computed by this function never change. */
6482 if (size < 0)
6483 {
6484 /* The first value is the incoming arg-pointer. */
6485 size = GET_MODE_SIZE (Pmode);
6486
6487 /* The second value is the structure value address unless this is
6488 passed as an "invisible" first argument. */
6489 if (struct_value_rtx)
6490 size += GET_MODE_SIZE (Pmode);
6491
6492 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6493 if (FUNCTION_ARG_REGNO_P (regno))
6494 {
6495 /* Search for the proper mode for copying this register's
6496 value. I'm not sure this is right, but it works so far. */
6497 enum machine_mode best_mode = VOIDmode;
6498
6499 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
6500 mode != VOIDmode;
6501 mode = GET_MODE_WIDER_MODE (mode))
6502 if (HARD_REGNO_MODE_OK (regno, mode)
6503 && HARD_REGNO_NREGS (regno, mode) == 1)
6504 best_mode = mode;
6505
6506 if (best_mode == VOIDmode)
6507 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
6508 mode != VOIDmode;
6509 mode = GET_MODE_WIDER_MODE (mode))
6510 if (HARD_REGNO_MODE_OK (regno, mode)
6511 && (mov_optab->handlers[(int) mode].insn_code
6512 != CODE_FOR_nothing))
6513 best_mode = mode;
6514
6515 mode = best_mode;
6516 if (mode == VOIDmode)
6517 abort ();
6518
6519 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6520 if (size % align != 0)
6521 size = CEIL (size, align) * align;
6522 size += GET_MODE_SIZE (mode);
6523 apply_args_mode[regno] = mode;
6524 }
6525 else
6526 apply_args_mode[regno] = VOIDmode;
6527 }
6528 return size;
6529 }
6530
6531 /* Return the size required for the block returned by __builtin_apply,
6532 and initialize apply_result_mode. */
6533 static int
6534 apply_result_size ()
6535 {
6536 static int size = -1;
6537 int align, regno;
6538 enum machine_mode mode;
6539
6540 /* The values computed by this function never change. */
6541 if (size < 0)
6542 {
6543 size = 0;
6544
6545 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6546 if (FUNCTION_VALUE_REGNO_P (regno))
6547 {
6548 /* Search for the proper mode for copying this register's
6549 value. I'm not sure this is right, but it works so far. */
6550 enum machine_mode best_mode = VOIDmode;
6551
6552 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
6553 mode != TImode;
6554 mode = GET_MODE_WIDER_MODE (mode))
6555 if (HARD_REGNO_MODE_OK (regno, mode))
6556 best_mode = mode;
6557
6558 if (best_mode == VOIDmode)
6559 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
6560 mode != VOIDmode;
6561 mode = GET_MODE_WIDER_MODE (mode))
6562 if (HARD_REGNO_MODE_OK (regno, mode)
6563 && (mov_optab->handlers[(int) mode].insn_code
6564 != CODE_FOR_nothing))
6565 best_mode = mode;
6566
6567 mode = best_mode;
6568 if (mode == VOIDmode)
6569 abort ();
6570
6571 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6572 if (size % align != 0)
6573 size = CEIL (size, align) * align;
6574 size += GET_MODE_SIZE (mode);
6575 apply_result_mode[regno] = mode;
6576 }
6577 else
6578 apply_result_mode[regno] = VOIDmode;
6579
6580 /* Allow targets that use untyped_call and untyped_return to override
6581 the size so that machine-specific information can be stored here. */
6582 #ifdef APPLY_RESULT_SIZE
6583 size = APPLY_RESULT_SIZE;
6584 #endif
6585 }
6586 return size;
6587 }
6588
6589 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
6590 /* Create a vector describing the result block RESULT. If SAVEP is true,
6591 the result block is used to save the values; otherwise it is used to
6592 restore the values. */
6593 static rtx
6594 result_vector (savep, result)
6595 int savep;
6596 rtx result;
6597 {
6598 int regno, size, align, nelts;
6599 enum machine_mode mode;
6600 rtx reg, mem;
6601 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
6602
6603 size = nelts = 0;
6604 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6605 if ((mode = apply_result_mode[regno]) != VOIDmode)
6606 {
6607 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6608 if (size % align != 0)
6609 size = CEIL (size, align) * align;
6610 reg = gen_rtx (REG, mode, savep ? INCOMING_REGNO (regno) : regno);
6611 mem = change_address (result, mode,
6612 plus_constant (XEXP (result, 0), size));
6613 savevec[nelts++] = (savep
6614 ? gen_rtx (SET, VOIDmode, mem, reg)
6615 : gen_rtx (SET, VOIDmode, reg, mem));
6616 size += GET_MODE_SIZE (mode);
6617 }
6618 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
6619 }
6620 #endif /* HAVE_untyped_call or HAVE_untyped_return */
6621
6622
6623 /* Save the state required to perform an untyped call with the same
6624 arguments as were passed to the current function. */
6625 static rtx
6626 expand_builtin_apply_args ()
6627 {
6628 rtx registers;
6629 int size, align, regno;
6630 enum machine_mode mode;
6631
6632 /* Create a block where the arg-pointer, structure value address,
6633 and argument registers can be saved. */
6634 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
6635
6636 /* Walk past the arg-pointer and structure value address. */
6637 size = GET_MODE_SIZE (Pmode);
6638 if (struct_value_rtx)
6639 size += GET_MODE_SIZE (Pmode);
6640
6641 /* Save each register used in calling a function to the block. */
6642 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6643 if ((mode = apply_args_mode[regno]) != VOIDmode)
6644 {
6645 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6646 if (size % align != 0)
6647 size = CEIL (size, align) * align;
6648 emit_move_insn (change_address (registers, mode,
6649 plus_constant (XEXP (registers, 0),
6650 size)),
6651 gen_rtx (REG, mode, INCOMING_REGNO (regno)));
6652 size += GET_MODE_SIZE (mode);
6653 }
6654
6655 /* Save the arg pointer to the block. */
6656 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
6657 copy_to_reg (virtual_incoming_args_rtx));
6658 size = GET_MODE_SIZE (Pmode);
6659
6660 /* Save the structure value address unless this is passed as an
6661 "invisible" first argument. */
6662 if (struct_value_incoming_rtx)
6663 {
6664 emit_move_insn (change_address (registers, Pmode,
6665 plus_constant (XEXP (registers, 0),
6666 size)),
6667 copy_to_reg (struct_value_incoming_rtx));
6668 size += GET_MODE_SIZE (Pmode);
6669 }
6670
6671 /* Return the address of the block. */
6672 return copy_addr_to_reg (XEXP (registers, 0));
6673 }
6674
6675 /* Perform an untyped call and save the state required to perform an
6676 untyped return of whatever value was returned by the given function. */
6677 static rtx
6678 expand_builtin_apply (function, arguments, argsize)
6679 rtx function, arguments, argsize;
6680 {
6681 int size, align, regno;
6682 enum machine_mode mode;
6683 rtx incoming_args, result, reg, dest, call_insn;
6684 rtx old_stack_level = 0;
6685 rtx use_insns = 0;
6686
6687 /* Create a block where the return registers can be saved. */
6688 result = assign_stack_local (BLKmode, apply_result_size (), -1);
6689
6690 /* ??? The argsize value should be adjusted here. */
6691
6692 /* Fetch the arg pointer from the ARGUMENTS block. */
6693 incoming_args = gen_reg_rtx (Pmode);
6694 emit_move_insn (incoming_args,
6695 gen_rtx (MEM, Pmode, arguments));
6696 #ifndef STACK_GROWS_DOWNWARD
6697 incoming_args = expand_binop (Pmode, add_optab, incoming_args, argsize,
6698 incoming_args, 0, OPTAB_LIB_WIDEN);
6699 #endif
6700
6701 /* Perform postincrements before actually calling the function. */
6702 emit_queue ();
6703
6704 /* Push a new argument block and copy the arguments. */
6705 do_pending_stack_adjust ();
6706 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
6707
6708 /* Push a block of memory onto the stack to store the memory arguments.
6709 Save the address in a register, and copy the memory arguments. ??? I
6710 haven't figured out how the calling convention macros effect this,
6711 but it's likely that the source and/or destination addresses in
6712 the block copy will need updating in machine specific ways. */
6713 dest = copy_addr_to_reg (push_block (argsize, 0, 0));
6714 emit_block_move (gen_rtx (MEM, BLKmode, dest),
6715 gen_rtx (MEM, BLKmode, incoming_args),
6716 argsize,
6717 PARM_BOUNDARY / BITS_PER_UNIT);
6718
6719 /* Refer to the argument block. */
6720 apply_args_size ();
6721 arguments = gen_rtx (MEM, BLKmode, arguments);
6722
6723 /* Walk past the arg-pointer and structure value address. */
6724 size = GET_MODE_SIZE (Pmode);
6725 if (struct_value_rtx)
6726 size += GET_MODE_SIZE (Pmode);
6727
6728 /* Restore each of the registers previously saved. Make USE insns
6729 for each of these registers for use in making the call. */
6730 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6731 if ((mode = apply_args_mode[regno]) != VOIDmode)
6732 {
6733 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6734 if (size % align != 0)
6735 size = CEIL (size, align) * align;
6736 reg = gen_rtx (REG, mode, regno);
6737 emit_move_insn (reg,
6738 change_address (arguments, mode,
6739 plus_constant (XEXP (arguments, 0),
6740 size)));
6741
6742 push_to_sequence (use_insns);
6743 emit_insn (gen_rtx (USE, VOIDmode, reg));
6744 use_insns = get_insns ();
6745 end_sequence ();
6746 size += GET_MODE_SIZE (mode);
6747 }
6748
6749 /* Restore the structure value address unless this is passed as an
6750 "invisible" first argument. */
6751 size = GET_MODE_SIZE (Pmode);
6752 if (struct_value_rtx)
6753 {
6754 rtx value = gen_reg_rtx (Pmode);
6755 emit_move_insn (value,
6756 change_address (arguments, Pmode,
6757 plus_constant (XEXP (arguments, 0),
6758 size)));
6759 emit_move_insn (struct_value_rtx, value);
6760 if (GET_CODE (struct_value_rtx) == REG)
6761 {
6762 push_to_sequence (use_insns);
6763 emit_insn (gen_rtx (USE, VOIDmode, struct_value_rtx));
6764 use_insns = get_insns ();
6765 end_sequence ();
6766 }
6767 size += GET_MODE_SIZE (Pmode);
6768 }
6769
6770 /* All arguments and registers used for the call are set up by now! */
6771 function = prepare_call_address (function, NULL_TREE, &use_insns);
6772
6773 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
6774 and we don't want to load it into a register as an optimization,
6775 because prepare_call_address already did it if it should be done. */
6776 if (GET_CODE (function) != SYMBOL_REF)
6777 function = memory_address (FUNCTION_MODE, function);
6778
6779 /* Generate the actual call instruction and save the return value. */
6780 #ifdef HAVE_untyped_call
6781 if (HAVE_untyped_call)
6782 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
6783 result, result_vector (1, result)));
6784 else
6785 #endif
6786 #ifdef HAVE_call_value
6787 if (HAVE_call_value)
6788 {
6789 rtx valreg = 0;
6790
6791 /* Locate the unique return register. It is not possible to
6792 express a call that sets more than one return register using
6793 call_value; use untyped_call for that. In fact, untyped_call
6794 only needs to save the return registers in the given block. */
6795 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6796 if ((mode = apply_result_mode[regno]) != VOIDmode)
6797 {
6798 if (valreg)
6799 abort (); /* HAVE_untyped_call required. */
6800 valreg = gen_rtx (REG, mode, regno);
6801 }
6802
6803 emit_call_insn (gen_call_value (valreg,
6804 gen_rtx (MEM, FUNCTION_MODE, function),
6805 const0_rtx, NULL_RTX, const0_rtx));
6806
6807 emit_move_insn (change_address (result, GET_MODE (valreg),
6808 XEXP (result, 0)),
6809 valreg);
6810 }
6811 else
6812 #endif
6813 abort ();
6814
6815 /* Find the CALL insn we just emitted and write the USE insns before it. */
6816 for (call_insn = get_last_insn ();
6817 call_insn && GET_CODE (call_insn) != CALL_INSN;
6818 call_insn = PREV_INSN (call_insn))
6819 ;
6820
6821 if (! call_insn)
6822 abort ();
6823
6824 /* Put the USE insns before the CALL. */
6825 emit_insns_before (use_insns, call_insn);
6826
6827 /* Restore the stack. */
6828 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
6829
6830 /* Return the address of the result block. */
6831 return copy_addr_to_reg (XEXP (result, 0));
6832 }
6833
6834 /* Perform an untyped return. */
6835 static void
6836 expand_builtin_return (result)
6837 rtx result;
6838 {
6839 int size, align, regno;
6840 enum machine_mode mode;
6841 rtx reg;
6842 rtx use_insns = 0;
6843
6844 apply_result_size ();
6845 result = gen_rtx (MEM, BLKmode, result);
6846
6847 #ifdef HAVE_untyped_return
6848 if (HAVE_untyped_return)
6849 {
6850 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
6851 emit_barrier ();
6852 return;
6853 }
6854 #endif
6855
6856 /* Restore the return value and note that each value is used. */
6857 size = 0;
6858 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6859 if ((mode = apply_result_mode[regno]) != VOIDmode)
6860 {
6861 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6862 if (size % align != 0)
6863 size = CEIL (size, align) * align;
6864 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
6865 emit_move_insn (reg,
6866 change_address (result, mode,
6867 plus_constant (XEXP (result, 0),
6868 size)));
6869
6870 push_to_sequence (use_insns);
6871 emit_insn (gen_rtx (USE, VOIDmode, reg));
6872 use_insns = get_insns ();
6873 end_sequence ();
6874 size += GET_MODE_SIZE (mode);
6875 }
6876
6877 /* Put the USE insns before the return. */
6878 emit_insns (use_insns);
6879
6880 /* Return whatever values was restored by jumping directly to the end
6881 of the function. */
6882 expand_null_return ();
6883 }
6884 \f
6885 /* Expand code for a post- or pre- increment or decrement
6886 and return the RTX for the result.
6887 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
6888
6889 static rtx
6890 expand_increment (exp, post)
6891 register tree exp;
6892 int post;
6893 {
6894 register rtx op0, op1;
6895 register rtx temp, value;
6896 register tree incremented = TREE_OPERAND (exp, 0);
6897 optab this_optab = add_optab;
6898 int icode;
6899 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
6900 int op0_is_copy = 0;
6901 int single_insn = 0;
6902
6903 /* Stabilize any component ref that might need to be
6904 evaluated more than once below. */
6905 if (!post
6906 || TREE_CODE (incremented) == BIT_FIELD_REF
6907 || (TREE_CODE (incremented) == COMPONENT_REF
6908 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
6909 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
6910 incremented = stabilize_reference (incremented);
6911
6912 /* Compute the operands as RTX.
6913 Note whether OP0 is the actual lvalue or a copy of it:
6914 I believe it is a copy iff it is a register or subreg
6915 and insns were generated in computing it. */
6916
6917 temp = get_last_insn ();
6918 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
6919
6920 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
6921 in place but intead must do sign- or zero-extension during assignment,
6922 so we copy it into a new register and let the code below use it as
6923 a copy.
6924
6925 Note that we can safely modify this SUBREG since it is know not to be
6926 shared (it was made by the expand_expr call above). */
6927
6928 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
6929 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
6930
6931 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
6932 && temp != get_last_insn ());
6933 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6934
6935 /* Decide whether incrementing or decrementing. */
6936 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
6937 || TREE_CODE (exp) == PREDECREMENT_EXPR)
6938 this_optab = sub_optab;
6939
6940 /* Convert decrement by a constant into a negative increment. */
6941 if (this_optab == sub_optab
6942 && GET_CODE (op1) == CONST_INT)
6943 {
6944 op1 = GEN_INT (- INTVAL (op1));
6945 this_optab = add_optab;
6946 }
6947
6948 /* For a preincrement, see if we can do this with a single instruction. */
6949 if (!post)
6950 {
6951 icode = (int) this_optab->handlers[(int) mode].insn_code;
6952 if (icode != (int) CODE_FOR_nothing
6953 /* Make sure that OP0 is valid for operands 0 and 1
6954 of the insn we want to queue. */
6955 && (*insn_operand_predicate[icode][0]) (op0, mode)
6956 && (*insn_operand_predicate[icode][1]) (op0, mode)
6957 && (*insn_operand_predicate[icode][2]) (op1, mode))
6958 single_insn = 1;
6959 }
6960
6961 /* If OP0 is not the actual lvalue, but rather a copy in a register,
6962 then we cannot just increment OP0. We must therefore contrive to
6963 increment the original value. Then, for postincrement, we can return
6964 OP0 since it is a copy of the old value. For preincrement, expand here
6965 unless we can do it with a single insn. */
6966 if (op0_is_copy || (!post && !single_insn))
6967 {
6968 /* This is the easiest way to increment the value wherever it is.
6969 Problems with multiple evaluation of INCREMENTED are prevented
6970 because either (1) it is a component_ref or preincrement,
6971 in which case it was stabilized above, or (2) it is an array_ref
6972 with constant index in an array in a register, which is
6973 safe to reevaluate. */
6974 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
6975 || TREE_CODE (exp) == PREDECREMENT_EXPR)
6976 ? MINUS_EXPR : PLUS_EXPR),
6977 TREE_TYPE (exp),
6978 incremented,
6979 TREE_OPERAND (exp, 1));
6980 temp = expand_assignment (incremented, newexp, ! post, 0);
6981 return post ? op0 : temp;
6982 }
6983
6984 if (post)
6985 {
6986 /* We have a true reference to the value in OP0.
6987 If there is an insn to add or subtract in this mode, queue it. */
6988
6989 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
6990 op0 = stabilize (op0);
6991 #endif
6992
6993 icode = (int) this_optab->handlers[(int) mode].insn_code;
6994 if (icode != (int) CODE_FOR_nothing
6995 /* Make sure that OP0 is valid for operands 0 and 1
6996 of the insn we want to queue. */
6997 && (*insn_operand_predicate[icode][0]) (op0, mode)
6998 && (*insn_operand_predicate[icode][1]) (op0, mode))
6999 {
7000 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
7001 op1 = force_reg (mode, op1);
7002
7003 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
7004 }
7005 }
7006
7007 /* Preincrement, or we can't increment with one simple insn. */
7008 if (post)
7009 /* Save a copy of the value before inc or dec, to return it later. */
7010 temp = value = copy_to_reg (op0);
7011 else
7012 /* Arrange to return the incremented value. */
7013 /* Copy the rtx because expand_binop will protect from the queue,
7014 and the results of that would be invalid for us to return
7015 if our caller does emit_queue before using our result. */
7016 temp = copy_rtx (value = op0);
7017
7018 /* Increment however we can. */
7019 op1 = expand_binop (mode, this_optab, value, op1, op0,
7020 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
7021 /* Make sure the value is stored into OP0. */
7022 if (op1 != op0)
7023 emit_move_insn (op0, op1);
7024
7025 return temp;
7026 }
7027 \f
7028 /* Expand all function calls contained within EXP, innermost ones first.
7029 But don't look within expressions that have sequence points.
7030 For each CALL_EXPR, record the rtx for its value
7031 in the CALL_EXPR_RTL field. */
7032
7033 static void
7034 preexpand_calls (exp)
7035 tree exp;
7036 {
7037 register int nops, i;
7038 int type = TREE_CODE_CLASS (TREE_CODE (exp));
7039
7040 if (! do_preexpand_calls)
7041 return;
7042
7043 /* Only expressions and references can contain calls. */
7044
7045 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
7046 return;
7047
7048 switch (TREE_CODE (exp))
7049 {
7050 case CALL_EXPR:
7051 /* Do nothing if already expanded. */
7052 if (CALL_EXPR_RTL (exp) != 0)
7053 return;
7054
7055 /* Do nothing to built-in functions. */
7056 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
7057 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
7058 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7059 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
7060 return;
7061
7062 case COMPOUND_EXPR:
7063 case COND_EXPR:
7064 case TRUTH_ANDIF_EXPR:
7065 case TRUTH_ORIF_EXPR:
7066 /* If we find one of these, then we can be sure
7067 the adjust will be done for it (since it makes jumps).
7068 Do it now, so that if this is inside an argument
7069 of a function, we don't get the stack adjustment
7070 after some other args have already been pushed. */
7071 do_pending_stack_adjust ();
7072 return;
7073
7074 case BLOCK:
7075 case RTL_EXPR:
7076 case WITH_CLEANUP_EXPR:
7077 return;
7078
7079 case SAVE_EXPR:
7080 if (SAVE_EXPR_RTL (exp) != 0)
7081 return;
7082 }
7083
7084 nops = tree_code_length[(int) TREE_CODE (exp)];
7085 for (i = 0; i < nops; i++)
7086 if (TREE_OPERAND (exp, i) != 0)
7087 {
7088 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
7089 if (type == 'e' || type == '<' || type == '1' || type == '2'
7090 || type == 'r')
7091 preexpand_calls (TREE_OPERAND (exp, i));
7092 }
7093 }
7094 \f
7095 /* At the start of a function, record that we have no previously-pushed
7096 arguments waiting to be popped. */
7097
7098 void
7099 init_pending_stack_adjust ()
7100 {
7101 pending_stack_adjust = 0;
7102 }
7103
7104 /* When exiting from function, if safe, clear out any pending stack adjust
7105 so the adjustment won't get done. */
7106
7107 void
7108 clear_pending_stack_adjust ()
7109 {
7110 #ifdef EXIT_IGNORE_STACK
7111 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
7112 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
7113 && ! flag_inline_functions)
7114 pending_stack_adjust = 0;
7115 #endif
7116 }
7117
7118 /* Pop any previously-pushed arguments that have not been popped yet. */
7119
7120 void
7121 do_pending_stack_adjust ()
7122 {
7123 if (inhibit_defer_pop == 0)
7124 {
7125 if (pending_stack_adjust != 0)
7126 adjust_stack (GEN_INT (pending_stack_adjust));
7127 pending_stack_adjust = 0;
7128 }
7129 }
7130
7131 /* Expand all cleanups up to OLD_CLEANUPS.
7132 Needed here, and also for language-dependent calls. */
7133
7134 void
7135 expand_cleanups_to (old_cleanups)
7136 tree old_cleanups;
7137 {
7138 while (cleanups_this_call != old_cleanups)
7139 {
7140 expand_expr (TREE_VALUE (cleanups_this_call), NULL_RTX, VOIDmode, 0);
7141 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
7142 }
7143 }
7144 \f
7145 /* Expand conditional expressions. */
7146
7147 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
7148 LABEL is an rtx of code CODE_LABEL, in this function and all the
7149 functions here. */
7150
7151 void
7152 jumpifnot (exp, label)
7153 tree exp;
7154 rtx label;
7155 {
7156 do_jump (exp, label, NULL_RTX);
7157 }
7158
7159 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
7160
7161 void
7162 jumpif (exp, label)
7163 tree exp;
7164 rtx label;
7165 {
7166 do_jump (exp, NULL_RTX, label);
7167 }
7168
7169 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
7170 the result is zero, or IF_TRUE_LABEL if the result is one.
7171 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
7172 meaning fall through in that case.
7173
7174 do_jump always does any pending stack adjust except when it does not
7175 actually perform a jump. An example where there is no jump
7176 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
7177
7178 This function is responsible for optimizing cases such as
7179 &&, || and comparison operators in EXP. */
7180
7181 void
7182 do_jump (exp, if_false_label, if_true_label)
7183 tree exp;
7184 rtx if_false_label, if_true_label;
7185 {
7186 register enum tree_code code = TREE_CODE (exp);
7187 /* Some cases need to create a label to jump to
7188 in order to properly fall through.
7189 These cases set DROP_THROUGH_LABEL nonzero. */
7190 rtx drop_through_label = 0;
7191 rtx temp;
7192 rtx comparison = 0;
7193 int i;
7194 tree type;
7195
7196 emit_queue ();
7197
7198 switch (code)
7199 {
7200 case ERROR_MARK:
7201 break;
7202
7203 case INTEGER_CST:
7204 temp = integer_zerop (exp) ? if_false_label : if_true_label;
7205 if (temp)
7206 emit_jump (temp);
7207 break;
7208
7209 #if 0
7210 /* This is not true with #pragma weak */
7211 case ADDR_EXPR:
7212 /* The address of something can never be zero. */
7213 if (if_true_label)
7214 emit_jump (if_true_label);
7215 break;
7216 #endif
7217
7218 case NOP_EXPR:
7219 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
7220 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
7221 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
7222 goto normal;
7223 case CONVERT_EXPR:
7224 /* If we are narrowing the operand, we have to do the compare in the
7225 narrower mode. */
7226 if ((TYPE_PRECISION (TREE_TYPE (exp))
7227 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7228 goto normal;
7229 case NON_LVALUE_EXPR:
7230 case REFERENCE_EXPR:
7231 case ABS_EXPR:
7232 case NEGATE_EXPR:
7233 case LROTATE_EXPR:
7234 case RROTATE_EXPR:
7235 /* These cannot change zero->non-zero or vice versa. */
7236 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
7237 break;
7238
7239 #if 0
7240 /* This is never less insns than evaluating the PLUS_EXPR followed by
7241 a test and can be longer if the test is eliminated. */
7242 case PLUS_EXPR:
7243 /* Reduce to minus. */
7244 exp = build (MINUS_EXPR, TREE_TYPE (exp),
7245 TREE_OPERAND (exp, 0),
7246 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
7247 TREE_OPERAND (exp, 1))));
7248 /* Process as MINUS. */
7249 #endif
7250
7251 case MINUS_EXPR:
7252 /* Non-zero iff operands of minus differ. */
7253 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
7254 TREE_OPERAND (exp, 0),
7255 TREE_OPERAND (exp, 1)),
7256 NE, NE);
7257 break;
7258
7259 case BIT_AND_EXPR:
7260 /* If we are AND'ing with a small constant, do this comparison in the
7261 smallest type that fits. If the machine doesn't have comparisons
7262 that small, it will be converted back to the wider comparison.
7263 This helps if we are testing the sign bit of a narrower object.
7264 combine can't do this for us because it can't know whether a
7265 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
7266
7267 if (! SLOW_BYTE_ACCESS
7268 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7269 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
7270 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
7271 && (type = type_for_size (i + 1, 1)) != 0
7272 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
7273 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
7274 != CODE_FOR_nothing))
7275 {
7276 do_jump (convert (type, exp), if_false_label, if_true_label);
7277 break;
7278 }
7279 goto normal;
7280
7281 case TRUTH_NOT_EXPR:
7282 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
7283 break;
7284
7285 case TRUTH_ANDIF_EXPR:
7286 if (if_false_label == 0)
7287 if_false_label = drop_through_label = gen_label_rtx ();
7288 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
7289 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
7290 break;
7291
7292 case TRUTH_ORIF_EXPR:
7293 if (if_true_label == 0)
7294 if_true_label = drop_through_label = gen_label_rtx ();
7295 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
7296 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
7297 break;
7298
7299 case COMPOUND_EXPR:
7300 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7301 free_temp_slots ();
7302 emit_queue ();
7303 do_pending_stack_adjust ();
7304 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
7305 break;
7306
7307 case COMPONENT_REF:
7308 case BIT_FIELD_REF:
7309 case ARRAY_REF:
7310 {
7311 int bitsize, bitpos, unsignedp;
7312 enum machine_mode mode;
7313 tree type;
7314 tree offset;
7315 int volatilep = 0;
7316
7317 /* Get description of this reference. We don't actually care
7318 about the underlying object here. */
7319 get_inner_reference (exp, &bitsize, &bitpos, &offset,
7320 &mode, &unsignedp, &volatilep);
7321
7322 type = type_for_size (bitsize, unsignedp);
7323 if (! SLOW_BYTE_ACCESS
7324 && type != 0 && bitsize >= 0
7325 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
7326 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
7327 != CODE_FOR_nothing))
7328 {
7329 do_jump (convert (type, exp), if_false_label, if_true_label);
7330 break;
7331 }
7332 goto normal;
7333 }
7334
7335 case COND_EXPR:
7336 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
7337 if (integer_onep (TREE_OPERAND (exp, 1))
7338 && integer_zerop (TREE_OPERAND (exp, 2)))
7339 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
7340
7341 else if (integer_zerop (TREE_OPERAND (exp, 1))
7342 && integer_onep (TREE_OPERAND (exp, 2)))
7343 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
7344
7345 else
7346 {
7347 register rtx label1 = gen_label_rtx ();
7348 drop_through_label = gen_label_rtx ();
7349 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
7350 /* Now the THEN-expression. */
7351 do_jump (TREE_OPERAND (exp, 1),
7352 if_false_label ? if_false_label : drop_through_label,
7353 if_true_label ? if_true_label : drop_through_label);
7354 /* In case the do_jump just above never jumps. */
7355 do_pending_stack_adjust ();
7356 emit_label (label1);
7357 /* Now the ELSE-expression. */
7358 do_jump (TREE_OPERAND (exp, 2),
7359 if_false_label ? if_false_label : drop_through_label,
7360 if_true_label ? if_true_label : drop_through_label);
7361 }
7362 break;
7363
7364 case EQ_EXPR:
7365 if (integer_zerop (TREE_OPERAND (exp, 1)))
7366 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
7367 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7368 == MODE_INT)
7369 &&
7370 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7371 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
7372 else
7373 comparison = compare (exp, EQ, EQ);
7374 break;
7375
7376 case NE_EXPR:
7377 if (integer_zerop (TREE_OPERAND (exp, 1)))
7378 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
7379 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7380 == MODE_INT)
7381 &&
7382 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7383 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
7384 else
7385 comparison = compare (exp, NE, NE);
7386 break;
7387
7388 case LT_EXPR:
7389 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7390 == MODE_INT)
7391 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7392 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
7393 else
7394 comparison = compare (exp, LT, LTU);
7395 break;
7396
7397 case LE_EXPR:
7398 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7399 == MODE_INT)
7400 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7401 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
7402 else
7403 comparison = compare (exp, LE, LEU);
7404 break;
7405
7406 case GT_EXPR:
7407 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7408 == MODE_INT)
7409 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7410 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
7411 else
7412 comparison = compare (exp, GT, GTU);
7413 break;
7414
7415 case GE_EXPR:
7416 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7417 == MODE_INT)
7418 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7419 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
7420 else
7421 comparison = compare (exp, GE, GEU);
7422 break;
7423
7424 default:
7425 normal:
7426 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
7427 #if 0
7428 /* This is not needed any more and causes poor code since it causes
7429 comparisons and tests from non-SI objects to have different code
7430 sequences. */
7431 /* Copy to register to avoid generating bad insns by cse
7432 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
7433 if (!cse_not_expected && GET_CODE (temp) == MEM)
7434 temp = copy_to_reg (temp);
7435 #endif
7436 do_pending_stack_adjust ();
7437 if (GET_CODE (temp) == CONST_INT)
7438 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
7439 else if (GET_CODE (temp) == LABEL_REF)
7440 comparison = const_true_rtx;
7441 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
7442 && !can_compare_p (GET_MODE (temp)))
7443 /* Note swapping the labels gives us not-equal. */
7444 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
7445 else if (GET_MODE (temp) != VOIDmode)
7446 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
7447 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
7448 GET_MODE (temp), NULL_RTX, 0);
7449 else
7450 abort ();
7451 }
7452
7453 /* Do any postincrements in the expression that was tested. */
7454 emit_queue ();
7455
7456 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
7457 straight into a conditional jump instruction as the jump condition.
7458 Otherwise, all the work has been done already. */
7459
7460 if (comparison == const_true_rtx)
7461 {
7462 if (if_true_label)
7463 emit_jump (if_true_label);
7464 }
7465 else if (comparison == const0_rtx)
7466 {
7467 if (if_false_label)
7468 emit_jump (if_false_label);
7469 }
7470 else if (comparison)
7471 do_jump_for_compare (comparison, if_false_label, if_true_label);
7472
7473 free_temp_slots ();
7474
7475 if (drop_through_label)
7476 {
7477 /* If do_jump produces code that might be jumped around,
7478 do any stack adjusts from that code, before the place
7479 where control merges in. */
7480 do_pending_stack_adjust ();
7481 emit_label (drop_through_label);
7482 }
7483 }
7484 \f
7485 /* Given a comparison expression EXP for values too wide to be compared
7486 with one insn, test the comparison and jump to the appropriate label.
7487 The code of EXP is ignored; we always test GT if SWAP is 0,
7488 and LT if SWAP is 1. */
7489
7490 static void
7491 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
7492 tree exp;
7493 int swap;
7494 rtx if_false_label, if_true_label;
7495 {
7496 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
7497 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
7498 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
7499 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
7500 rtx drop_through_label = 0;
7501 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
7502 int i;
7503
7504 if (! if_true_label || ! if_false_label)
7505 drop_through_label = gen_label_rtx ();
7506 if (! if_true_label)
7507 if_true_label = drop_through_label;
7508 if (! if_false_label)
7509 if_false_label = drop_through_label;
7510
7511 /* Compare a word at a time, high order first. */
7512 for (i = 0; i < nwords; i++)
7513 {
7514 rtx comp;
7515 rtx op0_word, op1_word;
7516
7517 if (WORDS_BIG_ENDIAN)
7518 {
7519 op0_word = operand_subword_force (op0, i, mode);
7520 op1_word = operand_subword_force (op1, i, mode);
7521 }
7522 else
7523 {
7524 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
7525 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
7526 }
7527
7528 /* All but high-order word must be compared as unsigned. */
7529 comp = compare_from_rtx (op0_word, op1_word,
7530 (unsignedp || i > 0) ? GTU : GT,
7531 unsignedp, word_mode, NULL_RTX, 0);
7532 if (comp == const_true_rtx)
7533 emit_jump (if_true_label);
7534 else if (comp != const0_rtx)
7535 do_jump_for_compare (comp, NULL_RTX, if_true_label);
7536
7537 /* Consider lower words only if these are equal. */
7538 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
7539 NULL_RTX, 0);
7540 if (comp == const_true_rtx)
7541 emit_jump (if_false_label);
7542 else if (comp != const0_rtx)
7543 do_jump_for_compare (comp, NULL_RTX, if_false_label);
7544 }
7545
7546 if (if_false_label)
7547 emit_jump (if_false_label);
7548 if (drop_through_label)
7549 emit_label (drop_through_label);
7550 }
7551
7552 /* Compare OP0 with OP1, word at a time, in mode MODE.
7553 UNSIGNEDP says to do unsigned comparison.
7554 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
7555
7556 static void
7557 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
7558 enum machine_mode mode;
7559 int unsignedp;
7560 rtx op0, op1;
7561 rtx if_false_label, if_true_label;
7562 {
7563 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
7564 rtx drop_through_label = 0;
7565 int i;
7566
7567 if (! if_true_label || ! if_false_label)
7568 drop_through_label = gen_label_rtx ();
7569 if (! if_true_label)
7570 if_true_label = drop_through_label;
7571 if (! if_false_label)
7572 if_false_label = drop_through_label;
7573
7574 /* Compare a word at a time, high order first. */
7575 for (i = 0; i < nwords; i++)
7576 {
7577 rtx comp;
7578 rtx op0_word, op1_word;
7579
7580 if (WORDS_BIG_ENDIAN)
7581 {
7582 op0_word = operand_subword_force (op0, i, mode);
7583 op1_word = operand_subword_force (op1, i, mode);
7584 }
7585 else
7586 {
7587 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
7588 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
7589 }
7590
7591 /* All but high-order word must be compared as unsigned. */
7592 comp = compare_from_rtx (op0_word, op1_word,
7593 (unsignedp || i > 0) ? GTU : GT,
7594 unsignedp, word_mode, NULL_RTX, 0);
7595 if (comp == const_true_rtx)
7596 emit_jump (if_true_label);
7597 else if (comp != const0_rtx)
7598 do_jump_for_compare (comp, NULL_RTX, if_true_label);
7599
7600 /* Consider lower words only if these are equal. */
7601 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
7602 NULL_RTX, 0);
7603 if (comp == const_true_rtx)
7604 emit_jump (if_false_label);
7605 else if (comp != const0_rtx)
7606 do_jump_for_compare (comp, NULL_RTX, if_false_label);
7607 }
7608
7609 if (if_false_label)
7610 emit_jump (if_false_label);
7611 if (drop_through_label)
7612 emit_label (drop_through_label);
7613 }
7614
7615 /* Given an EQ_EXPR expression EXP for values too wide to be compared
7616 with one insn, test the comparison and jump to the appropriate label. */
7617
7618 static void
7619 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
7620 tree exp;
7621 rtx if_false_label, if_true_label;
7622 {
7623 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7624 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7625 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
7626 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
7627 int i;
7628 rtx drop_through_label = 0;
7629
7630 if (! if_false_label)
7631 drop_through_label = if_false_label = gen_label_rtx ();
7632
7633 for (i = 0; i < nwords; i++)
7634 {
7635 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
7636 operand_subword_force (op1, i, mode),
7637 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
7638 word_mode, NULL_RTX, 0);
7639 if (comp == const_true_rtx)
7640 emit_jump (if_false_label);
7641 else if (comp != const0_rtx)
7642 do_jump_for_compare (comp, if_false_label, NULL_RTX);
7643 }
7644
7645 if (if_true_label)
7646 emit_jump (if_true_label);
7647 if (drop_through_label)
7648 emit_label (drop_through_label);
7649 }
7650 \f
7651 /* Jump according to whether OP0 is 0.
7652 We assume that OP0 has an integer mode that is too wide
7653 for the available compare insns. */
7654
7655 static void
7656 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
7657 rtx op0;
7658 rtx if_false_label, if_true_label;
7659 {
7660 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
7661 int i;
7662 rtx drop_through_label = 0;
7663
7664 if (! if_false_label)
7665 drop_through_label = if_false_label = gen_label_rtx ();
7666
7667 for (i = 0; i < nwords; i++)
7668 {
7669 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
7670 GET_MODE (op0)),
7671 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
7672 if (comp == const_true_rtx)
7673 emit_jump (if_false_label);
7674 else if (comp != const0_rtx)
7675 do_jump_for_compare (comp, if_false_label, NULL_RTX);
7676 }
7677
7678 if (if_true_label)
7679 emit_jump (if_true_label);
7680 if (drop_through_label)
7681 emit_label (drop_through_label);
7682 }
7683
7684 /* Given a comparison expression in rtl form, output conditional branches to
7685 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
7686
7687 static void
7688 do_jump_for_compare (comparison, if_false_label, if_true_label)
7689 rtx comparison, if_false_label, if_true_label;
7690 {
7691 if (if_true_label)
7692 {
7693 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
7694 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
7695 else
7696 abort ();
7697
7698 if (if_false_label)
7699 emit_jump (if_false_label);
7700 }
7701 else if (if_false_label)
7702 {
7703 rtx insn;
7704 rtx prev = PREV_INSN (get_last_insn ());
7705 rtx branch = 0;
7706
7707 /* Output the branch with the opposite condition. Then try to invert
7708 what is generated. If more than one insn is a branch, or if the
7709 branch is not the last insn written, abort. If we can't invert
7710 the branch, emit make a true label, redirect this jump to that,
7711 emit a jump to the false label and define the true label. */
7712
7713 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
7714 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_false_label));
7715 else
7716 abort ();
7717
7718 /* Here we get the insn before what was just emitted.
7719 On some machines, emitting the branch can discard
7720 the previous compare insn and emit a replacement. */
7721 if (prev == 0)
7722 /* If there's only one preceding insn... */
7723 insn = get_insns ();
7724 else
7725 insn = NEXT_INSN (prev);
7726
7727 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
7728 if (GET_CODE (insn) == JUMP_INSN)
7729 {
7730 if (branch)
7731 abort ();
7732 branch = insn;
7733 }
7734
7735 if (branch != get_last_insn ())
7736 abort ();
7737
7738 if (! invert_jump (branch, if_false_label))
7739 {
7740 if_true_label = gen_label_rtx ();
7741 redirect_jump (branch, if_true_label);
7742 emit_jump (if_false_label);
7743 emit_label (if_true_label);
7744 }
7745 }
7746 }
7747 \f
7748 /* Generate code for a comparison expression EXP
7749 (including code to compute the values to be compared)
7750 and set (CC0) according to the result.
7751 SIGNED_CODE should be the rtx operation for this comparison for
7752 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
7753
7754 We force a stack adjustment unless there are currently
7755 things pushed on the stack that aren't yet used. */
7756
7757 static rtx
7758 compare (exp, signed_code, unsigned_code)
7759 register tree exp;
7760 enum rtx_code signed_code, unsigned_code;
7761 {
7762 register rtx op0
7763 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7764 register rtx op1
7765 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7766 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
7767 register enum machine_mode mode = TYPE_MODE (type);
7768 int unsignedp = TREE_UNSIGNED (type);
7769 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
7770
7771 return compare_from_rtx (op0, op1, code, unsignedp, mode,
7772 ((mode == BLKmode)
7773 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
7774 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
7775 }
7776
7777 /* Like compare but expects the values to compare as two rtx's.
7778 The decision as to signed or unsigned comparison must be made by the caller.
7779
7780 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
7781 compared.
7782
7783 If ALIGN is non-zero, it is the alignment of this type; if zero, the
7784 size of MODE should be used. */
7785
7786 rtx
7787 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
7788 register rtx op0, op1;
7789 enum rtx_code code;
7790 int unsignedp;
7791 enum machine_mode mode;
7792 rtx size;
7793 int align;
7794 {
7795 rtx tem;
7796
7797 /* If one operand is constant, make it the second one. Only do this
7798 if the other operand is not constant as well. */
7799
7800 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
7801 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
7802 {
7803 tem = op0;
7804 op0 = op1;
7805 op1 = tem;
7806 code = swap_condition (code);
7807 }
7808
7809 if (flag_force_mem)
7810 {
7811 op0 = force_not_mem (op0);
7812 op1 = force_not_mem (op1);
7813 }
7814
7815 do_pending_stack_adjust ();
7816
7817 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
7818 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
7819 return tem;
7820
7821 #if 0
7822 /* There's no need to do this now that combine.c can eliminate lots of
7823 sign extensions. This can be less efficient in certain cases on other
7824 machines. */
7825
7826 /* If this is a signed equality comparison, we can do it as an
7827 unsigned comparison since zero-extension is cheaper than sign
7828 extension and comparisons with zero are done as unsigned. This is
7829 the case even on machines that can do fast sign extension, since
7830 zero-extension is easier to combine with other operations than
7831 sign-extension is. If we are comparing against a constant, we must
7832 convert it to what it would look like unsigned. */
7833 if ((code == EQ || code == NE) && ! unsignedp
7834 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
7835 {
7836 if (GET_CODE (op1) == CONST_INT
7837 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
7838 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
7839 unsignedp = 1;
7840 }
7841 #endif
7842
7843 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
7844
7845 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
7846 }
7847 \f
7848 /* Generate code to calculate EXP using a store-flag instruction
7849 and return an rtx for the result. EXP is either a comparison
7850 or a TRUTH_NOT_EXPR whose operand is a comparison.
7851
7852 If TARGET is nonzero, store the result there if convenient.
7853
7854 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
7855 cheap.
7856
7857 Return zero if there is no suitable set-flag instruction
7858 available on this machine.
7859
7860 Once expand_expr has been called on the arguments of the comparison,
7861 we are committed to doing the store flag, since it is not safe to
7862 re-evaluate the expression. We emit the store-flag insn by calling
7863 emit_store_flag, but only expand the arguments if we have a reason
7864 to believe that emit_store_flag will be successful. If we think that
7865 it will, but it isn't, we have to simulate the store-flag with a
7866 set/jump/set sequence. */
7867
7868 static rtx
7869 do_store_flag (exp, target, mode, only_cheap)
7870 tree exp;
7871 rtx target;
7872 enum machine_mode mode;
7873 int only_cheap;
7874 {
7875 enum rtx_code code;
7876 tree arg0, arg1, type;
7877 tree tem;
7878 enum machine_mode operand_mode;
7879 int invert = 0;
7880 int unsignedp;
7881 rtx op0, op1;
7882 enum insn_code icode;
7883 rtx subtarget = target;
7884 rtx result, label, pattern, jump_pat;
7885
7886 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
7887 result at the end. We can't simply invert the test since it would
7888 have already been inverted if it were valid. This case occurs for
7889 some floating-point comparisons. */
7890
7891 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
7892 invert = 1, exp = TREE_OPERAND (exp, 0);
7893
7894 arg0 = TREE_OPERAND (exp, 0);
7895 arg1 = TREE_OPERAND (exp, 1);
7896 type = TREE_TYPE (arg0);
7897 operand_mode = TYPE_MODE (type);
7898 unsignedp = TREE_UNSIGNED (type);
7899
7900 /* We won't bother with BLKmode store-flag operations because it would mean
7901 passing a lot of information to emit_store_flag. */
7902 if (operand_mode == BLKmode)
7903 return 0;
7904
7905 STRIP_NOPS (arg0);
7906 STRIP_NOPS (arg1);
7907
7908 /* Get the rtx comparison code to use. We know that EXP is a comparison
7909 operation of some type. Some comparisons against 1 and -1 can be
7910 converted to comparisons with zero. Do so here so that the tests
7911 below will be aware that we have a comparison with zero. These
7912 tests will not catch constants in the first operand, but constants
7913 are rarely passed as the first operand. */
7914
7915 switch (TREE_CODE (exp))
7916 {
7917 case EQ_EXPR:
7918 code = EQ;
7919 break;
7920 case NE_EXPR:
7921 code = NE;
7922 break;
7923 case LT_EXPR:
7924 if (integer_onep (arg1))
7925 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
7926 else
7927 code = unsignedp ? LTU : LT;
7928 break;
7929 case LE_EXPR:
7930 if (! unsignedp && integer_all_onesp (arg1))
7931 arg1 = integer_zero_node, code = LT;
7932 else
7933 code = unsignedp ? LEU : LE;
7934 break;
7935 case GT_EXPR:
7936 if (! unsignedp && integer_all_onesp (arg1))
7937 arg1 = integer_zero_node, code = GE;
7938 else
7939 code = unsignedp ? GTU : GT;
7940 break;
7941 case GE_EXPR:
7942 if (integer_onep (arg1))
7943 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
7944 else
7945 code = unsignedp ? GEU : GE;
7946 break;
7947 default:
7948 abort ();
7949 }
7950
7951 /* Put a constant second. */
7952 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
7953 {
7954 tem = arg0; arg0 = arg1; arg1 = tem;
7955 code = swap_condition (code);
7956 }
7957
7958 /* If this is an equality or inequality test of a single bit, we can
7959 do this by shifting the bit being tested to the low-order bit and
7960 masking the result with the constant 1. If the condition was EQ,
7961 we xor it with 1. This does not require an scc insn and is faster
7962 than an scc insn even if we have it. */
7963
7964 if ((code == NE || code == EQ)
7965 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7966 && integer_pow2p (TREE_OPERAND (arg0, 1))
7967 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
7968 {
7969 tree inner = TREE_OPERAND (arg0, 0);
7970 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
7971 NULL_RTX, VOIDmode, 0)));
7972 int ops_unsignedp;
7973
7974 /* If INNER is a right shift of a constant and it plus BITNUM does
7975 not overflow, adjust BITNUM and INNER. */
7976
7977 if (TREE_CODE (inner) == RSHIFT_EXPR
7978 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
7979 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
7980 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
7981 < TYPE_PRECISION (type)))
7982 {
7983 bitnum +=TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
7984 inner = TREE_OPERAND (inner, 0);
7985 }
7986
7987 /* If we are going to be able to omit the AND below, we must do our
7988 operations as unsigned. If we must use the AND, we have a choice.
7989 Normally unsigned is faster, but for some machines signed is. */
7990 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
7991 #ifdef BYTE_LOADS_SIGN_EXTEND
7992 : 0
7993 #else
7994 : 1
7995 #endif
7996 );
7997
7998 if (subtarget == 0 || GET_CODE (subtarget) != REG
7999 || GET_MODE (subtarget) != operand_mode
8000 || ! safe_from_p (subtarget, inner))
8001 subtarget = 0;
8002
8003 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
8004
8005 if (bitnum != 0)
8006 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
8007 size_int (bitnum), target, ops_unsignedp);
8008
8009 if (GET_MODE (op0) != mode)
8010 op0 = convert_to_mode (mode, op0, ops_unsignedp);
8011
8012 if ((code == EQ && ! invert) || (code == NE && invert))
8013 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, target,
8014 ops_unsignedp, OPTAB_LIB_WIDEN);
8015
8016 /* Put the AND last so it can combine with more things. */
8017 if (bitnum != TYPE_PRECISION (type) - 1)
8018 op0 = expand_and (op0, const1_rtx, target);
8019
8020 return op0;
8021 }
8022
8023 /* Now see if we are likely to be able to do this. Return if not. */
8024 if (! can_compare_p (operand_mode))
8025 return 0;
8026 icode = setcc_gen_code[(int) code];
8027 if (icode == CODE_FOR_nothing
8028 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
8029 {
8030 /* We can only do this if it is one of the special cases that
8031 can be handled without an scc insn. */
8032 if ((code == LT && integer_zerop (arg1))
8033 || (! only_cheap && code == GE && integer_zerop (arg1)))
8034 ;
8035 else if (BRANCH_COST >= 0
8036 && ! only_cheap && (code == NE || code == EQ)
8037 && TREE_CODE (type) != REAL_TYPE
8038 && ((abs_optab->handlers[(int) operand_mode].insn_code
8039 != CODE_FOR_nothing)
8040 || (ffs_optab->handlers[(int) operand_mode].insn_code
8041 != CODE_FOR_nothing)))
8042 ;
8043 else
8044 return 0;
8045 }
8046
8047 preexpand_calls (exp);
8048 if (subtarget == 0 || GET_CODE (subtarget) != REG
8049 || GET_MODE (subtarget) != operand_mode
8050 || ! safe_from_p (subtarget, arg1))
8051 subtarget = 0;
8052
8053 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
8054 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
8055
8056 if (target == 0)
8057 target = gen_reg_rtx (mode);
8058
8059 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
8060 because, if the emit_store_flag does anything it will succeed and
8061 OP0 and OP1 will not be used subsequently. */
8062
8063 result = emit_store_flag (target, code,
8064 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
8065 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
8066 operand_mode, unsignedp, 1);
8067
8068 if (result)
8069 {
8070 if (invert)
8071 result = expand_binop (mode, xor_optab, result, const1_rtx,
8072 result, 0, OPTAB_LIB_WIDEN);
8073 return result;
8074 }
8075
8076 /* If this failed, we have to do this with set/compare/jump/set code. */
8077 if (target == 0 || GET_CODE (target) != REG
8078 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
8079 target = gen_reg_rtx (GET_MODE (target));
8080
8081 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
8082 result = compare_from_rtx (op0, op1, code, unsignedp,
8083 operand_mode, NULL_RTX, 0);
8084 if (GET_CODE (result) == CONST_INT)
8085 return (((result == const0_rtx && ! invert)
8086 || (result != const0_rtx && invert))
8087 ? const0_rtx : const1_rtx);
8088
8089 label = gen_label_rtx ();
8090 if (bcc_gen_fctn[(int) code] == 0)
8091 abort ();
8092
8093 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
8094 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
8095 emit_label (label);
8096
8097 return target;
8098 }
8099 \f
8100 /* Generate a tablejump instruction (used for switch statements). */
8101
8102 #ifdef HAVE_tablejump
8103
8104 /* INDEX is the value being switched on, with the lowest value
8105 in the table already subtracted.
8106 MODE is its expected mode (needed if INDEX is constant).
8107 RANGE is the length of the jump table.
8108 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
8109
8110 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
8111 index value is out of range. */
8112
8113 void
8114 do_tablejump (index, mode, range, table_label, default_label)
8115 rtx index, range, table_label, default_label;
8116 enum machine_mode mode;
8117 {
8118 register rtx temp, vector;
8119
8120 /* Do an unsigned comparison (in the proper mode) between the index
8121 expression and the value which represents the length of the range.
8122 Since we just finished subtracting the lower bound of the range
8123 from the index expression, this comparison allows us to simultaneously
8124 check that the original index expression value is both greater than
8125 or equal to the minimum value of the range and less than or equal to
8126 the maximum value of the range. */
8127
8128 emit_cmp_insn (range, index, LTU, NULL_RTX, mode, 1, 0);
8129 emit_jump_insn (gen_bltu (default_label));
8130
8131 /* If index is in range, it must fit in Pmode.
8132 Convert to Pmode so we can index with it. */
8133 if (mode != Pmode)
8134 index = convert_to_mode (Pmode, index, 1);
8135
8136 /* If flag_force_addr were to affect this address
8137 it could interfere with the tricky assumptions made
8138 about addresses that contain label-refs,
8139 which may be valid only very near the tablejump itself. */
8140 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
8141 GET_MODE_SIZE, because this indicates how large insns are. The other
8142 uses should all be Pmode, because they are addresses. This code
8143 could fail if addresses and insns are not the same size. */
8144 index = memory_address_noforce
8145 (CASE_VECTOR_MODE,
8146 gen_rtx (PLUS, Pmode,
8147 gen_rtx (MULT, Pmode, index,
8148 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
8149 gen_rtx (LABEL_REF, Pmode, table_label)));
8150 temp = gen_reg_rtx (CASE_VECTOR_MODE);
8151 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
8152 RTX_UNCHANGING_P (vector) = 1;
8153 convert_move (temp, vector, 0);
8154
8155 emit_jump_insn (gen_tablejump (temp, table_label));
8156
8157 #ifndef CASE_VECTOR_PC_RELATIVE
8158 /* If we are generating PIC code or if the table is PC-relative, the
8159 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
8160 if (! flag_pic)
8161 emit_barrier ();
8162 #endif
8163 }
8164
8165 #endif /* HAVE_tablejump */
This page took 0.44029 seconds and 5 git commands to generate.