]> gcc.gnu.org Git - gcc.git/blob - gcc/expr.c
(expand_builtin...
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21 #include "config.h"
22 #include "rtl.h"
23 #include "tree.h"
24 #include "flags.h"
25 #include "function.h"
26 #include "insn-flags.h"
27 #include "insn-codes.h"
28 #include "expr.h"
29 #include "insn-config.h"
30 #include "recog.h"
31 #include "output.h"
32 #include "gvarargs.h"
33 #include "typeclass.h"
34
35 #define CEIL(x,y) (((x) + (y) - 1) / (y))
36
37 /* Decide whether a function's arguments should be processed
38 from first to last or from last to first.
39
40 They should if the stack and args grow in opposite directions, but
41 only if we have push insns. */
42
43 #ifdef PUSH_ROUNDING
44
45 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNARD)
46 #define PUSH_ARGS_REVERSED /* If it's last to first */
47 #endif
48
49 #endif
50
51 #ifndef STACK_PUSH_CODE
52 #ifdef STACK_GROWS_DOWNWARD
53 #define STACK_PUSH_CODE PRE_DEC
54 #else
55 #define STACK_PUSH_CODE PRE_INC
56 #endif
57 #endif
58
59 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
60 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
61
62 /* If this is nonzero, we do not bother generating VOLATILE
63 around volatile memory references, and we are willing to
64 output indirect addresses. If cse is to follow, we reject
65 indirect addresses so a useful potential cse is generated;
66 if it is used only once, instruction combination will produce
67 the same indirect address eventually. */
68 int cse_not_expected;
69
70 /* Nonzero to generate code for all the subroutines within an
71 expression before generating the upper levels of the expression.
72 Nowadays this is never zero. */
73 int do_preexpand_calls = 1;
74
75 /* Number of units that we should eventually pop off the stack.
76 These are the arguments to function calls that have already returned. */
77 int pending_stack_adjust;
78
79 /* Nonzero means stack pops must not be deferred, and deferred stack
80 pops must not be output. It is nonzero inside a function call,
81 inside a conditional expression, inside a statement expression,
82 and in other cases as well. */
83 int inhibit_defer_pop;
84
85 /* A list of all cleanups which belong to the arguments of
86 function calls being expanded by expand_call. */
87 tree cleanups_this_call;
88
89 /* Similarly for __builtin_apply_args. */
90 static rtx apply_args_value;
91
92 /* Nonzero means __builtin_saveregs has already been done in this function.
93 The value is the pseudoreg containing the value __builtin_saveregs
94 returned. */
95 static rtx saveregs_value;
96
97 /* This structure is used by move_by_pieces to describe the move to
98 be performed. */
99
100 struct move_by_pieces
101 {
102 rtx to;
103 rtx to_addr;
104 int autinc_to;
105 int explicit_inc_to;
106 rtx from;
107 rtx from_addr;
108 int autinc_from;
109 int explicit_inc_from;
110 int len;
111 int offset;
112 int reverse;
113 };
114
115 static rtx enqueue_insn PROTO((rtx, rtx));
116 static int queued_subexp_p PROTO((rtx));
117 static void init_queue PROTO((void));
118 static void move_by_pieces PROTO((rtx, rtx, int, int));
119 static int move_by_pieces_ninsns PROTO((unsigned int, int));
120 static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
121 struct move_by_pieces *));
122 static void group_insns PROTO((rtx));
123 static void store_constructor PROTO((tree, rtx));
124 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
125 enum machine_mode, int, int, int));
126 static tree save_noncopied_parts PROTO((tree, tree));
127 static tree init_noncopied_parts PROTO((tree, tree));
128 static int safe_from_p PROTO((rtx, tree));
129 static int fixed_type_p PROTO((tree));
130 static int get_pointer_alignment PROTO((tree, unsigned));
131 static tree string_constant PROTO((tree, tree *));
132 static tree c_strlen PROTO((tree));
133 static rtx expand_builtin PROTO((tree, rtx, rtx, enum machine_mode, int));
134 static int apply_args_size PROTO((void));
135 static int apply_result_size PROTO((void));
136 static rtx result_vector PROTO((int, rtx));
137 static rtx expand_builtin_apply_args PROTO((void));
138 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
139 static void expand_builtin_return PROTO((rtx));
140 static rtx expand_increment PROTO((tree, int));
141 static void preexpand_calls PROTO((tree));
142 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
143 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
144 static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
145 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
146 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
147 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
148
149 /* Record for each mode whether we can move a register directly to or
150 from an object of that mode in memory. If we can't, we won't try
151 to use that mode directly when accessing a field of that mode. */
152
153 static char direct_load[NUM_MACHINE_MODES];
154 static char direct_store[NUM_MACHINE_MODES];
155
156 /* MOVE_RATIO is the number of move instructions that is better than
157 a block move. */
158
159 #ifndef MOVE_RATIO
160 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
161 #define MOVE_RATIO 2
162 #else
163 /* A value of around 6 would minimize code size; infinity would minimize
164 execution time. */
165 #define MOVE_RATIO 15
166 #endif
167 #endif
168
169 /* This array records the insn_code of insns to perform block moves. */
170 static enum insn_code movstr_optab[NUM_MACHINE_MODES];
171
172 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
173
174 #ifndef SLOW_UNALIGNED_ACCESS
175 #define SLOW_UNALIGNED_ACCESS 0
176 #endif
177
178 /* Register mappings for target machines without register windows. */
179 #ifndef INCOMING_REGNO
180 #define INCOMING_REGNO(OUT) (OUT)
181 #endif
182 #ifndef OUTGOING_REGNO
183 #define OUTGOING_REGNO(IN) (IN)
184 #endif
185 \f
186 /* This is run once per compilation to set up which modes can be used
187 directly in memory and to initialize the block move optab. */
188
189 void
190 init_expr_once ()
191 {
192 rtx insn, pat;
193 enum machine_mode mode;
194 /* Try indexing by frame ptr and try by stack ptr.
195 It is known that on the Convex the stack ptr isn't a valid index.
196 With luck, one or the other is valid on any machine. */
197 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
198 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
199
200 start_sequence ();
201 insn = emit_insn (gen_rtx (SET, 0, 0));
202 pat = PATTERN (insn);
203
204 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
205 mode = (enum machine_mode) ((int) mode + 1))
206 {
207 int regno;
208 rtx reg;
209 int num_clobbers;
210
211 direct_load[(int) mode] = direct_store[(int) mode] = 0;
212 PUT_MODE (mem, mode);
213 PUT_MODE (mem1, mode);
214
215 /* See if there is some register that can be used in this mode and
216 directly loaded or stored from memory. */
217
218 if (mode != VOIDmode && mode != BLKmode)
219 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
220 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
221 regno++)
222 {
223 if (! HARD_REGNO_MODE_OK (regno, mode))
224 continue;
225
226 reg = gen_rtx (REG, mode, regno);
227
228 SET_SRC (pat) = mem;
229 SET_DEST (pat) = reg;
230 if (recog (pat, insn, &num_clobbers) >= 0)
231 direct_load[(int) mode] = 1;
232
233 SET_SRC (pat) = mem1;
234 SET_DEST (pat) = reg;
235 if (recog (pat, insn, &num_clobbers) >= 0)
236 direct_load[(int) mode] = 1;
237
238 SET_SRC (pat) = reg;
239 SET_DEST (pat) = mem;
240 if (recog (pat, insn, &num_clobbers) >= 0)
241 direct_store[(int) mode] = 1;
242
243 SET_SRC (pat) = reg;
244 SET_DEST (pat) = mem1;
245 if (recog (pat, insn, &num_clobbers) >= 0)
246 direct_store[(int) mode] = 1;
247 }
248
249 movstr_optab[(int) mode] = CODE_FOR_nothing;
250 }
251
252 end_sequence ();
253
254 #ifdef HAVE_movstrqi
255 if (HAVE_movstrqi)
256 movstr_optab[(int) QImode] = CODE_FOR_movstrqi;
257 #endif
258 #ifdef HAVE_movstrhi
259 if (HAVE_movstrhi)
260 movstr_optab[(int) HImode] = CODE_FOR_movstrhi;
261 #endif
262 #ifdef HAVE_movstrsi
263 if (HAVE_movstrsi)
264 movstr_optab[(int) SImode] = CODE_FOR_movstrsi;
265 #endif
266 #ifdef HAVE_movstrdi
267 if (HAVE_movstrdi)
268 movstr_optab[(int) DImode] = CODE_FOR_movstrdi;
269 #endif
270 #ifdef HAVE_movstrti
271 if (HAVE_movstrti)
272 movstr_optab[(int) TImode] = CODE_FOR_movstrti;
273 #endif
274 }
275
276 /* This is run at the start of compiling a function. */
277
278 void
279 init_expr ()
280 {
281 init_queue ();
282
283 pending_stack_adjust = 0;
284 inhibit_defer_pop = 0;
285 cleanups_this_call = 0;
286 saveregs_value = 0;
287 apply_args_value = 0;
288 forced_labels = 0;
289 }
290
291 /* Save all variables describing the current status into the structure *P.
292 This is used before starting a nested function. */
293
294 void
295 save_expr_status (p)
296 struct function *p;
297 {
298 /* Instead of saving the postincrement queue, empty it. */
299 emit_queue ();
300
301 p->pending_stack_adjust = pending_stack_adjust;
302 p->inhibit_defer_pop = inhibit_defer_pop;
303 p->cleanups_this_call = cleanups_this_call;
304 p->saveregs_value = saveregs_value;
305 p->apply_args_value = apply_args_value;
306 p->forced_labels = forced_labels;
307
308 pending_stack_adjust = 0;
309 inhibit_defer_pop = 0;
310 cleanups_this_call = 0;
311 saveregs_value = 0;
312 apply_args_value = 0;
313 forced_labels = 0;
314 }
315
316 /* Restore all variables describing the current status from the structure *P.
317 This is used after a nested function. */
318
319 void
320 restore_expr_status (p)
321 struct function *p;
322 {
323 pending_stack_adjust = p->pending_stack_adjust;
324 inhibit_defer_pop = p->inhibit_defer_pop;
325 cleanups_this_call = p->cleanups_this_call;
326 saveregs_value = p->saveregs_value;
327 apply_args_value = p->apply_args_value;
328 forced_labels = p->forced_labels;
329 }
330 \f
331 /* Manage the queue of increment instructions to be output
332 for POSTINCREMENT_EXPR expressions, etc. */
333
334 static rtx pending_chain;
335
336 /* Queue up to increment (or change) VAR later. BODY says how:
337 BODY should be the same thing you would pass to emit_insn
338 to increment right away. It will go to emit_insn later on.
339
340 The value is a QUEUED expression to be used in place of VAR
341 where you want to guarantee the pre-incrementation value of VAR. */
342
343 static rtx
344 enqueue_insn (var, body)
345 rtx var, body;
346 {
347 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
348 var, NULL_RTX, NULL_RTX, body, pending_chain);
349 return pending_chain;
350 }
351
352 /* Use protect_from_queue to convert a QUEUED expression
353 into something that you can put immediately into an instruction.
354 If the queued incrementation has not happened yet,
355 protect_from_queue returns the variable itself.
356 If the incrementation has happened, protect_from_queue returns a temp
357 that contains a copy of the old value of the variable.
358
359 Any time an rtx which might possibly be a QUEUED is to be put
360 into an instruction, it must be passed through protect_from_queue first.
361 QUEUED expressions are not meaningful in instructions.
362
363 Do not pass a value through protect_from_queue and then hold
364 on to it for a while before putting it in an instruction!
365 If the queue is flushed in between, incorrect code will result. */
366
367 rtx
368 protect_from_queue (x, modify)
369 register rtx x;
370 int modify;
371 {
372 register RTX_CODE code = GET_CODE (x);
373
374 #if 0 /* A QUEUED can hang around after the queue is forced out. */
375 /* Shortcut for most common case. */
376 if (pending_chain == 0)
377 return x;
378 #endif
379
380 if (code != QUEUED)
381 {
382 /* A special hack for read access to (MEM (QUEUED ...))
383 to facilitate use of autoincrement.
384 Make a copy of the contents of the memory location
385 rather than a copy of the address, but not
386 if the value is of mode BLKmode. */
387 if (code == MEM && GET_MODE (x) != BLKmode
388 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
389 {
390 register rtx y = XEXP (x, 0);
391 XEXP (x, 0) = QUEUED_VAR (y);
392 if (QUEUED_INSN (y))
393 {
394 register rtx temp = gen_reg_rtx (GET_MODE (x));
395 emit_insn_before (gen_move_insn (temp, x),
396 QUEUED_INSN (y));
397 return temp;
398 }
399 return x;
400 }
401 /* Otherwise, recursively protect the subexpressions of all
402 the kinds of rtx's that can contain a QUEUED. */
403 if (code == MEM)
404 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
405 else if (code == PLUS || code == MULT)
406 {
407 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
408 XEXP (x, 1) = protect_from_queue (XEXP (x, 1), 0);
409 }
410 return x;
411 }
412 /* If the increment has not happened, use the variable itself. */
413 if (QUEUED_INSN (x) == 0)
414 return QUEUED_VAR (x);
415 /* If the increment has happened and a pre-increment copy exists,
416 use that copy. */
417 if (QUEUED_COPY (x) != 0)
418 return QUEUED_COPY (x);
419 /* The increment has happened but we haven't set up a pre-increment copy.
420 Set one up now, and use it. */
421 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
422 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
423 QUEUED_INSN (x));
424 return QUEUED_COPY (x);
425 }
426
427 /* Return nonzero if X contains a QUEUED expression:
428 if it contains anything that will be altered by a queued increment.
429 We handle only combinations of MEM, PLUS, MINUS and MULT operators
430 since memory addresses generally contain only those. */
431
432 static int
433 queued_subexp_p (x)
434 rtx x;
435 {
436 register enum rtx_code code = GET_CODE (x);
437 switch (code)
438 {
439 case QUEUED:
440 return 1;
441 case MEM:
442 return queued_subexp_p (XEXP (x, 0));
443 case MULT:
444 case PLUS:
445 case MINUS:
446 return queued_subexp_p (XEXP (x, 0))
447 || queued_subexp_p (XEXP (x, 1));
448 }
449 return 0;
450 }
451
452 /* Perform all the pending incrementations. */
453
454 void
455 emit_queue ()
456 {
457 register rtx p;
458 while (p = pending_chain)
459 {
460 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
461 pending_chain = QUEUED_NEXT (p);
462 }
463 }
464
465 static void
466 init_queue ()
467 {
468 if (pending_chain)
469 abort ();
470 }
471 \f
472 /* Copy data from FROM to TO, where the machine modes are not the same.
473 Both modes may be integer, or both may be floating.
474 UNSIGNEDP should be nonzero if FROM is an unsigned type.
475 This causes zero-extension instead of sign-extension. */
476
477 void
478 convert_move (to, from, unsignedp)
479 register rtx to, from;
480 int unsignedp;
481 {
482 enum machine_mode to_mode = GET_MODE (to);
483 enum machine_mode from_mode = GET_MODE (from);
484 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
485 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
486 enum insn_code code;
487 rtx libcall;
488
489 /* rtx code for making an equivalent value. */
490 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
491
492 to = protect_from_queue (to, 1);
493 from = protect_from_queue (from, 0);
494
495 if (to_real != from_real)
496 abort ();
497
498 /* If FROM is a SUBREG that indicates that we have already done at least
499 the required extension, strip it. We don't handle such SUBREGs as
500 TO here. */
501
502 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
503 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
504 >= GET_MODE_SIZE (to_mode))
505 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
506 from = gen_lowpart (to_mode, from), from_mode = to_mode;
507
508 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
509 abort ();
510
511 if (to_mode == from_mode
512 || (from_mode == VOIDmode && CONSTANT_P (from)))
513 {
514 emit_move_insn (to, from);
515 return;
516 }
517
518 if (to_real)
519 {
520 #ifdef HAVE_extendqfhf2
521 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == HFmode)
522 {
523 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
524 return;
525 }
526 #endif
527 #ifdef HAVE_extendqfsf2
528 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == SFmode)
529 {
530 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
531 return;
532 }
533 #endif
534 #ifdef HAVE_extendqfdf2
535 if (HAVE_extendqfdf2 && from_mode == QFmode && to_mode == DFmode)
536 {
537 emit_unop_insn (CODE_FOR_extendqfdf2, to, from, UNKNOWN);
538 return;
539 }
540 #endif
541 #ifdef HAVE_extendqfxf2
542 if (HAVE_extendqfxf2 && from_mode == QFmode && to_mode == XFmode)
543 {
544 emit_unop_insn (CODE_FOR_extendqfxf2, to, from, UNKNOWN);
545 return;
546 }
547 #endif
548 #ifdef HAVE_extendqftf2
549 if (HAVE_extendqftf2 && from_mode == QFmode && to_mode == TFmode)
550 {
551 emit_unop_insn (CODE_FOR_extendqftf2, to, from, UNKNOWN);
552 return;
553 }
554 #endif
555
556 #ifdef HAVE_extendhfsf2
557 if (HAVE_extendhfsf2 && from_mode == HFmode && to_mode == SFmode)
558 {
559 emit_unop_insn (CODE_FOR_extendhfsf2, to, from, UNKNOWN);
560 return;
561 }
562 #endif
563 #ifdef HAVE_extendhfdf2
564 if (HAVE_extendhfdf2 && from_mode == HFmode && to_mode == DFmode)
565 {
566 emit_unop_insn (CODE_FOR_extendhfdf2, to, from, UNKNOWN);
567 return;
568 }
569 #endif
570 #ifdef HAVE_extendhfxf2
571 if (HAVE_extendhfxf2 && from_mode == HFmode && to_mode == XFmode)
572 {
573 emit_unop_insn (CODE_FOR_extendhfxf2, to, from, UNKNOWN);
574 return;
575 }
576 #endif
577 #ifdef HAVE_extendhftf2
578 if (HAVE_extendhftf2 && from_mode == HFmode && to_mode == TFmode)
579 {
580 emit_unop_insn (CODE_FOR_extendhftf2, to, from, UNKNOWN);
581 return;
582 }
583 #endif
584
585 #ifdef HAVE_extendsfdf2
586 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
587 {
588 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
589 return;
590 }
591 #endif
592 #ifdef HAVE_extendsfxf2
593 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
594 {
595 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
596 return;
597 }
598 #endif
599 #ifdef HAVE_extendsftf2
600 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
601 {
602 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
603 return;
604 }
605 #endif
606 #ifdef HAVE_extenddfxf2
607 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
608 {
609 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
610 return;
611 }
612 #endif
613 #ifdef HAVE_extenddftf2
614 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
615 {
616 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
617 return;
618 }
619 #endif
620
621 #ifdef HAVE_trunchfqf2
622 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
623 {
624 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
625 return;
626 }
627 #endif
628 #ifdef HAVE_truncsfqf2
629 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
630 {
631 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
632 return;
633 }
634 #endif
635 #ifdef HAVE_truncdfqf2
636 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
637 {
638 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
639 return;
640 }
641 #endif
642 #ifdef HAVE_truncxfqf2
643 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
644 {
645 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
646 return;
647 }
648 #endif
649 #ifdef HAVE_trunctfqf2
650 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
651 {
652 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
653 return;
654 }
655 #endif
656 #ifdef HAVE_truncsfhf2
657 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
658 {
659 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
660 return;
661 }
662 #endif
663 #ifdef HAVE_truncdfhf2
664 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
665 {
666 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
667 return;
668 }
669 #endif
670 #ifdef HAVE_truncxfhf2
671 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
672 {
673 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
674 return;
675 }
676 #endif
677 #ifdef HAVE_trunctfhf2
678 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
679 {
680 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
681 return;
682 }
683 #endif
684 #ifdef HAVE_truncdfsf2
685 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
686 {
687 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
688 return;
689 }
690 #endif
691 #ifdef HAVE_truncxfsf2
692 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
693 {
694 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
695 return;
696 }
697 #endif
698 #ifdef HAVE_trunctfsf2
699 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
700 {
701 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
702 return;
703 }
704 #endif
705 #ifdef HAVE_truncxfdf2
706 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
707 {
708 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
709 return;
710 }
711 #endif
712 #ifdef HAVE_trunctfdf2
713 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
714 {
715 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
716 return;
717 }
718 #endif
719
720 libcall = (rtx) 0;
721 switch (from_mode)
722 {
723 case SFmode:
724 switch (to_mode)
725 {
726 case DFmode:
727 libcall = extendsfdf2_libfunc;
728 break;
729
730 case XFmode:
731 libcall = extendsfxf2_libfunc;
732 break;
733
734 case TFmode:
735 libcall = extendsftf2_libfunc;
736 break;
737 }
738 break;
739
740 case DFmode:
741 switch (to_mode)
742 {
743 case SFmode:
744 libcall = truncdfsf2_libfunc;
745 break;
746
747 case XFmode:
748 libcall = extenddfxf2_libfunc;
749 break;
750
751 case TFmode:
752 libcall = extenddftf2_libfunc;
753 break;
754 }
755 break;
756
757 case XFmode:
758 switch (to_mode)
759 {
760 case SFmode:
761 libcall = truncxfsf2_libfunc;
762 break;
763
764 case DFmode:
765 libcall = truncxfdf2_libfunc;
766 break;
767 }
768 break;
769
770 case TFmode:
771 switch (to_mode)
772 {
773 case SFmode:
774 libcall = trunctfsf2_libfunc;
775 break;
776
777 case DFmode:
778 libcall = trunctfdf2_libfunc;
779 break;
780 }
781 break;
782 }
783
784 if (libcall == (rtx) 0)
785 /* This conversion is not implemented yet. */
786 abort ();
787
788 emit_library_call (libcall, 1, to_mode, 1, from, from_mode);
789 emit_move_insn (to, hard_libcall_value (to_mode));
790 return;
791 }
792
793 /* Now both modes are integers. */
794
795 /* Handle expanding beyond a word. */
796 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
797 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
798 {
799 rtx insns;
800 rtx lowpart;
801 rtx fill_value;
802 rtx lowfrom;
803 int i;
804 enum machine_mode lowpart_mode;
805 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
806
807 /* Try converting directly if the insn is supported. */
808 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
809 != CODE_FOR_nothing)
810 {
811 /* If FROM is a SUBREG, put it into a register. Do this
812 so that we always generate the same set of insns for
813 better cse'ing; if an intermediate assignment occurred,
814 we won't be doing the operation directly on the SUBREG. */
815 if (optimize > 0 && GET_CODE (from) == SUBREG)
816 from = force_reg (from_mode, from);
817 emit_unop_insn (code, to, from, equiv_code);
818 return;
819 }
820 /* Next, try converting via full word. */
821 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
822 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
823 != CODE_FOR_nothing))
824 {
825 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
826 emit_unop_insn (code, to,
827 gen_lowpart (word_mode, to), equiv_code);
828 return;
829 }
830
831 /* No special multiword conversion insn; do it by hand. */
832 start_sequence ();
833
834 /* Get a copy of FROM widened to a word, if necessary. */
835 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
836 lowpart_mode = word_mode;
837 else
838 lowpart_mode = from_mode;
839
840 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
841
842 lowpart = gen_lowpart (lowpart_mode, to);
843 emit_move_insn (lowpart, lowfrom);
844
845 /* Compute the value to put in each remaining word. */
846 if (unsignedp)
847 fill_value = const0_rtx;
848 else
849 {
850 #ifdef HAVE_slt
851 if (HAVE_slt
852 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
853 && STORE_FLAG_VALUE == -1)
854 {
855 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
856 lowpart_mode, 0, 0);
857 fill_value = gen_reg_rtx (word_mode);
858 emit_insn (gen_slt (fill_value));
859 }
860 else
861 #endif
862 {
863 fill_value
864 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
865 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
866 NULL_RTX, 0);
867 fill_value = convert_to_mode (word_mode, fill_value, 1);
868 }
869 }
870
871 /* Fill the remaining words. */
872 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
873 {
874 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
875 rtx subword = operand_subword (to, index, 1, to_mode);
876
877 if (subword == 0)
878 abort ();
879
880 if (fill_value != subword)
881 emit_move_insn (subword, fill_value);
882 }
883
884 insns = get_insns ();
885 end_sequence ();
886
887 emit_no_conflict_block (insns, to, from, NULL_RTX,
888 gen_rtx (equiv_code, to_mode, from));
889 return;
890 }
891
892 /* Truncating multi-word to a word or less. */
893 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
894 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
895 {
896 convert_move (to, gen_lowpart (word_mode, from), 0);
897 return;
898 }
899
900 /* Handle pointer conversion */ /* SPEE 900220 */
901 if (to_mode == PSImode)
902 {
903 if (from_mode != SImode)
904 from = convert_to_mode (SImode, from, unsignedp);
905
906 #ifdef HAVE_truncsipsi
907 if (HAVE_truncsipsi)
908 {
909 emit_unop_insn (CODE_FOR_truncsipsi, to, from, UNKNOWN);
910 return;
911 }
912 #endif /* HAVE_truncsipsi */
913 abort ();
914 }
915
916 if (from_mode == PSImode)
917 {
918 if (to_mode != SImode)
919 {
920 from = convert_to_mode (SImode, from, unsignedp);
921 from_mode = SImode;
922 }
923 else
924 {
925 #ifdef HAVE_extendpsisi
926 if (HAVE_extendpsisi)
927 {
928 emit_unop_insn (CODE_FOR_extendpsisi, to, from, UNKNOWN);
929 return;
930 }
931 #endif /* HAVE_extendpsisi */
932 abort ();
933 }
934 }
935
936 /* Now follow all the conversions between integers
937 no more than a word long. */
938
939 /* For truncation, usually we can just refer to FROM in a narrower mode. */
940 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
941 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
942 GET_MODE_BITSIZE (from_mode)))
943 {
944 if (!((GET_CODE (from) == MEM
945 && ! MEM_VOLATILE_P (from)
946 && direct_load[(int) to_mode]
947 && ! mode_dependent_address_p (XEXP (from, 0)))
948 || GET_CODE (from) == REG
949 || GET_CODE (from) == SUBREG))
950 from = force_reg (from_mode, from);
951 emit_move_insn (to, gen_lowpart (to_mode, from));
952 return;
953 }
954
955 /* Handle extension. */
956 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
957 {
958 /* Convert directly if that works. */
959 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
960 != CODE_FOR_nothing)
961 {
962 /* If FROM is a SUBREG, put it into a register. Do this
963 so that we always generate the same set of insns for
964 better cse'ing; if an intermediate assignment occurred,
965 we won't be doing the operation directly on the SUBREG. */
966 if (optimize > 0 && GET_CODE (from) == SUBREG)
967 from = force_reg (from_mode, from);
968 emit_unop_insn (code, to, from, equiv_code);
969 return;
970 }
971 else
972 {
973 enum machine_mode intermediate;
974
975 /* Search for a mode to convert via. */
976 for (intermediate = from_mode; intermediate != VOIDmode;
977 intermediate = GET_MODE_WIDER_MODE (intermediate))
978 if ((can_extend_p (to_mode, intermediate, unsignedp)
979 != CODE_FOR_nothing)
980 && (can_extend_p (intermediate, from_mode, unsignedp)
981 != CODE_FOR_nothing))
982 {
983 convert_move (to, convert_to_mode (intermediate, from,
984 unsignedp), unsignedp);
985 return;
986 }
987
988 /* No suitable intermediate mode. */
989 abort ();
990 }
991 }
992
993 /* Support special truncate insns for certain modes. */
994
995 if (from_mode == DImode && to_mode == SImode)
996 {
997 #ifdef HAVE_truncdisi2
998 if (HAVE_truncdisi2)
999 {
1000 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1001 return;
1002 }
1003 #endif
1004 convert_move (to, force_reg (from_mode, from), unsignedp);
1005 return;
1006 }
1007
1008 if (from_mode == DImode && to_mode == HImode)
1009 {
1010 #ifdef HAVE_truncdihi2
1011 if (HAVE_truncdihi2)
1012 {
1013 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1014 return;
1015 }
1016 #endif
1017 convert_move (to, force_reg (from_mode, from), unsignedp);
1018 return;
1019 }
1020
1021 if (from_mode == DImode && to_mode == QImode)
1022 {
1023 #ifdef HAVE_truncdiqi2
1024 if (HAVE_truncdiqi2)
1025 {
1026 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1027 return;
1028 }
1029 #endif
1030 convert_move (to, force_reg (from_mode, from), unsignedp);
1031 return;
1032 }
1033
1034 if (from_mode == SImode && to_mode == HImode)
1035 {
1036 #ifdef HAVE_truncsihi2
1037 if (HAVE_truncsihi2)
1038 {
1039 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1040 return;
1041 }
1042 #endif
1043 convert_move (to, force_reg (from_mode, from), unsignedp);
1044 return;
1045 }
1046
1047 if (from_mode == SImode && to_mode == QImode)
1048 {
1049 #ifdef HAVE_truncsiqi2
1050 if (HAVE_truncsiqi2)
1051 {
1052 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1053 return;
1054 }
1055 #endif
1056 convert_move (to, force_reg (from_mode, from), unsignedp);
1057 return;
1058 }
1059
1060 if (from_mode == HImode && to_mode == QImode)
1061 {
1062 #ifdef HAVE_trunchiqi2
1063 if (HAVE_trunchiqi2)
1064 {
1065 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1066 return;
1067 }
1068 #endif
1069 convert_move (to, force_reg (from_mode, from), unsignedp);
1070 return;
1071 }
1072
1073 /* Handle truncation of volatile memrefs, and so on;
1074 the things that couldn't be truncated directly,
1075 and for which there was no special instruction. */
1076 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1077 {
1078 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1079 emit_move_insn (to, temp);
1080 return;
1081 }
1082
1083 /* Mode combination is not recognized. */
1084 abort ();
1085 }
1086
1087 /* Return an rtx for a value that would result
1088 from converting X to mode MODE.
1089 Both X and MODE may be floating, or both integer.
1090 UNSIGNEDP is nonzero if X is an unsigned value.
1091 This can be done by referring to a part of X in place
1092 or by copying to a new temporary with conversion.
1093
1094 This function *must not* call protect_from_queue
1095 except when putting X into an insn (in which case convert_move does it). */
1096
1097 rtx
1098 convert_to_mode (mode, x, unsignedp)
1099 enum machine_mode mode;
1100 rtx x;
1101 int unsignedp;
1102 {
1103 register rtx temp;
1104
1105 /* If FROM is a SUBREG that indicates that we have already done at least
1106 the required extension, strip it. */
1107
1108 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1109 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1110 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1111 x = gen_lowpart (mode, x);
1112
1113 if (mode == GET_MODE (x))
1114 return x;
1115
1116 /* There is one case that we must handle specially: If we are converting
1117 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1118 we are to interpret the constant as unsigned, gen_lowpart will do
1119 the wrong if the constant appears negative. What we want to do is
1120 make the high-order word of the constant zero, not all ones. */
1121
1122 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1123 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1124 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1125 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
1126
1127 /* We can do this with a gen_lowpart if both desired and current modes
1128 are integer, and this is either a constant integer, a register, or a
1129 non-volatile MEM. Except for the constant case, we must be narrowing
1130 the operand. */
1131
1132 if (GET_CODE (x) == CONST_INT
1133 || (GET_MODE_CLASS (mode) == MODE_INT
1134 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
1135 && (GET_CODE (x) == CONST_DOUBLE
1136 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (x))
1137 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x))
1138 && direct_load[(int) mode]
1139 || GET_CODE (x) == REG)))))
1140 return gen_lowpart (mode, x);
1141
1142 temp = gen_reg_rtx (mode);
1143 convert_move (temp, x, unsignedp);
1144 return temp;
1145 }
1146 \f
1147 /* Generate several move instructions to copy LEN bytes
1148 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1149 The caller must pass FROM and TO
1150 through protect_from_queue before calling.
1151 ALIGN (in bytes) is maximum alignment we can assume. */
1152
1153 static void
1154 move_by_pieces (to, from, len, align)
1155 rtx to, from;
1156 int len, align;
1157 {
1158 struct move_by_pieces data;
1159 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1160 int max_size = MOVE_MAX + 1;
1161
1162 data.offset = 0;
1163 data.to_addr = to_addr;
1164 data.from_addr = from_addr;
1165 data.to = to;
1166 data.from = from;
1167 data.autinc_to
1168 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1169 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1170 data.autinc_from
1171 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1172 || GET_CODE (from_addr) == POST_INC
1173 || GET_CODE (from_addr) == POST_DEC);
1174
1175 data.explicit_inc_from = 0;
1176 data.explicit_inc_to = 0;
1177 data.reverse
1178 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1179 if (data.reverse) data.offset = len;
1180 data.len = len;
1181
1182 /* If copying requires more than two move insns,
1183 copy addresses to registers (to make displacements shorter)
1184 and use post-increment if available. */
1185 if (!(data.autinc_from && data.autinc_to)
1186 && move_by_pieces_ninsns (len, align) > 2)
1187 {
1188 #ifdef HAVE_PRE_DECREMENT
1189 if (data.reverse && ! data.autinc_from)
1190 {
1191 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1192 data.autinc_from = 1;
1193 data.explicit_inc_from = -1;
1194 }
1195 #endif
1196 #ifdef HAVE_POST_INCREMENT
1197 if (! data.autinc_from)
1198 {
1199 data.from_addr = copy_addr_to_reg (from_addr);
1200 data.autinc_from = 1;
1201 data.explicit_inc_from = 1;
1202 }
1203 #endif
1204 if (!data.autinc_from && CONSTANT_P (from_addr))
1205 data.from_addr = copy_addr_to_reg (from_addr);
1206 #ifdef HAVE_PRE_DECREMENT
1207 if (data.reverse && ! data.autinc_to)
1208 {
1209 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1210 data.autinc_to = 1;
1211 data.explicit_inc_to = -1;
1212 }
1213 #endif
1214 #ifdef HAVE_POST_INCREMENT
1215 if (! data.reverse && ! data.autinc_to)
1216 {
1217 data.to_addr = copy_addr_to_reg (to_addr);
1218 data.autinc_to = 1;
1219 data.explicit_inc_to = 1;
1220 }
1221 #endif
1222 if (!data.autinc_to && CONSTANT_P (to_addr))
1223 data.to_addr = copy_addr_to_reg (to_addr);
1224 }
1225
1226 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1227 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1228 align = MOVE_MAX;
1229
1230 /* First move what we can in the largest integer mode, then go to
1231 successively smaller modes. */
1232
1233 while (max_size > 1)
1234 {
1235 enum machine_mode mode = VOIDmode, tmode;
1236 enum insn_code icode;
1237
1238 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1239 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1240 if (GET_MODE_SIZE (tmode) < max_size)
1241 mode = tmode;
1242
1243 if (mode == VOIDmode)
1244 break;
1245
1246 icode = mov_optab->handlers[(int) mode].insn_code;
1247 if (icode != CODE_FOR_nothing
1248 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1249 GET_MODE_SIZE (mode)))
1250 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1251
1252 max_size = GET_MODE_SIZE (mode);
1253 }
1254
1255 /* The code above should have handled everything. */
1256 if (data.len != 0)
1257 abort ();
1258 }
1259
1260 /* Return number of insns required to move L bytes by pieces.
1261 ALIGN (in bytes) is maximum alignment we can assume. */
1262
1263 static int
1264 move_by_pieces_ninsns (l, align)
1265 unsigned int l;
1266 int align;
1267 {
1268 register int n_insns = 0;
1269 int max_size = MOVE_MAX + 1;
1270
1271 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1272 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1273 align = MOVE_MAX;
1274
1275 while (max_size > 1)
1276 {
1277 enum machine_mode mode = VOIDmode, tmode;
1278 enum insn_code icode;
1279
1280 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1281 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1282 if (GET_MODE_SIZE (tmode) < max_size)
1283 mode = tmode;
1284
1285 if (mode == VOIDmode)
1286 break;
1287
1288 icode = mov_optab->handlers[(int) mode].insn_code;
1289 if (icode != CODE_FOR_nothing
1290 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1291 GET_MODE_SIZE (mode)))
1292 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1293
1294 max_size = GET_MODE_SIZE (mode);
1295 }
1296
1297 return n_insns;
1298 }
1299
1300 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1301 with move instructions for mode MODE. GENFUN is the gen_... function
1302 to make a move insn for that mode. DATA has all the other info. */
1303
1304 static void
1305 move_by_pieces_1 (genfun, mode, data)
1306 rtx (*genfun) ();
1307 enum machine_mode mode;
1308 struct move_by_pieces *data;
1309 {
1310 register int size = GET_MODE_SIZE (mode);
1311 register rtx to1, from1;
1312
1313 while (data->len >= size)
1314 {
1315 if (data->reverse) data->offset -= size;
1316
1317 to1 = (data->autinc_to
1318 ? gen_rtx (MEM, mode, data->to_addr)
1319 : change_address (data->to, mode,
1320 plus_constant (data->to_addr, data->offset)));
1321 from1 =
1322 (data->autinc_from
1323 ? gen_rtx (MEM, mode, data->from_addr)
1324 : change_address (data->from, mode,
1325 plus_constant (data->from_addr, data->offset)));
1326
1327 #ifdef HAVE_PRE_DECREMENT
1328 if (data->explicit_inc_to < 0)
1329 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1330 if (data->explicit_inc_from < 0)
1331 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1332 #endif
1333
1334 emit_insn ((*genfun) (to1, from1));
1335 #ifdef HAVE_POST_INCREMENT
1336 if (data->explicit_inc_to > 0)
1337 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1338 if (data->explicit_inc_from > 0)
1339 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1340 #endif
1341
1342 if (! data->reverse) data->offset += size;
1343
1344 data->len -= size;
1345 }
1346 }
1347 \f
1348 /* Emit code to move a block Y to a block X.
1349 This may be done with string-move instructions,
1350 with multiple scalar move instructions, or with a library call.
1351
1352 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1353 with mode BLKmode.
1354 SIZE is an rtx that says how long they are.
1355 ALIGN is the maximum alignment we can assume they have,
1356 measured in bytes. */
1357
1358 void
1359 emit_block_move (x, y, size, align)
1360 rtx x, y;
1361 rtx size;
1362 int align;
1363 {
1364 if (GET_MODE (x) != BLKmode)
1365 abort ();
1366
1367 if (GET_MODE (y) != BLKmode)
1368 abort ();
1369
1370 x = protect_from_queue (x, 1);
1371 y = protect_from_queue (y, 0);
1372 size = protect_from_queue (size, 0);
1373
1374 if (GET_CODE (x) != MEM)
1375 abort ();
1376 if (GET_CODE (y) != MEM)
1377 abort ();
1378 if (size == 0)
1379 abort ();
1380
1381 if (GET_CODE (size) == CONST_INT
1382 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1383 move_by_pieces (x, y, INTVAL (size), align);
1384 else
1385 {
1386 /* Try the most limited insn first, because there's no point
1387 including more than one in the machine description unless
1388 the more limited one has some advantage. */
1389
1390 rtx opalign = GEN_INT (align);
1391 enum machine_mode mode;
1392
1393 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1394 mode = GET_MODE_WIDER_MODE (mode))
1395 {
1396 enum insn_code code = movstr_optab[(int) mode];
1397
1398 if (code != CODE_FOR_nothing
1399 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1400 here because if SIZE is less than the mode mask, as it is
1401 returned by the macro, it will definitely be less than the
1402 actual mode mask. */
1403 && (unsigned) INTVAL (size) <= GET_MODE_MASK (mode)
1404 && (insn_operand_predicate[(int) code][0] == 0
1405 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1406 && (insn_operand_predicate[(int) code][1] == 0
1407 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1408 && (insn_operand_predicate[(int) code][3] == 0
1409 || (*insn_operand_predicate[(int) code][3]) (opalign,
1410 VOIDmode)))
1411 {
1412 rtx op2;
1413 rtx last = get_last_insn ();
1414 rtx pat;
1415
1416 op2 = convert_to_mode (mode, size, 1);
1417 if (insn_operand_predicate[(int) code][2] != 0
1418 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1419 op2 = copy_to_mode_reg (mode, op2);
1420
1421 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1422 if (pat)
1423 {
1424 emit_insn (pat);
1425 return;
1426 }
1427 else
1428 delete_insns_since (last);
1429 }
1430 }
1431
1432 #ifdef TARGET_MEM_FUNCTIONS
1433 emit_library_call (memcpy_libfunc, 0,
1434 VOIDmode, 3, XEXP (x, 0), Pmode,
1435 XEXP (y, 0), Pmode,
1436 convert_to_mode (Pmode, size, 1), Pmode);
1437 #else
1438 emit_library_call (bcopy_libfunc, 0,
1439 VOIDmode, 3, XEXP (y, 0), Pmode,
1440 XEXP (x, 0), Pmode,
1441 convert_to_mode (Pmode, size, 1), Pmode);
1442 #endif
1443 }
1444 }
1445 \f
1446 /* Copy all or part of a value X into registers starting at REGNO.
1447 The number of registers to be filled is NREGS. */
1448
1449 void
1450 move_block_to_reg (regno, x, nregs, mode)
1451 int regno;
1452 rtx x;
1453 int nregs;
1454 enum machine_mode mode;
1455 {
1456 int i;
1457 rtx pat, last;
1458
1459 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1460 x = validize_mem (force_const_mem (mode, x));
1461
1462 /* See if the machine can do this with a load multiple insn. */
1463 #ifdef HAVE_load_multiple
1464 last = get_last_insn ();
1465 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1466 GEN_INT (nregs));
1467 if (pat)
1468 {
1469 emit_insn (pat);
1470 return;
1471 }
1472 else
1473 delete_insns_since (last);
1474 #endif
1475
1476 for (i = 0; i < nregs; i++)
1477 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1478 operand_subword_force (x, i, mode));
1479 }
1480
1481 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1482 The number of registers to be filled is NREGS. */
1483
1484 void
1485 move_block_from_reg (regno, x, nregs)
1486 int regno;
1487 rtx x;
1488 int nregs;
1489 {
1490 int i;
1491 rtx pat, last;
1492
1493 /* See if the machine can do this with a store multiple insn. */
1494 #ifdef HAVE_store_multiple
1495 last = get_last_insn ();
1496 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1497 GEN_INT (nregs));
1498 if (pat)
1499 {
1500 emit_insn (pat);
1501 return;
1502 }
1503 else
1504 delete_insns_since (last);
1505 #endif
1506
1507 for (i = 0; i < nregs; i++)
1508 {
1509 rtx tem = operand_subword (x, i, 1, BLKmode);
1510
1511 if (tem == 0)
1512 abort ();
1513
1514 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1515 }
1516 }
1517
1518 /* Mark NREGS consecutive regs, starting at REGNO, as being live now. */
1519
1520 void
1521 use_regs (regno, nregs)
1522 int regno;
1523 int nregs;
1524 {
1525 int i;
1526
1527 for (i = 0; i < nregs; i++)
1528 emit_insn (gen_rtx (USE, VOIDmode, gen_rtx (REG, word_mode, regno + i)));
1529 }
1530
1531 /* Mark the instructions since PREV as a libcall block.
1532 Add REG_LIBCALL to PREV and add a REG_RETVAL to the most recent insn. */
1533
1534 static void
1535 group_insns (prev)
1536 rtx prev;
1537 {
1538 rtx insn_first;
1539 rtx insn_last;
1540
1541 /* Find the instructions to mark */
1542 if (prev)
1543 insn_first = NEXT_INSN (prev);
1544 else
1545 insn_first = get_insns ();
1546
1547 insn_last = get_last_insn ();
1548
1549 REG_NOTES (insn_last) = gen_rtx (INSN_LIST, REG_RETVAL, insn_first,
1550 REG_NOTES (insn_last));
1551
1552 REG_NOTES (insn_first) = gen_rtx (INSN_LIST, REG_LIBCALL, insn_last,
1553 REG_NOTES (insn_first));
1554 }
1555 \f
1556 /* Write zeros through the storage of OBJECT.
1557 If OBJECT has BLKmode, SIZE is its length in bytes. */
1558
1559 void
1560 clear_storage (object, size)
1561 rtx object;
1562 int size;
1563 {
1564 if (GET_MODE (object) == BLKmode)
1565 {
1566 #ifdef TARGET_MEM_FUNCTIONS
1567 emit_library_call (memset_libfunc, 0,
1568 VOIDmode, 3,
1569 XEXP (object, 0), Pmode, const0_rtx, Pmode,
1570 GEN_INT (size), Pmode);
1571 #else
1572 emit_library_call (bzero_libfunc, 0,
1573 VOIDmode, 2,
1574 XEXP (object, 0), Pmode,
1575 GEN_INT (size), Pmode);
1576 #endif
1577 }
1578 else
1579 emit_move_insn (object, const0_rtx);
1580 }
1581
1582 /* Generate code to copy Y into X.
1583 Both Y and X must have the same mode, except that
1584 Y can be a constant with VOIDmode.
1585 This mode cannot be BLKmode; use emit_block_move for that.
1586
1587 Return the last instruction emitted. */
1588
1589 rtx
1590 emit_move_insn (x, y)
1591 rtx x, y;
1592 {
1593 enum machine_mode mode = GET_MODE (x);
1594 enum machine_mode submode;
1595 enum mode_class class = GET_MODE_CLASS (mode);
1596 int i;
1597
1598 x = protect_from_queue (x, 1);
1599 y = protect_from_queue (y, 0);
1600
1601 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1602 abort ();
1603
1604 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1605 y = force_const_mem (mode, y);
1606
1607 /* If X or Y are memory references, verify that their addresses are valid
1608 for the machine. */
1609 if (GET_CODE (x) == MEM
1610 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1611 && ! push_operand (x, GET_MODE (x)))
1612 || (flag_force_addr
1613 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1614 x = change_address (x, VOIDmode, XEXP (x, 0));
1615
1616 if (GET_CODE (y) == MEM
1617 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1618 || (flag_force_addr
1619 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1620 y = change_address (y, VOIDmode, XEXP (y, 0));
1621
1622 if (mode == BLKmode)
1623 abort ();
1624
1625 return emit_move_insn_1 (x, y);
1626 }
1627
1628 /* Low level part of emit_move_insn.
1629 Called just like emit_move_insn, but assumes X and Y
1630 are basically valid. */
1631
1632 rtx
1633 emit_move_insn_1 (x, y)
1634 rtx x, y;
1635 {
1636 enum machine_mode mode = GET_MODE (x);
1637 enum machine_mode submode;
1638 enum mode_class class = GET_MODE_CLASS (mode);
1639 int i;
1640
1641 if (class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1642 submode = mode_for_size (GET_MODE_UNIT_SIZE (mode) * BITS_PER_UNIT,
1643 (class == MODE_COMPLEX_INT
1644 ? MODE_INT : MODE_FLOAT),
1645 0);
1646
1647 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1648 return
1649 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1650
1651 /* Expand complex moves by moving real part and imag part, if possible. */
1652 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1653 && submode != BLKmode
1654 && (mov_optab->handlers[(int) submode].insn_code
1655 != CODE_FOR_nothing))
1656 {
1657 /* Don't split destination if it is a stack push. */
1658 int stack = push_operand (x, GET_MODE (x));
1659 rtx prev = get_last_insn ();
1660
1661 /* Tell flow that the whole of the destination is being set. */
1662 if (GET_CODE (x) == REG)
1663 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
1664
1665 /* If this is a stack, push the highpart first, so it
1666 will be in the argument order.
1667
1668 In that case, change_address is used only to convert
1669 the mode, not to change the address. */
1670 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1671 ((stack ? change_address (x, submode, (rtx) 0)
1672 : gen_highpart (submode, x)),
1673 gen_highpart (submode, y)));
1674 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1675 ((stack ? change_address (x, submode, (rtx) 0)
1676 : gen_lowpart (submode, x)),
1677 gen_lowpart (submode, y)));
1678
1679 group_insns (prev);
1680
1681 return get_last_insn ();
1682 }
1683
1684 /* This will handle any multi-word mode that lacks a move_insn pattern.
1685 However, you will get better code if you define such patterns,
1686 even if they must turn into multiple assembler instructions. */
1687 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1688 {
1689 rtx last_insn = 0;
1690 rtx prev_insn = get_last_insn ();
1691
1692 for (i = 0;
1693 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1694 i++)
1695 {
1696 rtx xpart = operand_subword (x, i, 1, mode);
1697 rtx ypart = operand_subword (y, i, 1, mode);
1698
1699 /* If we can't get a part of Y, put Y into memory if it is a
1700 constant. Otherwise, force it into a register. If we still
1701 can't get a part of Y, abort. */
1702 if (ypart == 0 && CONSTANT_P (y))
1703 {
1704 y = force_const_mem (mode, y);
1705 ypart = operand_subword (y, i, 1, mode);
1706 }
1707 else if (ypart == 0)
1708 ypart = operand_subword_force (y, i, mode);
1709
1710 if (xpart == 0 || ypart == 0)
1711 abort ();
1712
1713 last_insn = emit_move_insn (xpart, ypart);
1714 }
1715 /* Mark these insns as a libcall block. */
1716 group_insns (prev_insn);
1717
1718 return last_insn;
1719 }
1720 else
1721 abort ();
1722 }
1723 \f
1724 /* Pushing data onto the stack. */
1725
1726 /* Push a block of length SIZE (perhaps variable)
1727 and return an rtx to address the beginning of the block.
1728 Note that it is not possible for the value returned to be a QUEUED.
1729 The value may be virtual_outgoing_args_rtx.
1730
1731 EXTRA is the number of bytes of padding to push in addition to SIZE.
1732 BELOW nonzero means this padding comes at low addresses;
1733 otherwise, the padding comes at high addresses. */
1734
1735 rtx
1736 push_block (size, extra, below)
1737 rtx size;
1738 int extra, below;
1739 {
1740 register rtx temp;
1741 if (CONSTANT_P (size))
1742 anti_adjust_stack (plus_constant (size, extra));
1743 else if (GET_CODE (size) == REG && extra == 0)
1744 anti_adjust_stack (size);
1745 else
1746 {
1747 rtx temp = copy_to_mode_reg (Pmode, size);
1748 if (extra != 0)
1749 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
1750 temp, 0, OPTAB_LIB_WIDEN);
1751 anti_adjust_stack (temp);
1752 }
1753
1754 #ifdef STACK_GROWS_DOWNWARD
1755 temp = virtual_outgoing_args_rtx;
1756 if (extra != 0 && below)
1757 temp = plus_constant (temp, extra);
1758 #else
1759 if (GET_CODE (size) == CONST_INT)
1760 temp = plus_constant (virtual_outgoing_args_rtx,
1761 - INTVAL (size) - (below ? 0 : extra));
1762 else if (extra != 0 && !below)
1763 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1764 negate_rtx (Pmode, plus_constant (size, extra)));
1765 else
1766 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1767 negate_rtx (Pmode, size));
1768 #endif
1769
1770 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
1771 }
1772
1773 rtx
1774 gen_push_operand ()
1775 {
1776 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
1777 }
1778
1779 /* Generate code to push X onto the stack, assuming it has mode MODE and
1780 type TYPE.
1781 MODE is redundant except when X is a CONST_INT (since they don't
1782 carry mode info).
1783 SIZE is an rtx for the size of data to be copied (in bytes),
1784 needed only if X is BLKmode.
1785
1786 ALIGN (in bytes) is maximum alignment we can assume.
1787
1788 If PARTIAL and REG are both nonzero, then copy that many of the first
1789 words of X into registers starting with REG, and push the rest of X.
1790 The amount of space pushed is decreased by PARTIAL words,
1791 rounded *down* to a multiple of PARM_BOUNDARY.
1792 REG must be a hard register in this case.
1793 If REG is zero but PARTIAL is not, take any all others actions for an
1794 argument partially in registers, but do not actually load any
1795 registers.
1796
1797 EXTRA is the amount in bytes of extra space to leave next to this arg.
1798 This is ignored if an argument block has already been allocated.
1799
1800 On a machine that lacks real push insns, ARGS_ADDR is the address of
1801 the bottom of the argument block for this call. We use indexing off there
1802 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
1803 argument block has not been preallocated.
1804
1805 ARGS_SO_FAR is the size of args previously pushed for this call. */
1806
1807 void
1808 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
1809 args_addr, args_so_far)
1810 register rtx x;
1811 enum machine_mode mode;
1812 tree type;
1813 rtx size;
1814 int align;
1815 int partial;
1816 rtx reg;
1817 int extra;
1818 rtx args_addr;
1819 rtx args_so_far;
1820 {
1821 rtx xinner;
1822 enum direction stack_direction
1823 #ifdef STACK_GROWS_DOWNWARD
1824 = downward;
1825 #else
1826 = upward;
1827 #endif
1828
1829 /* Decide where to pad the argument: `downward' for below,
1830 `upward' for above, or `none' for don't pad it.
1831 Default is below for small data on big-endian machines; else above. */
1832 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
1833
1834 /* Invert direction if stack is post-update. */
1835 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
1836 if (where_pad != none)
1837 where_pad = (where_pad == downward ? upward : downward);
1838
1839 xinner = x = protect_from_queue (x, 0);
1840
1841 if (mode == BLKmode)
1842 {
1843 /* Copy a block into the stack, entirely or partially. */
1844
1845 register rtx temp;
1846 int used = partial * UNITS_PER_WORD;
1847 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
1848 int skip;
1849
1850 if (size == 0)
1851 abort ();
1852
1853 used -= offset;
1854
1855 /* USED is now the # of bytes we need not copy to the stack
1856 because registers will take care of them. */
1857
1858 if (partial != 0)
1859 xinner = change_address (xinner, BLKmode,
1860 plus_constant (XEXP (xinner, 0), used));
1861
1862 /* If the partial register-part of the arg counts in its stack size,
1863 skip the part of stack space corresponding to the registers.
1864 Otherwise, start copying to the beginning of the stack space,
1865 by setting SKIP to 0. */
1866 #ifndef REG_PARM_STACK_SPACE
1867 skip = 0;
1868 #else
1869 skip = used;
1870 #endif
1871
1872 #ifdef PUSH_ROUNDING
1873 /* Do it with several push insns if that doesn't take lots of insns
1874 and if there is no difficulty with push insns that skip bytes
1875 on the stack for alignment purposes. */
1876 if (args_addr == 0
1877 && GET_CODE (size) == CONST_INT
1878 && skip == 0
1879 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
1880 < MOVE_RATIO)
1881 /* Here we avoid the case of a structure whose weak alignment
1882 forces many pushes of a small amount of data,
1883 and such small pushes do rounding that causes trouble. */
1884 && ((! STRICT_ALIGNMENT && ! SLOW_UNALIGNED_ACCESS)
1885 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
1886 || PUSH_ROUNDING (align) == align)
1887 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
1888 {
1889 /* Push padding now if padding above and stack grows down,
1890 or if padding below and stack grows up.
1891 But if space already allocated, this has already been done. */
1892 if (extra && args_addr == 0
1893 && where_pad != none && where_pad != stack_direction)
1894 anti_adjust_stack (GEN_INT (extra));
1895
1896 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
1897 INTVAL (size) - used, align);
1898 }
1899 else
1900 #endif /* PUSH_ROUNDING */
1901 {
1902 /* Otherwise make space on the stack and copy the data
1903 to the address of that space. */
1904
1905 /* Deduct words put into registers from the size we must copy. */
1906 if (partial != 0)
1907 {
1908 if (GET_CODE (size) == CONST_INT)
1909 size = GEN_INT (INTVAL (size) - used);
1910 else
1911 size = expand_binop (GET_MODE (size), sub_optab, size,
1912 GEN_INT (used), NULL_RTX, 0,
1913 OPTAB_LIB_WIDEN);
1914 }
1915
1916 /* Get the address of the stack space.
1917 In this case, we do not deal with EXTRA separately.
1918 A single stack adjust will do. */
1919 if (! args_addr)
1920 {
1921 temp = push_block (size, extra, where_pad == downward);
1922 extra = 0;
1923 }
1924 else if (GET_CODE (args_so_far) == CONST_INT)
1925 temp = memory_address (BLKmode,
1926 plus_constant (args_addr,
1927 skip + INTVAL (args_so_far)));
1928 else
1929 temp = memory_address (BLKmode,
1930 plus_constant (gen_rtx (PLUS, Pmode,
1931 args_addr, args_so_far),
1932 skip));
1933
1934 /* TEMP is the address of the block. Copy the data there. */
1935 if (GET_CODE (size) == CONST_INT
1936 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
1937 < MOVE_RATIO))
1938 {
1939 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
1940 INTVAL (size), align);
1941 goto ret;
1942 }
1943 /* Try the most limited insn first, because there's no point
1944 including more than one in the machine description unless
1945 the more limited one has some advantage. */
1946 #ifdef HAVE_movstrqi
1947 if (HAVE_movstrqi
1948 && GET_CODE (size) == CONST_INT
1949 && ((unsigned) INTVAL (size)
1950 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
1951 {
1952 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
1953 xinner, size, GEN_INT (align));
1954 if (pat != 0)
1955 {
1956 emit_insn (pat);
1957 goto ret;
1958 }
1959 }
1960 #endif
1961 #ifdef HAVE_movstrhi
1962 if (HAVE_movstrhi
1963 && GET_CODE (size) == CONST_INT
1964 && ((unsigned) INTVAL (size)
1965 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
1966 {
1967 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
1968 xinner, size, GEN_INT (align));
1969 if (pat != 0)
1970 {
1971 emit_insn (pat);
1972 goto ret;
1973 }
1974 }
1975 #endif
1976 #ifdef HAVE_movstrsi
1977 if (HAVE_movstrsi)
1978 {
1979 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
1980 xinner, size, GEN_INT (align));
1981 if (pat != 0)
1982 {
1983 emit_insn (pat);
1984 goto ret;
1985 }
1986 }
1987 #endif
1988 #ifdef HAVE_movstrdi
1989 if (HAVE_movstrdi)
1990 {
1991 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
1992 xinner, size, GEN_INT (align));
1993 if (pat != 0)
1994 {
1995 emit_insn (pat);
1996 goto ret;
1997 }
1998 }
1999 #endif
2000
2001 #ifndef ACCUMULATE_OUTGOING_ARGS
2002 /* If the source is referenced relative to the stack pointer,
2003 copy it to another register to stabilize it. We do not need
2004 to do this if we know that we won't be changing sp. */
2005
2006 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2007 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2008 temp = copy_to_reg (temp);
2009 #endif
2010
2011 /* Make inhibit_defer_pop nonzero around the library call
2012 to force it to pop the bcopy-arguments right away. */
2013 NO_DEFER_POP;
2014 #ifdef TARGET_MEM_FUNCTIONS
2015 emit_library_call (memcpy_libfunc, 0,
2016 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
2017 size, Pmode);
2018 #else
2019 emit_library_call (bcopy_libfunc, 0,
2020 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
2021 size, Pmode);
2022 #endif
2023 OK_DEFER_POP;
2024 }
2025 }
2026 else if (partial > 0)
2027 {
2028 /* Scalar partly in registers. */
2029
2030 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2031 int i;
2032 int not_stack;
2033 /* # words of start of argument
2034 that we must make space for but need not store. */
2035 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2036 int args_offset = INTVAL (args_so_far);
2037 int skip;
2038
2039 /* Push padding now if padding above and stack grows down,
2040 or if padding below and stack grows up.
2041 But if space already allocated, this has already been done. */
2042 if (extra && args_addr == 0
2043 && where_pad != none && where_pad != stack_direction)
2044 anti_adjust_stack (GEN_INT (extra));
2045
2046 /* If we make space by pushing it, we might as well push
2047 the real data. Otherwise, we can leave OFFSET nonzero
2048 and leave the space uninitialized. */
2049 if (args_addr == 0)
2050 offset = 0;
2051
2052 /* Now NOT_STACK gets the number of words that we don't need to
2053 allocate on the stack. */
2054 not_stack = partial - offset;
2055
2056 /* If the partial register-part of the arg counts in its stack size,
2057 skip the part of stack space corresponding to the registers.
2058 Otherwise, start copying to the beginning of the stack space,
2059 by setting SKIP to 0. */
2060 #ifndef REG_PARM_STACK_SPACE
2061 skip = 0;
2062 #else
2063 skip = not_stack;
2064 #endif
2065
2066 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2067 x = validize_mem (force_const_mem (mode, x));
2068
2069 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2070 SUBREGs of such registers are not allowed. */
2071 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2072 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2073 x = copy_to_reg (x);
2074
2075 /* Loop over all the words allocated on the stack for this arg. */
2076 /* We can do it by words, because any scalar bigger than a word
2077 has a size a multiple of a word. */
2078 #ifndef PUSH_ARGS_REVERSED
2079 for (i = not_stack; i < size; i++)
2080 #else
2081 for (i = size - 1; i >= not_stack; i--)
2082 #endif
2083 if (i >= not_stack + offset)
2084 emit_push_insn (operand_subword_force (x, i, mode),
2085 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2086 0, args_addr,
2087 GEN_INT (args_offset + ((i - not_stack + skip)
2088 * UNITS_PER_WORD)));
2089 }
2090 else
2091 {
2092 rtx addr;
2093
2094 /* Push padding now if padding above and stack grows down,
2095 or if padding below and stack grows up.
2096 But if space already allocated, this has already been done. */
2097 if (extra && args_addr == 0
2098 && where_pad != none && where_pad != stack_direction)
2099 anti_adjust_stack (GEN_INT (extra));
2100
2101 #ifdef PUSH_ROUNDING
2102 if (args_addr == 0)
2103 addr = gen_push_operand ();
2104 else
2105 #endif
2106 if (GET_CODE (args_so_far) == CONST_INT)
2107 addr
2108 = memory_address (mode,
2109 plus_constant (args_addr, INTVAL (args_so_far)));
2110 else
2111 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2112 args_so_far));
2113
2114 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2115 }
2116
2117 ret:
2118 /* If part should go in registers, copy that part
2119 into the appropriate registers. Do this now, at the end,
2120 since mem-to-mem copies above may do function calls. */
2121 if (partial > 0 && reg != 0)
2122 move_block_to_reg (REGNO (reg), x, partial, mode);
2123
2124 if (extra && args_addr == 0 && where_pad == stack_direction)
2125 anti_adjust_stack (GEN_INT (extra));
2126 }
2127 \f
2128 /* Output a library call to function FUN (a SYMBOL_REF rtx)
2129 (emitting the queue unless NO_QUEUE is nonzero),
2130 for a value of mode OUTMODE,
2131 with NARGS different arguments, passed as alternating rtx values
2132 and machine_modes to convert them to.
2133 The rtx values should have been passed through protect_from_queue already.
2134
2135 NO_QUEUE will be true if and only if the library call is a `const' call
2136 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
2137 to the variable is_const in expand_call.
2138
2139 NO_QUEUE must be true for const calls, because if it isn't, then
2140 any pending increment will be emitted between REG_LIBCALL/REG_RETVAL notes,
2141 and will be lost if the libcall sequence is optimized away.
2142
2143 NO_QUEUE must be false for non-const calls, because if it isn't, the
2144 call insn will have its CONST_CALL_P bit set, and it will be incorrectly
2145 optimized. For instance, the instruction scheduler may incorrectly
2146 move memory references across the non-const call. */
2147
2148 void
2149 emit_library_call (va_alist)
2150 va_dcl
2151 {
2152 va_list p;
2153 /* Total size in bytes of all the stack-parms scanned so far. */
2154 struct args_size args_size;
2155 /* Size of arguments before any adjustments (such as rounding). */
2156 struct args_size original_args_size;
2157 register int argnum;
2158 enum machine_mode outmode;
2159 int nargs;
2160 rtx fun;
2161 rtx orgfun;
2162 int inc;
2163 int count;
2164 rtx argblock = 0;
2165 CUMULATIVE_ARGS args_so_far;
2166 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
2167 struct args_size offset; struct args_size size; };
2168 struct arg *argvec;
2169 int old_inhibit_defer_pop = inhibit_defer_pop;
2170 int no_queue = 0;
2171 rtx use_insns;
2172
2173 va_start (p);
2174 orgfun = fun = va_arg (p, rtx);
2175 no_queue = va_arg (p, int);
2176 outmode = va_arg (p, enum machine_mode);
2177 nargs = va_arg (p, int);
2178
2179 /* Copy all the libcall-arguments out of the varargs data
2180 and into a vector ARGVEC.
2181
2182 Compute how to pass each argument. We only support a very small subset
2183 of the full argument passing conventions to limit complexity here since
2184 library functions shouldn't have many args. */
2185
2186 argvec = (struct arg *) alloca (nargs * sizeof (struct arg));
2187
2188 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun);
2189
2190 args_size.constant = 0;
2191 args_size.var = 0;
2192
2193 for (count = 0; count < nargs; count++)
2194 {
2195 rtx val = va_arg (p, rtx);
2196 enum machine_mode mode = va_arg (p, enum machine_mode);
2197
2198 /* We cannot convert the arg value to the mode the library wants here;
2199 must do it earlier where we know the signedness of the arg. */
2200 if (mode == BLKmode
2201 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
2202 abort ();
2203
2204 /* On some machines, there's no way to pass a float to a library fcn.
2205 Pass it as a double instead. */
2206 #ifdef LIBGCC_NEEDS_DOUBLE
2207 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
2208 val = convert_to_mode (DFmode, val, 0), mode = DFmode;
2209 #endif
2210
2211 /* There's no need to call protect_from_queue, because
2212 either emit_move_insn or emit_push_insn will do that. */
2213
2214 /* Make sure it is a reasonable operand for a move or push insn. */
2215 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
2216 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
2217 val = force_operand (val, NULL_RTX);
2218
2219 argvec[count].value = val;
2220 argvec[count].mode = mode;
2221
2222 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2223 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
2224 abort ();
2225 #endif
2226
2227 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
2228 if (argvec[count].reg && GET_CODE (argvec[count].reg) == EXPR_LIST)
2229 abort ();
2230 #ifdef FUNCTION_ARG_PARTIAL_NREGS
2231 argvec[count].partial
2232 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
2233 #else
2234 argvec[count].partial = 0;
2235 #endif
2236
2237 locate_and_pad_parm (mode, NULL_TREE,
2238 argvec[count].reg && argvec[count].partial == 0,
2239 NULL_TREE, &args_size, &argvec[count].offset,
2240 &argvec[count].size);
2241
2242 if (argvec[count].size.var)
2243 abort ();
2244
2245 #ifndef REG_PARM_STACK_SPACE
2246 if (argvec[count].partial)
2247 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
2248 #endif
2249
2250 if (argvec[count].reg == 0 || argvec[count].partial != 0
2251 #ifdef REG_PARM_STACK_SPACE
2252 || 1
2253 #endif
2254 )
2255 args_size.constant += argvec[count].size.constant;
2256
2257 #ifdef ACCUMULATE_OUTGOING_ARGS
2258 /* If this arg is actually passed on the stack, it might be
2259 clobbering something we already put there (this library call might
2260 be inside the evaluation of an argument to a function whose call
2261 requires the stack). This will only occur when the library call
2262 has sufficient args to run out of argument registers. Abort in
2263 this case; if this ever occurs, code must be added to save and
2264 restore the arg slot. */
2265
2266 if (argvec[count].reg == 0 || argvec[count].partial != 0)
2267 abort ();
2268 #endif
2269
2270 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree)0, 1);
2271 }
2272 va_end (p);
2273
2274 /* If this machine requires an external definition for library
2275 functions, write one out. */
2276 assemble_external_libcall (fun);
2277
2278 original_args_size = args_size;
2279 #ifdef STACK_BOUNDARY
2280 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
2281 / STACK_BYTES) * STACK_BYTES);
2282 #endif
2283
2284 #ifdef REG_PARM_STACK_SPACE
2285 args_size.constant = MAX (args_size.constant,
2286 REG_PARM_STACK_SPACE (NULL_TREE));
2287 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2288 args_size.constant -= REG_PARM_STACK_SPACE (NULL_TREE);
2289 #endif
2290 #endif
2291
2292 #ifdef ACCUMULATE_OUTGOING_ARGS
2293 if (args_size.constant > current_function_outgoing_args_size)
2294 current_function_outgoing_args_size = args_size.constant;
2295 args_size.constant = 0;
2296 #endif
2297
2298 #ifndef PUSH_ROUNDING
2299 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
2300 #endif
2301
2302 #ifdef PUSH_ARGS_REVERSED
2303 #ifdef STACK_BOUNDARY
2304 /* If we push args individually in reverse order, perform stack alignment
2305 before the first push (the last arg). */
2306 if (argblock == 0)
2307 anti_adjust_stack (GEN_INT (args_size.constant
2308 - original_args_size.constant));
2309 #endif
2310 #endif
2311
2312 #ifdef PUSH_ARGS_REVERSED
2313 inc = -1;
2314 argnum = nargs - 1;
2315 #else
2316 inc = 1;
2317 argnum = 0;
2318 #endif
2319
2320 /* Push the args that need to be pushed. */
2321
2322 for (count = 0; count < nargs; count++, argnum += inc)
2323 {
2324 register enum machine_mode mode = argvec[argnum].mode;
2325 register rtx val = argvec[argnum].value;
2326 rtx reg = argvec[argnum].reg;
2327 int partial = argvec[argnum].partial;
2328
2329 if (! (reg != 0 && partial == 0))
2330 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
2331 argblock, GEN_INT (argvec[count].offset.constant));
2332 NO_DEFER_POP;
2333 }
2334
2335 #ifndef PUSH_ARGS_REVERSED
2336 #ifdef STACK_BOUNDARY
2337 /* If we pushed args in forward order, perform stack alignment
2338 after pushing the last arg. */
2339 if (argblock == 0)
2340 anti_adjust_stack (GEN_INT (args_size.constant
2341 - original_args_size.constant));
2342 #endif
2343 #endif
2344
2345 #ifdef PUSH_ARGS_REVERSED
2346 argnum = nargs - 1;
2347 #else
2348 argnum = 0;
2349 #endif
2350
2351 /* Now load any reg parms into their regs. */
2352
2353 for (count = 0; count < nargs; count++, argnum += inc)
2354 {
2355 register enum machine_mode mode = argvec[argnum].mode;
2356 register rtx val = argvec[argnum].value;
2357 rtx reg = argvec[argnum].reg;
2358 int partial = argvec[argnum].partial;
2359
2360 if (reg != 0 && partial == 0)
2361 emit_move_insn (reg, val);
2362 NO_DEFER_POP;
2363 }
2364
2365 /* For version 1.37, try deleting this entirely. */
2366 if (! no_queue)
2367 emit_queue ();
2368
2369 /* Any regs containing parms remain in use through the call. */
2370 start_sequence ();
2371 for (count = 0; count < nargs; count++)
2372 if (argvec[count].reg != 0)
2373 emit_insn (gen_rtx (USE, VOIDmode, argvec[count].reg));
2374
2375 use_insns = get_insns ();
2376 end_sequence ();
2377
2378 fun = prepare_call_address (fun, NULL_TREE, &use_insns);
2379
2380 /* Don't allow popping to be deferred, since then
2381 cse'ing of library calls could delete a call and leave the pop. */
2382 NO_DEFER_POP;
2383
2384 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2385 will set inhibit_defer_pop to that value. */
2386
2387 emit_call_1 (fun, get_identifier (XSTR (orgfun, 0)), args_size.constant, 0,
2388 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2389 outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
2390 old_inhibit_defer_pop + 1, use_insns, no_queue);
2391
2392 /* Now restore inhibit_defer_pop to its actual original value. */
2393 OK_DEFER_POP;
2394 }
2395 \f
2396 /* Like emit_library_call except that an extra argument, VALUE,
2397 comes second and says where to store the result.
2398 (If VALUE is zero, the result comes in the function value register.) */
2399
2400 void
2401 emit_library_call_value (va_alist)
2402 va_dcl
2403 {
2404 va_list p;
2405 /* Total size in bytes of all the stack-parms scanned so far. */
2406 struct args_size args_size;
2407 /* Size of arguments before any adjustments (such as rounding). */
2408 struct args_size original_args_size;
2409 register int argnum;
2410 enum machine_mode outmode;
2411 int nargs;
2412 rtx fun;
2413 rtx orgfun;
2414 int inc;
2415 int count;
2416 rtx argblock = 0;
2417 CUMULATIVE_ARGS args_so_far;
2418 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
2419 struct args_size offset; struct args_size size; };
2420 struct arg *argvec;
2421 int old_inhibit_defer_pop = inhibit_defer_pop;
2422 int no_queue = 0;
2423 rtx use_insns;
2424 rtx value;
2425 rtx mem_value = 0;
2426
2427 va_start (p);
2428 orgfun = fun = va_arg (p, rtx);
2429 value = va_arg (p, rtx);
2430 no_queue = va_arg (p, int);
2431 outmode = va_arg (p, enum machine_mode);
2432 nargs = va_arg (p, int);
2433
2434 /* If this kind of value comes back in memory,
2435 decide where in memory it should come back. */
2436 if (RETURN_IN_MEMORY (type_for_mode (outmode, 0)))
2437 {
2438 if (GET_CODE (value) == MEM)
2439 mem_value = value;
2440 else
2441 mem_value = assign_stack_temp (outmode, GET_MODE_SIZE (outmode), 0);
2442 }
2443
2444 /* ??? Unfinished: must pass the memory address as an argument. */
2445
2446 /* Copy all the libcall-arguments out of the varargs data
2447 and into a vector ARGVEC.
2448
2449 Compute how to pass each argument. We only support a very small subset
2450 of the full argument passing conventions to limit complexity here since
2451 library functions shouldn't have many args. */
2452
2453 argvec = (struct arg *) alloca ((nargs + 1) * sizeof (struct arg));
2454
2455 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun);
2456
2457 args_size.constant = 0;
2458 args_size.var = 0;
2459
2460 count = 0;
2461
2462 /* If there's a structure value address to be passed,
2463 either pass it in the special place, or pass it as an extra argument. */
2464 if (mem_value)
2465 {
2466 rtx addr = XEXP (mem_value, 0);
2467
2468 if (! struct_value_rtx)
2469 {
2470 nargs++;
2471
2472 /* Make sure it is a reasonable operand for a move or push insn. */
2473 if (GET_CODE (addr) != REG && GET_CODE (addr) != MEM
2474 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
2475 addr = force_operand (addr, NULL_RTX);
2476
2477 argvec[count].value = addr;
2478 argvec[count].mode = outmode;
2479 argvec[count].partial = 0;
2480
2481 argvec[count].reg = FUNCTION_ARG (args_so_far, outmode, NULL_TREE, 1);
2482 #ifdef FUNCTION_ARG_PARTIAL_NREGS
2483 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, outmode, NULL_TREE, 1))
2484 abort ();
2485 #endif
2486
2487 locate_and_pad_parm (outmode, NULL_TREE,
2488 argvec[count].reg && argvec[count].partial == 0,
2489 NULL_TREE, &args_size, &argvec[count].offset,
2490 &argvec[count].size);
2491
2492
2493 if (argvec[count].reg == 0 || argvec[count].partial != 0
2494 #ifdef REG_PARM_STACK_SPACE
2495 || 1
2496 #endif
2497 )
2498 args_size.constant += argvec[count].size.constant;
2499
2500 FUNCTION_ARG_ADVANCE (args_so_far, outmode, (tree)0, 1);
2501 }
2502 }
2503
2504 for (; count < nargs; count++)
2505 {
2506 rtx val = va_arg (p, rtx);
2507 enum machine_mode mode = va_arg (p, enum machine_mode);
2508
2509 /* We cannot convert the arg value to the mode the library wants here;
2510 must do it earlier where we know the signedness of the arg. */
2511 if (mode == BLKmode
2512 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
2513 abort ();
2514
2515 /* On some machines, there's no way to pass a float to a library fcn.
2516 Pass it as a double instead. */
2517 #ifdef LIBGCC_NEEDS_DOUBLE
2518 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
2519 val = convert_to_mode (DFmode, val, 0), mode = DFmode;
2520 #endif
2521
2522 /* There's no need to call protect_from_queue, because
2523 either emit_move_insn or emit_push_insn will do that. */
2524
2525 /* Make sure it is a reasonable operand for a move or push insn. */
2526 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
2527 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
2528 val = force_operand (val, NULL_RTX);
2529
2530 argvec[count].value = val;
2531 argvec[count].mode = mode;
2532
2533 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2534 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
2535 abort ();
2536 #endif
2537
2538 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
2539 if (argvec[count].reg && GET_CODE (argvec[count].reg) == EXPR_LIST)
2540 abort ();
2541 #ifdef FUNCTION_ARG_PARTIAL_NREGS
2542 argvec[count].partial
2543 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
2544 #else
2545 argvec[count].partial = 0;
2546 #endif
2547
2548 locate_and_pad_parm (mode, NULL_TREE,
2549 argvec[count].reg && argvec[count].partial == 0,
2550 NULL_TREE, &args_size, &argvec[count].offset,
2551 &argvec[count].size);
2552
2553 if (argvec[count].size.var)
2554 abort ();
2555
2556 #ifndef REG_PARM_STACK_SPACE
2557 if (argvec[count].partial)
2558 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
2559 #endif
2560
2561 if (argvec[count].reg == 0 || argvec[count].partial != 0
2562 #ifdef REG_PARM_STACK_SPACE
2563 || 1
2564 #endif
2565 )
2566 args_size.constant += argvec[count].size.constant;
2567
2568 #ifdef ACCUMULATE_OUTGOING_ARGS
2569 /* If this arg is actually passed on the stack, it might be
2570 clobbering something we already put there (this library call might
2571 be inside the evaluation of an argument to a function whose call
2572 requires the stack). This will only occur when the library call
2573 has sufficient args to run out of argument registers. Abort in
2574 this case; if this ever occurs, code must be added to save and
2575 restore the arg slot. */
2576
2577 if (argvec[count].reg == 0 || argvec[count].partial != 0)
2578 abort ();
2579 #endif
2580
2581 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree)0, 1);
2582 }
2583 va_end (p);
2584
2585 /* If this machine requires an external definition for library
2586 functions, write one out. */
2587 assemble_external_libcall (fun);
2588
2589 original_args_size = args_size;
2590 #ifdef STACK_BOUNDARY
2591 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
2592 / STACK_BYTES) * STACK_BYTES);
2593 #endif
2594
2595 #ifdef REG_PARM_STACK_SPACE
2596 args_size.constant = MAX (args_size.constant,
2597 REG_PARM_STACK_SPACE (NULL_TREE));
2598 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2599 args_size.constant -= REG_PARM_STACK_SPACE (NULL_TREE);
2600 #endif
2601 #endif
2602
2603 #ifdef ACCUMULATE_OUTGOING_ARGS
2604 if (args_size.constant > current_function_outgoing_args_size)
2605 current_function_outgoing_args_size = args_size.constant;
2606 args_size.constant = 0;
2607 #endif
2608
2609 #ifndef PUSH_ROUNDING
2610 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
2611 #endif
2612
2613 #ifdef PUSH_ARGS_REVERSED
2614 #ifdef STACK_BOUNDARY
2615 /* If we push args individually in reverse order, perform stack alignment
2616 before the first push (the last arg). */
2617 if (argblock == 0)
2618 anti_adjust_stack (GEN_INT (args_size.constant
2619 - original_args_size.constant));
2620 #endif
2621 #endif
2622
2623 #ifdef PUSH_ARGS_REVERSED
2624 inc = -1;
2625 argnum = nargs - 1;
2626 #else
2627 inc = 1;
2628 argnum = 0;
2629 #endif
2630
2631 /* Push the args that need to be pushed. */
2632
2633 for (count = 0; count < nargs; count++, argnum += inc)
2634 {
2635 register enum machine_mode mode = argvec[argnum].mode;
2636 register rtx val = argvec[argnum].value;
2637 rtx reg = argvec[argnum].reg;
2638 int partial = argvec[argnum].partial;
2639
2640 if (! (reg != 0 && partial == 0))
2641 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
2642 argblock, GEN_INT (argvec[count].offset.constant));
2643 NO_DEFER_POP;
2644 }
2645
2646 #ifndef PUSH_ARGS_REVERSED
2647 #ifdef STACK_BOUNDARY
2648 /* If we pushed args in forward order, perform stack alignment
2649 after pushing the last arg. */
2650 if (argblock == 0)
2651 anti_adjust_stack (GEN_INT (args_size.constant
2652 - original_args_size.constant));
2653 #endif
2654 #endif
2655
2656 #ifdef PUSH_ARGS_REVERSED
2657 argnum = nargs - 1;
2658 #else
2659 argnum = 0;
2660 #endif
2661
2662 /* Now load any reg parms into their regs. */
2663
2664 if (mem_value != 0 && struct_value_rtx != 0)
2665 emit_move_insn (struct_value_rtx, XEXP (mem_value, 0));
2666
2667 for (count = 0; count < nargs; count++, argnum += inc)
2668 {
2669 register enum machine_mode mode = argvec[argnum].mode;
2670 register rtx val = argvec[argnum].value;
2671 rtx reg = argvec[argnum].reg;
2672 int partial = argvec[argnum].partial;
2673
2674 if (reg != 0 && partial == 0)
2675 emit_move_insn (reg, val);
2676 NO_DEFER_POP;
2677 }
2678
2679 #if 0
2680 /* For version 1.37, try deleting this entirely. */
2681 if (! no_queue)
2682 emit_queue ();
2683 #endif
2684
2685 /* Any regs containing parms remain in use through the call. */
2686 start_sequence ();
2687 for (count = 0; count < nargs; count++)
2688 if (argvec[count].reg != 0)
2689 emit_insn (gen_rtx (USE, VOIDmode, argvec[count].reg));
2690
2691 use_insns = get_insns ();
2692 end_sequence ();
2693
2694 fun = prepare_call_address (fun, NULL_TREE, &use_insns);
2695
2696 /* Don't allow popping to be deferred, since then
2697 cse'ing of library calls could delete a call and leave the pop. */
2698 NO_DEFER_POP;
2699
2700 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2701 will set inhibit_defer_pop to that value. */
2702
2703 emit_call_1 (fun, get_identifier (XSTR (orgfun, 0)), args_size.constant, 0,
2704 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2705 outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
2706 old_inhibit_defer_pop + 1, use_insns, no_queue);
2707
2708 /* Now restore inhibit_defer_pop to its actual original value. */
2709 OK_DEFER_POP;
2710
2711 /* Copy the value to the right place. */
2712 if (outmode != VOIDmode)
2713 {
2714 if (mem_value)
2715 {
2716 if (value == 0)
2717 value = hard_libcall_value (outmode);
2718 if (value != mem_value)
2719 emit_move_insn (value, mem_value);
2720 }
2721 else if (value != 0)
2722 emit_move_insn (value, hard_libcall_value (outmode));
2723 }
2724 }
2725 \f
2726 /* Expand an assignment that stores the value of FROM into TO.
2727 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2728 (This may contain a QUEUED rtx.)
2729 Otherwise, the returned value is not meaningful.
2730
2731 SUGGEST_REG is no longer actually used.
2732 It used to mean, copy the value through a register
2733 and return that register, if that is possible.
2734 But now we do this if WANT_VALUE.
2735
2736 If the value stored is a constant, we return the constant. */
2737
2738 rtx
2739 expand_assignment (to, from, want_value, suggest_reg)
2740 tree to, from;
2741 int want_value;
2742 int suggest_reg;
2743 {
2744 register rtx to_rtx = 0;
2745 rtx result;
2746
2747 /* Don't crash if the lhs of the assignment was erroneous. */
2748
2749 if (TREE_CODE (to) == ERROR_MARK)
2750 return expand_expr (from, NULL_RTX, VOIDmode, 0);
2751
2752 /* Assignment of a structure component needs special treatment
2753 if the structure component's rtx is not simply a MEM.
2754 Assignment of an array element at a constant index
2755 has the same problem. */
2756
2757 if (TREE_CODE (to) == COMPONENT_REF
2758 || TREE_CODE (to) == BIT_FIELD_REF
2759 || (TREE_CODE (to) == ARRAY_REF
2760 && TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2761 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST))
2762 {
2763 enum machine_mode mode1;
2764 int bitsize;
2765 int bitpos;
2766 tree offset;
2767 int unsignedp;
2768 int volatilep = 0;
2769 tree tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
2770 &mode1, &unsignedp, &volatilep);
2771
2772 /* If we are going to use store_bit_field and extract_bit_field,
2773 make sure to_rtx will be safe for multiple use. */
2774
2775 if (mode1 == VOIDmode && want_value)
2776 tem = stabilize_reference (tem);
2777
2778 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
2779 if (offset != 0)
2780 {
2781 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2782
2783 if (GET_CODE (to_rtx) != MEM)
2784 abort ();
2785 to_rtx = change_address (to_rtx, VOIDmode,
2786 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2787 force_reg (Pmode, offset_rtx)));
2788 }
2789 if (volatilep)
2790 {
2791 if (GET_CODE (to_rtx) == MEM)
2792 MEM_VOLATILE_P (to_rtx) = 1;
2793 #if 0 /* This was turned off because, when a field is volatile
2794 in an object which is not volatile, the object may be in a register,
2795 and then we would abort over here. */
2796 else
2797 abort ();
2798 #endif
2799 }
2800
2801 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2802 (want_value
2803 /* Spurious cast makes HPUX compiler happy. */
2804 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2805 : VOIDmode),
2806 unsignedp,
2807 /* Required alignment of containing datum. */
2808 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
2809 int_size_in_bytes (TREE_TYPE (tem)));
2810 preserve_temp_slots (result);
2811 free_temp_slots ();
2812
2813 return result;
2814 }
2815
2816 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2817 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2818
2819 if (to_rtx == 0)
2820 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2821
2822 /* Don't move directly into a return register. */
2823 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
2824 {
2825 rtx temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
2826 emit_move_insn (to_rtx, temp);
2827 preserve_temp_slots (to_rtx);
2828 free_temp_slots ();
2829 return to_rtx;
2830 }
2831
2832 /* In case we are returning the contents of an object which overlaps
2833 the place the value is being stored, use a safe function when copying
2834 a value through a pointer into a structure value return block. */
2835 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2836 && current_function_returns_struct
2837 && !current_function_returns_pcc_struct)
2838 {
2839 rtx from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
2840 rtx size = expr_size (from);
2841
2842 #ifdef TARGET_MEM_FUNCTIONS
2843 emit_library_call (memcpy_libfunc, 0,
2844 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2845 XEXP (from_rtx, 0), Pmode,
2846 size, Pmode);
2847 #else
2848 emit_library_call (bcopy_libfunc, 0,
2849 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2850 XEXP (to_rtx, 0), Pmode,
2851 size, Pmode);
2852 #endif
2853
2854 preserve_temp_slots (to_rtx);
2855 free_temp_slots ();
2856 return to_rtx;
2857 }
2858
2859 /* Compute FROM and store the value in the rtx we got. */
2860
2861 result = store_expr (from, to_rtx, want_value);
2862 preserve_temp_slots (result);
2863 free_temp_slots ();
2864 return result;
2865 }
2866
2867 /* Generate code for computing expression EXP,
2868 and storing the value into TARGET.
2869 Returns TARGET or an equivalent value.
2870 TARGET may contain a QUEUED rtx.
2871
2872 If SUGGEST_REG is nonzero, copy the value through a register
2873 and return that register, if that is possible.
2874
2875 If the value stored is a constant, we return the constant. */
2876
2877 rtx
2878 store_expr (exp, target, suggest_reg)
2879 register tree exp;
2880 register rtx target;
2881 int suggest_reg;
2882 {
2883 register rtx temp;
2884 int dont_return_target = 0;
2885
2886 if (TREE_CODE (exp) == COMPOUND_EXPR)
2887 {
2888 /* Perform first part of compound expression, then assign from second
2889 part. */
2890 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2891 emit_queue ();
2892 return store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2893 }
2894 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2895 {
2896 /* For conditional expression, get safe form of the target. Then
2897 test the condition, doing the appropriate assignment on either
2898 side. This avoids the creation of unnecessary temporaries.
2899 For non-BLKmode, it is more efficient not to do this. */
2900
2901 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2902
2903 emit_queue ();
2904 target = protect_from_queue (target, 1);
2905
2906 NO_DEFER_POP;
2907 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2908 store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2909 emit_queue ();
2910 emit_jump_insn (gen_jump (lab2));
2911 emit_barrier ();
2912 emit_label (lab1);
2913 store_expr (TREE_OPERAND (exp, 2), target, suggest_reg);
2914 emit_queue ();
2915 emit_label (lab2);
2916 OK_DEFER_POP;
2917 return target;
2918 }
2919 else if (suggest_reg && GET_CODE (target) == MEM
2920 && GET_MODE (target) != BLKmode)
2921 /* If target is in memory and caller wants value in a register instead,
2922 arrange that. Pass TARGET as target for expand_expr so that,
2923 if EXP is another assignment, SUGGEST_REG will be nonzero for it.
2924 We know expand_expr will not use the target in that case. */
2925 {
2926 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
2927 GET_MODE (target), 0);
2928 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2929 temp = copy_to_reg (temp);
2930 dont_return_target = 1;
2931 }
2932 else if (queued_subexp_p (target))
2933 /* If target contains a postincrement, it is not safe
2934 to use as the returned value. It would access the wrong
2935 place by the time the queued increment gets output.
2936 So copy the value through a temporary and use that temp
2937 as the result. */
2938 {
2939 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2940 {
2941 /* Expand EXP into a new pseudo. */
2942 temp = gen_reg_rtx (GET_MODE (target));
2943 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2944 }
2945 else
2946 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
2947 dont_return_target = 1;
2948 }
2949 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2950 /* If this is an scalar in a register that is stored in a wider mode
2951 than the declared mode, compute the result into its declared mode
2952 and then convert to the wider mode. Our value is the computed
2953 expression. */
2954 {
2955 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2956 convert_move (SUBREG_REG (target), temp,
2957 SUBREG_PROMOTED_UNSIGNED_P (target));
2958 return temp;
2959 }
2960 else
2961 {
2962 temp = expand_expr (exp, target, GET_MODE (target), 0);
2963 /* DO return TARGET if it's a specified hardware register.
2964 expand_return relies on this. */
2965 if (!(target && GET_CODE (target) == REG
2966 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2967 && CONSTANT_P (temp))
2968 dont_return_target = 1;
2969 }
2970
2971 /* If value was not generated in the target, store it there.
2972 Convert the value to TARGET's type first if nec. */
2973
2974 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2975 {
2976 target = protect_from_queue (target, 1);
2977 if (GET_MODE (temp) != GET_MODE (target)
2978 && GET_MODE (temp) != VOIDmode)
2979 {
2980 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2981 if (dont_return_target)
2982 {
2983 /* In this case, we will return TEMP,
2984 so make sure it has the proper mode.
2985 But don't forget to store the value into TARGET. */
2986 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2987 emit_move_insn (target, temp);
2988 }
2989 else
2990 convert_move (target, temp, unsignedp);
2991 }
2992
2993 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2994 {
2995 /* Handle copying a string constant into an array.
2996 The string constant may be shorter than the array.
2997 So copy just the string's actual length, and clear the rest. */
2998 rtx size;
2999
3000 /* Get the size of the data type of the string,
3001 which is actually the size of the target. */
3002 size = expr_size (exp);
3003 if (GET_CODE (size) == CONST_INT
3004 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3005 emit_block_move (target, temp, size,
3006 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3007 else
3008 {
3009 /* Compute the size of the data to copy from the string. */
3010 tree copy_size
3011 = fold (build (MIN_EXPR, sizetype,
3012 size_binop (CEIL_DIV_EXPR,
3013 TYPE_SIZE (TREE_TYPE (exp)),
3014 size_int (BITS_PER_UNIT)),
3015 convert (sizetype,
3016 build_int_2 (TREE_STRING_LENGTH (exp), 0))));
3017 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3018 VOIDmode, 0);
3019 rtx label = 0;
3020
3021 /* Copy that much. */
3022 emit_block_move (target, temp, copy_size_rtx,
3023 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3024
3025 /* Figure out how much is left in TARGET
3026 that we have to clear. */
3027 if (GET_CODE (copy_size_rtx) == CONST_INT)
3028 {
3029 temp = plus_constant (XEXP (target, 0),
3030 TREE_STRING_LENGTH (exp));
3031 size = plus_constant (size,
3032 - TREE_STRING_LENGTH (exp));
3033 }
3034 else
3035 {
3036 enum machine_mode size_mode = Pmode;
3037
3038 temp = force_reg (Pmode, XEXP (target, 0));
3039 temp = expand_binop (size_mode, add_optab, temp,
3040 copy_size_rtx, NULL_RTX, 0,
3041 OPTAB_LIB_WIDEN);
3042
3043 size = expand_binop (size_mode, sub_optab, size,
3044 copy_size_rtx, NULL_RTX, 0,
3045 OPTAB_LIB_WIDEN);
3046
3047 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
3048 GET_MODE (size), 0, 0);
3049 label = gen_label_rtx ();
3050 emit_jump_insn (gen_blt (label));
3051 }
3052
3053 if (size != const0_rtx)
3054 {
3055 #ifdef TARGET_MEM_FUNCTIONS
3056 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3057 temp, Pmode, const0_rtx, Pmode, size, Pmode);
3058 #else
3059 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3060 temp, Pmode, size, Pmode);
3061 #endif
3062 }
3063 if (label)
3064 emit_label (label);
3065 }
3066 }
3067 else if (GET_MODE (temp) == BLKmode)
3068 emit_block_move (target, temp, expr_size (exp),
3069 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3070 else
3071 emit_move_insn (target, temp);
3072 }
3073 if (dont_return_target)
3074 return temp;
3075 return target;
3076 }
3077 \f
3078 /* Store the value of constructor EXP into the rtx TARGET.
3079 TARGET is either a REG or a MEM. */
3080
3081 static void
3082 store_constructor (exp, target)
3083 tree exp;
3084 rtx target;
3085 {
3086 tree type = TREE_TYPE (exp);
3087
3088 /* We know our target cannot conflict, since safe_from_p has been called. */
3089 #if 0
3090 /* Don't try copying piece by piece into a hard register
3091 since that is vulnerable to being clobbered by EXP.
3092 Instead, construct in a pseudo register and then copy it all. */
3093 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
3094 {
3095 rtx temp = gen_reg_rtx (GET_MODE (target));
3096 store_constructor (exp, temp);
3097 emit_move_insn (target, temp);
3098 return;
3099 }
3100 #endif
3101
3102 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE)
3103 {
3104 register tree elt;
3105
3106 /* Inform later passes that the whole union value is dead. */
3107 if (TREE_CODE (type) == UNION_TYPE)
3108 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3109
3110 /* If we are building a static constructor into a register,
3111 set the initial value as zero so we can fold the value into
3112 a constant. */
3113 else if (GET_CODE (target) == REG && TREE_STATIC (exp))
3114 emit_move_insn (target, const0_rtx);
3115
3116 /* If the constructor has fewer fields than the structure,
3117 clear the whole structure first. */
3118 else if (list_length (CONSTRUCTOR_ELTS (exp))
3119 != list_length (TYPE_FIELDS (type)))
3120 clear_storage (target, int_size_in_bytes (type));
3121 else
3122 /* Inform later passes that the old value is dead. */
3123 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3124
3125 /* Store each element of the constructor into
3126 the corresponding field of TARGET. */
3127
3128 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3129 {
3130 register tree field = TREE_PURPOSE (elt);
3131 register enum machine_mode mode;
3132 int bitsize;
3133 int bitpos;
3134 int unsignedp;
3135
3136 /* Just ignore missing fields.
3137 We cleared the whole structure, above,
3138 if any fields are missing. */
3139 if (field == 0)
3140 continue;
3141
3142 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
3143 unsignedp = TREE_UNSIGNED (field);
3144 mode = DECL_MODE (field);
3145 if (DECL_BIT_FIELD (field))
3146 mode = VOIDmode;
3147
3148 if (TREE_CODE (DECL_FIELD_BITPOS (field)) != INTEGER_CST)
3149 /* ??? This case remains to be written. */
3150 abort ();
3151
3152 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
3153
3154 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
3155 /* The alignment of TARGET is
3156 at least what its type requires. */
3157 VOIDmode, 0,
3158 TYPE_ALIGN (type) / BITS_PER_UNIT,
3159 int_size_in_bytes (type));
3160 }
3161 }
3162 else if (TREE_CODE (type) == ARRAY_TYPE)
3163 {
3164 register tree elt;
3165 register int i;
3166 tree domain = TYPE_DOMAIN (type);
3167 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
3168 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
3169 tree elttype = TREE_TYPE (type);
3170
3171 /* If the constructor has fewer fields than the structure,
3172 clear the whole structure first. Similarly if this this is
3173 static constructor of a non-BLKmode object. */
3174
3175 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
3176 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
3177 clear_storage (target, maxelt - minelt + 1);
3178 else
3179 /* Inform later passes that the old value is dead. */
3180 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3181
3182 /* Store each element of the constructor into
3183 the corresponding element of TARGET, determined
3184 by counting the elements. */
3185 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
3186 elt;
3187 elt = TREE_CHAIN (elt), i++)
3188 {
3189 register enum machine_mode mode;
3190 int bitsize;
3191 int bitpos;
3192 int unsignedp;
3193
3194 mode = TYPE_MODE (elttype);
3195 bitsize = GET_MODE_BITSIZE (mode);
3196 unsignedp = TREE_UNSIGNED (elttype);
3197
3198 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3199
3200 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
3201 /* The alignment of TARGET is
3202 at least what its type requires. */
3203 VOIDmode, 0,
3204 TYPE_ALIGN (type) / BITS_PER_UNIT,
3205 int_size_in_bytes (type));
3206 }
3207 }
3208
3209 else
3210 abort ();
3211 }
3212
3213 /* Store the value of EXP (an expression tree)
3214 into a subfield of TARGET which has mode MODE and occupies
3215 BITSIZE bits, starting BITPOS bits from the start of TARGET.
3216 If MODE is VOIDmode, it means that we are storing into a bit-field.
3217
3218 If VALUE_MODE is VOIDmode, return nothing in particular.
3219 UNSIGNEDP is not used in this case.
3220
3221 Otherwise, return an rtx for the value stored. This rtx
3222 has mode VALUE_MODE if that is convenient to do.
3223 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
3224
3225 ALIGN is the alignment that TARGET is known to have, measured in bytes.
3226 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
3227
3228 static rtx
3229 store_field (target, bitsize, bitpos, mode, exp, value_mode,
3230 unsignedp, align, total_size)
3231 rtx target;
3232 int bitsize, bitpos;
3233 enum machine_mode mode;
3234 tree exp;
3235 enum machine_mode value_mode;
3236 int unsignedp;
3237 int align;
3238 int total_size;
3239 {
3240 HOST_WIDE_INT width_mask = 0;
3241
3242 if (bitsize < HOST_BITS_PER_WIDE_INT)
3243 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
3244
3245 /* If we are storing into an unaligned field of an aligned union that is
3246 in a register, we may have the mode of TARGET being an integer mode but
3247 MODE == BLKmode. In that case, get an aligned object whose size and
3248 alignment are the same as TARGET and store TARGET into it (we can avoid
3249 the store if the field being stored is the entire width of TARGET). Then
3250 call ourselves recursively to store the field into a BLKmode version of
3251 that object. Finally, load from the object into TARGET. This is not
3252 very efficient in general, but should only be slightly more expensive
3253 than the otherwise-required unaligned accesses. Perhaps this can be
3254 cleaned up later. */
3255
3256 if (mode == BLKmode
3257 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
3258 {
3259 rtx object = assign_stack_temp (GET_MODE (target),
3260 GET_MODE_SIZE (GET_MODE (target)), 0);
3261 rtx blk_object = copy_rtx (object);
3262
3263 PUT_MODE (blk_object, BLKmode);
3264
3265 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
3266 emit_move_insn (object, target);
3267
3268 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
3269 align, total_size);
3270
3271 emit_move_insn (target, object);
3272
3273 return target;
3274 }
3275
3276 /* If the structure is in a register or if the component
3277 is a bit field, we cannot use addressing to access it.
3278 Use bit-field techniques or SUBREG to store in it. */
3279
3280 if (mode == VOIDmode
3281 || (mode != BLKmode && ! direct_store[(int) mode])
3282 || GET_CODE (target) == REG
3283 || GET_CODE (target) == SUBREG)
3284 {
3285 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3286 /* Store the value in the bitfield. */
3287 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
3288 if (value_mode != VOIDmode)
3289 {
3290 /* The caller wants an rtx for the value. */
3291 /* If possible, avoid refetching from the bitfield itself. */
3292 if (width_mask != 0
3293 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
3294 {
3295 tree count;
3296 enum machine_mode tmode;
3297
3298 if (unsignedp)
3299 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
3300 tmode = GET_MODE (temp);
3301 if (tmode == VOIDmode)
3302 tmode = value_mode;
3303 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
3304 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
3305 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
3306 }
3307 return extract_bit_field (target, bitsize, bitpos, unsignedp,
3308 NULL_RTX, value_mode, 0, align,
3309 total_size);
3310 }
3311 return const0_rtx;
3312 }
3313 else
3314 {
3315 rtx addr = XEXP (target, 0);
3316 rtx to_rtx;
3317
3318 /* If a value is wanted, it must be the lhs;
3319 so make the address stable for multiple use. */
3320
3321 if (value_mode != VOIDmode && GET_CODE (addr) != REG
3322 && ! CONSTANT_ADDRESS_P (addr)
3323 /* A frame-pointer reference is already stable. */
3324 && ! (GET_CODE (addr) == PLUS
3325 && GET_CODE (XEXP (addr, 1)) == CONST_INT
3326 && (XEXP (addr, 0) == virtual_incoming_args_rtx
3327 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
3328 addr = copy_to_reg (addr);
3329
3330 /* Now build a reference to just the desired component. */
3331
3332 to_rtx = change_address (target, mode,
3333 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
3334 MEM_IN_STRUCT_P (to_rtx) = 1;
3335
3336 return store_expr (exp, to_rtx, value_mode != VOIDmode);
3337 }
3338 }
3339 \f
3340 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
3341 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
3342 ARRAY_REFs and find the ultimate containing object, which we return.
3343
3344 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
3345 bit position, and *PUNSIGNEDP to the signedness of the field.
3346 If the position of the field is variable, we store a tree
3347 giving the variable offset (in units) in *POFFSET.
3348 This offset is in addition to the bit position.
3349 If the position is not variable, we store 0 in *POFFSET.
3350
3351 If any of the extraction expressions is volatile,
3352 we store 1 in *PVOLATILEP. Otherwise we don't change that.
3353
3354 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
3355 is a mode that can be used to access the field. In that case, *PBITSIZE
3356 is redundant.
3357
3358 If the field describes a variable-sized object, *PMODE is set to
3359 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
3360 this case, but the address of the object can be found. */
3361
3362 tree
3363 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
3364 punsignedp, pvolatilep)
3365 tree exp;
3366 int *pbitsize;
3367 int *pbitpos;
3368 tree *poffset;
3369 enum machine_mode *pmode;
3370 int *punsignedp;
3371 int *pvolatilep;
3372 {
3373 tree size_tree = 0;
3374 enum machine_mode mode = VOIDmode;
3375 tree offset = integer_zero_node;
3376
3377 if (TREE_CODE (exp) == COMPONENT_REF)
3378 {
3379 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
3380 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
3381 mode = DECL_MODE (TREE_OPERAND (exp, 1));
3382 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
3383 }
3384 else if (TREE_CODE (exp) == BIT_FIELD_REF)
3385 {
3386 size_tree = TREE_OPERAND (exp, 1);
3387 *punsignedp = TREE_UNSIGNED (exp);
3388 }
3389 else
3390 {
3391 mode = TYPE_MODE (TREE_TYPE (exp));
3392 *pbitsize = GET_MODE_BITSIZE (mode);
3393 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3394 }
3395
3396 if (size_tree)
3397 {
3398 if (TREE_CODE (size_tree) != INTEGER_CST)
3399 mode = BLKmode, *pbitsize = -1;
3400 else
3401 *pbitsize = TREE_INT_CST_LOW (size_tree);
3402 }
3403
3404 /* Compute cumulative bit-offset for nested component-refs and array-refs,
3405 and find the ultimate containing object. */
3406
3407 *pbitpos = 0;
3408
3409 while (1)
3410 {
3411 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
3412 {
3413 tree pos = (TREE_CODE (exp) == COMPONENT_REF
3414 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
3415 : TREE_OPERAND (exp, 2));
3416
3417 if (TREE_CODE (pos) == PLUS_EXPR)
3418 {
3419 tree constant, var;
3420 if (TREE_CODE (TREE_OPERAND (pos, 0)) == INTEGER_CST)
3421 {
3422 constant = TREE_OPERAND (pos, 0);
3423 var = TREE_OPERAND (pos, 1);
3424 }
3425 else if (TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3426 {
3427 constant = TREE_OPERAND (pos, 1);
3428 var = TREE_OPERAND (pos, 0);
3429 }
3430 else
3431 abort ();
3432
3433 *pbitpos += TREE_INT_CST_LOW (constant);
3434 offset = size_binop (PLUS_EXPR, offset,
3435 size_binop (FLOOR_DIV_EXPR, var,
3436 size_int (BITS_PER_UNIT)));
3437 }
3438 else if (TREE_CODE (pos) == INTEGER_CST)
3439 *pbitpos += TREE_INT_CST_LOW (pos);
3440 else
3441 {
3442 /* Assume here that the offset is a multiple of a unit.
3443 If not, there should be an explicitly added constant. */
3444 offset = size_binop (PLUS_EXPR, offset,
3445 size_binop (FLOOR_DIV_EXPR, pos,
3446 size_int (BITS_PER_UNIT)));
3447 }
3448 }
3449
3450 else if (TREE_CODE (exp) == ARRAY_REF)
3451 {
3452 /* This code is based on the code in case ARRAY_REF in expand_expr
3453 below. We assume here that the size of an array element is
3454 always an integral multiple of BITS_PER_UNIT. */
3455
3456 tree index = TREE_OPERAND (exp, 1);
3457 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
3458 tree low_bound
3459 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
3460 tree index_type = TREE_TYPE (index);
3461
3462 if (! integer_zerop (low_bound))
3463 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
3464
3465 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
3466 {
3467 index = convert (type_for_size (POINTER_SIZE, 0), index);
3468 index_type = TREE_TYPE (index);
3469 }
3470
3471 index = fold (build (MULT_EXPR, index_type, index,
3472 TYPE_SIZE (TREE_TYPE (exp))));
3473
3474 if (TREE_CODE (index) == INTEGER_CST
3475 && TREE_INT_CST_HIGH (index) == 0)
3476 *pbitpos += TREE_INT_CST_LOW (index);
3477 else
3478 offset = size_binop (PLUS_EXPR, offset,
3479 size_binop (FLOOR_DIV_EXPR, index,
3480 size_int (BITS_PER_UNIT)));
3481 }
3482 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
3483 && ! ((TREE_CODE (exp) == NOP_EXPR
3484 || TREE_CODE (exp) == CONVERT_EXPR)
3485 && (TYPE_MODE (TREE_TYPE (exp))
3486 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
3487 break;
3488
3489 /* If any reference in the chain is volatile, the effect is volatile. */
3490 if (TREE_THIS_VOLATILE (exp))
3491 *pvolatilep = 1;
3492 exp = TREE_OPERAND (exp, 0);
3493 }
3494
3495 /* If this was a bit-field, see if there is a mode that allows direct
3496 access in case EXP is in memory. */
3497 if (mode == VOIDmode && *pbitpos % *pbitsize == 0)
3498 {
3499 mode = mode_for_size (*pbitsize, MODE_INT, 0);
3500 if (mode == BLKmode)
3501 mode = VOIDmode;
3502 }
3503
3504 if (integer_zerop (offset))
3505 offset = 0;
3506
3507 *pmode = mode;
3508 *poffset = offset;
3509 #if 0
3510 /* We aren't finished fixing the callers to really handle nonzero offset. */
3511 if (offset != 0)
3512 abort ();
3513 #endif
3514
3515 return exp;
3516 }
3517 \f
3518 /* Given an rtx VALUE that may contain additions and multiplications,
3519 return an equivalent value that just refers to a register or memory.
3520 This is done by generating instructions to perform the arithmetic
3521 and returning a pseudo-register containing the value.
3522
3523 The returned value may be a REG, SUBREG, MEM or constant. */
3524
3525 rtx
3526 force_operand (value, target)
3527 rtx value, target;
3528 {
3529 register optab binoptab = 0;
3530 /* Use a temporary to force order of execution of calls to
3531 `force_operand'. */
3532 rtx tmp;
3533 register rtx op2;
3534 /* Use subtarget as the target for operand 0 of a binary operation. */
3535 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3536
3537 if (GET_CODE (value) == PLUS)
3538 binoptab = add_optab;
3539 else if (GET_CODE (value) == MINUS)
3540 binoptab = sub_optab;
3541 else if (GET_CODE (value) == MULT)
3542 {
3543 op2 = XEXP (value, 1);
3544 if (!CONSTANT_P (op2)
3545 && !(GET_CODE (op2) == REG && op2 != subtarget))
3546 subtarget = 0;
3547 tmp = force_operand (XEXP (value, 0), subtarget);
3548 return expand_mult (GET_MODE (value), tmp,
3549 force_operand (op2, NULL_RTX),
3550 target, 0);
3551 }
3552
3553 if (binoptab)
3554 {
3555 op2 = XEXP (value, 1);
3556 if (!CONSTANT_P (op2)
3557 && !(GET_CODE (op2) == REG && op2 != subtarget))
3558 subtarget = 0;
3559 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
3560 {
3561 binoptab = add_optab;
3562 op2 = negate_rtx (GET_MODE (value), op2);
3563 }
3564
3565 /* Check for an addition with OP2 a constant integer and our first
3566 operand a PLUS of a virtual register and something else. In that
3567 case, we want to emit the sum of the virtual register and the
3568 constant first and then add the other value. This allows virtual
3569 register instantiation to simply modify the constant rather than
3570 creating another one around this addition. */
3571 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
3572 && GET_CODE (XEXP (value, 0)) == PLUS
3573 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
3574 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
3575 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
3576 {
3577 rtx temp = expand_binop (GET_MODE (value), binoptab,
3578 XEXP (XEXP (value, 0), 0), op2,
3579 subtarget, 0, OPTAB_LIB_WIDEN);
3580 return expand_binop (GET_MODE (value), binoptab, temp,
3581 force_operand (XEXP (XEXP (value, 0), 1), 0),
3582 target, 0, OPTAB_LIB_WIDEN);
3583 }
3584
3585 tmp = force_operand (XEXP (value, 0), subtarget);
3586 return expand_binop (GET_MODE (value), binoptab, tmp,
3587 force_operand (op2, NULL_RTX),
3588 target, 0, OPTAB_LIB_WIDEN);
3589 /* We give UNSIGNEDP = 0 to expand_binop
3590 because the only operations we are expanding here are signed ones. */
3591 }
3592 return value;
3593 }
3594 \f
3595 /* Subroutine of expand_expr:
3596 save the non-copied parts (LIST) of an expr (LHS), and return a list
3597 which can restore these values to their previous values,
3598 should something modify their storage. */
3599
3600 static tree
3601 save_noncopied_parts (lhs, list)
3602 tree lhs;
3603 tree list;
3604 {
3605 tree tail;
3606 tree parts = 0;
3607
3608 for (tail = list; tail; tail = TREE_CHAIN (tail))
3609 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3610 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
3611 else
3612 {
3613 tree part = TREE_VALUE (tail);
3614 tree part_type = TREE_TYPE (part);
3615 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
3616 rtx target = assign_stack_temp (TYPE_MODE (part_type),
3617 int_size_in_bytes (part_type), 0);
3618 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
3619 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
3620 parts = tree_cons (to_be_saved,
3621 build (RTL_EXPR, part_type, NULL_TREE,
3622 (tree) target),
3623 parts);
3624 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
3625 }
3626 return parts;
3627 }
3628
3629 /* Subroutine of expand_expr:
3630 record the non-copied parts (LIST) of an expr (LHS), and return a list
3631 which specifies the initial values of these parts. */
3632
3633 static tree
3634 init_noncopied_parts (lhs, list)
3635 tree lhs;
3636 tree list;
3637 {
3638 tree tail;
3639 tree parts = 0;
3640
3641 for (tail = list; tail; tail = TREE_CHAIN (tail))
3642 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3643 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
3644 else
3645 {
3646 tree part = TREE_VALUE (tail);
3647 tree part_type = TREE_TYPE (part);
3648 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
3649 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
3650 }
3651 return parts;
3652 }
3653
3654 /* Subroutine of expand_expr: return nonzero iff there is no way that
3655 EXP can reference X, which is being modified. */
3656
3657 static int
3658 safe_from_p (x, exp)
3659 rtx x;
3660 tree exp;
3661 {
3662 rtx exp_rtl = 0;
3663 int i, nops;
3664
3665 if (x == 0)
3666 return 1;
3667
3668 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
3669 find the underlying pseudo. */
3670 if (GET_CODE (x) == SUBREG)
3671 {
3672 x = SUBREG_REG (x);
3673 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3674 return 0;
3675 }
3676
3677 /* If X is a location in the outgoing argument area, it is always safe. */
3678 if (GET_CODE (x) == MEM
3679 && (XEXP (x, 0) == virtual_outgoing_args_rtx
3680 || (GET_CODE (XEXP (x, 0)) == PLUS
3681 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
3682 return 1;
3683
3684 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3685 {
3686 case 'd':
3687 exp_rtl = DECL_RTL (exp);
3688 break;
3689
3690 case 'c':
3691 return 1;
3692
3693 case 'x':
3694 if (TREE_CODE (exp) == TREE_LIST)
3695 return ((TREE_VALUE (exp) == 0
3696 || safe_from_p (x, TREE_VALUE (exp)))
3697 && (TREE_CHAIN (exp) == 0
3698 || safe_from_p (x, TREE_CHAIN (exp))));
3699 else
3700 return 0;
3701
3702 case '1':
3703 return safe_from_p (x, TREE_OPERAND (exp, 0));
3704
3705 case '2':
3706 case '<':
3707 return (safe_from_p (x, TREE_OPERAND (exp, 0))
3708 && safe_from_p (x, TREE_OPERAND (exp, 1)));
3709
3710 case 'e':
3711 case 'r':
3712 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
3713 the expression. If it is set, we conflict iff we are that rtx or
3714 both are in memory. Otherwise, we check all operands of the
3715 expression recursively. */
3716
3717 switch (TREE_CODE (exp))
3718 {
3719 case ADDR_EXPR:
3720 return staticp (TREE_OPERAND (exp, 0));
3721
3722 case INDIRECT_REF:
3723 if (GET_CODE (x) == MEM)
3724 return 0;
3725 break;
3726
3727 case CALL_EXPR:
3728 exp_rtl = CALL_EXPR_RTL (exp);
3729 if (exp_rtl == 0)
3730 {
3731 /* Assume that the call will clobber all hard registers and
3732 all of memory. */
3733 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3734 || GET_CODE (x) == MEM)
3735 return 0;
3736 }
3737
3738 break;
3739
3740 case RTL_EXPR:
3741 exp_rtl = RTL_EXPR_RTL (exp);
3742 if (exp_rtl == 0)
3743 /* We don't know what this can modify. */
3744 return 0;
3745
3746 break;
3747
3748 case WITH_CLEANUP_EXPR:
3749 exp_rtl = RTL_EXPR_RTL (exp);
3750 break;
3751
3752 case SAVE_EXPR:
3753 exp_rtl = SAVE_EXPR_RTL (exp);
3754 break;
3755
3756 case BIND_EXPR:
3757 /* The only operand we look at is operand 1. The rest aren't
3758 part of the expression. */
3759 return safe_from_p (x, TREE_OPERAND (exp, 1));
3760
3761 case METHOD_CALL_EXPR:
3762 /* This takes a rtx argument, but shouldn't appear here. */
3763 abort ();
3764 }
3765
3766 /* If we have an rtx, we do not need to scan our operands. */
3767 if (exp_rtl)
3768 break;
3769
3770 nops = tree_code_length[(int) TREE_CODE (exp)];
3771 for (i = 0; i < nops; i++)
3772 if (TREE_OPERAND (exp, i) != 0
3773 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3774 return 0;
3775 }
3776
3777 /* If we have an rtl, find any enclosed object. Then see if we conflict
3778 with it. */
3779 if (exp_rtl)
3780 {
3781 if (GET_CODE (exp_rtl) == SUBREG)
3782 {
3783 exp_rtl = SUBREG_REG (exp_rtl);
3784 if (GET_CODE (exp_rtl) == REG
3785 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3786 return 0;
3787 }
3788
3789 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3790 are memory and EXP is not readonly. */
3791 return ! (rtx_equal_p (x, exp_rtl)
3792 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3793 && ! TREE_READONLY (exp)));
3794 }
3795
3796 /* If we reach here, it is safe. */
3797 return 1;
3798 }
3799
3800 /* Subroutine of expand_expr: return nonzero iff EXP is an
3801 expression whose type is statically determinable. */
3802
3803 static int
3804 fixed_type_p (exp)
3805 tree exp;
3806 {
3807 if (TREE_CODE (exp) == PARM_DECL
3808 || TREE_CODE (exp) == VAR_DECL
3809 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3810 || TREE_CODE (exp) == COMPONENT_REF
3811 || TREE_CODE (exp) == ARRAY_REF)
3812 return 1;
3813 return 0;
3814 }
3815 \f
3816 /* expand_expr: generate code for computing expression EXP.
3817 An rtx for the computed value is returned. The value is never null.
3818 In the case of a void EXP, const0_rtx is returned.
3819
3820 The value may be stored in TARGET if TARGET is nonzero.
3821 TARGET is just a suggestion; callers must assume that
3822 the rtx returned may not be the same as TARGET.
3823
3824 If TARGET is CONST0_RTX, it means that the value will be ignored.
3825
3826 If TMODE is not VOIDmode, it suggests generating the
3827 result in mode TMODE. But this is done only when convenient.
3828 Otherwise, TMODE is ignored and the value generated in its natural mode.
3829 TMODE is just a suggestion; callers must assume that
3830 the rtx returned may not have mode TMODE.
3831
3832 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
3833 with a constant address even if that address is not normally legitimate.
3834 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
3835
3836 If MODIFIER is EXPAND_SUM then when EXP is an addition
3837 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3838 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3839 products as above, or REG or MEM, or constant.
3840 Ordinarily in such cases we would output mul or add instructions
3841 and then return a pseudo reg containing the sum.
3842
3843 EXPAND_INITIALIZER is much like EXPAND_SUM except that
3844 it also marks a label as absolutely required (it can't be dead).
3845 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
3846 This is used for outputting expressions used in initializers. */
3847
3848 rtx
3849 expand_expr (exp, target, tmode, modifier)
3850 register tree exp;
3851 rtx target;
3852 enum machine_mode tmode;
3853 enum expand_modifier modifier;
3854 {
3855 register rtx op0, op1, temp;
3856 tree type = TREE_TYPE (exp);
3857 int unsignedp = TREE_UNSIGNED (type);
3858 register enum machine_mode mode = TYPE_MODE (type);
3859 register enum tree_code code = TREE_CODE (exp);
3860 optab this_optab;
3861 /* Use subtarget as the target for operand 0 of a binary operation. */
3862 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3863 rtx original_target = target;
3864 int ignore = target == const0_rtx;
3865 tree context;
3866
3867 /* Don't use hard regs as subtargets, because the combiner
3868 can only handle pseudo regs. */
3869 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
3870 subtarget = 0;
3871 /* Avoid subtargets inside loops,
3872 since they hide some invariant expressions. */
3873 if (preserve_subexpressions_p ())
3874 subtarget = 0;
3875
3876 if (ignore) target = 0, original_target = 0;
3877
3878 /* If will do cse, generate all results into pseudo registers
3879 since 1) that allows cse to find more things
3880 and 2) otherwise cse could produce an insn the machine
3881 cannot support. */
3882
3883 if (! cse_not_expected && mode != BLKmode && target
3884 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
3885 target = subtarget;
3886
3887 /* Ensure we reference a volatile object even if value is ignored. */
3888 if (ignore && TREE_THIS_VOLATILE (exp)
3889 && TREE_CODE (exp) != FUNCTION_DECL
3890 && mode != VOIDmode && mode != BLKmode)
3891 {
3892 target = gen_reg_rtx (mode);
3893 temp = expand_expr (exp, target, VOIDmode, modifier);
3894 if (temp != target)
3895 emit_move_insn (target, temp);
3896 return target;
3897 }
3898
3899 switch (code)
3900 {
3901 case LABEL_DECL:
3902 {
3903 tree function = decl_function_context (exp);
3904 /* Handle using a label in a containing function. */
3905 if (function != current_function_decl && function != 0)
3906 {
3907 struct function *p = find_function_data (function);
3908 /* Allocate in the memory associated with the function
3909 that the label is in. */
3910 push_obstacks (p->function_obstack,
3911 p->function_maybepermanent_obstack);
3912
3913 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3914 label_rtx (exp), p->forced_labels);
3915 pop_obstacks ();
3916 }
3917 else if (modifier == EXPAND_INITIALIZER)
3918 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3919 label_rtx (exp), forced_labels);
3920 temp = gen_rtx (MEM, FUNCTION_MODE,
3921 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
3922 if (function != current_function_decl && function != 0)
3923 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
3924 return temp;
3925 }
3926
3927 case PARM_DECL:
3928 if (DECL_RTL (exp) == 0)
3929 {
3930 error_with_decl (exp, "prior parameter's size depends on `%s'");
3931 return CONST0_RTX (mode);
3932 }
3933
3934 case FUNCTION_DECL:
3935 case VAR_DECL:
3936 case RESULT_DECL:
3937 if (DECL_RTL (exp) == 0)
3938 abort ();
3939 /* Ensure variable marked as used
3940 even if it doesn't go through a parser. */
3941 TREE_USED (exp) = 1;
3942 /* Handle variables inherited from containing functions. */
3943 context = decl_function_context (exp);
3944
3945 /* We treat inline_function_decl as an alias for the current function
3946 because that is the inline function whose vars, types, etc.
3947 are being merged into the current function.
3948 See expand_inline_function. */
3949 if (context != 0 && context != current_function_decl
3950 && context != inline_function_decl
3951 /* If var is static, we don't need a static chain to access it. */
3952 && ! (GET_CODE (DECL_RTL (exp)) == MEM
3953 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
3954 {
3955 rtx addr;
3956
3957 /* Mark as non-local and addressable. */
3958 DECL_NONLOCAL (exp) = 1;
3959 mark_addressable (exp);
3960 if (GET_CODE (DECL_RTL (exp)) != MEM)
3961 abort ();
3962 addr = XEXP (DECL_RTL (exp), 0);
3963 if (GET_CODE (addr) == MEM)
3964 addr = gen_rtx (MEM, Pmode, fix_lexical_addr (XEXP (addr, 0), exp));
3965 else
3966 addr = fix_lexical_addr (addr, exp);
3967 return change_address (DECL_RTL (exp), mode, addr);
3968 }
3969
3970 /* This is the case of an array whose size is to be determined
3971 from its initializer, while the initializer is still being parsed.
3972 See expand_decl. */
3973 if (GET_CODE (DECL_RTL (exp)) == MEM
3974 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
3975 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
3976 XEXP (DECL_RTL (exp), 0));
3977 if (GET_CODE (DECL_RTL (exp)) == MEM
3978 && modifier != EXPAND_CONST_ADDRESS
3979 && modifier != EXPAND_SUM
3980 && modifier != EXPAND_INITIALIZER)
3981 {
3982 /* DECL_RTL probably contains a constant address.
3983 On RISC machines where a constant address isn't valid,
3984 make some insns to get that address into a register. */
3985 if (!memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
3986 || (flag_force_addr
3987 && CONSTANT_ADDRESS_P (XEXP (DECL_RTL (exp), 0))))
3988 return change_address (DECL_RTL (exp), VOIDmode,
3989 copy_rtx (XEXP (DECL_RTL (exp), 0)));
3990 }
3991
3992 /* If the mode of DECL_RTL does not match that of the decl, it
3993 must be a promoted value. We return a SUBREG of the wanted mode,
3994 but mark it so that we know that it was already extended. */
3995
3996 if (GET_CODE (DECL_RTL (exp)) == REG
3997 && GET_MODE (DECL_RTL (exp)) != mode)
3998 {
3999 enum machine_mode decl_mode = DECL_MODE (exp);
4000
4001 /* Get the signedness used for this variable. Ensure we get the
4002 same mode we got when the variable was declared. */
4003
4004 PROMOTE_MODE (decl_mode, unsignedp, type);
4005
4006 if (decl_mode != GET_MODE (DECL_RTL (exp)))
4007 abort ();
4008
4009 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
4010 SUBREG_PROMOTED_VAR_P (temp) = 1;
4011 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4012 return temp;
4013 }
4014
4015 return DECL_RTL (exp);
4016
4017 case INTEGER_CST:
4018 return immed_double_const (TREE_INT_CST_LOW (exp),
4019 TREE_INT_CST_HIGH (exp),
4020 mode);
4021
4022 case CONST_DECL:
4023 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
4024
4025 case REAL_CST:
4026 /* If optimized, generate immediate CONST_DOUBLE
4027 which will be turned into memory by reload if necessary.
4028
4029 We used to force a register so that loop.c could see it. But
4030 this does not allow gen_* patterns to perform optimizations with
4031 the constants. It also produces two insns in cases like "x = 1.0;".
4032 On most machines, floating-point constants are not permitted in
4033 many insns, so we'd end up copying it to a register in any case.
4034
4035 Now, we do the copying in expand_binop, if appropriate. */
4036 return immed_real_const (exp);
4037
4038 case COMPLEX_CST:
4039 case STRING_CST:
4040 if (! TREE_CST_RTL (exp))
4041 output_constant_def (exp);
4042
4043 /* TREE_CST_RTL probably contains a constant address.
4044 On RISC machines where a constant address isn't valid,
4045 make some insns to get that address into a register. */
4046 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
4047 && modifier != EXPAND_CONST_ADDRESS
4048 && modifier != EXPAND_INITIALIZER
4049 && modifier != EXPAND_SUM
4050 && !memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0)))
4051 return change_address (TREE_CST_RTL (exp), VOIDmode,
4052 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
4053 return TREE_CST_RTL (exp);
4054
4055 case SAVE_EXPR:
4056 context = decl_function_context (exp);
4057 /* We treat inline_function_decl as an alias for the current function
4058 because that is the inline function whose vars, types, etc.
4059 are being merged into the current function.
4060 See expand_inline_function. */
4061 if (context == current_function_decl || context == inline_function_decl)
4062 context = 0;
4063
4064 /* If this is non-local, handle it. */
4065 if (context)
4066 {
4067 temp = SAVE_EXPR_RTL (exp);
4068 if (temp && GET_CODE (temp) == REG)
4069 {
4070 put_var_into_stack (exp);
4071 temp = SAVE_EXPR_RTL (exp);
4072 }
4073 if (temp == 0 || GET_CODE (temp) != MEM)
4074 abort ();
4075 return change_address (temp, mode,
4076 fix_lexical_addr (XEXP (temp, 0), exp));
4077 }
4078 if (SAVE_EXPR_RTL (exp) == 0)
4079 {
4080 if (mode == BLKmode)
4081 temp
4082 = assign_stack_temp (mode,
4083 int_size_in_bytes (TREE_TYPE (exp)), 0);
4084 else
4085 {
4086 enum machine_mode var_mode = mode;
4087
4088 if (TREE_CODE (type) == INTEGER_TYPE
4089 || TREE_CODE (type) == ENUMERAL_TYPE
4090 || TREE_CODE (type) == BOOLEAN_TYPE
4091 || TREE_CODE (type) == CHAR_TYPE
4092 || TREE_CODE (type) == REAL_TYPE
4093 || TREE_CODE (type) == POINTER_TYPE
4094 || TREE_CODE (type) == OFFSET_TYPE)
4095 {
4096 PROMOTE_MODE (var_mode, unsignedp, type);
4097 }
4098
4099 temp = gen_reg_rtx (var_mode);
4100 }
4101
4102 SAVE_EXPR_RTL (exp) = temp;
4103 store_expr (TREE_OPERAND (exp, 0), temp, 0);
4104 if (!optimize && GET_CODE (temp) == REG)
4105 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
4106 save_expr_regs);
4107 }
4108
4109 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
4110 must be a promoted value. We return a SUBREG of the wanted mode,
4111 but mark it so that we know that it was already extended. Note
4112 that `unsignedp' was modified above in this case. */
4113
4114 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
4115 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
4116 {
4117 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
4118 SUBREG_PROMOTED_VAR_P (temp) = 1;
4119 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4120 return temp;
4121 }
4122
4123 return SAVE_EXPR_RTL (exp);
4124
4125 case EXIT_EXPR:
4126 /* Exit the current loop if the body-expression is true. */
4127 {
4128 rtx label = gen_label_rtx ();
4129 do_jump (TREE_OPERAND (exp, 0), label, NULL_RTX);
4130 expand_exit_loop (NULL_PTR);
4131 emit_label (label);
4132 }
4133 return const0_rtx;
4134
4135 case LOOP_EXPR:
4136 expand_start_loop (1);
4137 expand_expr_stmt (TREE_OPERAND (exp, 0));
4138 expand_end_loop ();
4139
4140 return const0_rtx;
4141
4142 case BIND_EXPR:
4143 {
4144 tree vars = TREE_OPERAND (exp, 0);
4145 int vars_need_expansion = 0;
4146
4147 /* Need to open a binding contour here because
4148 if there are any cleanups they most be contained here. */
4149 expand_start_bindings (0);
4150
4151 /* Mark the corresponding BLOCK for output in its proper place. */
4152 if (TREE_OPERAND (exp, 2) != 0
4153 && ! TREE_USED (TREE_OPERAND (exp, 2)))
4154 insert_block (TREE_OPERAND (exp, 2));
4155
4156 /* If VARS have not yet been expanded, expand them now. */
4157 while (vars)
4158 {
4159 if (DECL_RTL (vars) == 0)
4160 {
4161 vars_need_expansion = 1;
4162 expand_decl (vars);
4163 }
4164 expand_decl_init (vars);
4165 vars = TREE_CHAIN (vars);
4166 }
4167
4168 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
4169
4170 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
4171
4172 return temp;
4173 }
4174
4175 case RTL_EXPR:
4176 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
4177 abort ();
4178 emit_insns (RTL_EXPR_SEQUENCE (exp));
4179 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
4180 return RTL_EXPR_RTL (exp);
4181
4182 case CONSTRUCTOR:
4183 /* All elts simple constants => refer to a constant in memory. But
4184 if this is a non-BLKmode mode, let it store a field at a time
4185 since that should make a CONST_INT or CONST_DOUBLE when we
4186 fold. */
4187 if (TREE_STATIC (exp) && (mode == BLKmode || TREE_ADDRESSABLE (exp)))
4188 {
4189 rtx constructor = output_constant_def (exp);
4190 if (modifier != EXPAND_CONST_ADDRESS
4191 && modifier != EXPAND_INITIALIZER
4192 && modifier != EXPAND_SUM
4193 && !memory_address_p (GET_MODE (constructor),
4194 XEXP (constructor, 0)))
4195 constructor = change_address (constructor, VOIDmode,
4196 XEXP (constructor, 0));
4197 return constructor;
4198 }
4199
4200 if (ignore)
4201 {
4202 tree elt;
4203 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4204 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
4205 return const0_rtx;
4206 }
4207 else
4208 {
4209 if (target == 0 || ! safe_from_p (target, exp))
4210 {
4211 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
4212 target = gen_reg_rtx (mode);
4213 else
4214 {
4215 enum tree_code c = TREE_CODE (type);
4216 target
4217 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
4218 if (c == RECORD_TYPE || c == UNION_TYPE || c == ARRAY_TYPE)
4219 MEM_IN_STRUCT_P (target) = 1;
4220 }
4221 }
4222 store_constructor (exp, target);
4223 return target;
4224 }
4225
4226 case INDIRECT_REF:
4227 {
4228 tree exp1 = TREE_OPERAND (exp, 0);
4229 tree exp2;
4230
4231 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
4232 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
4233 This code has the same general effect as simply doing
4234 expand_expr on the save expr, except that the expression PTR
4235 is computed for use as a memory address. This means different
4236 code, suitable for indexing, may be generated. */
4237 if (TREE_CODE (exp1) == SAVE_EXPR
4238 && SAVE_EXPR_RTL (exp1) == 0
4239 && TREE_CODE (exp2 = TREE_OPERAND (exp1, 0)) != ERROR_MARK
4240 && TYPE_MODE (TREE_TYPE (exp1)) == Pmode
4241 && TYPE_MODE (TREE_TYPE (exp2)) == Pmode)
4242 {
4243 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
4244 VOIDmode, EXPAND_SUM);
4245 op0 = memory_address (mode, temp);
4246 op0 = copy_all_regs (op0);
4247 SAVE_EXPR_RTL (exp1) = op0;
4248 }
4249 else
4250 {
4251 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
4252 op0 = memory_address (mode, op0);
4253 }
4254
4255 temp = gen_rtx (MEM, mode, op0);
4256 /* If address was computed by addition,
4257 mark this as an element of an aggregate. */
4258 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4259 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
4260 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
4261 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
4262 || TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4263 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4264 || (TREE_CODE (exp1) == ADDR_EXPR
4265 && (exp2 = TREE_OPERAND (exp1, 0))
4266 && (TREE_CODE (TREE_TYPE (exp2)) == ARRAY_TYPE
4267 || TREE_CODE (TREE_TYPE (exp2)) == RECORD_TYPE
4268 || TREE_CODE (TREE_TYPE (exp2)) == UNION_TYPE)))
4269 MEM_IN_STRUCT_P (temp) = 1;
4270 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
4271 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
4272 a location is accessed through a pointer to const does not mean
4273 that the value there can never change. */
4274 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
4275 #endif
4276 return temp;
4277 }
4278
4279 case ARRAY_REF:
4280 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
4281 abort ();
4282
4283 {
4284 tree array = TREE_OPERAND (exp, 0);
4285 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
4286 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4287 tree index = TREE_OPERAND (exp, 1);
4288 tree index_type = TREE_TYPE (index);
4289 int i;
4290
4291 /* Optimize the special-case of a zero lower bound. */
4292 if (! integer_zerop (low_bound))
4293 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
4294
4295 if (TREE_CODE (index) != INTEGER_CST
4296 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4297 {
4298 /* Nonconstant array index or nonconstant element size.
4299 Generate the tree for *(&array+index) and expand that,
4300 except do it in a language-independent way
4301 and don't complain about non-lvalue arrays.
4302 `mark_addressable' should already have been called
4303 for any array for which this case will be reached. */
4304
4305 /* Don't forget the const or volatile flag from the array
4306 element. */
4307 tree variant_type = build_type_variant (type,
4308 TREE_READONLY (exp),
4309 TREE_THIS_VOLATILE (exp));
4310 tree array_adr = build1 (ADDR_EXPR,
4311 build_pointer_type (variant_type), array);
4312 tree elt;
4313
4314 /* Convert the integer argument to a type the same size as a
4315 pointer so the multiply won't overflow spuriously. */
4316 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
4317 index = convert (type_for_size (POINTER_SIZE, 0), index);
4318
4319 /* Don't think the address has side effects
4320 just because the array does.
4321 (In some cases the address might have side effects,
4322 and we fail to record that fact here. However, it should not
4323 matter, since expand_expr should not care.) */
4324 TREE_SIDE_EFFECTS (array_adr) = 0;
4325
4326 elt = build1 (INDIRECT_REF, type,
4327 fold (build (PLUS_EXPR,
4328 TYPE_POINTER_TO (variant_type),
4329 array_adr,
4330 fold (build (MULT_EXPR,
4331 TYPE_POINTER_TO (variant_type),
4332 index,
4333 size_in_bytes (type))))));
4334
4335 /* Volatility, etc., of new expression is same as old
4336 expression. */
4337 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
4338 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
4339 TREE_READONLY (elt) = TREE_READONLY (exp);
4340
4341 return expand_expr (elt, target, tmode, modifier);
4342 }
4343
4344 /* Fold an expression like: "foo"[2].
4345 This is not done in fold so it won't happen inside &. */
4346
4347 if (TREE_CODE (array) == STRING_CST
4348 && TREE_CODE (index) == INTEGER_CST
4349 && !TREE_INT_CST_HIGH (index)
4350 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array))
4351 {
4352 if (TREE_TYPE (TREE_TYPE (array)) == integer_type_node)
4353 {
4354 exp = build_int_2 (((int *)TREE_STRING_POINTER (array))[i], 0);
4355 TREE_TYPE (exp) = integer_type_node;
4356 return expand_expr (exp, target, tmode, modifier);
4357 }
4358 if (TREE_TYPE (TREE_TYPE (array)) == char_type_node)
4359 {
4360 exp = build_int_2 (TREE_STRING_POINTER (array)[i], 0);
4361 TREE_TYPE (exp) = integer_type_node;
4362 return expand_expr (convert (TREE_TYPE (TREE_TYPE (array)),
4363 exp),
4364 target, tmode, modifier);
4365 }
4366 }
4367
4368 /* If this is a constant index into a constant array,
4369 just get the value from the array. Handle both the cases when
4370 we have an explicit constructor and when our operand is a variable
4371 that was declared const. */
4372
4373 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
4374 {
4375 if (TREE_CODE (index) == INTEGER_CST
4376 && TREE_INT_CST_HIGH (index) == 0)
4377 {
4378 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
4379
4380 i = TREE_INT_CST_LOW (index);
4381 while (elem && i--)
4382 elem = TREE_CHAIN (elem);
4383 if (elem)
4384 return expand_expr (fold (TREE_VALUE (elem)), target,
4385 tmode, modifier);
4386 }
4387 }
4388
4389 else if (optimize >= 1
4390 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
4391 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
4392 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
4393 {
4394 if (TREE_CODE (index) == INTEGER_CST
4395 && TREE_INT_CST_HIGH (index) == 0)
4396 {
4397 tree init = DECL_INITIAL (array);
4398
4399 i = TREE_INT_CST_LOW (index);
4400 if (TREE_CODE (init) == CONSTRUCTOR)
4401 {
4402 tree elem = CONSTRUCTOR_ELTS (init);
4403
4404 while (elem && i--)
4405 elem = TREE_CHAIN (elem);
4406 if (elem)
4407 return expand_expr (fold (TREE_VALUE (elem)), target,
4408 tmode, modifier);
4409 }
4410 else if (TREE_CODE (init) == STRING_CST
4411 && i < TREE_STRING_LENGTH (init))
4412 {
4413 temp = GEN_INT (TREE_STRING_POINTER (init)[i]);
4414 return convert_to_mode (mode, temp, 0);
4415 }
4416 }
4417 }
4418 }
4419
4420 /* Treat array-ref with constant index as a component-ref. */
4421
4422 case COMPONENT_REF:
4423 case BIT_FIELD_REF:
4424 /* If the operand is a CONSTRUCTOR, we can just extract the
4425 appropriate field if it is present. */
4426 if (code != ARRAY_REF
4427 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
4428 {
4429 tree elt;
4430
4431 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
4432 elt = TREE_CHAIN (elt))
4433 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
4434 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
4435 }
4436
4437 {
4438 enum machine_mode mode1;
4439 int bitsize;
4440 int bitpos;
4441 tree offset;
4442 int volatilep = 0;
4443 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
4444 &mode1, &unsignedp, &volatilep);
4445
4446 /* In some cases, we will be offsetting OP0's address by a constant.
4447 So get it as a sum, if possible. If we will be using it
4448 directly in an insn, we validate it. */
4449 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_SUM);
4450
4451 /* If this is a constant, put it into a register if it is a
4452 legitimate constant and memory if it isn't. */
4453 if (CONSTANT_P (op0))
4454 {
4455 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
4456 if (LEGITIMATE_CONSTANT_P (op0))
4457 op0 = force_reg (mode, op0);
4458 else
4459 op0 = validize_mem (force_const_mem (mode, op0));
4460 }
4461
4462 if (offset != 0)
4463 {
4464 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4465
4466 if (GET_CODE (op0) != MEM)
4467 abort ();
4468 op0 = change_address (op0, VOIDmode,
4469 gen_rtx (PLUS, Pmode, XEXP (op0, 0),
4470 force_reg (Pmode, offset_rtx)));
4471 }
4472
4473 /* Don't forget about volatility even if this is a bitfield. */
4474 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
4475 {
4476 op0 = copy_rtx (op0);
4477 MEM_VOLATILE_P (op0) = 1;
4478 }
4479
4480 if (mode1 == VOIDmode
4481 || (mode1 != BLKmode && ! direct_load[(int) mode1]
4482 && modifier != EXPAND_CONST_ADDRESS
4483 && modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4484 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG)
4485 {
4486 /* In cases where an aligned union has an unaligned object
4487 as a field, we might be extracting a BLKmode value from
4488 an integer-mode (e.g., SImode) object. Handle this case
4489 by doing the extract into an object as wide as the field
4490 (which we know to be the width of a basic mode), then
4491 storing into memory, and changing the mode to BLKmode. */
4492 enum machine_mode ext_mode = mode;
4493
4494 if (ext_mode == BLKmode)
4495 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
4496
4497 if (ext_mode == BLKmode)
4498 abort ();
4499
4500 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
4501 unsignedp, target, ext_mode, ext_mode,
4502 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
4503 int_size_in_bytes (TREE_TYPE (tem)));
4504 if (mode == BLKmode)
4505 {
4506 rtx new = assign_stack_temp (ext_mode,
4507 bitsize / BITS_PER_UNIT, 0);
4508
4509 emit_move_insn (new, op0);
4510 op0 = copy_rtx (new);
4511 PUT_MODE (op0, BLKmode);
4512 }
4513
4514 return op0;
4515 }
4516
4517 /* Get a reference to just this component. */
4518 if (modifier == EXPAND_CONST_ADDRESS
4519 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4520 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
4521 (bitpos / BITS_PER_UNIT)));
4522 else
4523 op0 = change_address (op0, mode1,
4524 plus_constant (XEXP (op0, 0),
4525 (bitpos / BITS_PER_UNIT)));
4526 MEM_IN_STRUCT_P (op0) = 1;
4527 MEM_VOLATILE_P (op0) |= volatilep;
4528 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
4529 return op0;
4530 if (target == 0)
4531 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4532 convert_move (target, op0, unsignedp);
4533 return target;
4534 }
4535
4536 case OFFSET_REF:
4537 {
4538 tree base = build_unary_op (ADDR_EXPR, TREE_OPERAND (exp, 0), 0);
4539 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
4540 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
4541 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
4542 MEM_IN_STRUCT_P (temp) = 1;
4543 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
4544 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
4545 a location is accessed through a pointer to const does not mean
4546 that the value there can never change. */
4547 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
4548 #endif
4549 return temp;
4550 }
4551
4552 /* Intended for a reference to a buffer of a file-object in Pascal.
4553 But it's not certain that a special tree code will really be
4554 necessary for these. INDIRECT_REF might work for them. */
4555 case BUFFER_REF:
4556 abort ();
4557
4558 /* IN_EXPR: Inlined pascal set IN expression.
4559
4560 Algorithm:
4561 rlo = set_low - (set_low%bits_per_word);
4562 the_word = set [ (index - rlo)/bits_per_word ];
4563 bit_index = index % bits_per_word;
4564 bitmask = 1 << bit_index;
4565 return !!(the_word & bitmask); */
4566 case IN_EXPR:
4567 preexpand_calls (exp);
4568 {
4569 tree set = TREE_OPERAND (exp, 0);
4570 tree index = TREE_OPERAND (exp, 1);
4571 tree set_type = TREE_TYPE (set);
4572
4573 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
4574 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
4575
4576 rtx index_val;
4577 rtx lo_r;
4578 rtx hi_r;
4579 rtx rlow;
4580 rtx diff, quo, rem, addr, bit, result;
4581 rtx setval, setaddr;
4582 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
4583
4584 if (target == 0)
4585 target = gen_reg_rtx (mode);
4586
4587 /* If domain is empty, answer is no. */
4588 if (tree_int_cst_lt (set_high_bound, set_low_bound))
4589 return const0_rtx;
4590
4591 index_val = expand_expr (index, 0, VOIDmode, 0);
4592 lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
4593 hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
4594 setval = expand_expr (set, 0, VOIDmode, 0);
4595 setaddr = XEXP (setval, 0);
4596
4597 /* Compare index against bounds, if they are constant. */
4598 if (GET_CODE (index_val) == CONST_INT
4599 && GET_CODE (lo_r) == CONST_INT
4600 && INTVAL (index_val) < INTVAL (lo_r))
4601 return const0_rtx;
4602
4603 if (GET_CODE (index_val) == CONST_INT
4604 && GET_CODE (hi_r) == CONST_INT
4605 && INTVAL (hi_r) < INTVAL (index_val))
4606 return const0_rtx;
4607
4608 /* If we get here, we have to generate the code for both cases
4609 (in range and out of range). */
4610
4611 op0 = gen_label_rtx ();
4612 op1 = gen_label_rtx ();
4613
4614 if (! (GET_CODE (index_val) == CONST_INT
4615 && GET_CODE (lo_r) == CONST_INT))
4616 {
4617 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
4618 GET_MODE (index_val), 0, 0);
4619 emit_jump_insn (gen_blt (op1));
4620 }
4621
4622 if (! (GET_CODE (index_val) == CONST_INT
4623 && GET_CODE (hi_r) == CONST_INT))
4624 {
4625 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
4626 GET_MODE (index_val), 0, 0);
4627 emit_jump_insn (gen_bgt (op1));
4628 }
4629
4630 /* Calculate the element number of bit zero in the first word
4631 of the set. */
4632 if (GET_CODE (lo_r) == CONST_INT)
4633 rlow = GEN_INT (INTVAL (lo_r)
4634 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
4635 else
4636 rlow = expand_binop (index_mode, and_optab, lo_r,
4637 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
4638 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4639
4640 diff = expand_binop (index_mode, sub_optab,
4641 index_val, rlow, NULL_RTX, 0, OPTAB_LIB_WIDEN);
4642
4643 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
4644 GEN_INT (BITS_PER_UNIT), NULL_RTX, 0);
4645 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
4646 GEN_INT (BITS_PER_UNIT), NULL_RTX, 0);
4647 addr = memory_address (byte_mode,
4648 expand_binop (index_mode, add_optab,
4649 diff, setaddr, NULL_RTX, 0,
4650 OPTAB_LIB_WIDEN));
4651 /* Extract the bit we want to examine */
4652 bit = expand_shift (RSHIFT_EXPR, byte_mode,
4653 gen_rtx (MEM, byte_mode, addr),
4654 make_tree (TREE_TYPE (index), rem),
4655 NULL_RTX, 1);
4656 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
4657 GET_MODE (target) == byte_mode ? target : 0,
4658 1, OPTAB_LIB_WIDEN);
4659
4660 if (result != target)
4661 convert_move (target, result, 1);
4662
4663 /* Output the code to handle the out-of-range case. */
4664 emit_jump (op0);
4665 emit_label (op1);
4666 emit_move_insn (target, const0_rtx);
4667 emit_label (op0);
4668 return target;
4669 }
4670
4671 case WITH_CLEANUP_EXPR:
4672 if (RTL_EXPR_RTL (exp) == 0)
4673 {
4674 RTL_EXPR_RTL (exp)
4675 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
4676 cleanups_this_call
4677 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
4678 /* That's it for this cleanup. */
4679 TREE_OPERAND (exp, 2) = 0;
4680 }
4681 return RTL_EXPR_RTL (exp);
4682
4683 case CALL_EXPR:
4684 /* Check for a built-in function. */
4685 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
4686 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == FUNCTION_DECL
4687 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4688 return expand_builtin (exp, target, subtarget, tmode, ignore);
4689 /* If this call was expanded already by preexpand_calls,
4690 just return the result we got. */
4691 if (CALL_EXPR_RTL (exp) != 0)
4692 return CALL_EXPR_RTL (exp);
4693 return expand_call (exp, target, ignore);
4694
4695 case NON_LVALUE_EXPR:
4696 case NOP_EXPR:
4697 case CONVERT_EXPR:
4698 case REFERENCE_EXPR:
4699 if (TREE_CODE (type) == VOID_TYPE || ignore)
4700 {
4701 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
4702 return const0_rtx;
4703 }
4704 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4705 return expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
4706 if (TREE_CODE (type) == UNION_TYPE)
4707 {
4708 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
4709 if (target == 0)
4710 {
4711 if (mode == BLKmode)
4712 {
4713 if (TYPE_SIZE (type) == 0
4714 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4715 abort ();
4716 target = assign_stack_temp (BLKmode,
4717 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4718 + BITS_PER_UNIT - 1)
4719 / BITS_PER_UNIT, 0);
4720 }
4721 else
4722 target = gen_reg_rtx (mode);
4723 }
4724 if (GET_CODE (target) == MEM)
4725 /* Store data into beginning of memory target. */
4726 store_expr (TREE_OPERAND (exp, 0),
4727 change_address (target, TYPE_MODE (valtype), 0), 0);
4728
4729 else if (GET_CODE (target) == REG)
4730 /* Store this field into a union of the proper type. */
4731 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
4732 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
4733 VOIDmode, 0, 1,
4734 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
4735 else
4736 abort ();
4737
4738 /* Return the entire union. */
4739 return target;
4740 }
4741 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
4742 if (GET_MODE (op0) == mode)
4743 return op0;
4744 /* If arg is a constant integer being extended from a narrower mode,
4745 we must really truncate to get the extended bits right. Otherwise
4746 (unsigned long) (unsigned char) ("\377"[0])
4747 would come out as ffffffff. */
4748 if (GET_MODE (op0) == VOIDmode
4749 && (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4750 < GET_MODE_BITSIZE (mode)))
4751 {
4752 /* MODE must be narrower than HOST_BITS_PER_INT. */
4753 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4754
4755 if (width < HOST_BITS_PER_WIDE_INT)
4756 {
4757 HOST_WIDE_INT val = (GET_CODE (op0) == CONST_INT ? INTVAL (op0)
4758 : CONST_DOUBLE_LOW (op0));
4759 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4760 || !(val & ((HOST_WIDE_INT) 1 << (width - 1))))
4761 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4762 else
4763 val |= ~(((HOST_WIDE_INT) 1 << width) - 1);
4764
4765 op0 = GEN_INT (val);
4766 }
4767 else
4768 {
4769 op0 = (simplify_unary_operation
4770 ((TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4771 ? ZERO_EXTEND : SIGN_EXTEND),
4772 mode, op0,
4773 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))));
4774 if (op0 == 0)
4775 abort ();
4776 }
4777 }
4778 if (GET_MODE (op0) == VOIDmode)
4779 return op0;
4780 if (modifier == EXPAND_INITIALIZER)
4781 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
4782 if (flag_force_mem && GET_CODE (op0) == MEM)
4783 op0 = copy_to_reg (op0);
4784
4785 if (target == 0)
4786 return convert_to_mode (mode, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4787 else
4788 convert_move (target, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4789 return target;
4790
4791 case PLUS_EXPR:
4792 /* We come here from MINUS_EXPR when the second operand is a constant. */
4793 plus_expr:
4794 this_optab = add_optab;
4795
4796 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
4797 something else, make sure we add the register to the constant and
4798 then to the other thing. This case can occur during strength
4799 reduction and doing it this way will produce better code if the
4800 frame pointer or argument pointer is eliminated.
4801
4802 fold-const.c will ensure that the constant is always in the inner
4803 PLUS_EXPR, so the only case we need to do anything about is if
4804 sp, ap, or fp is our second argument, in which case we must swap
4805 the innermost first argument and our second argument. */
4806
4807 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4808 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
4809 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
4810 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
4811 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
4812 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
4813 {
4814 tree t = TREE_OPERAND (exp, 1);
4815
4816 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4817 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
4818 }
4819
4820 /* If the result is to be Pmode and we are adding an integer to
4821 something, we might be forming a constant. So try to use
4822 plus_constant. If it produces a sum and we can't accept it,
4823 use force_operand. This allows P = &ARR[const] to generate
4824 efficient code on machines where a SYMBOL_REF is not a valid
4825 address.
4826
4827 If this is an EXPAND_SUM call, always return the sum. */
4828 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
4829 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4830 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4831 || mode == Pmode))
4832 {
4833 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
4834 EXPAND_SUM);
4835 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
4836 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4837 op1 = force_operand (op1, target);
4838 return op1;
4839 }
4840
4841 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4842 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
4843 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4844 || mode == Pmode))
4845 {
4846 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
4847 EXPAND_SUM);
4848 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
4849 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4850 op0 = force_operand (op0, target);
4851 return op0;
4852 }
4853
4854 /* No sense saving up arithmetic to be done
4855 if it's all in the wrong mode to form part of an address.
4856 And force_operand won't know whether to sign-extend or
4857 zero-extend. */
4858 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4859 || mode != Pmode) goto binop;
4860
4861 preexpand_calls (exp);
4862 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4863 subtarget = 0;
4864
4865 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
4866 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
4867
4868 /* Make sure any term that's a sum with a constant comes last. */
4869 if (GET_CODE (op0) == PLUS
4870 && CONSTANT_P (XEXP (op0, 1)))
4871 {
4872 temp = op0;
4873 op0 = op1;
4874 op1 = temp;
4875 }
4876 /* If adding to a sum including a constant,
4877 associate it to put the constant outside. */
4878 if (GET_CODE (op1) == PLUS
4879 && CONSTANT_P (XEXP (op1, 1)))
4880 {
4881 rtx constant_term = const0_rtx;
4882
4883 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
4884 if (temp != 0)
4885 op0 = temp;
4886 /* Ensure that MULT comes first if there is one. */
4887 else if (GET_CODE (op0) == MULT)
4888 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
4889 else
4890 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
4891
4892 /* Let's also eliminate constants from op0 if possible. */
4893 op0 = eliminate_constant_term (op0, &constant_term);
4894
4895 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
4896 their sum should be a constant. Form it into OP1, since the
4897 result we want will then be OP0 + OP1. */
4898
4899 temp = simplify_binary_operation (PLUS, mode, constant_term,
4900 XEXP (op1, 1));
4901 if (temp != 0)
4902 op1 = temp;
4903 else
4904 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
4905 }
4906
4907 /* Put a constant term last and put a multiplication first. */
4908 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
4909 temp = op1, op1 = op0, op0 = temp;
4910
4911 temp = simplify_binary_operation (PLUS, mode, op0, op1);
4912 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
4913
4914 case MINUS_EXPR:
4915 /* Handle difference of two symbolic constants,
4916 for the sake of an initializer. */
4917 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4918 && really_constant_p (TREE_OPERAND (exp, 0))
4919 && really_constant_p (TREE_OPERAND (exp, 1)))
4920 {
4921 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
4922 VOIDmode, modifier);
4923 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4924 VOIDmode, modifier);
4925 return gen_rtx (MINUS, mode, op0, op1);
4926 }
4927 /* Convert A - const to A + (-const). */
4928 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4929 {
4930 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0),
4931 fold (build1 (NEGATE_EXPR, type,
4932 TREE_OPERAND (exp, 1))));
4933 goto plus_expr;
4934 }
4935 this_optab = sub_optab;
4936 goto binop;
4937
4938 case MULT_EXPR:
4939 preexpand_calls (exp);
4940 /* If first operand is constant, swap them.
4941 Thus the following special case checks need only
4942 check the second operand. */
4943 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
4944 {
4945 register tree t1 = TREE_OPERAND (exp, 0);
4946 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
4947 TREE_OPERAND (exp, 1) = t1;
4948 }
4949
4950 /* Attempt to return something suitable for generating an
4951 indexed address, for machines that support that. */
4952
4953 if (modifier == EXPAND_SUM && mode == Pmode
4954 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4955 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
4956 {
4957 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
4958
4959 /* Apply distributive law if OP0 is x+c. */
4960 if (GET_CODE (op0) == PLUS
4961 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
4962 return gen_rtx (PLUS, mode,
4963 gen_rtx (MULT, mode, XEXP (op0, 0),
4964 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
4965 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
4966 * INTVAL (XEXP (op0, 1))));
4967
4968 if (GET_CODE (op0) != REG)
4969 op0 = force_operand (op0, NULL_RTX);
4970 if (GET_CODE (op0) != REG)
4971 op0 = copy_to_mode_reg (mode, op0);
4972
4973 return gen_rtx (MULT, mode, op0,
4974 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
4975 }
4976
4977 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4978 subtarget = 0;
4979
4980 /* Check for multiplying things that have been extended
4981 from a narrower type. If this machine supports multiplying
4982 in that narrower type with a result in the desired type,
4983 do it that way, and avoid the explicit type-conversion. */
4984 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
4985 && TREE_CODE (type) == INTEGER_TYPE
4986 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4987 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
4988 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4989 && int_fits_type_p (TREE_OPERAND (exp, 1),
4990 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4991 /* Don't use a widening multiply if a shift will do. */
4992 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
4993 > HOST_BITS_PER_WIDE_INT)
4994 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
4995 ||
4996 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
4997 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4998 ==
4999 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
5000 /* If both operands are extended, they must either both
5001 be zero-extended or both be sign-extended. */
5002 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
5003 ==
5004 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
5005 {
5006 enum machine_mode innermode
5007 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
5008 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5009 ? umul_widen_optab : smul_widen_optab);
5010 if (mode == GET_MODE_WIDER_MODE (innermode)
5011 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
5012 {
5013 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5014 NULL_RTX, VOIDmode, 0);
5015 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
5016 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5017 VOIDmode, 0);
5018 else
5019 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
5020 NULL_RTX, VOIDmode, 0);
5021 goto binop2;
5022 }
5023 }
5024 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5025 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5026 return expand_mult (mode, op0, op1, target, unsignedp);
5027
5028 case TRUNC_DIV_EXPR:
5029 case FLOOR_DIV_EXPR:
5030 case CEIL_DIV_EXPR:
5031 case ROUND_DIV_EXPR:
5032 case EXACT_DIV_EXPR:
5033 preexpand_calls (exp);
5034 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5035 subtarget = 0;
5036 /* Possible optimization: compute the dividend with EXPAND_SUM
5037 then if the divisor is constant can optimize the case
5038 where some terms of the dividend have coeffs divisible by it. */
5039 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5040 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5041 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
5042
5043 case RDIV_EXPR:
5044 this_optab = flodiv_optab;
5045 goto binop;
5046
5047 case TRUNC_MOD_EXPR:
5048 case FLOOR_MOD_EXPR:
5049 case CEIL_MOD_EXPR:
5050 case ROUND_MOD_EXPR:
5051 preexpand_calls (exp);
5052 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5053 subtarget = 0;
5054 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5055 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5056 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
5057
5058 case FIX_ROUND_EXPR:
5059 case FIX_FLOOR_EXPR:
5060 case FIX_CEIL_EXPR:
5061 abort (); /* Not used for C. */
5062
5063 case FIX_TRUNC_EXPR:
5064 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
5065 if (target == 0)
5066 target = gen_reg_rtx (mode);
5067 expand_fix (target, op0, unsignedp);
5068 return target;
5069
5070 case FLOAT_EXPR:
5071 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
5072 if (target == 0)
5073 target = gen_reg_rtx (mode);
5074 /* expand_float can't figure out what to do if FROM has VOIDmode.
5075 So give it the correct mode. With -O, cse will optimize this. */
5076 if (GET_MODE (op0) == VOIDmode)
5077 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5078 op0);
5079 expand_float (target, op0,
5080 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5081 return target;
5082
5083 case NEGATE_EXPR:
5084 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5085 temp = expand_unop (mode, neg_optab, op0, target, 0);
5086 if (temp == 0)
5087 abort ();
5088 return temp;
5089
5090 case ABS_EXPR:
5091 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5092
5093 /* Handle complex values specially. */
5094 {
5095 enum machine_mode opmode
5096 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5097
5098 if (GET_MODE_CLASS (opmode) == MODE_COMPLEX_INT
5099 || GET_MODE_CLASS (opmode) == MODE_COMPLEX_FLOAT)
5100 return expand_complex_abs (opmode, op0, target, unsignedp);
5101 }
5102
5103 /* Unsigned abs is simply the operand. Testing here means we don't
5104 risk generating incorrect code below. */
5105 if (TREE_UNSIGNED (type))
5106 return op0;
5107
5108 /* First try to do it with a special abs instruction. */
5109 temp = expand_unop (mode, abs_optab, op0, target, 0);
5110 if (temp != 0)
5111 return temp;
5112
5113 /* If this machine has expensive jumps, we can do integer absolute
5114 value of X as (((signed) x >> (W-1)) ^ x) - ((signed) x >> (W-1)),
5115 where W is the width of MODE. */
5116
5117 if (GET_MODE_CLASS (mode) == MODE_INT && BRANCH_COST >= 2)
5118 {
5119 rtx extended = expand_shift (RSHIFT_EXPR, mode, op0,
5120 size_int (GET_MODE_BITSIZE (mode) - 1),
5121 NULL_RTX, 0);
5122
5123 temp = expand_binop (mode, xor_optab, extended, op0, target, 0,
5124 OPTAB_LIB_WIDEN);
5125 if (temp != 0)
5126 temp = expand_binop (mode, sub_optab, temp, extended, target, 0,
5127 OPTAB_LIB_WIDEN);
5128
5129 if (temp != 0)
5130 return temp;
5131 }
5132
5133 /* If that does not win, use conditional jump and negate. */
5134 target = original_target;
5135 temp = gen_label_rtx ();
5136 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 0))
5137 || (GET_CODE (target) == REG
5138 && REGNO (target) < FIRST_PSEUDO_REGISTER))
5139 target = gen_reg_rtx (mode);
5140 emit_move_insn (target, op0);
5141 emit_cmp_insn (target,
5142 expand_expr (convert (type, integer_zero_node),
5143 NULL_RTX, VOIDmode, 0),
5144 GE, NULL_RTX, mode, 0, 0);
5145 NO_DEFER_POP;
5146 emit_jump_insn (gen_bge (temp));
5147 op0 = expand_unop (mode, neg_optab, target, target, 0);
5148 if (op0 != target)
5149 emit_move_insn (target, op0);
5150 emit_label (temp);
5151 OK_DEFER_POP;
5152 return target;
5153
5154 case MAX_EXPR:
5155 case MIN_EXPR:
5156 target = original_target;
5157 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
5158 || (GET_CODE (target) == REG
5159 && REGNO (target) < FIRST_PSEUDO_REGISTER))
5160 target = gen_reg_rtx (mode);
5161 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5162 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5163
5164 /* First try to do it with a special MIN or MAX instruction.
5165 If that does not win, use a conditional jump to select the proper
5166 value. */
5167 this_optab = (TREE_UNSIGNED (type)
5168 ? (code == MIN_EXPR ? umin_optab : umax_optab)
5169 : (code == MIN_EXPR ? smin_optab : smax_optab));
5170
5171 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
5172 OPTAB_WIDEN);
5173 if (temp != 0)
5174 return temp;
5175
5176 if (target != op0)
5177 emit_move_insn (target, op0);
5178 op0 = gen_label_rtx ();
5179 if (code == MAX_EXPR)
5180 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
5181 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
5182 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
5183 else
5184 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
5185 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
5186 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
5187 if (temp == const0_rtx)
5188 emit_move_insn (target, op1);
5189 else if (temp != const_true_rtx)
5190 {
5191 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
5192 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
5193 else
5194 abort ();
5195 emit_move_insn (target, op1);
5196 }
5197 emit_label (op0);
5198 return target;
5199
5200 /* ??? Can optimize when the operand of this is a bitwise operation,
5201 by using a different bitwise operation. */
5202 case BIT_NOT_EXPR:
5203 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5204 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
5205 if (temp == 0)
5206 abort ();
5207 return temp;
5208
5209 case FFS_EXPR:
5210 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5211 temp = expand_unop (mode, ffs_optab, op0, target, 1);
5212 if (temp == 0)
5213 abort ();
5214 return temp;
5215
5216 /* ??? Can optimize bitwise operations with one arg constant.
5217 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
5218 and (a bitwise1 b) bitwise2 b (etc)
5219 but that is probably not worth while. */
5220
5221 /* BIT_AND_EXPR is for bitwise anding.
5222 TRUTH_AND_EXPR is for anding two boolean values
5223 when we want in all cases to compute both of them.
5224 In general it is fastest to do TRUTH_AND_EXPR by
5225 computing both operands as actual zero-or-1 values
5226 and then bitwise anding. In cases where there cannot
5227 be any side effects, better code would be made by
5228 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR;
5229 but the question is how to recognize those cases. */
5230
5231 case TRUTH_AND_EXPR:
5232 case BIT_AND_EXPR:
5233 this_optab = and_optab;
5234 goto binop;
5235
5236 /* See comment above about TRUTH_AND_EXPR; it applies here too. */
5237 case TRUTH_OR_EXPR:
5238 case BIT_IOR_EXPR:
5239 this_optab = ior_optab;
5240 goto binop;
5241
5242 case TRUTH_XOR_EXPR:
5243 case BIT_XOR_EXPR:
5244 this_optab = xor_optab;
5245 goto binop;
5246
5247 case LSHIFT_EXPR:
5248 case RSHIFT_EXPR:
5249 case LROTATE_EXPR:
5250 case RROTATE_EXPR:
5251 preexpand_calls (exp);
5252 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5253 subtarget = 0;
5254 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5255 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
5256 unsignedp);
5257
5258 /* Could determine the answer when only additive constants differ.
5259 Also, the addition of one can be handled by changing the condition. */
5260 case LT_EXPR:
5261 case LE_EXPR:
5262 case GT_EXPR:
5263 case GE_EXPR:
5264 case EQ_EXPR:
5265 case NE_EXPR:
5266 preexpand_calls (exp);
5267 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
5268 if (temp != 0)
5269 return temp;
5270 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
5271 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
5272 && original_target
5273 && GET_CODE (original_target) == REG
5274 && (GET_MODE (original_target)
5275 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5276 {
5277 temp = expand_expr (TREE_OPERAND (exp, 0), original_target, VOIDmode, 0);
5278 if (temp != original_target)
5279 temp = copy_to_reg (temp);
5280 op1 = gen_label_rtx ();
5281 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
5282 GET_MODE (temp), unsignedp, 0);
5283 emit_jump_insn (gen_beq (op1));
5284 emit_move_insn (temp, const1_rtx);
5285 emit_label (op1);
5286 return temp;
5287 }
5288 /* If no set-flag instruction, must generate a conditional
5289 store into a temporary variable. Drop through
5290 and handle this like && and ||. */
5291
5292 case TRUTH_ANDIF_EXPR:
5293 case TRUTH_ORIF_EXPR:
5294 if (target == 0 || ! safe_from_p (target, exp)
5295 /* Make sure we don't have a hard reg (such as function's return
5296 value) live across basic blocks, if not optimizing. */
5297 || (!optimize && GET_CODE (target) == REG
5298 && REGNO (target) < FIRST_PSEUDO_REGISTER))
5299 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5300 emit_clr_insn (target);
5301 op1 = gen_label_rtx ();
5302 jumpifnot (exp, op1);
5303 emit_0_to_1_insn (target);
5304 emit_label (op1);
5305 return target;
5306
5307 case TRUTH_NOT_EXPR:
5308 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5309 /* The parser is careful to generate TRUTH_NOT_EXPR
5310 only with operands that are always zero or one. */
5311 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
5312 target, 1, OPTAB_LIB_WIDEN);
5313 if (temp == 0)
5314 abort ();
5315 return temp;
5316
5317 case COMPOUND_EXPR:
5318 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
5319 emit_queue ();
5320 return expand_expr (TREE_OPERAND (exp, 1),
5321 (ignore ? const0_rtx : target),
5322 VOIDmode, 0);
5323
5324 case COND_EXPR:
5325 {
5326 /* Note that COND_EXPRs whose type is a structure or union
5327 are required to be constructed to contain assignments of
5328 a temporary variable, so that we can evaluate them here
5329 for side effect only. If type is void, we must do likewise. */
5330
5331 /* If an arm of the branch requires a cleanup,
5332 only that cleanup is performed. */
5333
5334 tree singleton = 0;
5335 tree binary_op = 0, unary_op = 0;
5336 tree old_cleanups = cleanups_this_call;
5337 cleanups_this_call = 0;
5338
5339 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
5340 convert it to our mode, if necessary. */
5341 if (integer_onep (TREE_OPERAND (exp, 1))
5342 && integer_zerop (TREE_OPERAND (exp, 2))
5343 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
5344 {
5345 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
5346 if (GET_MODE (op0) == mode)
5347 return op0;
5348 if (target == 0)
5349 target = gen_reg_rtx (mode);
5350 convert_move (target, op0, unsignedp);
5351 return target;
5352 }
5353
5354 /* If we are not to produce a result, we have no target. Otherwise,
5355 if a target was specified use it; it will not be used as an
5356 intermediate target unless it is safe. If no target, use a
5357 temporary. */
5358
5359 if (mode == VOIDmode || ignore)
5360 temp = 0;
5361 else if (original_target
5362 && safe_from_p (original_target, TREE_OPERAND (exp, 0)))
5363 temp = original_target;
5364 else if (mode == BLKmode)
5365 {
5366 if (TYPE_SIZE (type) == 0
5367 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5368 abort ();
5369 temp = assign_stack_temp (BLKmode,
5370 (TREE_INT_CST_LOW (TYPE_SIZE (type))
5371 + BITS_PER_UNIT - 1)
5372 / BITS_PER_UNIT, 0);
5373 }
5374 else
5375 temp = gen_reg_rtx (mode);
5376
5377 /* Check for X ? A + B : A. If we have this, we can copy
5378 A to the output and conditionally add B. Similarly for unary
5379 operations. Don't do this if X has side-effects because
5380 those side effects might affect A or B and the "?" operation is
5381 a sequence point in ANSI. (We test for side effects later.) */
5382
5383 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
5384 && operand_equal_p (TREE_OPERAND (exp, 2),
5385 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
5386 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
5387 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
5388 && operand_equal_p (TREE_OPERAND (exp, 1),
5389 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
5390 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
5391 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
5392 && operand_equal_p (TREE_OPERAND (exp, 2),
5393 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
5394 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
5395 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
5396 && operand_equal_p (TREE_OPERAND (exp, 1),
5397 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
5398 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
5399
5400 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
5401 operation, do this as A + (X != 0). Similarly for other simple
5402 binary operators. */
5403 if (singleton && binary_op
5404 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5405 && (TREE_CODE (binary_op) == PLUS_EXPR
5406 || TREE_CODE (binary_op) == MINUS_EXPR
5407 || TREE_CODE (binary_op) == BIT_IOR_EXPR
5408 || TREE_CODE (binary_op) == BIT_XOR_EXPR
5409 || TREE_CODE (binary_op) == BIT_AND_EXPR)
5410 && integer_onep (TREE_OPERAND (binary_op, 1))
5411 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
5412 {
5413 rtx result;
5414 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
5415 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
5416 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
5417 : TREE_CODE (binary_op) == BIT_XOR_EXPR ? xor_optab
5418 : and_optab);
5419
5420 /* If we had X ? A : A + 1, do this as A + (X == 0).
5421
5422 We have to invert the truth value here and then put it
5423 back later if do_store_flag fails. We cannot simply copy
5424 TREE_OPERAND (exp, 0) to another variable and modify that
5425 because invert_truthvalue can modify the tree pointed to
5426 by its argument. */
5427 if (singleton == TREE_OPERAND (exp, 1))
5428 TREE_OPERAND (exp, 0)
5429 = invert_truthvalue (TREE_OPERAND (exp, 0));
5430
5431 result = do_store_flag (TREE_OPERAND (exp, 0),
5432 (safe_from_p (temp, singleton)
5433 ? temp : NULL_RTX),
5434 mode, BRANCH_COST <= 1);
5435
5436 if (result)
5437 {
5438 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
5439 return expand_binop (mode, boptab, op1, result, temp,
5440 unsignedp, OPTAB_LIB_WIDEN);
5441 }
5442 else if (singleton == TREE_OPERAND (exp, 1))
5443 TREE_OPERAND (exp, 0)
5444 = invert_truthvalue (TREE_OPERAND (exp, 0));
5445 }
5446
5447 NO_DEFER_POP;
5448 op0 = gen_label_rtx ();
5449
5450 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
5451 {
5452 if (temp != 0)
5453 {
5454 /* If the target conflicts with the other operand of the
5455 binary op, we can't use it. Also, we can't use the target
5456 if it is a hard register, because evaluating the condition
5457 might clobber it. */
5458 if ((binary_op
5459 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
5460 || (GET_CODE (temp) == REG
5461 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
5462 temp = gen_reg_rtx (mode);
5463 store_expr (singleton, temp, 0);
5464 }
5465 else
5466 expand_expr (singleton,
5467 ignore ? const1_rtx : NULL_RTX, VOIDmode, 0);
5468 if (cleanups_this_call)
5469 {
5470 sorry ("aggregate value in COND_EXPR");
5471 cleanups_this_call = 0;
5472 }
5473 if (singleton == TREE_OPERAND (exp, 1))
5474 jumpif (TREE_OPERAND (exp, 0), op0);
5475 else
5476 jumpifnot (TREE_OPERAND (exp, 0), op0);
5477
5478 if (binary_op && temp == 0)
5479 /* Just touch the other operand. */
5480 expand_expr (TREE_OPERAND (binary_op, 1),
5481 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5482 else if (binary_op)
5483 store_expr (build (TREE_CODE (binary_op), type,
5484 make_tree (type, temp),
5485 TREE_OPERAND (binary_op, 1)),
5486 temp, 0);
5487 else
5488 store_expr (build1 (TREE_CODE (unary_op), type,
5489 make_tree (type, temp)),
5490 temp, 0);
5491 op1 = op0;
5492 }
5493 #if 0
5494 /* This is now done in jump.c and is better done there because it
5495 produces shorter register lifetimes. */
5496
5497 /* Check for both possibilities either constants or variables
5498 in registers (but not the same as the target!). If so, can
5499 save branches by assigning one, branching, and assigning the
5500 other. */
5501 else if (temp && GET_MODE (temp) != BLKmode
5502 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
5503 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
5504 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
5505 && DECL_RTL (TREE_OPERAND (exp, 1))
5506 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
5507 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
5508 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
5509 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
5510 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
5511 && DECL_RTL (TREE_OPERAND (exp, 2))
5512 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
5513 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
5514 {
5515 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5516 temp = gen_reg_rtx (mode);
5517 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5518 jumpifnot (TREE_OPERAND (exp, 0), op0);
5519 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5520 op1 = op0;
5521 }
5522 #endif
5523 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
5524 comparison operator. If we have one of these cases, set the
5525 output to A, branch on A (cse will merge these two references),
5526 then set the output to FOO. */
5527 else if (temp
5528 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5529 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5530 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5531 TREE_OPERAND (exp, 1), 0)
5532 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5533 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
5534 {
5535 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5536 temp = gen_reg_rtx (mode);
5537 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5538 jumpif (TREE_OPERAND (exp, 0), op0);
5539 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5540 op1 = op0;
5541 }
5542 else if (temp
5543 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5544 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5545 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5546 TREE_OPERAND (exp, 2), 0)
5547 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5548 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
5549 {
5550 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5551 temp = gen_reg_rtx (mode);
5552 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5553 jumpifnot (TREE_OPERAND (exp, 0), op0);
5554 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5555 op1 = op0;
5556 }
5557 else
5558 {
5559 op1 = gen_label_rtx ();
5560 jumpifnot (TREE_OPERAND (exp, 0), op0);
5561 if (temp != 0)
5562 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5563 else
5564 expand_expr (TREE_OPERAND (exp, 1),
5565 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5566 if (cleanups_this_call)
5567 {
5568 sorry ("aggregate value in COND_EXPR");
5569 cleanups_this_call = 0;
5570 }
5571
5572 emit_queue ();
5573 emit_jump_insn (gen_jump (op1));
5574 emit_barrier ();
5575 emit_label (op0);
5576 if (temp != 0)
5577 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5578 else
5579 expand_expr (TREE_OPERAND (exp, 2),
5580 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5581 }
5582
5583 if (cleanups_this_call)
5584 {
5585 sorry ("aggregate value in COND_EXPR");
5586 cleanups_this_call = 0;
5587 }
5588
5589 emit_queue ();
5590 emit_label (op1);
5591 OK_DEFER_POP;
5592 cleanups_this_call = old_cleanups;
5593 return temp;
5594 }
5595
5596 case TARGET_EXPR:
5597 {
5598 /* Something needs to be initialized, but we didn't know
5599 where that thing was when building the tree. For example,
5600 it could be the return value of a function, or a parameter
5601 to a function which lays down in the stack, or a temporary
5602 variable which must be passed by reference.
5603
5604 We guarantee that the expression will either be constructed
5605 or copied into our original target. */
5606
5607 tree slot = TREE_OPERAND (exp, 0);
5608 tree exp1;
5609
5610 if (TREE_CODE (slot) != VAR_DECL)
5611 abort ();
5612
5613 if (target == 0)
5614 {
5615 if (DECL_RTL (slot) != 0)
5616 {
5617 target = DECL_RTL (slot);
5618 /* If we have already expanded the slot, so don't do
5619 it again. (mrs) */
5620 if (TREE_OPERAND (exp, 1) == NULL_TREE)
5621 return target;
5622 }
5623 else
5624 {
5625 target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
5626 /* All temp slots at this level must not conflict. */
5627 preserve_temp_slots (target);
5628 DECL_RTL (slot) = target;
5629 }
5630
5631 #if 0
5632 /* I bet this needs to be done, and I bet that it needs to
5633 be above, inside the else clause. The reason is
5634 simple, how else is it going to get cleaned up? (mrs)
5635
5636 The reason is probably did not work before, and was
5637 commented out is because this was re-expanding already
5638 expanded target_exprs (target == 0 and DECL_RTL (slot)
5639 != 0) also cleaning them up many times as well. :-( */
5640
5641 /* Since SLOT is not known to the called function
5642 to belong to its stack frame, we must build an explicit
5643 cleanup. This case occurs when we must build up a reference
5644 to pass the reference as an argument. In this case,
5645 it is very likely that such a reference need not be
5646 built here. */
5647
5648 if (TREE_OPERAND (exp, 2) == 0)
5649 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
5650 if (TREE_OPERAND (exp, 2))
5651 cleanups_this_call = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2),
5652 cleanups_this_call);
5653 #endif
5654 }
5655 else
5656 {
5657 /* This case does occur, when expanding a parameter which
5658 needs to be constructed on the stack. The target
5659 is the actual stack address that we want to initialize.
5660 The function we call will perform the cleanup in this case. */
5661
5662 DECL_RTL (slot) = target;
5663 }
5664
5665 exp1 = TREE_OPERAND (exp, 1);
5666 /* Mark it as expanded. */
5667 TREE_OPERAND (exp, 1) = NULL_TREE;
5668
5669 return expand_expr (exp1, target, tmode, modifier);
5670 }
5671
5672 case INIT_EXPR:
5673 {
5674 tree lhs = TREE_OPERAND (exp, 0);
5675 tree rhs = TREE_OPERAND (exp, 1);
5676 tree noncopied_parts = 0;
5677 tree lhs_type = TREE_TYPE (lhs);
5678
5679 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5680 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
5681 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
5682 TYPE_NONCOPIED_PARTS (lhs_type));
5683 while (noncopied_parts != 0)
5684 {
5685 expand_assignment (TREE_VALUE (noncopied_parts),
5686 TREE_PURPOSE (noncopied_parts), 0, 0);
5687 noncopied_parts = TREE_CHAIN (noncopied_parts);
5688 }
5689 return temp;
5690 }
5691
5692 case MODIFY_EXPR:
5693 {
5694 /* If lhs is complex, expand calls in rhs before computing it.
5695 That's so we don't compute a pointer and save it over a call.
5696 If lhs is simple, compute it first so we can give it as a
5697 target if the rhs is just a call. This avoids an extra temp and copy
5698 and that prevents a partial-subsumption which makes bad code.
5699 Actually we could treat component_ref's of vars like vars. */
5700
5701 tree lhs = TREE_OPERAND (exp, 0);
5702 tree rhs = TREE_OPERAND (exp, 1);
5703 tree noncopied_parts = 0;
5704 tree lhs_type = TREE_TYPE (lhs);
5705
5706 temp = 0;
5707
5708 if (TREE_CODE (lhs) != VAR_DECL
5709 && TREE_CODE (lhs) != RESULT_DECL
5710 && TREE_CODE (lhs) != PARM_DECL)
5711 preexpand_calls (exp);
5712
5713 /* Check for |= or &= of a bitfield of size one into another bitfield
5714 of size 1. In this case, (unless we need the result of the
5715 assignment) we can do this more efficiently with a
5716 test followed by an assignment, if necessary.
5717
5718 ??? At this point, we can't get a BIT_FIELD_REF here. But if
5719 things change so we do, this code should be enhanced to
5720 support it. */
5721 if (ignore
5722 && TREE_CODE (lhs) == COMPONENT_REF
5723 && (TREE_CODE (rhs) == BIT_IOR_EXPR
5724 || TREE_CODE (rhs) == BIT_AND_EXPR)
5725 && TREE_OPERAND (rhs, 0) == lhs
5726 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
5727 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
5728 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
5729 {
5730 rtx label = gen_label_rtx ();
5731
5732 do_jump (TREE_OPERAND (rhs, 1),
5733 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
5734 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
5735 expand_assignment (lhs, convert (TREE_TYPE (rhs),
5736 (TREE_CODE (rhs) == BIT_IOR_EXPR
5737 ? integer_one_node
5738 : integer_zero_node)),
5739 0, 0);
5740 do_pending_stack_adjust ();
5741 emit_label (label);
5742 return const0_rtx;
5743 }
5744
5745 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
5746 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
5747 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
5748 TYPE_NONCOPIED_PARTS (lhs_type));
5749
5750 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5751 while (noncopied_parts != 0)
5752 {
5753 expand_assignment (TREE_PURPOSE (noncopied_parts),
5754 TREE_VALUE (noncopied_parts), 0, 0);
5755 noncopied_parts = TREE_CHAIN (noncopied_parts);
5756 }
5757 return temp;
5758 }
5759
5760 case PREINCREMENT_EXPR:
5761 case PREDECREMENT_EXPR:
5762 return expand_increment (exp, 0);
5763
5764 case POSTINCREMENT_EXPR:
5765 case POSTDECREMENT_EXPR:
5766 /* Faster to treat as pre-increment if result is not used. */
5767 return expand_increment (exp, ! ignore);
5768
5769 case ADDR_EXPR:
5770 /* Are we taking the address of a nested function? */
5771 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
5772 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
5773 {
5774 op0 = trampoline_address (TREE_OPERAND (exp, 0));
5775 op0 = force_operand (op0, target);
5776 }
5777 else
5778 {
5779 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
5780 (modifier == EXPAND_INITIALIZER
5781 ? modifier : EXPAND_CONST_ADDRESS));
5782 if (GET_CODE (op0) != MEM)
5783 abort ();
5784
5785 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5786 return XEXP (op0, 0);
5787 op0 = force_operand (XEXP (op0, 0), target);
5788 }
5789 if (flag_force_addr && GET_CODE (op0) != REG)
5790 return force_reg (Pmode, op0);
5791 return op0;
5792
5793 case ENTRY_VALUE_EXPR:
5794 abort ();
5795
5796 /* COMPLEX type for Extended Pascal & Fortran */
5797 case COMPLEX_EXPR:
5798 {
5799 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5800
5801 rtx prev;
5802
5803 /* Get the rtx code of the operands. */
5804 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5805 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
5806
5807 if (! target)
5808 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5809
5810 prev = get_last_insn ();
5811
5812 /* Tell flow that the whole of the destination is being set. */
5813 if (GET_CODE (target) == REG)
5814 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5815
5816 /* Move the real (op0) and imaginary (op1) parts to their location. */
5817 emit_move_insn (gen_realpart (mode, target), op0);
5818 emit_move_insn (gen_imagpart (mode, target), op1);
5819
5820 /* Complex construction should appear as a single unit. */
5821 group_insns (prev);
5822
5823 return target;
5824 }
5825
5826 case REALPART_EXPR:
5827 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5828 return gen_realpart (mode, op0);
5829
5830 case IMAGPART_EXPR:
5831 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5832 return gen_imagpart (mode, op0);
5833
5834 case CONJ_EXPR:
5835 {
5836 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5837 rtx imag_t;
5838 rtx prev;
5839
5840 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5841
5842 if (! target)
5843 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5844
5845 prev = get_last_insn ();
5846
5847 /* Tell flow that the whole of the destination is being set. */
5848 if (GET_CODE (target) == REG)
5849 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5850
5851 /* Store the realpart and the negated imagpart to target. */
5852 emit_move_insn (gen_realpart (mode, target), gen_realpart (mode, op0));
5853
5854 imag_t = gen_imagpart (mode, target);
5855 temp = expand_unop (mode, neg_optab,
5856 gen_imagpart (mode, op0), imag_t, 0);
5857 if (temp != imag_t)
5858 emit_move_insn (imag_t, temp);
5859
5860 /* Conjugate should appear as a single unit */
5861 group_insns (prev);
5862
5863 return target;
5864 }
5865
5866 case ERROR_MARK:
5867 op0 = CONST0_RTX (tmode);
5868 if (op0 != 0)
5869 return op0;
5870 return const0_rtx;
5871
5872 default:
5873 return (*lang_expand_expr) (exp, target, tmode, modifier);
5874 }
5875
5876 /* Here to do an ordinary binary operator, generating an instruction
5877 from the optab already placed in `this_optab'. */
5878 binop:
5879 preexpand_calls (exp);
5880 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5881 subtarget = 0;
5882 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5883 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5884 binop2:
5885 temp = expand_binop (mode, this_optab, op0, op1, target,
5886 unsignedp, OPTAB_LIB_WIDEN);
5887 if (temp == 0)
5888 abort ();
5889 return temp;
5890 }
5891 \f
5892 /* Return the alignment in bits of EXP, a pointer valued expression.
5893 But don't return more than MAX_ALIGN no matter what.
5894 The alignment returned is, by default, the alignment of the thing that
5895 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
5896
5897 Otherwise, look at the expression to see if we can do better, i.e., if the
5898 expression is actually pointing at an object whose alignment is tighter. */
5899
5900 static int
5901 get_pointer_alignment (exp, max_align)
5902 tree exp;
5903 unsigned max_align;
5904 {
5905 unsigned align, inner;
5906
5907 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
5908 return 0;
5909
5910 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
5911 align = MIN (align, max_align);
5912
5913 while (1)
5914 {
5915 switch (TREE_CODE (exp))
5916 {
5917 case NOP_EXPR:
5918 case CONVERT_EXPR:
5919 case NON_LVALUE_EXPR:
5920 exp = TREE_OPERAND (exp, 0);
5921 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
5922 return align;
5923 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
5924 inner = MIN (inner, max_align);
5925 align = MAX (align, inner);
5926 break;
5927
5928 case PLUS_EXPR:
5929 /* If sum of pointer + int, restrict our maximum alignment to that
5930 imposed by the integer. If not, we can't do any better than
5931 ALIGN. */
5932 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
5933 return align;
5934
5935 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
5936 & (max_align - 1))
5937 != 0)
5938 max_align >>= 1;
5939
5940 exp = TREE_OPERAND (exp, 0);
5941 break;
5942
5943 case ADDR_EXPR:
5944 /* See what we are pointing at and look at its alignment. */
5945 exp = TREE_OPERAND (exp, 0);
5946 if (TREE_CODE (exp) == FUNCTION_DECL)
5947 align = MAX (align, FUNCTION_BOUNDARY);
5948 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
5949 align = MAX (align, DECL_ALIGN (exp));
5950 #ifdef CONSTANT_ALIGNMENT
5951 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
5952 align = CONSTANT_ALIGNMENT (exp, align);
5953 #endif
5954 return MIN (align, max_align);
5955
5956 default:
5957 return align;
5958 }
5959 }
5960 }
5961 \f
5962 /* Return the tree node and offset if a given argument corresponds to
5963 a string constant. */
5964
5965 static tree
5966 string_constant (arg, ptr_offset)
5967 tree arg;
5968 tree *ptr_offset;
5969 {
5970 STRIP_NOPS (arg);
5971
5972 if (TREE_CODE (arg) == ADDR_EXPR
5973 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
5974 {
5975 *ptr_offset = integer_zero_node;
5976 return TREE_OPERAND (arg, 0);
5977 }
5978 else if (TREE_CODE (arg) == PLUS_EXPR)
5979 {
5980 tree arg0 = TREE_OPERAND (arg, 0);
5981 tree arg1 = TREE_OPERAND (arg, 1);
5982
5983 STRIP_NOPS (arg0);
5984 STRIP_NOPS (arg1);
5985
5986 if (TREE_CODE (arg0) == ADDR_EXPR
5987 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
5988 {
5989 *ptr_offset = arg1;
5990 return TREE_OPERAND (arg0, 0);
5991 }
5992 else if (TREE_CODE (arg1) == ADDR_EXPR
5993 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
5994 {
5995 *ptr_offset = arg0;
5996 return TREE_OPERAND (arg1, 0);
5997 }
5998 }
5999
6000 return 0;
6001 }
6002
6003 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
6004 way, because it could contain a zero byte in the middle.
6005 TREE_STRING_LENGTH is the size of the character array, not the string.
6006
6007 Unfortunately, string_constant can't access the values of const char
6008 arrays with initializers, so neither can we do so here. */
6009
6010 static tree
6011 c_strlen (src)
6012 tree src;
6013 {
6014 tree offset_node;
6015 int offset, max;
6016 char *ptr;
6017
6018 src = string_constant (src, &offset_node);
6019 if (src == 0)
6020 return 0;
6021 max = TREE_STRING_LENGTH (src);
6022 ptr = TREE_STRING_POINTER (src);
6023 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
6024 {
6025 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
6026 compute the offset to the following null if we don't know where to
6027 start searching for it. */
6028 int i;
6029 for (i = 0; i < max; i++)
6030 if (ptr[i] == 0)
6031 return 0;
6032 /* We don't know the starting offset, but we do know that the string
6033 has no internal zero bytes. We can assume that the offset falls
6034 within the bounds of the string; otherwise, the programmer deserves
6035 what he gets. Subtract the offset from the length of the string,
6036 and return that. */
6037 /* This would perhaps not be valid if we were dealing with named
6038 arrays in addition to literal string constants. */
6039 return size_binop (MINUS_EXPR, size_int (max), offset_node);
6040 }
6041
6042 /* We have a known offset into the string. Start searching there for
6043 a null character. */
6044 if (offset_node == 0)
6045 offset = 0;
6046 else
6047 {
6048 /* Did we get a long long offset? If so, punt. */
6049 if (TREE_INT_CST_HIGH (offset_node) != 0)
6050 return 0;
6051 offset = TREE_INT_CST_LOW (offset_node);
6052 }
6053 /* If the offset is known to be out of bounds, warn, and call strlen at
6054 runtime. */
6055 if (offset < 0 || offset > max)
6056 {
6057 warning ("offset outside bounds of constant string");
6058 return 0;
6059 }
6060 /* Use strlen to search for the first zero byte. Since any strings
6061 constructed with build_string will have nulls appended, we win even
6062 if we get handed something like (char[4])"abcd".
6063
6064 Since OFFSET is our starting index into the string, no further
6065 calculation is needed. */
6066 return size_int (strlen (ptr + offset));
6067 }
6068 \f
6069 /* Expand an expression EXP that calls a built-in function,
6070 with result going to TARGET if that's convenient
6071 (and in mode MODE if that's convenient).
6072 SUBTARGET may be used as the target for computing one of EXP's operands.
6073 IGNORE is nonzero if the value is to be ignored. */
6074
6075 static rtx
6076 expand_builtin (exp, target, subtarget, mode, ignore)
6077 tree exp;
6078 rtx target;
6079 rtx subtarget;
6080 enum machine_mode mode;
6081 int ignore;
6082 {
6083 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6084 tree arglist = TREE_OPERAND (exp, 1);
6085 rtx op0;
6086 rtx lab1, insns;
6087 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
6088 optab builtin_optab;
6089
6090 switch (DECL_FUNCTION_CODE (fndecl))
6091 {
6092 case BUILT_IN_ABS:
6093 case BUILT_IN_LABS:
6094 case BUILT_IN_FABS:
6095 /* build_function_call changes these into ABS_EXPR. */
6096 abort ();
6097
6098 case BUILT_IN_SIN:
6099 case BUILT_IN_COS:
6100 case BUILT_IN_FSQRT:
6101 /* If not optimizing, call the library function. */
6102 if (! optimize)
6103 break;
6104
6105 if (arglist == 0
6106 /* Arg could be wrong type if user redeclared this fcn wrong. */
6107 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
6108 return CONST0_RTX (TYPE_MODE (TREE_TYPE (exp)));
6109
6110 /* Stabilize and compute the argument. */
6111 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
6112 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
6113 {
6114 exp = copy_node (exp);
6115 arglist = copy_node (arglist);
6116 TREE_OPERAND (exp, 1) = arglist;
6117 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
6118 }
6119 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
6120
6121 /* Make a suitable register to place result in. */
6122 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6123
6124 emit_queue ();
6125 start_sequence ();
6126
6127 switch (DECL_FUNCTION_CODE (fndecl))
6128 {
6129 case BUILT_IN_SIN:
6130 builtin_optab = sin_optab; break;
6131 case BUILT_IN_COS:
6132 builtin_optab = cos_optab; break;
6133 case BUILT_IN_FSQRT:
6134 builtin_optab = sqrt_optab; break;
6135 default:
6136 abort ();
6137 }
6138
6139 /* Compute into TARGET.
6140 Set TARGET to wherever the result comes back. */
6141 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
6142 builtin_optab, op0, target, 0);
6143
6144 /* If we were unable to expand via the builtin, stop the
6145 sequence (without outputting the insns) and break, causing
6146 a call the the library function. */
6147 if (target == 0)
6148 {
6149 end_sequence ();
6150 break;
6151 }
6152
6153 /* Check the results by default. But if flag_fast_math is turned on,
6154 then assume sqrt will always be called with valid arguments. */
6155
6156 if (! flag_fast_math)
6157 {
6158 /* Don't define the builtin FP instructions
6159 if your machine is not IEEE. */
6160 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
6161 abort ();
6162
6163 lab1 = gen_label_rtx ();
6164
6165 /* Test the result; if it is NaN, set errno=EDOM because
6166 the argument was not in the domain. */
6167 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
6168 emit_jump_insn (gen_beq (lab1));
6169
6170 #if TARGET_EDOM
6171 {
6172 #ifdef GEN_ERRNO_RTX
6173 rtx errno_rtx = GEN_ERRNO_RTX;
6174 #else
6175 rtx errno_rtx
6176 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "*errno"));
6177 #endif
6178
6179 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
6180 }
6181 #else
6182 /* We can't set errno=EDOM directly; let the library call do it.
6183 Pop the arguments right away in case the call gets deleted. */
6184 NO_DEFER_POP;
6185 expand_call (exp, target, 0);
6186 OK_DEFER_POP;
6187 #endif
6188
6189 emit_label (lab1);
6190 }
6191
6192 /* Output the entire sequence. */
6193 insns = get_insns ();
6194 end_sequence ();
6195 emit_insns (insns);
6196
6197 return target;
6198
6199 /* __builtin_apply_args returns block of memory allocated on
6200 the stack into which is stored the arg pointer, structure
6201 value address, static chain, and all the registers that might
6202 possibly be used in performing a function call. The code is
6203 moved to the start of the function so the incoming values are
6204 saved. */
6205 case BUILT_IN_APPLY_ARGS:
6206 /* Don't do __builtin_apply_args more than once in a function.
6207 Save the result of the first call and reuse it. */
6208 if (apply_args_value != 0)
6209 return apply_args_value;
6210 {
6211 /* When this function is called, it means that registers must be
6212 saved on entry to this function. So we migrate the
6213 call to the first insn of this function. */
6214 rtx temp;
6215 rtx seq;
6216
6217 start_sequence ();
6218 temp = expand_builtin_apply_args ();
6219 seq = get_insns ();
6220 end_sequence ();
6221
6222 apply_args_value = temp;
6223
6224 /* Put the sequence after the NOTE that starts the function.
6225 If this is inside a SEQUENCE, make the outer-level insn
6226 chain current, so the code is placed at the start of the
6227 function. */
6228 push_topmost_sequence ();
6229 emit_insns_before (seq, NEXT_INSN (get_insns ()));
6230 pop_topmost_sequence ();
6231 return temp;
6232 }
6233
6234 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6235 FUNCTION with a copy of the parameters described by
6236 ARGUMENTS, and ARGSIZE. It returns a block of memory
6237 allocated on the stack into which is stored all the registers
6238 that might possibly be used for returning the result of a
6239 function. ARGUMENTS is the value returned by
6240 __builtin_apply_args. ARGSIZE is the number of bytes of
6241 arguments that must be copied. ??? How should this value be
6242 computed? We'll also need a safe worst case value for varargs
6243 functions. */
6244 case BUILT_IN_APPLY:
6245 if (arglist == 0
6246 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6247 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6248 || TREE_CHAIN (arglist) == 0
6249 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6250 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6251 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6252 return const0_rtx;
6253 else
6254 {
6255 int i;
6256 tree t;
6257 rtx ops[3];
6258
6259 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
6260 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
6261
6262 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6263 }
6264
6265 /* __builtin_return (RESULT) causes the function to return the
6266 value described by RESULT. RESULT is address of the block of
6267 memory returned by __builtin_apply. */
6268 case BUILT_IN_RETURN:
6269 if (arglist
6270 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6271 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
6272 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
6273 NULL_RTX, VOIDmode, 0));
6274 return const0_rtx;
6275
6276 case BUILT_IN_SAVEREGS:
6277 /* Don't do __builtin_saveregs more than once in a function.
6278 Save the result of the first call and reuse it. */
6279 if (saveregs_value != 0)
6280 return saveregs_value;
6281 {
6282 /* When this function is called, it means that registers must be
6283 saved on entry to this function. So we migrate the
6284 call to the first insn of this function. */
6285 rtx temp;
6286 rtx seq;
6287 rtx valreg, saved_valreg;
6288
6289 /* Now really call the function. `expand_call' does not call
6290 expand_builtin, so there is no danger of infinite recursion here. */
6291 start_sequence ();
6292
6293 #ifdef EXPAND_BUILTIN_SAVEREGS
6294 /* Do whatever the machine needs done in this case. */
6295 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
6296 #else
6297 /* The register where the function returns its value
6298 is likely to have something else in it, such as an argument.
6299 So preserve that register around the call. */
6300 if (value_mode != VOIDmode)
6301 {
6302 valreg = hard_libcall_value (value_mode);
6303 saved_valreg = gen_reg_rtx (value_mode);
6304 emit_move_insn (saved_valreg, valreg);
6305 }
6306
6307 /* Generate the call, putting the value in a pseudo. */
6308 temp = expand_call (exp, target, ignore);
6309
6310 if (value_mode != VOIDmode)
6311 emit_move_insn (valreg, saved_valreg);
6312 #endif
6313
6314 seq = get_insns ();
6315 end_sequence ();
6316
6317 saveregs_value = temp;
6318
6319 /* Put the sequence after the NOTE that starts the function.
6320 If this is inside a SEQUENCE, make the outer-level insn
6321 chain current, so the code is placed at the start of the
6322 function. */
6323 push_topmost_sequence ();
6324 emit_insns_before (seq, NEXT_INSN (get_insns ()));
6325 pop_topmost_sequence ();
6326 return temp;
6327 }
6328
6329 /* __builtin_args_info (N) returns word N of the arg space info
6330 for the current function. The number and meanings of words
6331 is controlled by the definition of CUMULATIVE_ARGS. */
6332 case BUILT_IN_ARGS_INFO:
6333 {
6334 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
6335 int i;
6336 int *word_ptr = (int *) &current_function_args_info;
6337 tree type, elts, result;
6338
6339 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
6340 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
6341 __FILE__, __LINE__);
6342
6343 if (arglist != 0)
6344 {
6345 tree arg = TREE_VALUE (arglist);
6346 if (TREE_CODE (arg) != INTEGER_CST)
6347 error ("argument of `__builtin_args_info' must be constant");
6348 else
6349 {
6350 int wordnum = TREE_INT_CST_LOW (arg);
6351
6352 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
6353 error ("argument of `__builtin_args_info' out of range");
6354 else
6355 return GEN_INT (word_ptr[wordnum]);
6356 }
6357 }
6358 else
6359 error ("missing argument in `__builtin_args_info'");
6360
6361 return const0_rtx;
6362
6363 #if 0
6364 for (i = 0; i < nwords; i++)
6365 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
6366
6367 type = build_array_type (integer_type_node,
6368 build_index_type (build_int_2 (nwords, 0)));
6369 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
6370 TREE_CONSTANT (result) = 1;
6371 TREE_STATIC (result) = 1;
6372 result = build (INDIRECT_REF, build_pointer_type (type), result);
6373 TREE_CONSTANT (result) = 1;
6374 return expand_expr (result, NULL_RTX, VOIDmode, 0);
6375 #endif
6376 }
6377
6378 /* Return the address of the first anonymous stack arg. */
6379 case BUILT_IN_NEXT_ARG:
6380 {
6381 tree fntype = TREE_TYPE (current_function_decl);
6382 if (!(TYPE_ARG_TYPES (fntype) != 0
6383 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
6384 != void_type_node)))
6385 {
6386 error ("`va_start' used in function with fixed args");
6387 return const0_rtx;
6388 }
6389 }
6390
6391 return expand_binop (Pmode, add_optab,
6392 current_function_internal_arg_pointer,
6393 current_function_arg_offset_rtx,
6394 NULL_RTX, 0, OPTAB_LIB_WIDEN);
6395
6396 case BUILT_IN_CLASSIFY_TYPE:
6397 if (arglist != 0)
6398 {
6399 tree type = TREE_TYPE (TREE_VALUE (arglist));
6400 enum tree_code code = TREE_CODE (type);
6401 if (code == VOID_TYPE)
6402 return GEN_INT (void_type_class);
6403 if (code == INTEGER_TYPE)
6404 return GEN_INT (integer_type_class);
6405 if (code == CHAR_TYPE)
6406 return GEN_INT (char_type_class);
6407 if (code == ENUMERAL_TYPE)
6408 return GEN_INT (enumeral_type_class);
6409 if (code == BOOLEAN_TYPE)
6410 return GEN_INT (boolean_type_class);
6411 if (code == POINTER_TYPE)
6412 return GEN_INT (pointer_type_class);
6413 if (code == REFERENCE_TYPE)
6414 return GEN_INT (reference_type_class);
6415 if (code == OFFSET_TYPE)
6416 return GEN_INT (offset_type_class);
6417 if (code == REAL_TYPE)
6418 return GEN_INT (real_type_class);
6419 if (code == COMPLEX_TYPE)
6420 return GEN_INT (complex_type_class);
6421 if (code == FUNCTION_TYPE)
6422 return GEN_INT (function_type_class);
6423 if (code == METHOD_TYPE)
6424 return GEN_INT (method_type_class);
6425 if (code == RECORD_TYPE)
6426 return GEN_INT (record_type_class);
6427 if (code == UNION_TYPE)
6428 return GEN_INT (union_type_class);
6429 if (code == ARRAY_TYPE)
6430 return GEN_INT (array_type_class);
6431 if (code == STRING_TYPE)
6432 return GEN_INT (string_type_class);
6433 if (code == SET_TYPE)
6434 return GEN_INT (set_type_class);
6435 if (code == FILE_TYPE)
6436 return GEN_INT (file_type_class);
6437 if (code == LANG_TYPE)
6438 return GEN_INT (lang_type_class);
6439 }
6440 return GEN_INT (no_type_class);
6441
6442 case BUILT_IN_CONSTANT_P:
6443 if (arglist == 0)
6444 return const0_rtx;
6445 else
6446 return (TREE_CODE_CLASS (TREE_CODE (TREE_VALUE (arglist))) == 'c'
6447 ? const1_rtx : const0_rtx);
6448
6449 case BUILT_IN_FRAME_ADDRESS:
6450 /* The argument must be a nonnegative integer constant.
6451 It counts the number of frames to scan up the stack.
6452 The value is the address of that frame. */
6453 case BUILT_IN_RETURN_ADDRESS:
6454 /* The argument must be a nonnegative integer constant.
6455 It counts the number of frames to scan up the stack.
6456 The value is the return address saved in that frame. */
6457 if (arglist == 0)
6458 /* Warning about missing arg was already issued. */
6459 return const0_rtx;
6460 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
6461 {
6462 error ("invalid arg to `__builtin_return_address'");
6463 return const0_rtx;
6464 }
6465 else if (tree_int_cst_lt (TREE_VALUE (arglist), integer_zero_node))
6466 {
6467 error ("invalid arg to `__builtin_return_address'");
6468 return const0_rtx;
6469 }
6470 else
6471 {
6472 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
6473 rtx tem = frame_pointer_rtx;
6474 int i;
6475
6476 /* Some machines need special handling before we can access arbitrary
6477 frames. For example, on the sparc, we must first flush all
6478 register windows to the stack. */
6479 #ifdef SETUP_FRAME_ADDRESSES
6480 SETUP_FRAME_ADDRESSES ();
6481 #endif
6482
6483 /* On the sparc, the return address is not in the frame, it is
6484 in a register. There is no way to access it off of the current
6485 frame pointer, but it can be accessed off the previous frame
6486 pointer by reading the value from the register window save
6487 area. */
6488 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
6489 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_RETURN_ADDRESS)
6490 count--;
6491 #endif
6492
6493 /* Scan back COUNT frames to the specified frame. */
6494 for (i = 0; i < count; i++)
6495 {
6496 /* Assume the dynamic chain pointer is in the word that
6497 the frame address points to, unless otherwise specified. */
6498 #ifdef DYNAMIC_CHAIN_ADDRESS
6499 tem = DYNAMIC_CHAIN_ADDRESS (tem);
6500 #endif
6501 tem = memory_address (Pmode, tem);
6502 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
6503 }
6504
6505 /* For __builtin_frame_address, return what we've got. */
6506 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
6507 return tem;
6508
6509 /* For __builtin_return_address,
6510 Get the return address from that frame. */
6511 #ifdef RETURN_ADDR_RTX
6512 return RETURN_ADDR_RTX (count, tem);
6513 #else
6514 tem = memory_address (Pmode,
6515 plus_constant (tem, GET_MODE_SIZE (Pmode)));
6516 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
6517 #endif
6518 }
6519
6520 case BUILT_IN_ALLOCA:
6521 if (arglist == 0
6522 /* Arg could be non-integer if user redeclared this fcn wrong. */
6523 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
6524 return const0_rtx;
6525 current_function_calls_alloca = 1;
6526 /* Compute the argument. */
6527 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
6528
6529 /* Allocate the desired space. */
6530 target = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
6531
6532 /* Record the new stack level for nonlocal gotos. */
6533 if (nonlocal_goto_handler_slot != 0)
6534 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
6535 return target;
6536
6537 case BUILT_IN_FFS:
6538 /* If not optimizing, call the library function. */
6539 if (!optimize)
6540 break;
6541
6542 if (arglist == 0
6543 /* Arg could be non-integer if user redeclared this fcn wrong. */
6544 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
6545 return const0_rtx;
6546
6547 /* Compute the argument. */
6548 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
6549 /* Compute ffs, into TARGET if possible.
6550 Set TARGET to wherever the result comes back. */
6551 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
6552 ffs_optab, op0, target, 1);
6553 if (target == 0)
6554 abort ();
6555 return target;
6556
6557 case BUILT_IN_STRLEN:
6558 /* If not optimizing, call the library function. */
6559 if (!optimize)
6560 break;
6561
6562 if (arglist == 0
6563 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6564 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
6565 return const0_rtx;
6566 else
6567 {
6568 tree src = TREE_VALUE (arglist);
6569 tree len = c_strlen (src);
6570
6571 int align
6572 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6573
6574 rtx result, src_rtx, char_rtx;
6575 enum machine_mode insn_mode = value_mode, char_mode;
6576 enum insn_code icode;
6577
6578 /* If the length is known, just return it. */
6579 if (len != 0)
6580 return expand_expr (len, target, mode, 0);
6581
6582 /* If SRC is not a pointer type, don't do this operation inline. */
6583 if (align == 0)
6584 break;
6585
6586 /* Call a function if we can't compute strlen in the right mode. */
6587
6588 while (insn_mode != VOIDmode)
6589 {
6590 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
6591 if (icode != CODE_FOR_nothing)
6592 break;
6593
6594 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
6595 }
6596 if (insn_mode == VOIDmode)
6597 break;
6598
6599 /* Make a place to write the result of the instruction. */
6600 result = target;
6601 if (! (result != 0
6602 && GET_CODE (result) == REG
6603 && GET_MODE (result) == insn_mode
6604 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
6605 result = gen_reg_rtx (insn_mode);
6606
6607 /* Make sure the operands are acceptable to the predicates. */
6608
6609 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
6610 result = gen_reg_rtx (insn_mode);
6611
6612 src_rtx = memory_address (BLKmode,
6613 expand_expr (src, NULL_RTX, Pmode,
6614 EXPAND_NORMAL));
6615 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
6616 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
6617
6618 char_rtx = const0_rtx;
6619 char_mode = insn_operand_mode[(int)icode][2];
6620 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
6621 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
6622
6623 emit_insn (GEN_FCN (icode) (result,
6624 gen_rtx (MEM, BLKmode, src_rtx),
6625 char_rtx, GEN_INT (align)));
6626
6627 /* Return the value in the proper mode for this function. */
6628 if (GET_MODE (result) == value_mode)
6629 return result;
6630 else if (target != 0)
6631 {
6632 convert_move (target, result, 0);
6633 return target;
6634 }
6635 else
6636 return convert_to_mode (value_mode, result, 0);
6637 }
6638
6639 case BUILT_IN_STRCPY:
6640 /* If not optimizing, call the library function. */
6641 if (!optimize)
6642 break;
6643
6644 if (arglist == 0
6645 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6646 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6647 || TREE_CHAIN (arglist) == 0
6648 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
6649 return const0_rtx;
6650 else
6651 {
6652 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
6653
6654 if (len == 0)
6655 break;
6656
6657 len = size_binop (PLUS_EXPR, len, integer_one_node);
6658
6659 chainon (arglist, build_tree_list (NULL_TREE, len));
6660 }
6661
6662 /* Drops in. */
6663 case BUILT_IN_MEMCPY:
6664 /* If not optimizing, call the library function. */
6665 if (!optimize)
6666 break;
6667
6668 if (arglist == 0
6669 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6670 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6671 || TREE_CHAIN (arglist) == 0
6672 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6673 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6674 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6675 return const0_rtx;
6676 else
6677 {
6678 tree dest = TREE_VALUE (arglist);
6679 tree src = TREE_VALUE (TREE_CHAIN (arglist));
6680 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6681
6682 int src_align
6683 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6684 int dest_align
6685 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6686 rtx dest_rtx, dest_mem, src_mem;
6687
6688 /* If either SRC or DEST is not a pointer type, don't do
6689 this operation in-line. */
6690 if (src_align == 0 || dest_align == 0)
6691 {
6692 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
6693 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
6694 break;
6695 }
6696
6697 dest_rtx = expand_expr (dest, NULL_RTX, Pmode, EXPAND_NORMAL);
6698 dest_mem = gen_rtx (MEM, BLKmode,
6699 memory_address (BLKmode, dest_rtx));
6700 src_mem = gen_rtx (MEM, BLKmode,
6701 memory_address (BLKmode,
6702 expand_expr (src, NULL_RTX,
6703 Pmode,
6704 EXPAND_NORMAL)));
6705
6706 /* Copy word part most expediently. */
6707 emit_block_move (dest_mem, src_mem,
6708 expand_expr (len, NULL_RTX, VOIDmode, 0),
6709 MIN (src_align, dest_align));
6710 return dest_rtx;
6711 }
6712
6713 /* These comparison functions need an instruction that returns an actual
6714 index. An ordinary compare that just sets the condition codes
6715 is not enough. */
6716 #ifdef HAVE_cmpstrsi
6717 case BUILT_IN_STRCMP:
6718 /* If not optimizing, call the library function. */
6719 if (!optimize)
6720 break;
6721
6722 if (arglist == 0
6723 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6724 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6725 || TREE_CHAIN (arglist) == 0
6726 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
6727 return const0_rtx;
6728 else if (!HAVE_cmpstrsi)
6729 break;
6730 {
6731 tree arg1 = TREE_VALUE (arglist);
6732 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
6733 tree offset;
6734 tree len, len2;
6735
6736 len = c_strlen (arg1);
6737 if (len)
6738 len = size_binop (PLUS_EXPR, integer_one_node, len);
6739 len2 = c_strlen (arg2);
6740 if (len2)
6741 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
6742
6743 /* If we don't have a constant length for the first, use the length
6744 of the second, if we know it. We don't require a constant for
6745 this case; some cost analysis could be done if both are available
6746 but neither is constant. For now, assume they're equally cheap.
6747
6748 If both strings have constant lengths, use the smaller. This
6749 could arise if optimization results in strcpy being called with
6750 two fixed strings, or if the code was machine-generated. We should
6751 add some code to the `memcmp' handler below to deal with such
6752 situations, someday. */
6753 if (!len || TREE_CODE (len) != INTEGER_CST)
6754 {
6755 if (len2)
6756 len = len2;
6757 else if (len == 0)
6758 break;
6759 }
6760 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
6761 {
6762 if (tree_int_cst_lt (len2, len))
6763 len = len2;
6764 }
6765
6766 chainon (arglist, build_tree_list (NULL_TREE, len));
6767 }
6768
6769 /* Drops in. */
6770 case BUILT_IN_MEMCMP:
6771 /* If not optimizing, call the library function. */
6772 if (!optimize)
6773 break;
6774
6775 if (arglist == 0
6776 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6777 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6778 || TREE_CHAIN (arglist) == 0
6779 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6780 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6781 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6782 return const0_rtx;
6783 else if (!HAVE_cmpstrsi)
6784 break;
6785 {
6786 tree arg1 = TREE_VALUE (arglist);
6787 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
6788 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6789 rtx result;
6790
6791 int arg1_align
6792 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6793 int arg2_align
6794 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6795 enum machine_mode insn_mode
6796 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
6797
6798 /* If we don't have POINTER_TYPE, call the function. */
6799 if (arg1_align == 0 || arg2_align == 0)
6800 {
6801 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
6802 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
6803 break;
6804 }
6805
6806 /* Make a place to write the result of the instruction. */
6807 result = target;
6808 if (! (result != 0
6809 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
6810 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
6811 result = gen_reg_rtx (insn_mode);
6812
6813 emit_insn (gen_cmpstrsi (result,
6814 gen_rtx (MEM, BLKmode,
6815 expand_expr (arg1, NULL_RTX, Pmode,
6816 EXPAND_NORMAL)),
6817 gen_rtx (MEM, BLKmode,
6818 expand_expr (arg2, NULL_RTX, Pmode,
6819 EXPAND_NORMAL)),
6820 expand_expr (len, NULL_RTX, VOIDmode, 0),
6821 GEN_INT (MIN (arg1_align, arg2_align))));
6822
6823 /* Return the value in the proper mode for this function. */
6824 mode = TYPE_MODE (TREE_TYPE (exp));
6825 if (GET_MODE (result) == mode)
6826 return result;
6827 else if (target != 0)
6828 {
6829 convert_move (target, result, 0);
6830 return target;
6831 }
6832 else
6833 return convert_to_mode (mode, result, 0);
6834 }
6835 #else
6836 case BUILT_IN_STRCMP:
6837 case BUILT_IN_MEMCMP:
6838 break;
6839 #endif
6840
6841 default: /* just do library call, if unknown builtin */
6842 error ("built-in function `%s' not currently supported",
6843 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
6844 }
6845
6846 /* The switch statement above can drop through to cause the function
6847 to be called normally. */
6848
6849 return expand_call (exp, target, ignore);
6850 }
6851 \f
6852 /* Built-in functions to perform an untyped call and return. */
6853
6854 /* For each register that may be used for calling a function, this
6855 gives a mode used to copy the register's value. VOIDmode indicates
6856 the register is not used for calling a function. If the machine
6857 has register windows, this gives only the outbound registers.
6858 INCOMING_REGNO gives the corresponding inbound register. */
6859 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
6860
6861 /* For each register that may be used for returning values, this gives
6862 a mode used to copy the register's value. VOIDmode indicates the
6863 register is not used for returning values. If the machine has
6864 register windows, this gives only the outbound registers.
6865 INCOMING_REGNO gives the corresponding inbound register. */
6866 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
6867
6868 /* Return the size required for the block returned by __builtin_apply_args,
6869 and initialize apply_args_mode. */
6870 static int
6871 apply_args_size ()
6872 {
6873 static int size = -1;
6874 int align, regno;
6875 enum machine_mode mode;
6876
6877 /* The values computed by this function never change. */
6878 if (size < 0)
6879 {
6880 /* The first value is the incoming arg-pointer. */
6881 size = GET_MODE_SIZE (Pmode);
6882
6883 /* The second value is the structure value address unless this is
6884 passed as an "invisible" first argument. */
6885 if (struct_value_rtx)
6886 size += GET_MODE_SIZE (Pmode);
6887
6888 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6889 if (FUNCTION_ARG_REGNO_P (regno))
6890 {
6891 /* Search for the proper mode for copying this register's
6892 value. I'm not sure this is right, but it works so far. */
6893 enum machine_mode best_mode = VOIDmode;
6894
6895 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
6896 mode != VOIDmode;
6897 mode = GET_MODE_WIDER_MODE (mode))
6898 if (HARD_REGNO_MODE_OK (regno, mode)
6899 && HARD_REGNO_NREGS (regno, mode) == 1)
6900 best_mode = mode;
6901
6902 if (best_mode == VOIDmode)
6903 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
6904 mode != VOIDmode;
6905 mode = GET_MODE_WIDER_MODE (mode))
6906 if (HARD_REGNO_MODE_OK (regno, mode)
6907 && (mov_optab->handlers[(int) mode].insn_code
6908 != CODE_FOR_nothing))
6909 best_mode = mode;
6910
6911 mode = best_mode;
6912 if (mode == VOIDmode)
6913 abort ();
6914
6915 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6916 if (size % align != 0)
6917 size = CEIL (size, align) * align;
6918 size += GET_MODE_SIZE (mode);
6919 apply_args_mode[regno] = mode;
6920 }
6921 else
6922 apply_args_mode[regno] = VOIDmode;
6923 }
6924 return size;
6925 }
6926
6927 /* Return the size required for the block returned by __builtin_apply,
6928 and initialize apply_result_mode. */
6929 static int
6930 apply_result_size ()
6931 {
6932 static int size = -1;
6933 int align, regno;
6934 enum machine_mode mode;
6935
6936 /* The values computed by this function never change. */
6937 if (size < 0)
6938 {
6939 size = 0;
6940
6941 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6942 if (FUNCTION_VALUE_REGNO_P (regno))
6943 {
6944 /* Search for the proper mode for copying this register's
6945 value. I'm not sure this is right, but it works so far. */
6946 enum machine_mode best_mode = VOIDmode;
6947
6948 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
6949 mode != TImode;
6950 mode = GET_MODE_WIDER_MODE (mode))
6951 if (HARD_REGNO_MODE_OK (regno, mode))
6952 best_mode = mode;
6953
6954 if (best_mode == VOIDmode)
6955 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
6956 mode != VOIDmode;
6957 mode = GET_MODE_WIDER_MODE (mode))
6958 if (HARD_REGNO_MODE_OK (regno, mode)
6959 && (mov_optab->handlers[(int) mode].insn_code
6960 != CODE_FOR_nothing))
6961 best_mode = mode;
6962
6963 mode = best_mode;
6964 if (mode == VOIDmode)
6965 abort ();
6966
6967 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6968 if (size % align != 0)
6969 size = CEIL (size, align) * align;
6970 size += GET_MODE_SIZE (mode);
6971 apply_result_mode[regno] = mode;
6972 }
6973 else
6974 apply_result_mode[regno] = VOIDmode;
6975
6976 /* Allow targets that use untyped_call and untyped_return to override
6977 the size so that machine-specific information can be stored here. */
6978 #ifdef APPLY_RESULT_SIZE
6979 size = APPLY_RESULT_SIZE;
6980 #endif
6981 }
6982 return size;
6983 }
6984
6985 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
6986 /* Create a vector describing the result block RESULT. If SAVEP is true,
6987 the result block is used to save the values; otherwise it is used to
6988 restore the values. */
6989 static rtx
6990 result_vector (savep, result)
6991 int savep;
6992 rtx result;
6993 {
6994 int regno, size, align, nelts;
6995 enum machine_mode mode;
6996 rtx reg, mem;
6997 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
6998
6999 size = nelts = 0;
7000 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7001 if ((mode = apply_result_mode[regno]) != VOIDmode)
7002 {
7003 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7004 if (size % align != 0)
7005 size = CEIL (size, align) * align;
7006 reg = gen_rtx (REG, mode, savep ? INCOMING_REGNO (regno) : regno);
7007 mem = change_address (result, mode,
7008 plus_constant (XEXP (result, 0), size));
7009 savevec[nelts++] = (savep
7010 ? gen_rtx (SET, VOIDmode, mem, reg)
7011 : gen_rtx (SET, VOIDmode, reg, mem));
7012 size += GET_MODE_SIZE (mode);
7013 }
7014 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
7015 }
7016 #endif /* HAVE_untyped_call or HAVE_untyped_return */
7017
7018
7019 /* Save the state required to perform an untyped call with the same
7020 arguments as were passed to the current function. */
7021 static rtx
7022 expand_builtin_apply_args ()
7023 {
7024 rtx registers;
7025 int size, align, regno;
7026 enum machine_mode mode;
7027
7028 /* Create a block where the arg-pointer, structure value address,
7029 and argument registers can be saved. */
7030 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
7031
7032 /* Walk past the arg-pointer and structure value address. */
7033 size = GET_MODE_SIZE (Pmode);
7034 if (struct_value_rtx)
7035 size += GET_MODE_SIZE (Pmode);
7036
7037 /* Save each register used in calling a function to the block. */
7038 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7039 if ((mode = apply_args_mode[regno]) != VOIDmode)
7040 {
7041 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7042 if (size % align != 0)
7043 size = CEIL (size, align) * align;
7044 emit_move_insn (change_address (registers, mode,
7045 plus_constant (XEXP (registers, 0),
7046 size)),
7047 gen_rtx (REG, mode, INCOMING_REGNO (regno)));
7048 size += GET_MODE_SIZE (mode);
7049 }
7050
7051 /* Save the arg pointer to the block. */
7052 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
7053 copy_to_reg (virtual_incoming_args_rtx));
7054 size = GET_MODE_SIZE (Pmode);
7055
7056 /* Save the structure value address unless this is passed as an
7057 "invisible" first argument. */
7058 if (struct_value_incoming_rtx)
7059 {
7060 emit_move_insn (change_address (registers, Pmode,
7061 plus_constant (XEXP (registers, 0),
7062 size)),
7063 copy_to_reg (struct_value_incoming_rtx));
7064 size += GET_MODE_SIZE (Pmode);
7065 }
7066
7067 /* Return the address of the block. */
7068 return copy_addr_to_reg (XEXP (registers, 0));
7069 }
7070
7071 /* Perform an untyped call and save the state required to perform an
7072 untyped return of whatever value was returned by the given function. */
7073 static rtx
7074 expand_builtin_apply (function, arguments, argsize)
7075 rtx function, arguments, argsize;
7076 {
7077 int size, align, regno;
7078 enum machine_mode mode;
7079 rtx incoming_args, result, reg, dest, call_insn;
7080 rtx old_stack_level = 0;
7081 rtx use_insns = 0;
7082
7083 /* Create a block where the return registers can be saved. */
7084 result = assign_stack_local (BLKmode, apply_result_size (), -1);
7085
7086 /* ??? The argsize value should be adjusted here. */
7087
7088 /* Fetch the arg pointer from the ARGUMENTS block. */
7089 incoming_args = gen_reg_rtx (Pmode);
7090 emit_move_insn (incoming_args,
7091 gen_rtx (MEM, Pmode, arguments));
7092 #ifndef STACK_GROWS_DOWNWARD
7093 incoming_args = expand_binop (Pmode, add_optab, incoming_args, argsize,
7094 incoming_args, 0, OPTAB_LIB_WIDEN);
7095 #endif
7096
7097 /* Perform postincrements before actually calling the function. */
7098 emit_queue ();
7099
7100 /* Push a new argument block and copy the arguments. */
7101 do_pending_stack_adjust ();
7102 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
7103
7104 /* Push a block of memory onto the stack to store the memory arguments.
7105 Save the address in a register, and copy the memory arguments. ??? I
7106 haven't figured out how the calling convention macros effect this,
7107 but it's likely that the source and/or destination addresses in
7108 the block copy will need updating in machine specific ways. */
7109 dest = copy_addr_to_reg (push_block (argsize, 0, 0));
7110 emit_block_move (gen_rtx (MEM, BLKmode, dest),
7111 gen_rtx (MEM, BLKmode, incoming_args),
7112 argsize,
7113 PARM_BOUNDARY / BITS_PER_UNIT);
7114
7115 /* Refer to the argument block. */
7116 apply_args_size ();
7117 arguments = gen_rtx (MEM, BLKmode, arguments);
7118
7119 /* Walk past the arg-pointer and structure value address. */
7120 size = GET_MODE_SIZE (Pmode);
7121 if (struct_value_rtx)
7122 size += GET_MODE_SIZE (Pmode);
7123
7124 /* Restore each of the registers previously saved. Make USE insns
7125 for each of these registers for use in making the call. */
7126 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7127 if ((mode = apply_args_mode[regno]) != VOIDmode)
7128 {
7129 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7130 if (size % align != 0)
7131 size = CEIL (size, align) * align;
7132 reg = gen_rtx (REG, mode, regno);
7133 emit_move_insn (reg,
7134 change_address (arguments, mode,
7135 plus_constant (XEXP (arguments, 0),
7136 size)));
7137
7138 push_to_sequence (use_insns);
7139 emit_insn (gen_rtx (USE, VOIDmode, reg));
7140 use_insns = get_insns ();
7141 end_sequence ();
7142 size += GET_MODE_SIZE (mode);
7143 }
7144
7145 /* Restore the structure value address unless this is passed as an
7146 "invisible" first argument. */
7147 size = GET_MODE_SIZE (Pmode);
7148 if (struct_value_rtx)
7149 {
7150 rtx value = gen_reg_rtx (Pmode);
7151 emit_move_insn (value,
7152 change_address (arguments, Pmode,
7153 plus_constant (XEXP (arguments, 0),
7154 size)));
7155 emit_move_insn (struct_value_rtx, value);
7156 if (GET_CODE (struct_value_rtx) == REG)
7157 {
7158 push_to_sequence (use_insns);
7159 emit_insn (gen_rtx (USE, VOIDmode, struct_value_rtx));
7160 use_insns = get_insns ();
7161 end_sequence ();
7162 }
7163 size += GET_MODE_SIZE (Pmode);
7164 }
7165
7166 /* All arguments and registers used for the call are set up by now! */
7167 function = prepare_call_address (function, NULL_TREE, &use_insns);
7168
7169 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
7170 and we don't want to load it into a register as an optimization,
7171 because prepare_call_address already did it if it should be done. */
7172 if (GET_CODE (function) != SYMBOL_REF)
7173 function = memory_address (FUNCTION_MODE, function);
7174
7175 /* Generate the actual call instruction and save the return value. */
7176 #ifdef HAVE_untyped_call
7177 if (HAVE_untyped_call)
7178 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
7179 result, result_vector (1, result)));
7180 else
7181 #endif
7182 #ifdef HAVE_call_value
7183 if (HAVE_call_value)
7184 {
7185 rtx valreg = 0;
7186
7187 /* Locate the unique return register. It is not possible to
7188 express a call that sets more than one return register using
7189 call_value; use untyped_call for that. In fact, untyped_call
7190 only needs to save the return registers in the given block. */
7191 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7192 if ((mode = apply_result_mode[regno]) != VOIDmode)
7193 {
7194 if (valreg)
7195 abort (); /* HAVE_untyped_call required. */
7196 valreg = gen_rtx (REG, mode, regno);
7197 }
7198
7199 emit_call_insn (gen_call_value (valreg,
7200 gen_rtx (MEM, FUNCTION_MODE, function),
7201 const0_rtx, NULL_RTX, const0_rtx));
7202
7203 emit_move_insn (change_address (result, GET_MODE (valreg),
7204 XEXP (result, 0)),
7205 valreg);
7206 }
7207 else
7208 #endif
7209 abort ();
7210
7211 /* Find the CALL insn we just emitted and write the USE insns before it. */
7212 for (call_insn = get_last_insn ();
7213 call_insn && GET_CODE (call_insn) != CALL_INSN;
7214 call_insn = PREV_INSN (call_insn))
7215 ;
7216
7217 if (! call_insn)
7218 abort ();
7219
7220 /* Put the USE insns before the CALL. */
7221 emit_insns_before (use_insns, call_insn);
7222
7223 /* Restore the stack. */
7224 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
7225
7226 /* Return the address of the result block. */
7227 return copy_addr_to_reg (XEXP (result, 0));
7228 }
7229
7230 /* Perform an untyped return. */
7231 static void
7232 expand_builtin_return (result)
7233 rtx result;
7234 {
7235 int size, align, regno;
7236 enum machine_mode mode;
7237 rtx reg;
7238 rtx use_insns = 0;
7239
7240 apply_result_size ();
7241 result = gen_rtx (MEM, BLKmode, result);
7242
7243 #ifdef HAVE_untyped_return
7244 if (HAVE_untyped_return)
7245 {
7246 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
7247 emit_barrier ();
7248 return;
7249 }
7250 #endif
7251
7252 /* Restore the return value and note that each value is used. */
7253 size = 0;
7254 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7255 if ((mode = apply_result_mode[regno]) != VOIDmode)
7256 {
7257 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7258 if (size % align != 0)
7259 size = CEIL (size, align) * align;
7260 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
7261 emit_move_insn (reg,
7262 change_address (result, mode,
7263 plus_constant (XEXP (result, 0),
7264 size)));
7265
7266 push_to_sequence (use_insns);
7267 emit_insn (gen_rtx (USE, VOIDmode, reg));
7268 use_insns = get_insns ();
7269 end_sequence ();
7270 size += GET_MODE_SIZE (mode);
7271 }
7272
7273 /* Put the USE insns before the return. */
7274 emit_insns (use_insns);
7275
7276 /* Return whatever values was restored by jumping directly to the end
7277 of the function. */
7278 expand_null_return ();
7279 }
7280 \f
7281 /* Expand code for a post- or pre- increment or decrement
7282 and return the RTX for the result.
7283 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
7284
7285 static rtx
7286 expand_increment (exp, post)
7287 register tree exp;
7288 int post;
7289 {
7290 register rtx op0, op1;
7291 register rtx temp, value;
7292 register tree incremented = TREE_OPERAND (exp, 0);
7293 optab this_optab = add_optab;
7294 int icode;
7295 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
7296 int op0_is_copy = 0;
7297
7298 /* Stabilize any component ref that might need to be
7299 evaluated more than once below. */
7300 if (!post
7301 || TREE_CODE (incremented) == BIT_FIELD_REF
7302 || (TREE_CODE (incremented) == COMPONENT_REF
7303 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
7304 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
7305 incremented = stabilize_reference (incremented);
7306
7307 /* Compute the operands as RTX.
7308 Note whether OP0 is the actual lvalue or a copy of it:
7309 I believe it is a copy iff it is a register or subreg
7310 and insns were generated in computing it. */
7311
7312 temp = get_last_insn ();
7313 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
7314
7315 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
7316 in place but intead must do sign- or zero-extension during assignment,
7317 so we copy it into a new register and let the code below use it as
7318 a copy.
7319
7320 Note that we can safely modify this SUBREG since it is know not to be
7321 shared (it was made by the expand_expr call above). */
7322
7323 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
7324 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
7325
7326 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
7327 && temp != get_last_insn ());
7328 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7329
7330 /* Decide whether incrementing or decrementing. */
7331 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
7332 || TREE_CODE (exp) == PREDECREMENT_EXPR)
7333 this_optab = sub_optab;
7334
7335 /* If OP0 is not the actual lvalue, but rather a copy in a register,
7336 then we cannot just increment OP0. We must therefore contrive to
7337 increment the original value. Then, for postincrement, we can return
7338 OP0 since it is a copy of the old value. For preincrement, we want
7339 to always expand here, since this generates better or equivalent code. */
7340 if (!post || op0_is_copy)
7341 {
7342 /* This is the easiest way to increment the value wherever it is.
7343 Problems with multiple evaluation of INCREMENTED are prevented
7344 because either (1) it is a component_ref or preincrement,
7345 in which case it was stabilized above, or (2) it is an array_ref
7346 with constant index in an array in a register, which is
7347 safe to reevaluate. */
7348 tree newexp = build ((this_optab == add_optab
7349 ? PLUS_EXPR : MINUS_EXPR),
7350 TREE_TYPE (exp),
7351 incremented,
7352 TREE_OPERAND (exp, 1));
7353 temp = expand_assignment (incremented, newexp, ! post, 0);
7354 return post ? op0 : temp;
7355 }
7356
7357 /* Convert decrement by a constant into a negative increment. */
7358 if (this_optab == sub_optab
7359 && GET_CODE (op1) == CONST_INT)
7360 {
7361 op1 = GEN_INT (- INTVAL (op1));
7362 this_optab = add_optab;
7363 }
7364
7365 if (post)
7366 {
7367 /* We have a true reference to the value in OP0.
7368 If there is an insn to add or subtract in this mode, queue it. */
7369
7370 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
7371 op0 = stabilize (op0);
7372 #endif
7373
7374 icode = (int) this_optab->handlers[(int) mode].insn_code;
7375 if (icode != (int) CODE_FOR_nothing
7376 /* Make sure that OP0 is valid for operands 0 and 1
7377 of the insn we want to queue. */
7378 && (*insn_operand_predicate[icode][0]) (op0, mode)
7379 && (*insn_operand_predicate[icode][1]) (op0, mode))
7380 {
7381 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
7382 op1 = force_reg (mode, op1);
7383
7384 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
7385 }
7386 }
7387
7388 /* Preincrement, or we can't increment with one simple insn. */
7389 if (post)
7390 /* Save a copy of the value before inc or dec, to return it later. */
7391 temp = value = copy_to_reg (op0);
7392 else
7393 /* Arrange to return the incremented value. */
7394 /* Copy the rtx because expand_binop will protect from the queue,
7395 and the results of that would be invalid for us to return
7396 if our caller does emit_queue before using our result. */
7397 temp = copy_rtx (value = op0);
7398
7399 /* Increment however we can. */
7400 op1 = expand_binop (mode, this_optab, value, op1, op0,
7401 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
7402 /* Make sure the value is stored into OP0. */
7403 if (op1 != op0)
7404 emit_move_insn (op0, op1);
7405
7406 return temp;
7407 }
7408 \f
7409 /* Expand all function calls contained within EXP, innermost ones first.
7410 But don't look within expressions that have sequence points.
7411 For each CALL_EXPR, record the rtx for its value
7412 in the CALL_EXPR_RTL field. */
7413
7414 static void
7415 preexpand_calls (exp)
7416 tree exp;
7417 {
7418 register int nops, i;
7419 int type = TREE_CODE_CLASS (TREE_CODE (exp));
7420
7421 if (! do_preexpand_calls)
7422 return;
7423
7424 /* Only expressions and references can contain calls. */
7425
7426 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
7427 return;
7428
7429 switch (TREE_CODE (exp))
7430 {
7431 case CALL_EXPR:
7432 /* Do nothing if already expanded. */
7433 if (CALL_EXPR_RTL (exp) != 0)
7434 return;
7435
7436 /* Do nothing to built-in functions. */
7437 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
7438 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
7439 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7440 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
7441 return;
7442
7443 case COMPOUND_EXPR:
7444 case COND_EXPR:
7445 case TRUTH_ANDIF_EXPR:
7446 case TRUTH_ORIF_EXPR:
7447 /* If we find one of these, then we can be sure
7448 the adjust will be done for it (since it makes jumps).
7449 Do it now, so that if this is inside an argument
7450 of a function, we don't get the stack adjustment
7451 after some other args have already been pushed. */
7452 do_pending_stack_adjust ();
7453 return;
7454
7455 case BLOCK:
7456 case RTL_EXPR:
7457 case WITH_CLEANUP_EXPR:
7458 return;
7459
7460 case SAVE_EXPR:
7461 if (SAVE_EXPR_RTL (exp) != 0)
7462 return;
7463 }
7464
7465 nops = tree_code_length[(int) TREE_CODE (exp)];
7466 for (i = 0; i < nops; i++)
7467 if (TREE_OPERAND (exp, i) != 0)
7468 {
7469 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
7470 if (type == 'e' || type == '<' || type == '1' || type == '2'
7471 || type == 'r')
7472 preexpand_calls (TREE_OPERAND (exp, i));
7473 }
7474 }
7475 \f
7476 /* At the start of a function, record that we have no previously-pushed
7477 arguments waiting to be popped. */
7478
7479 void
7480 init_pending_stack_adjust ()
7481 {
7482 pending_stack_adjust = 0;
7483 }
7484
7485 /* When exiting from function, if safe, clear out any pending stack adjust
7486 so the adjustment won't get done. */
7487
7488 void
7489 clear_pending_stack_adjust ()
7490 {
7491 #ifdef EXIT_IGNORE_STACK
7492 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
7493 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
7494 && ! flag_inline_functions)
7495 pending_stack_adjust = 0;
7496 #endif
7497 }
7498
7499 /* Pop any previously-pushed arguments that have not been popped yet. */
7500
7501 void
7502 do_pending_stack_adjust ()
7503 {
7504 if (inhibit_defer_pop == 0)
7505 {
7506 if (pending_stack_adjust != 0)
7507 adjust_stack (GEN_INT (pending_stack_adjust));
7508 pending_stack_adjust = 0;
7509 }
7510 }
7511
7512 /* Expand all cleanups up to OLD_CLEANUPS.
7513 Needed here, and also for language-dependent calls. */
7514
7515 void
7516 expand_cleanups_to (old_cleanups)
7517 tree old_cleanups;
7518 {
7519 while (cleanups_this_call != old_cleanups)
7520 {
7521 expand_expr (TREE_VALUE (cleanups_this_call), NULL_RTX, VOIDmode, 0);
7522 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
7523 }
7524 }
7525 \f
7526 /* Expand conditional expressions. */
7527
7528 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
7529 LABEL is an rtx of code CODE_LABEL, in this function and all the
7530 functions here. */
7531
7532 void
7533 jumpifnot (exp, label)
7534 tree exp;
7535 rtx label;
7536 {
7537 do_jump (exp, label, NULL_RTX);
7538 }
7539
7540 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
7541
7542 void
7543 jumpif (exp, label)
7544 tree exp;
7545 rtx label;
7546 {
7547 do_jump (exp, NULL_RTX, label);
7548 }
7549
7550 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
7551 the result is zero, or IF_TRUE_LABEL if the result is one.
7552 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
7553 meaning fall through in that case.
7554
7555 do_jump always does any pending stack adjust except when it does not
7556 actually perform a jump. An example where there is no jump
7557 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
7558
7559 This function is responsible for optimizing cases such as
7560 &&, || and comparison operators in EXP. */
7561
7562 void
7563 do_jump (exp, if_false_label, if_true_label)
7564 tree exp;
7565 rtx if_false_label, if_true_label;
7566 {
7567 register enum tree_code code = TREE_CODE (exp);
7568 /* Some cases need to create a label to jump to
7569 in order to properly fall through.
7570 These cases set DROP_THROUGH_LABEL nonzero. */
7571 rtx drop_through_label = 0;
7572 rtx temp;
7573 rtx comparison = 0;
7574 int i;
7575 tree type;
7576
7577 emit_queue ();
7578
7579 switch (code)
7580 {
7581 case ERROR_MARK:
7582 break;
7583
7584 case INTEGER_CST:
7585 temp = integer_zerop (exp) ? if_false_label : if_true_label;
7586 if (temp)
7587 emit_jump (temp);
7588 break;
7589
7590 #if 0
7591 /* This is not true with #pragma weak */
7592 case ADDR_EXPR:
7593 /* The address of something can never be zero. */
7594 if (if_true_label)
7595 emit_jump (if_true_label);
7596 break;
7597 #endif
7598
7599 case NOP_EXPR:
7600 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
7601 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
7602 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
7603 goto normal;
7604 case CONVERT_EXPR:
7605 /* If we are narrowing the operand, we have to do the compare in the
7606 narrower mode. */
7607 if ((TYPE_PRECISION (TREE_TYPE (exp))
7608 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7609 goto normal;
7610 case NON_LVALUE_EXPR:
7611 case REFERENCE_EXPR:
7612 case ABS_EXPR:
7613 case NEGATE_EXPR:
7614 case LROTATE_EXPR:
7615 case RROTATE_EXPR:
7616 /* These cannot change zero->non-zero or vice versa. */
7617 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
7618 break;
7619
7620 #if 0
7621 /* This is never less insns than evaluating the PLUS_EXPR followed by
7622 a test and can be longer if the test is eliminated. */
7623 case PLUS_EXPR:
7624 /* Reduce to minus. */
7625 exp = build (MINUS_EXPR, TREE_TYPE (exp),
7626 TREE_OPERAND (exp, 0),
7627 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
7628 TREE_OPERAND (exp, 1))));
7629 /* Process as MINUS. */
7630 #endif
7631
7632 case MINUS_EXPR:
7633 /* Non-zero iff operands of minus differ. */
7634 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
7635 TREE_OPERAND (exp, 0),
7636 TREE_OPERAND (exp, 1)),
7637 NE, NE);
7638 break;
7639
7640 case BIT_AND_EXPR:
7641 /* If we are AND'ing with a small constant, do this comparison in the
7642 smallest type that fits. If the machine doesn't have comparisons
7643 that small, it will be converted back to the wider comparison.
7644 This helps if we are testing the sign bit of a narrower object.
7645 combine can't do this for us because it can't know whether a
7646 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
7647
7648 if (! SLOW_BYTE_ACCESS
7649 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7650 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
7651 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
7652 && (type = type_for_size (i + 1, 1)) != 0
7653 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
7654 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
7655 != CODE_FOR_nothing))
7656 {
7657 do_jump (convert (type, exp), if_false_label, if_true_label);
7658 break;
7659 }
7660 goto normal;
7661
7662 case TRUTH_NOT_EXPR:
7663 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
7664 break;
7665
7666 case TRUTH_ANDIF_EXPR:
7667 if (if_false_label == 0)
7668 if_false_label = drop_through_label = gen_label_rtx ();
7669 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
7670 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
7671 break;
7672
7673 case TRUTH_ORIF_EXPR:
7674 if (if_true_label == 0)
7675 if_true_label = drop_through_label = gen_label_rtx ();
7676 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
7677 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
7678 break;
7679
7680 case COMPOUND_EXPR:
7681 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7682 free_temp_slots ();
7683 emit_queue ();
7684 do_pending_stack_adjust ();
7685 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
7686 break;
7687
7688 case COMPONENT_REF:
7689 case BIT_FIELD_REF:
7690 case ARRAY_REF:
7691 {
7692 int bitsize, bitpos, unsignedp;
7693 enum machine_mode mode;
7694 tree type;
7695 tree offset;
7696 int volatilep = 0;
7697
7698 /* Get description of this reference. We don't actually care
7699 about the underlying object here. */
7700 get_inner_reference (exp, &bitsize, &bitpos, &offset,
7701 &mode, &unsignedp, &volatilep);
7702
7703 type = type_for_size (bitsize, unsignedp);
7704 if (! SLOW_BYTE_ACCESS
7705 && type != 0 && bitsize >= 0
7706 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
7707 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
7708 != CODE_FOR_nothing))
7709 {
7710 do_jump (convert (type, exp), if_false_label, if_true_label);
7711 break;
7712 }
7713 goto normal;
7714 }
7715
7716 case COND_EXPR:
7717 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
7718 if (integer_onep (TREE_OPERAND (exp, 1))
7719 && integer_zerop (TREE_OPERAND (exp, 2)))
7720 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
7721
7722 else if (integer_zerop (TREE_OPERAND (exp, 1))
7723 && integer_onep (TREE_OPERAND (exp, 2)))
7724 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
7725
7726 else
7727 {
7728 register rtx label1 = gen_label_rtx ();
7729 drop_through_label = gen_label_rtx ();
7730 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
7731 /* Now the THEN-expression. */
7732 do_jump (TREE_OPERAND (exp, 1),
7733 if_false_label ? if_false_label : drop_through_label,
7734 if_true_label ? if_true_label : drop_through_label);
7735 /* In case the do_jump just above never jumps. */
7736 do_pending_stack_adjust ();
7737 emit_label (label1);
7738 /* Now the ELSE-expression. */
7739 do_jump (TREE_OPERAND (exp, 2),
7740 if_false_label ? if_false_label : drop_through_label,
7741 if_true_label ? if_true_label : drop_through_label);
7742 }
7743 break;
7744
7745 case EQ_EXPR:
7746 if (integer_zerop (TREE_OPERAND (exp, 1)))
7747 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
7748 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7749 == MODE_INT)
7750 &&
7751 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7752 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
7753 else
7754 comparison = compare (exp, EQ, EQ);
7755 break;
7756
7757 case NE_EXPR:
7758 if (integer_zerop (TREE_OPERAND (exp, 1)))
7759 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
7760 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7761 == MODE_INT)
7762 &&
7763 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7764 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
7765 else
7766 comparison = compare (exp, NE, NE);
7767 break;
7768
7769 case LT_EXPR:
7770 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7771 == MODE_INT)
7772 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7773 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
7774 else
7775 comparison = compare (exp, LT, LTU);
7776 break;
7777
7778 case LE_EXPR:
7779 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7780 == MODE_INT)
7781 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7782 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
7783 else
7784 comparison = compare (exp, LE, LEU);
7785 break;
7786
7787 case GT_EXPR:
7788 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7789 == MODE_INT)
7790 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7791 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
7792 else
7793 comparison = compare (exp, GT, GTU);
7794 break;
7795
7796 case GE_EXPR:
7797 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7798 == MODE_INT)
7799 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7800 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
7801 else
7802 comparison = compare (exp, GE, GEU);
7803 break;
7804
7805 default:
7806 normal:
7807 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
7808 #if 0
7809 /* This is not needed any more and causes poor code since it causes
7810 comparisons and tests from non-SI objects to have different code
7811 sequences. */
7812 /* Copy to register to avoid generating bad insns by cse
7813 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
7814 if (!cse_not_expected && GET_CODE (temp) == MEM)
7815 temp = copy_to_reg (temp);
7816 #endif
7817 do_pending_stack_adjust ();
7818 if (GET_CODE (temp) == CONST_INT)
7819 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
7820 else if (GET_CODE (temp) == LABEL_REF)
7821 comparison = const_true_rtx;
7822 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
7823 && !can_compare_p (GET_MODE (temp)))
7824 /* Note swapping the labels gives us not-equal. */
7825 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
7826 else if (GET_MODE (temp) != VOIDmode)
7827 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
7828 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
7829 GET_MODE (temp), NULL_RTX, 0);
7830 else
7831 abort ();
7832 }
7833
7834 /* Do any postincrements in the expression that was tested. */
7835 emit_queue ();
7836
7837 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
7838 straight into a conditional jump instruction as the jump condition.
7839 Otherwise, all the work has been done already. */
7840
7841 if (comparison == const_true_rtx)
7842 {
7843 if (if_true_label)
7844 emit_jump (if_true_label);
7845 }
7846 else if (comparison == const0_rtx)
7847 {
7848 if (if_false_label)
7849 emit_jump (if_false_label);
7850 }
7851 else if (comparison)
7852 do_jump_for_compare (comparison, if_false_label, if_true_label);
7853
7854 free_temp_slots ();
7855
7856 if (drop_through_label)
7857 {
7858 /* If do_jump produces code that might be jumped around,
7859 do any stack adjusts from that code, before the place
7860 where control merges in. */
7861 do_pending_stack_adjust ();
7862 emit_label (drop_through_label);
7863 }
7864 }
7865 \f
7866 /* Given a comparison expression EXP for values too wide to be compared
7867 with one insn, test the comparison and jump to the appropriate label.
7868 The code of EXP is ignored; we always test GT if SWAP is 0,
7869 and LT if SWAP is 1. */
7870
7871 static void
7872 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
7873 tree exp;
7874 int swap;
7875 rtx if_false_label, if_true_label;
7876 {
7877 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
7878 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
7879 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
7880 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
7881 rtx drop_through_label = 0;
7882 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
7883 int i;
7884
7885 if (! if_true_label || ! if_false_label)
7886 drop_through_label = gen_label_rtx ();
7887 if (! if_true_label)
7888 if_true_label = drop_through_label;
7889 if (! if_false_label)
7890 if_false_label = drop_through_label;
7891
7892 /* Compare a word at a time, high order first. */
7893 for (i = 0; i < nwords; i++)
7894 {
7895 rtx comp;
7896 rtx op0_word, op1_word;
7897
7898 if (WORDS_BIG_ENDIAN)
7899 {
7900 op0_word = operand_subword_force (op0, i, mode);
7901 op1_word = operand_subword_force (op1, i, mode);
7902 }
7903 else
7904 {
7905 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
7906 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
7907 }
7908
7909 /* All but high-order word must be compared as unsigned. */
7910 comp = compare_from_rtx (op0_word, op1_word,
7911 (unsignedp || i > 0) ? GTU : GT,
7912 unsignedp, word_mode, NULL_RTX, 0);
7913 if (comp == const_true_rtx)
7914 emit_jump (if_true_label);
7915 else if (comp != const0_rtx)
7916 do_jump_for_compare (comp, NULL_RTX, if_true_label);
7917
7918 /* Consider lower words only if these are equal. */
7919 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
7920 NULL_RTX, 0);
7921 if (comp == const_true_rtx)
7922 emit_jump (if_false_label);
7923 else if (comp != const0_rtx)
7924 do_jump_for_compare (comp, NULL_RTX, if_false_label);
7925 }
7926
7927 if (if_false_label)
7928 emit_jump (if_false_label);
7929 if (drop_through_label)
7930 emit_label (drop_through_label);
7931 }
7932
7933 /* Given an EQ_EXPR expression EXP for values too wide to be compared
7934 with one insn, test the comparison and jump to the appropriate label. */
7935
7936 static void
7937 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
7938 tree exp;
7939 rtx if_false_label, if_true_label;
7940 {
7941 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7942 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7943 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
7944 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
7945 int i;
7946 rtx drop_through_label = 0;
7947
7948 if (! if_false_label)
7949 drop_through_label = if_false_label = gen_label_rtx ();
7950
7951 for (i = 0; i < nwords; i++)
7952 {
7953 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
7954 operand_subword_force (op1, i, mode),
7955 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
7956 word_mode, NULL_RTX, 0);
7957 if (comp == const_true_rtx)
7958 emit_jump (if_false_label);
7959 else if (comp != const0_rtx)
7960 do_jump_for_compare (comp, if_false_label, NULL_RTX);
7961 }
7962
7963 if (if_true_label)
7964 emit_jump (if_true_label);
7965 if (drop_through_label)
7966 emit_label (drop_through_label);
7967 }
7968 \f
7969 /* Jump according to whether OP0 is 0.
7970 We assume that OP0 has an integer mode that is too wide
7971 for the available compare insns. */
7972
7973 static void
7974 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
7975 rtx op0;
7976 rtx if_false_label, if_true_label;
7977 {
7978 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
7979 int i;
7980 rtx drop_through_label = 0;
7981
7982 if (! if_false_label)
7983 drop_through_label = if_false_label = gen_label_rtx ();
7984
7985 for (i = 0; i < nwords; i++)
7986 {
7987 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
7988 GET_MODE (op0)),
7989 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
7990 if (comp == const_true_rtx)
7991 emit_jump (if_false_label);
7992 else if (comp != const0_rtx)
7993 do_jump_for_compare (comp, if_false_label, NULL_RTX);
7994 }
7995
7996 if (if_true_label)
7997 emit_jump (if_true_label);
7998 if (drop_through_label)
7999 emit_label (drop_through_label);
8000 }
8001
8002 /* Given a comparison expression in rtl form, output conditional branches to
8003 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
8004
8005 static void
8006 do_jump_for_compare (comparison, if_false_label, if_true_label)
8007 rtx comparison, if_false_label, if_true_label;
8008 {
8009 if (if_true_label)
8010 {
8011 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
8012 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
8013 else
8014 abort ();
8015
8016 if (if_false_label)
8017 emit_jump (if_false_label);
8018 }
8019 else if (if_false_label)
8020 {
8021 rtx insn;
8022 rtx prev = PREV_INSN (get_last_insn ());
8023 rtx branch = 0;
8024
8025 /* Output the branch with the opposite condition. Then try to invert
8026 what is generated. If more than one insn is a branch, or if the
8027 branch is not the last insn written, abort. If we can't invert
8028 the branch, emit make a true label, redirect this jump to that,
8029 emit a jump to the false label and define the true label. */
8030
8031 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
8032 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_false_label));
8033 else
8034 abort ();
8035
8036 /* Here we get the insn before what was just emitted.
8037 On some machines, emitting the branch can discard
8038 the previous compare insn and emit a replacement. */
8039 if (prev == 0)
8040 /* If there's only one preceding insn... */
8041 insn = get_insns ();
8042 else
8043 insn = NEXT_INSN (prev);
8044
8045 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
8046 if (GET_CODE (insn) == JUMP_INSN)
8047 {
8048 if (branch)
8049 abort ();
8050 branch = insn;
8051 }
8052
8053 if (branch != get_last_insn ())
8054 abort ();
8055
8056 if (! invert_jump (branch, if_false_label))
8057 {
8058 if_true_label = gen_label_rtx ();
8059 redirect_jump (branch, if_true_label);
8060 emit_jump (if_false_label);
8061 emit_label (if_true_label);
8062 }
8063 }
8064 }
8065 \f
8066 /* Generate code for a comparison expression EXP
8067 (including code to compute the values to be compared)
8068 and set (CC0) according to the result.
8069 SIGNED_CODE should be the rtx operation for this comparison for
8070 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
8071
8072 We force a stack adjustment unless there are currently
8073 things pushed on the stack that aren't yet used. */
8074
8075 static rtx
8076 compare (exp, signed_code, unsigned_code)
8077 register tree exp;
8078 enum rtx_code signed_code, unsigned_code;
8079 {
8080 register rtx op0
8081 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8082 register rtx op1
8083 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8084 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
8085 register enum machine_mode mode = TYPE_MODE (type);
8086 int unsignedp = TREE_UNSIGNED (type);
8087 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
8088
8089 return compare_from_rtx (op0, op1, code, unsignedp, mode,
8090 ((mode == BLKmode)
8091 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
8092 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
8093 }
8094
8095 /* Like compare but expects the values to compare as two rtx's.
8096 The decision as to signed or unsigned comparison must be made by the caller.
8097
8098 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
8099 compared.
8100
8101 If ALIGN is non-zero, it is the alignment of this type; if zero, the
8102 size of MODE should be used. */
8103
8104 rtx
8105 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
8106 register rtx op0, op1;
8107 enum rtx_code code;
8108 int unsignedp;
8109 enum machine_mode mode;
8110 rtx size;
8111 int align;
8112 {
8113 rtx tem;
8114
8115 /* If one operand is constant, make it the second one. Only do this
8116 if the other operand is not constant as well. */
8117
8118 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
8119 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
8120 {
8121 tem = op0;
8122 op0 = op1;
8123 op1 = tem;
8124 code = swap_condition (code);
8125 }
8126
8127 if (flag_force_mem)
8128 {
8129 op0 = force_not_mem (op0);
8130 op1 = force_not_mem (op1);
8131 }
8132
8133 do_pending_stack_adjust ();
8134
8135 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
8136 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
8137 return tem;
8138
8139 #if 0
8140 /* There's no need to do this now that combine.c can eliminate lots of
8141 sign extensions. This can be less efficient in certain cases on other
8142 machines.
8143
8144 /* If this is a signed equality comparison, we can do it as an
8145 unsigned comparison since zero-extension is cheaper than sign
8146 extension and comparisons with zero are done as unsigned. This is
8147 the case even on machines that can do fast sign extension, since
8148 zero-extension is easier to combine with other operations than
8149 sign-extension is. If we are comparing against a constant, we must
8150 convert it to what it would look like unsigned. */
8151 if ((code == EQ || code == NE) && ! unsignedp
8152 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
8153 {
8154 if (GET_CODE (op1) == CONST_INT
8155 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
8156 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
8157 unsignedp = 1;
8158 }
8159 #endif
8160
8161 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
8162
8163 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
8164 }
8165 \f
8166 /* Generate code to calculate EXP using a store-flag instruction
8167 and return an rtx for the result. EXP is either a comparison
8168 or a TRUTH_NOT_EXPR whose operand is a comparison.
8169
8170 If TARGET is nonzero, store the result there if convenient.
8171
8172 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
8173 cheap.
8174
8175 Return zero if there is no suitable set-flag instruction
8176 available on this machine.
8177
8178 Once expand_expr has been called on the arguments of the comparison,
8179 we are committed to doing the store flag, since it is not safe to
8180 re-evaluate the expression. We emit the store-flag insn by calling
8181 emit_store_flag, but only expand the arguments if we have a reason
8182 to believe that emit_store_flag will be successful. If we think that
8183 it will, but it isn't, we have to simulate the store-flag with a
8184 set/jump/set sequence. */
8185
8186 static rtx
8187 do_store_flag (exp, target, mode, only_cheap)
8188 tree exp;
8189 rtx target;
8190 enum machine_mode mode;
8191 int only_cheap;
8192 {
8193 enum rtx_code code;
8194 tree arg0, arg1, type;
8195 tree tem;
8196 enum machine_mode operand_mode;
8197 int invert = 0;
8198 int unsignedp;
8199 rtx op0, op1;
8200 enum insn_code icode;
8201 rtx subtarget = target;
8202 rtx result, label, pattern, jump_pat;
8203
8204 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
8205 result at the end. We can't simply invert the test since it would
8206 have already been inverted if it were valid. This case occurs for
8207 some floating-point comparisons. */
8208
8209 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
8210 invert = 1, exp = TREE_OPERAND (exp, 0);
8211
8212 arg0 = TREE_OPERAND (exp, 0);
8213 arg1 = TREE_OPERAND (exp, 1);
8214 type = TREE_TYPE (arg0);
8215 operand_mode = TYPE_MODE (type);
8216 unsignedp = TREE_UNSIGNED (type);
8217
8218 /* We won't bother with BLKmode store-flag operations because it would mean
8219 passing a lot of information to emit_store_flag. */
8220 if (operand_mode == BLKmode)
8221 return 0;
8222
8223 STRIP_NOPS (arg0);
8224 STRIP_NOPS (arg1);
8225
8226 /* Get the rtx comparison code to use. We know that EXP is a comparison
8227 operation of some type. Some comparisons against 1 and -1 can be
8228 converted to comparisons with zero. Do so here so that the tests
8229 below will be aware that we have a comparison with zero. These
8230 tests will not catch constants in the first operand, but constants
8231 are rarely passed as the first operand. */
8232
8233 switch (TREE_CODE (exp))
8234 {
8235 case EQ_EXPR:
8236 code = EQ;
8237 break;
8238 case NE_EXPR:
8239 code = NE;
8240 break;
8241 case LT_EXPR:
8242 if (integer_onep (arg1))
8243 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
8244 else
8245 code = unsignedp ? LTU : LT;
8246 break;
8247 case LE_EXPR:
8248 if (integer_all_onesp (arg1))
8249 arg1 = integer_zero_node, code = unsignedp ? LTU : LT;
8250 else
8251 code = unsignedp ? LEU : LE;
8252 break;
8253 case GT_EXPR:
8254 if (integer_all_onesp (arg1))
8255 arg1 = integer_zero_node, code = unsignedp ? GEU : GE;
8256 else
8257 code = unsignedp ? GTU : GT;
8258 break;
8259 case GE_EXPR:
8260 if (integer_onep (arg1))
8261 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
8262 else
8263 code = unsignedp ? GEU : GE;
8264 break;
8265 default:
8266 abort ();
8267 }
8268
8269 /* Put a constant second. */
8270 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
8271 {
8272 tem = arg0; arg0 = arg1; arg1 = tem;
8273 code = swap_condition (code);
8274 }
8275
8276 /* If this is an equality or inequality test of a single bit, we can
8277 do this by shifting the bit being tested to the low-order bit and
8278 masking the result with the constant 1. If the condition was EQ,
8279 we xor it with 1. This does not require an scc insn and is faster
8280 than an scc insn even if we have it. */
8281
8282 if ((code == NE || code == EQ)
8283 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
8284 && integer_pow2p (TREE_OPERAND (arg0, 1))
8285 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
8286 {
8287 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
8288 NULL_RTX, VOIDmode, 0)));
8289
8290 if (subtarget == 0 || GET_CODE (subtarget) != REG
8291 || GET_MODE (subtarget) != operand_mode
8292 || ! safe_from_p (subtarget, TREE_OPERAND (arg0, 0)))
8293 subtarget = 0;
8294
8295 op0 = expand_expr (TREE_OPERAND (arg0, 0), subtarget, VOIDmode, 0);
8296
8297 if (bitnum != 0)
8298 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
8299 size_int (bitnum), target, 1);
8300
8301 if (GET_MODE (op0) != mode)
8302 op0 = convert_to_mode (mode, op0, 1);
8303
8304 if (bitnum != TYPE_PRECISION (type) - 1)
8305 op0 = expand_and (op0, const1_rtx, target);
8306
8307 if ((code == EQ && ! invert) || (code == NE && invert))
8308 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, target, 0,
8309 OPTAB_LIB_WIDEN);
8310
8311 return op0;
8312 }
8313
8314 /* Now see if we are likely to be able to do this. Return if not. */
8315 if (! can_compare_p (operand_mode))
8316 return 0;
8317 icode = setcc_gen_code[(int) code];
8318 if (icode == CODE_FOR_nothing
8319 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
8320 {
8321 /* We can only do this if it is one of the special cases that
8322 can be handled without an scc insn. */
8323 if ((code == LT && integer_zerop (arg1))
8324 || (! only_cheap && code == GE && integer_zerop (arg1)))
8325 ;
8326 else if (BRANCH_COST >= 0
8327 && ! only_cheap && (code == NE || code == EQ)
8328 && TREE_CODE (type) != REAL_TYPE
8329 && ((abs_optab->handlers[(int) operand_mode].insn_code
8330 != CODE_FOR_nothing)
8331 || (ffs_optab->handlers[(int) operand_mode].insn_code
8332 != CODE_FOR_nothing)))
8333 ;
8334 else
8335 return 0;
8336 }
8337
8338 preexpand_calls (exp);
8339 if (subtarget == 0 || GET_CODE (subtarget) != REG
8340 || GET_MODE (subtarget) != operand_mode
8341 || ! safe_from_p (subtarget, arg1))
8342 subtarget = 0;
8343
8344 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
8345 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
8346
8347 if (target == 0)
8348 target = gen_reg_rtx (mode);
8349
8350 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
8351 because, if the emit_store_flag does anything it will succeed and
8352 OP0 and OP1 will not be used subsequently. */
8353
8354 result = emit_store_flag (target, code,
8355 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
8356 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
8357 operand_mode, unsignedp, 1);
8358
8359 if (result)
8360 {
8361 if (invert)
8362 result = expand_binop (mode, xor_optab, result, const1_rtx,
8363 result, 0, OPTAB_LIB_WIDEN);
8364 return result;
8365 }
8366
8367 /* If this failed, we have to do this with set/compare/jump/set code. */
8368 if (target == 0 || GET_CODE (target) != REG
8369 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
8370 target = gen_reg_rtx (GET_MODE (target));
8371
8372 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
8373 result = compare_from_rtx (op0, op1, code, unsignedp,
8374 operand_mode, NULL_RTX, 0);
8375 if (GET_CODE (result) == CONST_INT)
8376 return (((result == const0_rtx && ! invert)
8377 || (result != const0_rtx && invert))
8378 ? const0_rtx : const1_rtx);
8379
8380 label = gen_label_rtx ();
8381 if (bcc_gen_fctn[(int) code] == 0)
8382 abort ();
8383
8384 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
8385 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
8386 emit_label (label);
8387
8388 return target;
8389 }
8390 \f
8391 /* Generate a tablejump instruction (used for switch statements). */
8392
8393 #ifdef HAVE_tablejump
8394
8395 /* INDEX is the value being switched on, with the lowest value
8396 in the table already subtracted.
8397 MODE is its expected mode (needed if INDEX is constant).
8398 RANGE is the length of the jump table.
8399 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
8400
8401 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
8402 index value is out of range. */
8403
8404 void
8405 do_tablejump (index, mode, range, table_label, default_label)
8406 rtx index, range, table_label, default_label;
8407 enum machine_mode mode;
8408 {
8409 register rtx temp, vector;
8410
8411 /* Do an unsigned comparison (in the proper mode) between the index
8412 expression and the value which represents the length of the range.
8413 Since we just finished subtracting the lower bound of the range
8414 from the index expression, this comparison allows us to simultaneously
8415 check that the original index expression value is both greater than
8416 or equal to the minimum value of the range and less than or equal to
8417 the maximum value of the range. */
8418
8419 emit_cmp_insn (range, index, LTU, NULL_RTX, mode, 1, 0);
8420 emit_jump_insn (gen_bltu (default_label));
8421
8422 /* If index is in range, it must fit in Pmode.
8423 Convert to Pmode so we can index with it. */
8424 if (mode != Pmode)
8425 index = convert_to_mode (Pmode, index, 1);
8426
8427 /* If flag_force_addr were to affect this address
8428 it could interfere with the tricky assumptions made
8429 about addresses that contain label-refs,
8430 which may be valid only very near the tablejump itself. */
8431 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
8432 GET_MODE_SIZE, because this indicates how large insns are. The other
8433 uses should all be Pmode, because they are addresses. This code
8434 could fail if addresses and insns are not the same size. */
8435 index = memory_address_noforce
8436 (CASE_VECTOR_MODE,
8437 gen_rtx (PLUS, Pmode,
8438 gen_rtx (MULT, Pmode, index,
8439 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
8440 gen_rtx (LABEL_REF, Pmode, table_label)));
8441 temp = gen_reg_rtx (CASE_VECTOR_MODE);
8442 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
8443 RTX_UNCHANGING_P (vector) = 1;
8444 convert_move (temp, vector, 0);
8445
8446 emit_jump_insn (gen_tablejump (temp, table_label));
8447
8448 #ifndef CASE_VECTOR_PC_RELATIVE
8449 /* If we are generating PIC code or if the table is PC-relative, the
8450 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
8451 if (! flag_pic)
8452 emit_barrier ();
8453 #endif
8454 }
8455
8456 #endif /* HAVE_tablejump */
This page took 0.421054 seconds and 6 git commands to generate.