]> gcc.gnu.org Git - gcc.git/blob - gcc/expr.c
[multiple changes]
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 92-97, 1998 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "obstack.h"
28 #include "flags.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "except.h"
32 #include "function.h"
33 #include "insn-flags.h"
34 #include "insn-codes.h"
35 #include "insn-config.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
37 #include "expr.h"
38 #include "recog.h"
39 #include "output.h"
40 #include "typeclass.h"
41 #include "defaults.h"
42 #include "toplev.h"
43
44 #define CEIL(x,y) (((x) + (y) - 1) / (y))
45
46 /* Decide whether a function's arguments should be processed
47 from first to last or from last to first.
48
49 They should if the stack and args grow in opposite directions, but
50 only if we have push insns. */
51
52 #ifdef PUSH_ROUNDING
53
54 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
55 #define PUSH_ARGS_REVERSED /* If it's last to first */
56 #endif
57
58 #endif
59
60 #ifndef STACK_PUSH_CODE
61 #ifdef STACK_GROWS_DOWNWARD
62 #define STACK_PUSH_CODE PRE_DEC
63 #else
64 #define STACK_PUSH_CODE PRE_INC
65 #endif
66 #endif
67
68 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
69 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
70
71 /* Assume that case vectors are not pc-relative. */
72 #ifndef CASE_VECTOR_PC_RELATIVE
73 #define CASE_VECTOR_PC_RELATIVE 0
74 #endif
75
76 /* If this is nonzero, we do not bother generating VOLATILE
77 around volatile memory references, and we are willing to
78 output indirect addresses. If cse is to follow, we reject
79 indirect addresses so a useful potential cse is generated;
80 if it is used only once, instruction combination will produce
81 the same indirect address eventually. */
82 int cse_not_expected;
83
84 /* Nonzero to generate code for all the subroutines within an
85 expression before generating the upper levels of the expression.
86 Nowadays this is never zero. */
87 int do_preexpand_calls = 1;
88
89 /* Number of units that we should eventually pop off the stack.
90 These are the arguments to function calls that have already returned. */
91 int pending_stack_adjust;
92
93 /* Nonzero means stack pops must not be deferred, and deferred stack
94 pops must not be output. It is nonzero inside a function call,
95 inside a conditional expression, inside a statement expression,
96 and in other cases as well. */
97 int inhibit_defer_pop;
98
99 /* Nonzero means __builtin_saveregs has already been done in this function.
100 The value is the pseudoreg containing the value __builtin_saveregs
101 returned. */
102 static rtx saveregs_value;
103
104 /* Similarly for __builtin_apply_args. */
105 static rtx apply_args_value;
106
107 /* Don't check memory usage, since code is being emitted to check a memory
108 usage. Used when flag_check_memory_usage is true, to avoid infinite
109 recursion. */
110 static int in_check_memory_usage;
111
112 /* This structure is used by move_by_pieces to describe the move to
113 be performed. */
114 struct move_by_pieces
115 {
116 rtx to;
117 rtx to_addr;
118 int autinc_to;
119 int explicit_inc_to;
120 int to_struct;
121 rtx from;
122 rtx from_addr;
123 int autinc_from;
124 int explicit_inc_from;
125 int from_struct;
126 int len;
127 int offset;
128 int reverse;
129 };
130
131 /* This structure is used by clear_by_pieces to describe the clear to
132 be performed. */
133
134 struct clear_by_pieces
135 {
136 rtx to;
137 rtx to_addr;
138 int autinc_to;
139 int explicit_inc_to;
140 int to_struct;
141 int len;
142 int offset;
143 int reverse;
144 };
145
146 extern struct obstack permanent_obstack;
147 extern rtx arg_pointer_save_area;
148
149 static rtx get_push_address PROTO ((int));
150
151 static rtx enqueue_insn PROTO((rtx, rtx));
152 static int queued_subexp_p PROTO((rtx));
153 static void init_queue PROTO((void));
154 static void move_by_pieces PROTO((rtx, rtx, int, int));
155 static int move_by_pieces_ninsns PROTO((unsigned int, int));
156 static void move_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
157 struct move_by_pieces *));
158 static void clear_by_pieces PROTO((rtx, int, int));
159 static void clear_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
160 struct clear_by_pieces *));
161 static int is_zeros_p PROTO((tree));
162 static int mostly_zeros_p PROTO((tree));
163 static void store_constructor_field PROTO((rtx, int, int, enum machine_mode,
164 tree, tree, int));
165 static void store_constructor PROTO((tree, rtx, int));
166 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
167 enum machine_mode, int, int, int));
168 static enum memory_use_mode
169 get_memory_usage_from_modifier PROTO((enum expand_modifier));
170 static tree save_noncopied_parts PROTO((tree, tree));
171 static tree init_noncopied_parts PROTO((tree, tree));
172 static int safe_from_p PROTO((rtx, tree, int));
173 static int fixed_type_p PROTO((tree));
174 static rtx var_rtx PROTO((tree));
175 static int get_pointer_alignment PROTO((tree, unsigned));
176 static tree string_constant PROTO((tree, tree *));
177 static tree c_strlen PROTO((tree));
178 static rtx expand_builtin PROTO((tree, rtx, rtx,
179 enum machine_mode, int));
180 static int apply_args_size PROTO((void));
181 static int apply_result_size PROTO((void));
182 static rtx result_vector PROTO((int, rtx));
183 static rtx expand_builtin_apply_args PROTO((void));
184 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
185 static void expand_builtin_return PROTO((rtx));
186 static rtx expand_increment PROTO((tree, int, int));
187 static void preexpand_calls PROTO((tree));
188 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
189 void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
190 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
191 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
192 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
193 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
194 extern tree truthvalue_conversion PROTO((tree));
195
196 /* Record for each mode whether we can move a register directly to or
197 from an object of that mode in memory. If we can't, we won't try
198 to use that mode directly when accessing a field of that mode. */
199
200 static char direct_load[NUM_MACHINE_MODES];
201 static char direct_store[NUM_MACHINE_MODES];
202
203 /* MOVE_RATIO is the number of move instructions that is better than
204 a block move. */
205
206 #ifndef MOVE_RATIO
207 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
208 #define MOVE_RATIO 2
209 #else
210 /* If we are optimizing for space (-Os), cut down the default move ratio */
211 #define MOVE_RATIO (optimize_size ? 3 : 15)
212 #endif
213 #endif
214
215 /* This array records the insn_code of insns to perform block moves. */
216 enum insn_code movstr_optab[NUM_MACHINE_MODES];
217
218 /* This array records the insn_code of insns to perform block clears. */
219 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
220
221 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
222
223 #ifndef SLOW_UNALIGNED_ACCESS
224 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
225 #endif
226
227 /* Register mappings for target machines without register windows. */
228 #ifndef INCOMING_REGNO
229 #define INCOMING_REGNO(OUT) (OUT)
230 #endif
231 #ifndef OUTGOING_REGNO
232 #define OUTGOING_REGNO(IN) (IN)
233 #endif
234 \f
235 /* This is run once per compilation to set up which modes can be used
236 directly in memory and to initialize the block move optab. */
237
238 void
239 init_expr_once ()
240 {
241 rtx insn, pat;
242 enum machine_mode mode;
243 /* Try indexing by frame ptr and try by stack ptr.
244 It is known that on the Convex the stack ptr isn't a valid index.
245 With luck, one or the other is valid on any machine. */
246 rtx mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
247 rtx mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
248
249 start_sequence ();
250 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
251 pat = PATTERN (insn);
252
253 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
254 mode = (enum machine_mode) ((int) mode + 1))
255 {
256 int regno;
257 rtx reg;
258 int num_clobbers;
259
260 direct_load[(int) mode] = direct_store[(int) mode] = 0;
261 PUT_MODE (mem, mode);
262 PUT_MODE (mem1, mode);
263
264 /* See if there is some register that can be used in this mode and
265 directly loaded or stored from memory. */
266
267 if (mode != VOIDmode && mode != BLKmode)
268 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
269 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
270 regno++)
271 {
272 if (! HARD_REGNO_MODE_OK (regno, mode))
273 continue;
274
275 reg = gen_rtx_REG (mode, regno);
276
277 SET_SRC (pat) = mem;
278 SET_DEST (pat) = reg;
279 if (recog (pat, insn, &num_clobbers) >= 0)
280 direct_load[(int) mode] = 1;
281
282 SET_SRC (pat) = mem1;
283 SET_DEST (pat) = reg;
284 if (recog (pat, insn, &num_clobbers) >= 0)
285 direct_load[(int) mode] = 1;
286
287 SET_SRC (pat) = reg;
288 SET_DEST (pat) = mem;
289 if (recog (pat, insn, &num_clobbers) >= 0)
290 direct_store[(int) mode] = 1;
291
292 SET_SRC (pat) = reg;
293 SET_DEST (pat) = mem1;
294 if (recog (pat, insn, &num_clobbers) >= 0)
295 direct_store[(int) mode] = 1;
296 }
297 }
298
299 end_sequence ();
300 }
301
302 /* This is run at the start of compiling a function. */
303
304 void
305 init_expr ()
306 {
307 init_queue ();
308
309 pending_stack_adjust = 0;
310 inhibit_defer_pop = 0;
311 saveregs_value = 0;
312 apply_args_value = 0;
313 forced_labels = 0;
314 }
315
316 /* Save all variables describing the current status into the structure *P.
317 This is used before starting a nested function. */
318
319 void
320 save_expr_status (p)
321 struct function *p;
322 {
323 /* Instead of saving the postincrement queue, empty it. */
324 emit_queue ();
325
326 p->pending_stack_adjust = pending_stack_adjust;
327 p->inhibit_defer_pop = inhibit_defer_pop;
328 p->saveregs_value = saveregs_value;
329 p->apply_args_value = apply_args_value;
330 p->forced_labels = forced_labels;
331
332 pending_stack_adjust = 0;
333 inhibit_defer_pop = 0;
334 saveregs_value = 0;
335 apply_args_value = 0;
336 forced_labels = 0;
337 }
338
339 /* Restore all variables describing the current status from the structure *P.
340 This is used after a nested function. */
341
342 void
343 restore_expr_status (p)
344 struct function *p;
345 {
346 pending_stack_adjust = p->pending_stack_adjust;
347 inhibit_defer_pop = p->inhibit_defer_pop;
348 saveregs_value = p->saveregs_value;
349 apply_args_value = p->apply_args_value;
350 forced_labels = p->forced_labels;
351 }
352 \f
353 /* Manage the queue of increment instructions to be output
354 for POSTINCREMENT_EXPR expressions, etc. */
355
356 static rtx pending_chain;
357
358 /* Queue up to increment (or change) VAR later. BODY says how:
359 BODY should be the same thing you would pass to emit_insn
360 to increment right away. It will go to emit_insn later on.
361
362 The value is a QUEUED expression to be used in place of VAR
363 where you want to guarantee the pre-incrementation value of VAR. */
364
365 static rtx
366 enqueue_insn (var, body)
367 rtx var, body;
368 {
369 pending_chain = gen_rtx_QUEUED (GET_MODE (var),
370 var, NULL_RTX, NULL_RTX, body,
371 pending_chain);
372 return pending_chain;
373 }
374
375 /* Use protect_from_queue to convert a QUEUED expression
376 into something that you can put immediately into an instruction.
377 If the queued incrementation has not happened yet,
378 protect_from_queue returns the variable itself.
379 If the incrementation has happened, protect_from_queue returns a temp
380 that contains a copy of the old value of the variable.
381
382 Any time an rtx which might possibly be a QUEUED is to be put
383 into an instruction, it must be passed through protect_from_queue first.
384 QUEUED expressions are not meaningful in instructions.
385
386 Do not pass a value through protect_from_queue and then hold
387 on to it for a while before putting it in an instruction!
388 If the queue is flushed in between, incorrect code will result. */
389
390 rtx
391 protect_from_queue (x, modify)
392 register rtx x;
393 int modify;
394 {
395 register RTX_CODE code = GET_CODE (x);
396
397 #if 0 /* A QUEUED can hang around after the queue is forced out. */
398 /* Shortcut for most common case. */
399 if (pending_chain == 0)
400 return x;
401 #endif
402
403 if (code != QUEUED)
404 {
405 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
406 use of autoincrement. Make a copy of the contents of the memory
407 location rather than a copy of the address, but not if the value is
408 of mode BLKmode. Don't modify X in place since it might be
409 shared. */
410 if (code == MEM && GET_MODE (x) != BLKmode
411 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
412 {
413 register rtx y = XEXP (x, 0);
414 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
415
416 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
417 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
418 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
419
420 if (QUEUED_INSN (y))
421 {
422 register rtx temp = gen_reg_rtx (GET_MODE (new));
423 emit_insn_before (gen_move_insn (temp, new),
424 QUEUED_INSN (y));
425 return temp;
426 }
427 return new;
428 }
429 /* Otherwise, recursively protect the subexpressions of all
430 the kinds of rtx's that can contain a QUEUED. */
431 if (code == MEM)
432 {
433 rtx tem = protect_from_queue (XEXP (x, 0), 0);
434 if (tem != XEXP (x, 0))
435 {
436 x = copy_rtx (x);
437 XEXP (x, 0) = tem;
438 }
439 }
440 else if (code == PLUS || code == MULT)
441 {
442 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
443 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
444 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
445 {
446 x = copy_rtx (x);
447 XEXP (x, 0) = new0;
448 XEXP (x, 1) = new1;
449 }
450 }
451 return x;
452 }
453 /* If the increment has not happened, use the variable itself. */
454 if (QUEUED_INSN (x) == 0)
455 return QUEUED_VAR (x);
456 /* If the increment has happened and a pre-increment copy exists,
457 use that copy. */
458 if (QUEUED_COPY (x) != 0)
459 return QUEUED_COPY (x);
460 /* The increment has happened but we haven't set up a pre-increment copy.
461 Set one up now, and use it. */
462 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
463 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
464 QUEUED_INSN (x));
465 return QUEUED_COPY (x);
466 }
467
468 /* Return nonzero if X contains a QUEUED expression:
469 if it contains anything that will be altered by a queued increment.
470 We handle only combinations of MEM, PLUS, MINUS and MULT operators
471 since memory addresses generally contain only those. */
472
473 static int
474 queued_subexp_p (x)
475 rtx x;
476 {
477 register enum rtx_code code = GET_CODE (x);
478 switch (code)
479 {
480 case QUEUED:
481 return 1;
482 case MEM:
483 return queued_subexp_p (XEXP (x, 0));
484 case MULT:
485 case PLUS:
486 case MINUS:
487 return (queued_subexp_p (XEXP (x, 0))
488 || queued_subexp_p (XEXP (x, 1)));
489 default:
490 return 0;
491 }
492 }
493
494 /* Perform all the pending incrementations. */
495
496 void
497 emit_queue ()
498 {
499 register rtx p;
500 while ((p = pending_chain))
501 {
502 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
503 pending_chain = QUEUED_NEXT (p);
504 }
505 }
506
507 static void
508 init_queue ()
509 {
510 if (pending_chain)
511 abort ();
512 }
513 \f
514 /* Copy data from FROM to TO, where the machine modes are not the same.
515 Both modes may be integer, or both may be floating.
516 UNSIGNEDP should be nonzero if FROM is an unsigned type.
517 This causes zero-extension instead of sign-extension. */
518
519 void
520 convert_move (to, from, unsignedp)
521 register rtx to, from;
522 int unsignedp;
523 {
524 enum machine_mode to_mode = GET_MODE (to);
525 enum machine_mode from_mode = GET_MODE (from);
526 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
527 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
528 enum insn_code code;
529 rtx libcall;
530
531 /* rtx code for making an equivalent value. */
532 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
533
534 to = protect_from_queue (to, 1);
535 from = protect_from_queue (from, 0);
536
537 if (to_real != from_real)
538 abort ();
539
540 /* If FROM is a SUBREG that indicates that we have already done at least
541 the required extension, strip it. We don't handle such SUBREGs as
542 TO here. */
543
544 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
545 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
546 >= GET_MODE_SIZE (to_mode))
547 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
548 from = gen_lowpart (to_mode, from), from_mode = to_mode;
549
550 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
551 abort ();
552
553 if (to_mode == from_mode
554 || (from_mode == VOIDmode && CONSTANT_P (from)))
555 {
556 emit_move_insn (to, from);
557 return;
558 }
559
560 if (to_real)
561 {
562 rtx value;
563
564 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
565 {
566 /* Try converting directly if the insn is supported. */
567 if ((code = can_extend_p (to_mode, from_mode, 0))
568 != CODE_FOR_nothing)
569 {
570 emit_unop_insn (code, to, from, UNKNOWN);
571 return;
572 }
573 }
574
575 #ifdef HAVE_trunchfqf2
576 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
577 {
578 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
579 return;
580 }
581 #endif
582 #ifdef HAVE_trunctqfqf2
583 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
584 {
585 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
586 return;
587 }
588 #endif
589 #ifdef HAVE_truncsfqf2
590 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
591 {
592 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
593 return;
594 }
595 #endif
596 #ifdef HAVE_truncdfqf2
597 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
598 {
599 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
600 return;
601 }
602 #endif
603 #ifdef HAVE_truncxfqf2
604 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
605 {
606 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
607 return;
608 }
609 #endif
610 #ifdef HAVE_trunctfqf2
611 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
612 {
613 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
614 return;
615 }
616 #endif
617
618 #ifdef HAVE_trunctqfhf2
619 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
620 {
621 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
622 return;
623 }
624 #endif
625 #ifdef HAVE_truncsfhf2
626 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
627 {
628 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
629 return;
630 }
631 #endif
632 #ifdef HAVE_truncdfhf2
633 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
634 {
635 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
636 return;
637 }
638 #endif
639 #ifdef HAVE_truncxfhf2
640 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
641 {
642 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
643 return;
644 }
645 #endif
646 #ifdef HAVE_trunctfhf2
647 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
648 {
649 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
650 return;
651 }
652 #endif
653
654 #ifdef HAVE_truncsftqf2
655 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
656 {
657 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
658 return;
659 }
660 #endif
661 #ifdef HAVE_truncdftqf2
662 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
663 {
664 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
665 return;
666 }
667 #endif
668 #ifdef HAVE_truncxftqf2
669 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
670 {
671 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
672 return;
673 }
674 #endif
675 #ifdef HAVE_trunctftqf2
676 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
677 {
678 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
679 return;
680 }
681 #endif
682
683 #ifdef HAVE_truncdfsf2
684 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
685 {
686 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
687 return;
688 }
689 #endif
690 #ifdef HAVE_truncxfsf2
691 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
692 {
693 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
694 return;
695 }
696 #endif
697 #ifdef HAVE_trunctfsf2
698 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
699 {
700 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
701 return;
702 }
703 #endif
704 #ifdef HAVE_truncxfdf2
705 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
706 {
707 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
708 return;
709 }
710 #endif
711 #ifdef HAVE_trunctfdf2
712 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
713 {
714 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
715 return;
716 }
717 #endif
718
719 libcall = (rtx) 0;
720 switch (from_mode)
721 {
722 case SFmode:
723 switch (to_mode)
724 {
725 case DFmode:
726 libcall = extendsfdf2_libfunc;
727 break;
728
729 case XFmode:
730 libcall = extendsfxf2_libfunc;
731 break;
732
733 case TFmode:
734 libcall = extendsftf2_libfunc;
735 break;
736
737 default:
738 break;
739 }
740 break;
741
742 case DFmode:
743 switch (to_mode)
744 {
745 case SFmode:
746 libcall = truncdfsf2_libfunc;
747 break;
748
749 case XFmode:
750 libcall = extenddfxf2_libfunc;
751 break;
752
753 case TFmode:
754 libcall = extenddftf2_libfunc;
755 break;
756
757 default:
758 break;
759 }
760 break;
761
762 case XFmode:
763 switch (to_mode)
764 {
765 case SFmode:
766 libcall = truncxfsf2_libfunc;
767 break;
768
769 case DFmode:
770 libcall = truncxfdf2_libfunc;
771 break;
772
773 default:
774 break;
775 }
776 break;
777
778 case TFmode:
779 switch (to_mode)
780 {
781 case SFmode:
782 libcall = trunctfsf2_libfunc;
783 break;
784
785 case DFmode:
786 libcall = trunctfdf2_libfunc;
787 break;
788
789 default:
790 break;
791 }
792 break;
793
794 default:
795 break;
796 }
797
798 if (libcall == (rtx) 0)
799 /* This conversion is not implemented yet. */
800 abort ();
801
802 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
803 1, from, from_mode);
804 emit_move_insn (to, value);
805 return;
806 }
807
808 /* Now both modes are integers. */
809
810 /* Handle expanding beyond a word. */
811 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
812 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
813 {
814 rtx insns;
815 rtx lowpart;
816 rtx fill_value;
817 rtx lowfrom;
818 int i;
819 enum machine_mode lowpart_mode;
820 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
821
822 /* Try converting directly if the insn is supported. */
823 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
824 != CODE_FOR_nothing)
825 {
826 /* If FROM is a SUBREG, put it into a register. Do this
827 so that we always generate the same set of insns for
828 better cse'ing; if an intermediate assignment occurred,
829 we won't be doing the operation directly on the SUBREG. */
830 if (optimize > 0 && GET_CODE (from) == SUBREG)
831 from = force_reg (from_mode, from);
832 emit_unop_insn (code, to, from, equiv_code);
833 return;
834 }
835 /* Next, try converting via full word. */
836 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
837 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
838 != CODE_FOR_nothing))
839 {
840 if (GET_CODE (to) == REG)
841 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
842 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
843 emit_unop_insn (code, to,
844 gen_lowpart (word_mode, to), equiv_code);
845 return;
846 }
847
848 /* No special multiword conversion insn; do it by hand. */
849 start_sequence ();
850
851 /* Since we will turn this into a no conflict block, we must ensure
852 that the source does not overlap the target. */
853
854 if (reg_overlap_mentioned_p (to, from))
855 from = force_reg (from_mode, from);
856
857 /* Get a copy of FROM widened to a word, if necessary. */
858 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
859 lowpart_mode = word_mode;
860 else
861 lowpart_mode = from_mode;
862
863 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
864
865 lowpart = gen_lowpart (lowpart_mode, to);
866 emit_move_insn (lowpart, lowfrom);
867
868 /* Compute the value to put in each remaining word. */
869 if (unsignedp)
870 fill_value = const0_rtx;
871 else
872 {
873 #ifdef HAVE_slt
874 if (HAVE_slt
875 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
876 && STORE_FLAG_VALUE == -1)
877 {
878 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
879 lowpart_mode, 0, 0);
880 fill_value = gen_reg_rtx (word_mode);
881 emit_insn (gen_slt (fill_value));
882 }
883 else
884 #endif
885 {
886 fill_value
887 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
888 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
889 NULL_RTX, 0);
890 fill_value = convert_to_mode (word_mode, fill_value, 1);
891 }
892 }
893
894 /* Fill the remaining words. */
895 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
896 {
897 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
898 rtx subword = operand_subword (to, index, 1, to_mode);
899
900 if (subword == 0)
901 abort ();
902
903 if (fill_value != subword)
904 emit_move_insn (subword, fill_value);
905 }
906
907 insns = get_insns ();
908 end_sequence ();
909
910 emit_no_conflict_block (insns, to, from, NULL_RTX,
911 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
912 return;
913 }
914
915 /* Truncating multi-word to a word or less. */
916 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
917 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
918 {
919 if (!((GET_CODE (from) == MEM
920 && ! MEM_VOLATILE_P (from)
921 && direct_load[(int) to_mode]
922 && ! mode_dependent_address_p (XEXP (from, 0)))
923 || GET_CODE (from) == REG
924 || GET_CODE (from) == SUBREG))
925 from = force_reg (from_mode, from);
926 convert_move (to, gen_lowpart (word_mode, from), 0);
927 return;
928 }
929
930 /* Handle pointer conversion */ /* SPEE 900220 */
931 if (to_mode == PQImode)
932 {
933 if (from_mode != QImode)
934 from = convert_to_mode (QImode, from, unsignedp);
935
936 #ifdef HAVE_truncqipqi2
937 if (HAVE_truncqipqi2)
938 {
939 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
940 return;
941 }
942 #endif /* HAVE_truncqipqi2 */
943 abort ();
944 }
945
946 if (from_mode == PQImode)
947 {
948 if (to_mode != QImode)
949 {
950 from = convert_to_mode (QImode, from, unsignedp);
951 from_mode = QImode;
952 }
953 else
954 {
955 #ifdef HAVE_extendpqiqi2
956 if (HAVE_extendpqiqi2)
957 {
958 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
959 return;
960 }
961 #endif /* HAVE_extendpqiqi2 */
962 abort ();
963 }
964 }
965
966 if (to_mode == PSImode)
967 {
968 if (from_mode != SImode)
969 from = convert_to_mode (SImode, from, unsignedp);
970
971 #ifdef HAVE_truncsipsi2
972 if (HAVE_truncsipsi2)
973 {
974 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
975 return;
976 }
977 #endif /* HAVE_truncsipsi2 */
978 abort ();
979 }
980
981 if (from_mode == PSImode)
982 {
983 if (to_mode != SImode)
984 {
985 from = convert_to_mode (SImode, from, unsignedp);
986 from_mode = SImode;
987 }
988 else
989 {
990 #ifdef HAVE_extendpsisi2
991 if (HAVE_extendpsisi2)
992 {
993 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
994 return;
995 }
996 #endif /* HAVE_extendpsisi2 */
997 abort ();
998 }
999 }
1000
1001 if (to_mode == PDImode)
1002 {
1003 if (from_mode != DImode)
1004 from = convert_to_mode (DImode, from, unsignedp);
1005
1006 #ifdef HAVE_truncdipdi2
1007 if (HAVE_truncdipdi2)
1008 {
1009 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1010 return;
1011 }
1012 #endif /* HAVE_truncdipdi2 */
1013 abort ();
1014 }
1015
1016 if (from_mode == PDImode)
1017 {
1018 if (to_mode != DImode)
1019 {
1020 from = convert_to_mode (DImode, from, unsignedp);
1021 from_mode = DImode;
1022 }
1023 else
1024 {
1025 #ifdef HAVE_extendpdidi2
1026 if (HAVE_extendpdidi2)
1027 {
1028 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1029 return;
1030 }
1031 #endif /* HAVE_extendpdidi2 */
1032 abort ();
1033 }
1034 }
1035
1036 /* Now follow all the conversions between integers
1037 no more than a word long. */
1038
1039 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1040 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1041 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1042 GET_MODE_BITSIZE (from_mode)))
1043 {
1044 if (!((GET_CODE (from) == MEM
1045 && ! MEM_VOLATILE_P (from)
1046 && direct_load[(int) to_mode]
1047 && ! mode_dependent_address_p (XEXP (from, 0)))
1048 || GET_CODE (from) == REG
1049 || GET_CODE (from) == SUBREG))
1050 from = force_reg (from_mode, from);
1051 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1052 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1053 from = copy_to_reg (from);
1054 emit_move_insn (to, gen_lowpart (to_mode, from));
1055 return;
1056 }
1057
1058 /* Handle extension. */
1059 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1060 {
1061 /* Convert directly if that works. */
1062 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1063 != CODE_FOR_nothing)
1064 {
1065 emit_unop_insn (code, to, from, equiv_code);
1066 return;
1067 }
1068 else
1069 {
1070 enum machine_mode intermediate;
1071
1072 /* Search for a mode to convert via. */
1073 for (intermediate = from_mode; intermediate != VOIDmode;
1074 intermediate = GET_MODE_WIDER_MODE (intermediate))
1075 if (((can_extend_p (to_mode, intermediate, unsignedp)
1076 != CODE_FOR_nothing)
1077 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1078 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
1079 && (can_extend_p (intermediate, from_mode, unsignedp)
1080 != CODE_FOR_nothing))
1081 {
1082 convert_move (to, convert_to_mode (intermediate, from,
1083 unsignedp), unsignedp);
1084 return;
1085 }
1086
1087 /* No suitable intermediate mode. */
1088 abort ();
1089 }
1090 }
1091
1092 /* Support special truncate insns for certain modes. */
1093
1094 if (from_mode == DImode && to_mode == SImode)
1095 {
1096 #ifdef HAVE_truncdisi2
1097 if (HAVE_truncdisi2)
1098 {
1099 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1100 return;
1101 }
1102 #endif
1103 convert_move (to, force_reg (from_mode, from), unsignedp);
1104 return;
1105 }
1106
1107 if (from_mode == DImode && to_mode == HImode)
1108 {
1109 #ifdef HAVE_truncdihi2
1110 if (HAVE_truncdihi2)
1111 {
1112 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1113 return;
1114 }
1115 #endif
1116 convert_move (to, force_reg (from_mode, from), unsignedp);
1117 return;
1118 }
1119
1120 if (from_mode == DImode && to_mode == QImode)
1121 {
1122 #ifdef HAVE_truncdiqi2
1123 if (HAVE_truncdiqi2)
1124 {
1125 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1126 return;
1127 }
1128 #endif
1129 convert_move (to, force_reg (from_mode, from), unsignedp);
1130 return;
1131 }
1132
1133 if (from_mode == SImode && to_mode == HImode)
1134 {
1135 #ifdef HAVE_truncsihi2
1136 if (HAVE_truncsihi2)
1137 {
1138 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1139 return;
1140 }
1141 #endif
1142 convert_move (to, force_reg (from_mode, from), unsignedp);
1143 return;
1144 }
1145
1146 if (from_mode == SImode && to_mode == QImode)
1147 {
1148 #ifdef HAVE_truncsiqi2
1149 if (HAVE_truncsiqi2)
1150 {
1151 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1152 return;
1153 }
1154 #endif
1155 convert_move (to, force_reg (from_mode, from), unsignedp);
1156 return;
1157 }
1158
1159 if (from_mode == HImode && to_mode == QImode)
1160 {
1161 #ifdef HAVE_trunchiqi2
1162 if (HAVE_trunchiqi2)
1163 {
1164 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1165 return;
1166 }
1167 #endif
1168 convert_move (to, force_reg (from_mode, from), unsignedp);
1169 return;
1170 }
1171
1172 if (from_mode == TImode && to_mode == DImode)
1173 {
1174 #ifdef HAVE_trunctidi2
1175 if (HAVE_trunctidi2)
1176 {
1177 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1178 return;
1179 }
1180 #endif
1181 convert_move (to, force_reg (from_mode, from), unsignedp);
1182 return;
1183 }
1184
1185 if (from_mode == TImode && to_mode == SImode)
1186 {
1187 #ifdef HAVE_trunctisi2
1188 if (HAVE_trunctisi2)
1189 {
1190 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1191 return;
1192 }
1193 #endif
1194 convert_move (to, force_reg (from_mode, from), unsignedp);
1195 return;
1196 }
1197
1198 if (from_mode == TImode && to_mode == HImode)
1199 {
1200 #ifdef HAVE_trunctihi2
1201 if (HAVE_trunctihi2)
1202 {
1203 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1204 return;
1205 }
1206 #endif
1207 convert_move (to, force_reg (from_mode, from), unsignedp);
1208 return;
1209 }
1210
1211 if (from_mode == TImode && to_mode == QImode)
1212 {
1213 #ifdef HAVE_trunctiqi2
1214 if (HAVE_trunctiqi2)
1215 {
1216 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1217 return;
1218 }
1219 #endif
1220 convert_move (to, force_reg (from_mode, from), unsignedp);
1221 return;
1222 }
1223
1224 /* Handle truncation of volatile memrefs, and so on;
1225 the things that couldn't be truncated directly,
1226 and for which there was no special instruction. */
1227 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1228 {
1229 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1230 emit_move_insn (to, temp);
1231 return;
1232 }
1233
1234 /* Mode combination is not recognized. */
1235 abort ();
1236 }
1237
1238 /* Return an rtx for a value that would result
1239 from converting X to mode MODE.
1240 Both X and MODE may be floating, or both integer.
1241 UNSIGNEDP is nonzero if X is an unsigned value.
1242 This can be done by referring to a part of X in place
1243 or by copying to a new temporary with conversion.
1244
1245 This function *must not* call protect_from_queue
1246 except when putting X into an insn (in which case convert_move does it). */
1247
1248 rtx
1249 convert_to_mode (mode, x, unsignedp)
1250 enum machine_mode mode;
1251 rtx x;
1252 int unsignedp;
1253 {
1254 return convert_modes (mode, VOIDmode, x, unsignedp);
1255 }
1256
1257 /* Return an rtx for a value that would result
1258 from converting X from mode OLDMODE to mode MODE.
1259 Both modes may be floating, or both integer.
1260 UNSIGNEDP is nonzero if X is an unsigned value.
1261
1262 This can be done by referring to a part of X in place
1263 or by copying to a new temporary with conversion.
1264
1265 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1266
1267 This function *must not* call protect_from_queue
1268 except when putting X into an insn (in which case convert_move does it). */
1269
1270 rtx
1271 convert_modes (mode, oldmode, x, unsignedp)
1272 enum machine_mode mode, oldmode;
1273 rtx x;
1274 int unsignedp;
1275 {
1276 register rtx temp;
1277
1278 /* If FROM is a SUBREG that indicates that we have already done at least
1279 the required extension, strip it. */
1280
1281 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1282 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1283 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1284 x = gen_lowpart (mode, x);
1285
1286 if (GET_MODE (x) != VOIDmode)
1287 oldmode = GET_MODE (x);
1288
1289 if (mode == oldmode)
1290 return x;
1291
1292 /* There is one case that we must handle specially: If we are converting
1293 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1294 we are to interpret the constant as unsigned, gen_lowpart will do
1295 the wrong if the constant appears negative. What we want to do is
1296 make the high-order word of the constant zero, not all ones. */
1297
1298 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1299 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1300 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1301 {
1302 HOST_WIDE_INT val = INTVAL (x);
1303
1304 if (oldmode != VOIDmode
1305 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1306 {
1307 int width = GET_MODE_BITSIZE (oldmode);
1308
1309 /* We need to zero extend VAL. */
1310 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1311 }
1312
1313 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1314 }
1315
1316 /* We can do this with a gen_lowpart if both desired and current modes
1317 are integer, and this is either a constant integer, a register, or a
1318 non-volatile MEM. Except for the constant case where MODE is no
1319 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1320
1321 if ((GET_CODE (x) == CONST_INT
1322 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1323 || (GET_MODE_CLASS (mode) == MODE_INT
1324 && GET_MODE_CLASS (oldmode) == MODE_INT
1325 && (GET_CODE (x) == CONST_DOUBLE
1326 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1327 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1328 && direct_load[(int) mode])
1329 || (GET_CODE (x) == REG
1330 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1331 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1332 {
1333 /* ?? If we don't know OLDMODE, we have to assume here that
1334 X does not need sign- or zero-extension. This may not be
1335 the case, but it's the best we can do. */
1336 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1337 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1338 {
1339 HOST_WIDE_INT val = INTVAL (x);
1340 int width = GET_MODE_BITSIZE (oldmode);
1341
1342 /* We must sign or zero-extend in this case. Start by
1343 zero-extending, then sign extend if we need to. */
1344 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1345 if (! unsignedp
1346 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1347 val |= (HOST_WIDE_INT) (-1) << width;
1348
1349 return GEN_INT (val);
1350 }
1351
1352 return gen_lowpart (mode, x);
1353 }
1354
1355 temp = gen_reg_rtx (mode);
1356 convert_move (temp, x, unsignedp);
1357 return temp;
1358 }
1359 \f
1360 /* Generate several move instructions to copy LEN bytes
1361 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1362 The caller must pass FROM and TO
1363 through protect_from_queue before calling.
1364 ALIGN (in bytes) is maximum alignment we can assume. */
1365
1366 static void
1367 move_by_pieces (to, from, len, align)
1368 rtx to, from;
1369 int len, align;
1370 {
1371 struct move_by_pieces data;
1372 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1373 int max_size = MOVE_MAX + 1;
1374
1375 data.offset = 0;
1376 data.to_addr = to_addr;
1377 data.from_addr = from_addr;
1378 data.to = to;
1379 data.from = from;
1380 data.autinc_to
1381 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1382 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1383 data.autinc_from
1384 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1385 || GET_CODE (from_addr) == POST_INC
1386 || GET_CODE (from_addr) == POST_DEC);
1387
1388 data.explicit_inc_from = 0;
1389 data.explicit_inc_to = 0;
1390 data.reverse
1391 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1392 if (data.reverse) data.offset = len;
1393 data.len = len;
1394
1395 data.to_struct = MEM_IN_STRUCT_P (to);
1396 data.from_struct = MEM_IN_STRUCT_P (from);
1397
1398 /* If copying requires more than two move insns,
1399 copy addresses to registers (to make displacements shorter)
1400 and use post-increment if available. */
1401 if (!(data.autinc_from && data.autinc_to)
1402 && move_by_pieces_ninsns (len, align) > 2)
1403 {
1404 #ifdef HAVE_PRE_DECREMENT
1405 if (data.reverse && ! data.autinc_from)
1406 {
1407 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1408 data.autinc_from = 1;
1409 data.explicit_inc_from = -1;
1410 }
1411 #endif
1412 #ifdef HAVE_POST_INCREMENT
1413 if (! data.autinc_from)
1414 {
1415 data.from_addr = copy_addr_to_reg (from_addr);
1416 data.autinc_from = 1;
1417 data.explicit_inc_from = 1;
1418 }
1419 #endif
1420 if (!data.autinc_from && CONSTANT_P (from_addr))
1421 data.from_addr = copy_addr_to_reg (from_addr);
1422 #ifdef HAVE_PRE_DECREMENT
1423 if (data.reverse && ! data.autinc_to)
1424 {
1425 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1426 data.autinc_to = 1;
1427 data.explicit_inc_to = -1;
1428 }
1429 #endif
1430 #ifdef HAVE_POST_INCREMENT
1431 if (! data.reverse && ! data.autinc_to)
1432 {
1433 data.to_addr = copy_addr_to_reg (to_addr);
1434 data.autinc_to = 1;
1435 data.explicit_inc_to = 1;
1436 }
1437 #endif
1438 if (!data.autinc_to && CONSTANT_P (to_addr))
1439 data.to_addr = copy_addr_to_reg (to_addr);
1440 }
1441
1442 if (! SLOW_UNALIGNED_ACCESS
1443 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1444 align = MOVE_MAX;
1445
1446 /* First move what we can in the largest integer mode, then go to
1447 successively smaller modes. */
1448
1449 while (max_size > 1)
1450 {
1451 enum machine_mode mode = VOIDmode, tmode;
1452 enum insn_code icode;
1453
1454 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1455 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1456 if (GET_MODE_SIZE (tmode) < max_size)
1457 mode = tmode;
1458
1459 if (mode == VOIDmode)
1460 break;
1461
1462 icode = mov_optab->handlers[(int) mode].insn_code;
1463 if (icode != CODE_FOR_nothing
1464 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1465 GET_MODE_SIZE (mode)))
1466 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1467
1468 max_size = GET_MODE_SIZE (mode);
1469 }
1470
1471 /* The code above should have handled everything. */
1472 if (data.len > 0)
1473 abort ();
1474 }
1475
1476 /* Return number of insns required to move L bytes by pieces.
1477 ALIGN (in bytes) is maximum alignment we can assume. */
1478
1479 static int
1480 move_by_pieces_ninsns (l, align)
1481 unsigned int l;
1482 int align;
1483 {
1484 register int n_insns = 0;
1485 int max_size = MOVE_MAX + 1;
1486
1487 if (! SLOW_UNALIGNED_ACCESS
1488 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1489 align = MOVE_MAX;
1490
1491 while (max_size > 1)
1492 {
1493 enum machine_mode mode = VOIDmode, tmode;
1494 enum insn_code icode;
1495
1496 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1497 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1498 if (GET_MODE_SIZE (tmode) < max_size)
1499 mode = tmode;
1500
1501 if (mode == VOIDmode)
1502 break;
1503
1504 icode = mov_optab->handlers[(int) mode].insn_code;
1505 if (icode != CODE_FOR_nothing
1506 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1507 GET_MODE_SIZE (mode)))
1508 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1509
1510 max_size = GET_MODE_SIZE (mode);
1511 }
1512
1513 return n_insns;
1514 }
1515
1516 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1517 with move instructions for mode MODE. GENFUN is the gen_... function
1518 to make a move insn for that mode. DATA has all the other info. */
1519
1520 static void
1521 move_by_pieces_1 (genfun, mode, data)
1522 rtx (*genfun) PROTO ((rtx, ...));
1523 enum machine_mode mode;
1524 struct move_by_pieces *data;
1525 {
1526 register int size = GET_MODE_SIZE (mode);
1527 register rtx to1, from1;
1528
1529 while (data->len >= size)
1530 {
1531 if (data->reverse) data->offset -= size;
1532
1533 to1 = (data->autinc_to
1534 ? gen_rtx_MEM (mode, data->to_addr)
1535 : copy_rtx (change_address (data->to, mode,
1536 plus_constant (data->to_addr,
1537 data->offset))));
1538 MEM_IN_STRUCT_P (to1) = data->to_struct;
1539
1540 from1
1541 = (data->autinc_from
1542 ? gen_rtx_MEM (mode, data->from_addr)
1543 : copy_rtx (change_address (data->from, mode,
1544 plus_constant (data->from_addr,
1545 data->offset))));
1546 MEM_IN_STRUCT_P (from1) = data->from_struct;
1547
1548 #ifdef HAVE_PRE_DECREMENT
1549 if (data->explicit_inc_to < 0)
1550 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1551 if (data->explicit_inc_from < 0)
1552 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1553 #endif
1554
1555 emit_insn ((*genfun) (to1, from1));
1556 #ifdef HAVE_POST_INCREMENT
1557 if (data->explicit_inc_to > 0)
1558 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1559 if (data->explicit_inc_from > 0)
1560 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1561 #endif
1562
1563 if (! data->reverse) data->offset += size;
1564
1565 data->len -= size;
1566 }
1567 }
1568 \f
1569 /* Emit code to move a block Y to a block X.
1570 This may be done with string-move instructions,
1571 with multiple scalar move instructions, or with a library call.
1572
1573 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1574 with mode BLKmode.
1575 SIZE is an rtx that says how long they are.
1576 ALIGN is the maximum alignment we can assume they have,
1577 measured in bytes.
1578
1579 Return the address of the new block, if memcpy is called and returns it,
1580 0 otherwise. */
1581
1582 rtx
1583 emit_block_move (x, y, size, align)
1584 rtx x, y;
1585 rtx size;
1586 int align;
1587 {
1588 rtx retval = 0;
1589
1590 if (GET_MODE (x) != BLKmode)
1591 abort ();
1592
1593 if (GET_MODE (y) != BLKmode)
1594 abort ();
1595
1596 x = protect_from_queue (x, 1);
1597 y = protect_from_queue (y, 0);
1598 size = protect_from_queue (size, 0);
1599
1600 if (GET_CODE (x) != MEM)
1601 abort ();
1602 if (GET_CODE (y) != MEM)
1603 abort ();
1604 if (size == 0)
1605 abort ();
1606
1607 if (GET_CODE (size) == CONST_INT
1608 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1609 move_by_pieces (x, y, INTVAL (size), align);
1610 else
1611 {
1612 /* Try the most limited insn first, because there's no point
1613 including more than one in the machine description unless
1614 the more limited one has some advantage. */
1615
1616 rtx opalign = GEN_INT (align);
1617 enum machine_mode mode;
1618
1619 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1620 mode = GET_MODE_WIDER_MODE (mode))
1621 {
1622 enum insn_code code = movstr_optab[(int) mode];
1623
1624 if (code != CODE_FOR_nothing
1625 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1626 here because if SIZE is less than the mode mask, as it is
1627 returned by the macro, it will definitely be less than the
1628 actual mode mask. */
1629 && ((GET_CODE (size) == CONST_INT
1630 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1631 <= (GET_MODE_MASK (mode) >> 1)))
1632 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1633 && (insn_operand_predicate[(int) code][0] == 0
1634 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1635 && (insn_operand_predicate[(int) code][1] == 0
1636 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1637 && (insn_operand_predicate[(int) code][3] == 0
1638 || (*insn_operand_predicate[(int) code][3]) (opalign,
1639 VOIDmode)))
1640 {
1641 rtx op2;
1642 rtx last = get_last_insn ();
1643 rtx pat;
1644
1645 op2 = convert_to_mode (mode, size, 1);
1646 if (insn_operand_predicate[(int) code][2] != 0
1647 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1648 op2 = copy_to_mode_reg (mode, op2);
1649
1650 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1651 if (pat)
1652 {
1653 emit_insn (pat);
1654 return 0;
1655 }
1656 else
1657 delete_insns_since (last);
1658 }
1659 }
1660
1661 #ifdef TARGET_MEM_FUNCTIONS
1662 retval
1663 = emit_library_call_value (memcpy_libfunc, NULL_RTX, 0,
1664 ptr_mode, 3, XEXP (x, 0), Pmode,
1665 XEXP (y, 0), Pmode,
1666 convert_to_mode (TYPE_MODE (sizetype), size,
1667 TREE_UNSIGNED (sizetype)),
1668 TYPE_MODE (sizetype));
1669 #else
1670 emit_library_call (bcopy_libfunc, 0,
1671 VOIDmode, 3, XEXP (y, 0), Pmode,
1672 XEXP (x, 0), Pmode,
1673 convert_to_mode (TYPE_MODE (integer_type_node), size,
1674 TREE_UNSIGNED (integer_type_node)),
1675 TYPE_MODE (integer_type_node));
1676 #endif
1677 }
1678
1679 return retval;
1680 }
1681 \f
1682 /* Copy all or part of a value X into registers starting at REGNO.
1683 The number of registers to be filled is NREGS. */
1684
1685 void
1686 move_block_to_reg (regno, x, nregs, mode)
1687 int regno;
1688 rtx x;
1689 int nregs;
1690 enum machine_mode mode;
1691 {
1692 int i;
1693 #ifdef HAVE_load_multiple
1694 rtx pat;
1695 rtx last;
1696 #endif
1697
1698 if (nregs == 0)
1699 return;
1700
1701 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1702 x = validize_mem (force_const_mem (mode, x));
1703
1704 /* See if the machine can do this with a load multiple insn. */
1705 #ifdef HAVE_load_multiple
1706 if (HAVE_load_multiple)
1707 {
1708 last = get_last_insn ();
1709 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1710 GEN_INT (nregs));
1711 if (pat)
1712 {
1713 emit_insn (pat);
1714 return;
1715 }
1716 else
1717 delete_insns_since (last);
1718 }
1719 #endif
1720
1721 for (i = 0; i < nregs; i++)
1722 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1723 operand_subword_force (x, i, mode));
1724 }
1725
1726 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1727 The number of registers to be filled is NREGS. SIZE indicates the number
1728 of bytes in the object X. */
1729
1730
1731 void
1732 move_block_from_reg (regno, x, nregs, size)
1733 int regno;
1734 rtx x;
1735 int nregs;
1736 int size;
1737 {
1738 int i;
1739 #ifdef HAVE_store_multiple
1740 rtx pat;
1741 rtx last;
1742 #endif
1743 enum machine_mode mode;
1744
1745 /* If SIZE is that of a mode no bigger than a word, just use that
1746 mode's store operation. */
1747 if (size <= UNITS_PER_WORD
1748 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1749 {
1750 emit_move_insn (change_address (x, mode, NULL),
1751 gen_rtx_REG (mode, regno));
1752 return;
1753 }
1754
1755 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1756 to the left before storing to memory. Note that the previous test
1757 doesn't handle all cases (e.g. SIZE == 3). */
1758 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1759 {
1760 rtx tem = operand_subword (x, 0, 1, BLKmode);
1761 rtx shift;
1762
1763 if (tem == 0)
1764 abort ();
1765
1766 shift = expand_shift (LSHIFT_EXPR, word_mode,
1767 gen_rtx_REG (word_mode, regno),
1768 build_int_2 ((UNITS_PER_WORD - size)
1769 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1770 emit_move_insn (tem, shift);
1771 return;
1772 }
1773
1774 /* See if the machine can do this with a store multiple insn. */
1775 #ifdef HAVE_store_multiple
1776 if (HAVE_store_multiple)
1777 {
1778 last = get_last_insn ();
1779 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1780 GEN_INT (nregs));
1781 if (pat)
1782 {
1783 emit_insn (pat);
1784 return;
1785 }
1786 else
1787 delete_insns_since (last);
1788 }
1789 #endif
1790
1791 for (i = 0; i < nregs; i++)
1792 {
1793 rtx tem = operand_subword (x, i, 1, BLKmode);
1794
1795 if (tem == 0)
1796 abort ();
1797
1798 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1799 }
1800 }
1801
1802 /* Emit code to move a block Y to a block X, where X is non-consecutive
1803 registers represented by a PARALLEL. */
1804
1805 void
1806 emit_group_load (x, y)
1807 rtx x, y;
1808 {
1809 rtx target_reg, source;
1810 int i;
1811
1812 if (GET_CODE (x) != PARALLEL)
1813 abort ();
1814
1815 /* Check for a NULL entry, used to indicate that the parameter goes
1816 both on the stack and in registers. */
1817 if (XEXP (XVECEXP (x, 0, 0), 0))
1818 i = 0;
1819 else
1820 i = 1;
1821
1822 for (; i < XVECLEN (x, 0); i++)
1823 {
1824 rtx element = XVECEXP (x, 0, i);
1825
1826 target_reg = XEXP (element, 0);
1827
1828 if (GET_CODE (y) == MEM)
1829 source = change_address (y, GET_MODE (target_reg),
1830 plus_constant (XEXP (y, 0),
1831 INTVAL (XEXP (element, 1))));
1832 else if (XEXP (element, 1) == const0_rtx)
1833 {
1834 if (GET_MODE (target_reg) == GET_MODE (y))
1835 source = y;
1836 /* Allow for the target_reg to be smaller than the input register
1837 to allow for AIX with 4 DF arguments after a single SI arg. The
1838 last DF argument will only load 1 word into the integer registers,
1839 but load a DF value into the float registers. */
1840 else if ((GET_MODE_SIZE (GET_MODE (target_reg))
1841 <= GET_MODE_SIZE (GET_MODE (y)))
1842 && GET_MODE (target_reg) == word_mode)
1843 /* This might be a const_double, so we can't just use SUBREG. */
1844 source = operand_subword (y, 0, 0, VOIDmode);
1845 else if (GET_MODE_SIZE (GET_MODE (target_reg))
1846 == GET_MODE_SIZE (GET_MODE (y)))
1847 source = gen_lowpart (GET_MODE (target_reg), y);
1848 else
1849 abort ();
1850 }
1851 else
1852 abort ();
1853
1854 emit_move_insn (target_reg, source);
1855 }
1856 }
1857
1858 /* Emit code to move a block Y to a block X, where Y is non-consecutive
1859 registers represented by a PARALLEL. */
1860
1861 void
1862 emit_group_store (x, y)
1863 rtx x, y;
1864 {
1865 rtx source_reg, target;
1866 int i;
1867
1868 if (GET_CODE (y) != PARALLEL)
1869 abort ();
1870
1871 /* Check for a NULL entry, used to indicate that the parameter goes
1872 both on the stack and in registers. */
1873 if (XEXP (XVECEXP (y, 0, 0), 0))
1874 i = 0;
1875 else
1876 i = 1;
1877
1878 for (; i < XVECLEN (y, 0); i++)
1879 {
1880 rtx element = XVECEXP (y, 0, i);
1881
1882 source_reg = XEXP (element, 0);
1883
1884 if (GET_CODE (x) == MEM)
1885 target = change_address (x, GET_MODE (source_reg),
1886 plus_constant (XEXP (x, 0),
1887 INTVAL (XEXP (element, 1))));
1888 else if (XEXP (element, 1) == const0_rtx)
1889 {
1890 target = x;
1891 if (GET_MODE (target) != GET_MODE (source_reg))
1892 target = gen_lowpart (GET_MODE (source_reg), target);
1893 }
1894 else
1895 abort ();
1896
1897 emit_move_insn (target, source_reg);
1898 }
1899 }
1900
1901 /* Add a USE expression for REG to the (possibly empty) list pointed
1902 to by CALL_FUSAGE. REG must denote a hard register. */
1903
1904 void
1905 use_reg (call_fusage, reg)
1906 rtx *call_fusage, reg;
1907 {
1908 if (GET_CODE (reg) != REG
1909 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
1910 abort();
1911
1912 *call_fusage
1913 = gen_rtx_EXPR_LIST (VOIDmode,
1914 gen_rtx_USE (VOIDmode, reg), *call_fusage);
1915 }
1916
1917 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1918 starting at REGNO. All of these registers must be hard registers. */
1919
1920 void
1921 use_regs (call_fusage, regno, nregs)
1922 rtx *call_fusage;
1923 int regno;
1924 int nregs;
1925 {
1926 int i;
1927
1928 if (regno + nregs > FIRST_PSEUDO_REGISTER)
1929 abort ();
1930
1931 for (i = 0; i < nregs; i++)
1932 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
1933 }
1934
1935 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
1936 PARALLEL REGS. This is for calls that pass values in multiple
1937 non-contiguous locations. The Irix 6 ABI has examples of this. */
1938
1939 void
1940 use_group_regs (call_fusage, regs)
1941 rtx *call_fusage;
1942 rtx regs;
1943 {
1944 int i;
1945
1946 for (i = 0; i < XVECLEN (regs, 0); i++)
1947 {
1948 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
1949
1950 /* A NULL entry means the parameter goes both on the stack and in
1951 registers. This can also be a MEM for targets that pass values
1952 partially on the stack and partially in registers. */
1953 if (reg != 0 && GET_CODE (reg) == REG)
1954 use_reg (call_fusage, reg);
1955 }
1956 }
1957 \f
1958 /* Generate several move instructions to clear LEN bytes of block TO.
1959 (A MEM rtx with BLKmode). The caller must pass TO through
1960 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
1961 we can assume. */
1962
1963 static void
1964 clear_by_pieces (to, len, align)
1965 rtx to;
1966 int len, align;
1967 {
1968 struct clear_by_pieces data;
1969 rtx to_addr = XEXP (to, 0);
1970 int max_size = MOVE_MAX + 1;
1971
1972 data.offset = 0;
1973 data.to_addr = to_addr;
1974 data.to = to;
1975 data.autinc_to
1976 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1977 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1978
1979 data.explicit_inc_to = 0;
1980 data.reverse
1981 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1982 if (data.reverse) data.offset = len;
1983 data.len = len;
1984
1985 data.to_struct = MEM_IN_STRUCT_P (to);
1986
1987 /* If copying requires more than two move insns,
1988 copy addresses to registers (to make displacements shorter)
1989 and use post-increment if available. */
1990 if (!data.autinc_to
1991 && move_by_pieces_ninsns (len, align) > 2)
1992 {
1993 #ifdef HAVE_PRE_DECREMENT
1994 if (data.reverse && ! data.autinc_to)
1995 {
1996 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1997 data.autinc_to = 1;
1998 data.explicit_inc_to = -1;
1999 }
2000 #endif
2001 #ifdef HAVE_POST_INCREMENT
2002 if (! data.reverse && ! data.autinc_to)
2003 {
2004 data.to_addr = copy_addr_to_reg (to_addr);
2005 data.autinc_to = 1;
2006 data.explicit_inc_to = 1;
2007 }
2008 #endif
2009 if (!data.autinc_to && CONSTANT_P (to_addr))
2010 data.to_addr = copy_addr_to_reg (to_addr);
2011 }
2012
2013 if (! SLOW_UNALIGNED_ACCESS
2014 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2015 align = MOVE_MAX;
2016
2017 /* First move what we can in the largest integer mode, then go to
2018 successively smaller modes. */
2019
2020 while (max_size > 1)
2021 {
2022 enum machine_mode mode = VOIDmode, tmode;
2023 enum insn_code icode;
2024
2025 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2026 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2027 if (GET_MODE_SIZE (tmode) < max_size)
2028 mode = tmode;
2029
2030 if (mode == VOIDmode)
2031 break;
2032
2033 icode = mov_optab->handlers[(int) mode].insn_code;
2034 if (icode != CODE_FOR_nothing
2035 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2036 GET_MODE_SIZE (mode)))
2037 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2038
2039 max_size = GET_MODE_SIZE (mode);
2040 }
2041
2042 /* The code above should have handled everything. */
2043 if (data.len != 0)
2044 abort ();
2045 }
2046
2047 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2048 with move instructions for mode MODE. GENFUN is the gen_... function
2049 to make a move insn for that mode. DATA has all the other info. */
2050
2051 static void
2052 clear_by_pieces_1 (genfun, mode, data)
2053 rtx (*genfun) PROTO ((rtx, ...));
2054 enum machine_mode mode;
2055 struct clear_by_pieces *data;
2056 {
2057 register int size = GET_MODE_SIZE (mode);
2058 register rtx to1;
2059
2060 while (data->len >= size)
2061 {
2062 if (data->reverse) data->offset -= size;
2063
2064 to1 = (data->autinc_to
2065 ? gen_rtx_MEM (mode, data->to_addr)
2066 : copy_rtx (change_address (data->to, mode,
2067 plus_constant (data->to_addr,
2068 data->offset))));
2069 MEM_IN_STRUCT_P (to1) = data->to_struct;
2070
2071 #ifdef HAVE_PRE_DECREMENT
2072 if (data->explicit_inc_to < 0)
2073 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2074 #endif
2075
2076 emit_insn ((*genfun) (to1, const0_rtx));
2077 #ifdef HAVE_POST_INCREMENT
2078 if (data->explicit_inc_to > 0)
2079 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2080 #endif
2081
2082 if (! data->reverse) data->offset += size;
2083
2084 data->len -= size;
2085 }
2086 }
2087 \f
2088 /* Write zeros through the storage of OBJECT.
2089 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2090 the maximum alignment we can is has, measured in bytes.
2091
2092 If we call a function that returns the length of the block, return it. */
2093
2094 rtx
2095 clear_storage (object, size, align)
2096 rtx object;
2097 rtx size;
2098 int align;
2099 {
2100 rtx retval = 0;
2101
2102 if (GET_MODE (object) == BLKmode)
2103 {
2104 object = protect_from_queue (object, 1);
2105 size = protect_from_queue (size, 0);
2106
2107 if (GET_CODE (size) == CONST_INT
2108 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
2109 clear_by_pieces (object, INTVAL (size), align);
2110
2111 else
2112 {
2113 /* Try the most limited insn first, because there's no point
2114 including more than one in the machine description unless
2115 the more limited one has some advantage. */
2116
2117 rtx opalign = GEN_INT (align);
2118 enum machine_mode mode;
2119
2120 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2121 mode = GET_MODE_WIDER_MODE (mode))
2122 {
2123 enum insn_code code = clrstr_optab[(int) mode];
2124
2125 if (code != CODE_FOR_nothing
2126 /* We don't need MODE to be narrower than
2127 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2128 the mode mask, as it is returned by the macro, it will
2129 definitely be less than the actual mode mask. */
2130 && ((GET_CODE (size) == CONST_INT
2131 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2132 <= (GET_MODE_MASK (mode) >> 1)))
2133 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2134 && (insn_operand_predicate[(int) code][0] == 0
2135 || (*insn_operand_predicate[(int) code][0]) (object,
2136 BLKmode))
2137 && (insn_operand_predicate[(int) code][2] == 0
2138 || (*insn_operand_predicate[(int) code][2]) (opalign,
2139 VOIDmode)))
2140 {
2141 rtx op1;
2142 rtx last = get_last_insn ();
2143 rtx pat;
2144
2145 op1 = convert_to_mode (mode, size, 1);
2146 if (insn_operand_predicate[(int) code][1] != 0
2147 && ! (*insn_operand_predicate[(int) code][1]) (op1,
2148 mode))
2149 op1 = copy_to_mode_reg (mode, op1);
2150
2151 pat = GEN_FCN ((int) code) (object, op1, opalign);
2152 if (pat)
2153 {
2154 emit_insn (pat);
2155 return 0;
2156 }
2157 else
2158 delete_insns_since (last);
2159 }
2160 }
2161
2162
2163 #ifdef TARGET_MEM_FUNCTIONS
2164 retval
2165 = emit_library_call_value (memset_libfunc, NULL_RTX, 0,
2166 ptr_mode, 3,
2167 XEXP (object, 0), Pmode,
2168 const0_rtx,
2169 TYPE_MODE (integer_type_node),
2170 convert_to_mode
2171 (TYPE_MODE (sizetype), size,
2172 TREE_UNSIGNED (sizetype)),
2173 TYPE_MODE (sizetype));
2174 #else
2175 emit_library_call (bzero_libfunc, 0,
2176 VOIDmode, 2,
2177 XEXP (object, 0), Pmode,
2178 convert_to_mode
2179 (TYPE_MODE (integer_type_node), size,
2180 TREE_UNSIGNED (integer_type_node)),
2181 TYPE_MODE (integer_type_node));
2182 #endif
2183 }
2184 }
2185 else
2186 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2187
2188 return retval;
2189 }
2190
2191 /* Generate code to copy Y into X.
2192 Both Y and X must have the same mode, except that
2193 Y can be a constant with VOIDmode.
2194 This mode cannot be BLKmode; use emit_block_move for that.
2195
2196 Return the last instruction emitted. */
2197
2198 rtx
2199 emit_move_insn (x, y)
2200 rtx x, y;
2201 {
2202 enum machine_mode mode = GET_MODE (x);
2203
2204 x = protect_from_queue (x, 1);
2205 y = protect_from_queue (y, 0);
2206
2207 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2208 abort ();
2209
2210 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2211 y = force_const_mem (mode, y);
2212
2213 /* If X or Y are memory references, verify that their addresses are valid
2214 for the machine. */
2215 if (GET_CODE (x) == MEM
2216 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2217 && ! push_operand (x, GET_MODE (x)))
2218 || (flag_force_addr
2219 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2220 x = change_address (x, VOIDmode, XEXP (x, 0));
2221
2222 if (GET_CODE (y) == MEM
2223 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2224 || (flag_force_addr
2225 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2226 y = change_address (y, VOIDmode, XEXP (y, 0));
2227
2228 if (mode == BLKmode)
2229 abort ();
2230
2231 return emit_move_insn_1 (x, y);
2232 }
2233
2234 /* Low level part of emit_move_insn.
2235 Called just like emit_move_insn, but assumes X and Y
2236 are basically valid. */
2237
2238 rtx
2239 emit_move_insn_1 (x, y)
2240 rtx x, y;
2241 {
2242 enum machine_mode mode = GET_MODE (x);
2243 enum machine_mode submode;
2244 enum mode_class class = GET_MODE_CLASS (mode);
2245 int i;
2246
2247 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2248 return
2249 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2250
2251 /* Expand complex moves by moving real part and imag part, if possible. */
2252 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2253 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2254 * BITS_PER_UNIT),
2255 (class == MODE_COMPLEX_INT
2256 ? MODE_INT : MODE_FLOAT),
2257 0))
2258 && (mov_optab->handlers[(int) submode].insn_code
2259 != CODE_FOR_nothing))
2260 {
2261 /* Don't split destination if it is a stack push. */
2262 int stack = push_operand (x, GET_MODE (x));
2263
2264 /* If this is a stack, push the highpart first, so it
2265 will be in the argument order.
2266
2267 In that case, change_address is used only to convert
2268 the mode, not to change the address. */
2269 if (stack)
2270 {
2271 /* Note that the real part always precedes the imag part in memory
2272 regardless of machine's endianness. */
2273 #ifdef STACK_GROWS_DOWNWARD
2274 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2275 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2276 gen_imagpart (submode, y)));
2277 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2278 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2279 gen_realpart (submode, y)));
2280 #else
2281 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2282 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2283 gen_realpart (submode, y)));
2284 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2285 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2286 gen_imagpart (submode, y)));
2287 #endif
2288 }
2289 else
2290 {
2291 /* Show the output dies here. */
2292 if (x != y)
2293 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
2294
2295 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2296 (gen_realpart (submode, x), gen_realpart (submode, y)));
2297 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2298 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
2299 }
2300
2301 return get_last_insn ();
2302 }
2303
2304 /* This will handle any multi-word mode that lacks a move_insn pattern.
2305 However, you will get better code if you define such patterns,
2306 even if they must turn into multiple assembler instructions. */
2307 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2308 {
2309 rtx last_insn = 0;
2310
2311 #ifdef PUSH_ROUNDING
2312
2313 /* If X is a push on the stack, do the push now and replace
2314 X with a reference to the stack pointer. */
2315 if (push_operand (x, GET_MODE (x)))
2316 {
2317 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2318 x = change_address (x, VOIDmode, stack_pointer_rtx);
2319 }
2320 #endif
2321
2322 /* Show the output dies here. */
2323 if (x != y)
2324 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2325
2326 for (i = 0;
2327 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2328 i++)
2329 {
2330 rtx xpart = operand_subword (x, i, 1, mode);
2331 rtx ypart = operand_subword (y, i, 1, mode);
2332
2333 /* If we can't get a part of Y, put Y into memory if it is a
2334 constant. Otherwise, force it into a register. If we still
2335 can't get a part of Y, abort. */
2336 if (ypart == 0 && CONSTANT_P (y))
2337 {
2338 y = force_const_mem (mode, y);
2339 ypart = operand_subword (y, i, 1, mode);
2340 }
2341 else if (ypart == 0)
2342 ypart = operand_subword_force (y, i, mode);
2343
2344 if (xpart == 0 || ypart == 0)
2345 abort ();
2346
2347 last_insn = emit_move_insn (xpart, ypart);
2348 }
2349
2350 return last_insn;
2351 }
2352 else
2353 abort ();
2354 }
2355 \f
2356 /* Pushing data onto the stack. */
2357
2358 /* Push a block of length SIZE (perhaps variable)
2359 and return an rtx to address the beginning of the block.
2360 Note that it is not possible for the value returned to be a QUEUED.
2361 The value may be virtual_outgoing_args_rtx.
2362
2363 EXTRA is the number of bytes of padding to push in addition to SIZE.
2364 BELOW nonzero means this padding comes at low addresses;
2365 otherwise, the padding comes at high addresses. */
2366
2367 rtx
2368 push_block (size, extra, below)
2369 rtx size;
2370 int extra, below;
2371 {
2372 register rtx temp;
2373
2374 size = convert_modes (Pmode, ptr_mode, size, 1);
2375 if (CONSTANT_P (size))
2376 anti_adjust_stack (plus_constant (size, extra));
2377 else if (GET_CODE (size) == REG && extra == 0)
2378 anti_adjust_stack (size);
2379 else
2380 {
2381 rtx temp = copy_to_mode_reg (Pmode, size);
2382 if (extra != 0)
2383 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2384 temp, 0, OPTAB_LIB_WIDEN);
2385 anti_adjust_stack (temp);
2386 }
2387
2388 #ifdef STACK_GROWS_DOWNWARD
2389 temp = virtual_outgoing_args_rtx;
2390 if (extra != 0 && below)
2391 temp = plus_constant (temp, extra);
2392 #else
2393 if (GET_CODE (size) == CONST_INT)
2394 temp = plus_constant (virtual_outgoing_args_rtx,
2395 - INTVAL (size) - (below ? 0 : extra));
2396 else if (extra != 0 && !below)
2397 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2398 negate_rtx (Pmode, plus_constant (size, extra)));
2399 else
2400 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2401 negate_rtx (Pmode, size));
2402 #endif
2403
2404 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2405 }
2406
2407 rtx
2408 gen_push_operand ()
2409 {
2410 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2411 }
2412
2413 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2414 block of SIZE bytes. */
2415
2416 static rtx
2417 get_push_address (size)
2418 int size;
2419 {
2420 register rtx temp;
2421
2422 if (STACK_PUSH_CODE == POST_DEC)
2423 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2424 else if (STACK_PUSH_CODE == POST_INC)
2425 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2426 else
2427 temp = stack_pointer_rtx;
2428
2429 return copy_to_reg (temp);
2430 }
2431
2432 /* Generate code to push X onto the stack, assuming it has mode MODE and
2433 type TYPE.
2434 MODE is redundant except when X is a CONST_INT (since they don't
2435 carry mode info).
2436 SIZE is an rtx for the size of data to be copied (in bytes),
2437 needed only if X is BLKmode.
2438
2439 ALIGN (in bytes) is maximum alignment we can assume.
2440
2441 If PARTIAL and REG are both nonzero, then copy that many of the first
2442 words of X into registers starting with REG, and push the rest of X.
2443 The amount of space pushed is decreased by PARTIAL words,
2444 rounded *down* to a multiple of PARM_BOUNDARY.
2445 REG must be a hard register in this case.
2446 If REG is zero but PARTIAL is not, take any all others actions for an
2447 argument partially in registers, but do not actually load any
2448 registers.
2449
2450 EXTRA is the amount in bytes of extra space to leave next to this arg.
2451 This is ignored if an argument block has already been allocated.
2452
2453 On a machine that lacks real push insns, ARGS_ADDR is the address of
2454 the bottom of the argument block for this call. We use indexing off there
2455 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2456 argument block has not been preallocated.
2457
2458 ARGS_SO_FAR is the size of args previously pushed for this call.
2459
2460 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2461 for arguments passed in registers. If nonzero, it will be the number
2462 of bytes required. */
2463
2464 void
2465 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2466 args_addr, args_so_far, reg_parm_stack_space)
2467 register rtx x;
2468 enum machine_mode mode;
2469 tree type;
2470 rtx size;
2471 int align;
2472 int partial;
2473 rtx reg;
2474 int extra;
2475 rtx args_addr;
2476 rtx args_so_far;
2477 int reg_parm_stack_space;
2478 {
2479 rtx xinner;
2480 enum direction stack_direction
2481 #ifdef STACK_GROWS_DOWNWARD
2482 = downward;
2483 #else
2484 = upward;
2485 #endif
2486
2487 /* Decide where to pad the argument: `downward' for below,
2488 `upward' for above, or `none' for don't pad it.
2489 Default is below for small data on big-endian machines; else above. */
2490 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2491
2492 /* Invert direction if stack is post-update. */
2493 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2494 if (where_pad != none)
2495 where_pad = (where_pad == downward ? upward : downward);
2496
2497 xinner = x = protect_from_queue (x, 0);
2498
2499 if (mode == BLKmode)
2500 {
2501 /* Copy a block into the stack, entirely or partially. */
2502
2503 register rtx temp;
2504 int used = partial * UNITS_PER_WORD;
2505 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2506 int skip;
2507
2508 if (size == 0)
2509 abort ();
2510
2511 used -= offset;
2512
2513 /* USED is now the # of bytes we need not copy to the stack
2514 because registers will take care of them. */
2515
2516 if (partial != 0)
2517 xinner = change_address (xinner, BLKmode,
2518 plus_constant (XEXP (xinner, 0), used));
2519
2520 /* If the partial register-part of the arg counts in its stack size,
2521 skip the part of stack space corresponding to the registers.
2522 Otherwise, start copying to the beginning of the stack space,
2523 by setting SKIP to 0. */
2524 skip = (reg_parm_stack_space == 0) ? 0 : used;
2525
2526 #ifdef PUSH_ROUNDING
2527 /* Do it with several push insns if that doesn't take lots of insns
2528 and if there is no difficulty with push insns that skip bytes
2529 on the stack for alignment purposes. */
2530 if (args_addr == 0
2531 && GET_CODE (size) == CONST_INT
2532 && skip == 0
2533 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2534 < MOVE_RATIO)
2535 /* Here we avoid the case of a structure whose weak alignment
2536 forces many pushes of a small amount of data,
2537 and such small pushes do rounding that causes trouble. */
2538 && ((! SLOW_UNALIGNED_ACCESS)
2539 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2540 || PUSH_ROUNDING (align) == align)
2541 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2542 {
2543 /* Push padding now if padding above and stack grows down,
2544 or if padding below and stack grows up.
2545 But if space already allocated, this has already been done. */
2546 if (extra && args_addr == 0
2547 && where_pad != none && where_pad != stack_direction)
2548 anti_adjust_stack (GEN_INT (extra));
2549
2550 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
2551 INTVAL (size) - used, align);
2552
2553 if (flag_check_memory_usage && ! in_check_memory_usage)
2554 {
2555 rtx temp;
2556
2557 in_check_memory_usage = 1;
2558 temp = get_push_address (INTVAL(size) - used);
2559 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
2560 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2561 temp, ptr_mode,
2562 XEXP (xinner, 0), ptr_mode,
2563 GEN_INT (INTVAL(size) - used),
2564 TYPE_MODE (sizetype));
2565 else
2566 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2567 temp, ptr_mode,
2568 GEN_INT (INTVAL(size) - used),
2569 TYPE_MODE (sizetype),
2570 GEN_INT (MEMORY_USE_RW),
2571 TYPE_MODE (integer_type_node));
2572 in_check_memory_usage = 0;
2573 }
2574 }
2575 else
2576 #endif /* PUSH_ROUNDING */
2577 {
2578 /* Otherwise make space on the stack and copy the data
2579 to the address of that space. */
2580
2581 /* Deduct words put into registers from the size we must copy. */
2582 if (partial != 0)
2583 {
2584 if (GET_CODE (size) == CONST_INT)
2585 size = GEN_INT (INTVAL (size) - used);
2586 else
2587 size = expand_binop (GET_MODE (size), sub_optab, size,
2588 GEN_INT (used), NULL_RTX, 0,
2589 OPTAB_LIB_WIDEN);
2590 }
2591
2592 /* Get the address of the stack space.
2593 In this case, we do not deal with EXTRA separately.
2594 A single stack adjust will do. */
2595 if (! args_addr)
2596 {
2597 temp = push_block (size, extra, where_pad == downward);
2598 extra = 0;
2599 }
2600 else if (GET_CODE (args_so_far) == CONST_INT)
2601 temp = memory_address (BLKmode,
2602 plus_constant (args_addr,
2603 skip + INTVAL (args_so_far)));
2604 else
2605 temp = memory_address (BLKmode,
2606 plus_constant (gen_rtx_PLUS (Pmode,
2607 args_addr,
2608 args_so_far),
2609 skip));
2610 if (flag_check_memory_usage && ! in_check_memory_usage)
2611 {
2612 rtx target;
2613
2614 in_check_memory_usage = 1;
2615 target = copy_to_reg (temp);
2616 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
2617 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2618 target, ptr_mode,
2619 XEXP (xinner, 0), ptr_mode,
2620 size, TYPE_MODE (sizetype));
2621 else
2622 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2623 target, ptr_mode,
2624 size, TYPE_MODE (sizetype),
2625 GEN_INT (MEMORY_USE_RW),
2626 TYPE_MODE (integer_type_node));
2627 in_check_memory_usage = 0;
2628 }
2629
2630 /* TEMP is the address of the block. Copy the data there. */
2631 if (GET_CODE (size) == CONST_INT
2632 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2633 < MOVE_RATIO))
2634 {
2635 move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
2636 INTVAL (size), align);
2637 goto ret;
2638 }
2639 else
2640 {
2641 rtx opalign = GEN_INT (align);
2642 enum machine_mode mode;
2643 rtx target = gen_rtx (MEM, BLKmode, temp);
2644
2645 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2646 mode != VOIDmode;
2647 mode = GET_MODE_WIDER_MODE (mode))
2648 {
2649 enum insn_code code = movstr_optab[(int) mode];
2650
2651 if (code != CODE_FOR_nothing
2652 && ((GET_CODE (size) == CONST_INT
2653 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2654 <= (GET_MODE_MASK (mode) >> 1)))
2655 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2656 && (insn_operand_predicate[(int) code][0] == 0
2657 || ((*insn_operand_predicate[(int) code][0])
2658 (target, BLKmode)))
2659 && (insn_operand_predicate[(int) code][1] == 0
2660 || ((*insn_operand_predicate[(int) code][1])
2661 (xinner, BLKmode)))
2662 && (insn_operand_predicate[(int) code][3] == 0
2663 || ((*insn_operand_predicate[(int) code][3])
2664 (opalign, VOIDmode))))
2665 {
2666 rtx op2 = convert_to_mode (mode, size, 1);
2667 rtx last = get_last_insn ();
2668 rtx pat;
2669
2670 if (insn_operand_predicate[(int) code][2] != 0
2671 && ! ((*insn_operand_predicate[(int) code][2])
2672 (op2, mode)))
2673 op2 = copy_to_mode_reg (mode, op2);
2674
2675 pat = GEN_FCN ((int) code) (target, xinner,
2676 op2, opalign);
2677 if (pat)
2678 {
2679 emit_insn (pat);
2680 goto ret;
2681 }
2682 else
2683 delete_insns_since (last);
2684 }
2685 }
2686 }
2687
2688 #ifndef ACCUMULATE_OUTGOING_ARGS
2689 /* If the source is referenced relative to the stack pointer,
2690 copy it to another register to stabilize it. We do not need
2691 to do this if we know that we won't be changing sp. */
2692
2693 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2694 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2695 temp = copy_to_reg (temp);
2696 #endif
2697
2698 /* Make inhibit_defer_pop nonzero around the library call
2699 to force it to pop the bcopy-arguments right away. */
2700 NO_DEFER_POP;
2701 #ifdef TARGET_MEM_FUNCTIONS
2702 emit_library_call (memcpy_libfunc, 0,
2703 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
2704 convert_to_mode (TYPE_MODE (sizetype),
2705 size, TREE_UNSIGNED (sizetype)),
2706 TYPE_MODE (sizetype));
2707 #else
2708 emit_library_call (bcopy_libfunc, 0,
2709 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
2710 convert_to_mode (TYPE_MODE (integer_type_node),
2711 size,
2712 TREE_UNSIGNED (integer_type_node)),
2713 TYPE_MODE (integer_type_node));
2714 #endif
2715 OK_DEFER_POP;
2716 }
2717 }
2718 else if (partial > 0)
2719 {
2720 /* Scalar partly in registers. */
2721
2722 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2723 int i;
2724 int not_stack;
2725 /* # words of start of argument
2726 that we must make space for but need not store. */
2727 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2728 int args_offset = INTVAL (args_so_far);
2729 int skip;
2730
2731 /* Push padding now if padding above and stack grows down,
2732 or if padding below and stack grows up.
2733 But if space already allocated, this has already been done. */
2734 if (extra && args_addr == 0
2735 && where_pad != none && where_pad != stack_direction)
2736 anti_adjust_stack (GEN_INT (extra));
2737
2738 /* If we make space by pushing it, we might as well push
2739 the real data. Otherwise, we can leave OFFSET nonzero
2740 and leave the space uninitialized. */
2741 if (args_addr == 0)
2742 offset = 0;
2743
2744 /* Now NOT_STACK gets the number of words that we don't need to
2745 allocate on the stack. */
2746 not_stack = partial - offset;
2747
2748 /* If the partial register-part of the arg counts in its stack size,
2749 skip the part of stack space corresponding to the registers.
2750 Otherwise, start copying to the beginning of the stack space,
2751 by setting SKIP to 0. */
2752 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
2753
2754 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2755 x = validize_mem (force_const_mem (mode, x));
2756
2757 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2758 SUBREGs of such registers are not allowed. */
2759 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2760 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2761 x = copy_to_reg (x);
2762
2763 /* Loop over all the words allocated on the stack for this arg. */
2764 /* We can do it by words, because any scalar bigger than a word
2765 has a size a multiple of a word. */
2766 #ifndef PUSH_ARGS_REVERSED
2767 for (i = not_stack; i < size; i++)
2768 #else
2769 for (i = size - 1; i >= not_stack; i--)
2770 #endif
2771 if (i >= not_stack + offset)
2772 emit_push_insn (operand_subword_force (x, i, mode),
2773 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2774 0, args_addr,
2775 GEN_INT (args_offset + ((i - not_stack + skip)
2776 * UNITS_PER_WORD)),
2777 reg_parm_stack_space);
2778 }
2779 else
2780 {
2781 rtx addr;
2782 rtx target = NULL_RTX;
2783
2784 /* Push padding now if padding above and stack grows down,
2785 or if padding below and stack grows up.
2786 But if space already allocated, this has already been done. */
2787 if (extra && args_addr == 0
2788 && where_pad != none && where_pad != stack_direction)
2789 anti_adjust_stack (GEN_INT (extra));
2790
2791 #ifdef PUSH_ROUNDING
2792 if (args_addr == 0)
2793 addr = gen_push_operand ();
2794 else
2795 #endif
2796 {
2797 if (GET_CODE (args_so_far) == CONST_INT)
2798 addr
2799 = memory_address (mode,
2800 plus_constant (args_addr,
2801 INTVAL (args_so_far)));
2802 else
2803 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
2804 args_so_far));
2805 target = addr;
2806 }
2807
2808 emit_move_insn (gen_rtx_MEM (mode, addr), x);
2809
2810 if (flag_check_memory_usage && ! in_check_memory_usage)
2811 {
2812 in_check_memory_usage = 1;
2813 if (target == 0)
2814 target = get_push_address (GET_MODE_SIZE (mode));
2815
2816 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
2817 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2818 target, ptr_mode,
2819 XEXP (x, 0), ptr_mode,
2820 GEN_INT (GET_MODE_SIZE (mode)),
2821 TYPE_MODE (sizetype));
2822 else
2823 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2824 target, ptr_mode,
2825 GEN_INT (GET_MODE_SIZE (mode)),
2826 TYPE_MODE (sizetype),
2827 GEN_INT (MEMORY_USE_RW),
2828 TYPE_MODE (integer_type_node));
2829 in_check_memory_usage = 0;
2830 }
2831 }
2832
2833 ret:
2834 /* If part should go in registers, copy that part
2835 into the appropriate registers. Do this now, at the end,
2836 since mem-to-mem copies above may do function calls. */
2837 if (partial > 0 && reg != 0)
2838 {
2839 /* Handle calls that pass values in multiple non-contiguous locations.
2840 The Irix 6 ABI has examples of this. */
2841 if (GET_CODE (reg) == PARALLEL)
2842 emit_group_load (reg, x);
2843 else
2844 move_block_to_reg (REGNO (reg), x, partial, mode);
2845 }
2846
2847 if (extra && args_addr == 0 && where_pad == stack_direction)
2848 anti_adjust_stack (GEN_INT (extra));
2849 }
2850 \f
2851 /* Expand an assignment that stores the value of FROM into TO.
2852 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2853 (This may contain a QUEUED rtx;
2854 if the value is constant, this rtx is a constant.)
2855 Otherwise, the returned value is NULL_RTX.
2856
2857 SUGGEST_REG is no longer actually used.
2858 It used to mean, copy the value through a register
2859 and return that register, if that is possible.
2860 We now use WANT_VALUE to decide whether to do this. */
2861
2862 rtx
2863 expand_assignment (to, from, want_value, suggest_reg)
2864 tree to, from;
2865 int want_value;
2866 int suggest_reg;
2867 {
2868 register rtx to_rtx = 0;
2869 rtx result;
2870
2871 /* Don't crash if the lhs of the assignment was erroneous. */
2872
2873 if (TREE_CODE (to) == ERROR_MARK)
2874 {
2875 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
2876 return want_value ? result : NULL_RTX;
2877 }
2878
2879 /* Assignment of a structure component needs special treatment
2880 if the structure component's rtx is not simply a MEM.
2881 Assignment of an array element at a constant index, and assignment of
2882 an array element in an unaligned packed structure field, has the same
2883 problem. */
2884
2885 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
2886 || TREE_CODE (to) == ARRAY_REF)
2887 {
2888 enum machine_mode mode1;
2889 int bitsize;
2890 int bitpos;
2891 tree offset;
2892 int unsignedp;
2893 int volatilep = 0;
2894 tree tem;
2895 int alignment;
2896
2897 push_temp_slots ();
2898 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
2899 &unsignedp, &volatilep, &alignment);
2900
2901 /* If we are going to use store_bit_field and extract_bit_field,
2902 make sure to_rtx will be safe for multiple use. */
2903
2904 if (mode1 == VOIDmode && want_value)
2905 tem = stabilize_reference (tem);
2906
2907 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
2908 if (offset != 0)
2909 {
2910 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2911
2912 if (GET_CODE (to_rtx) != MEM)
2913 abort ();
2914
2915 if (GET_MODE (offset_rtx) != ptr_mode)
2916 {
2917 #ifdef POINTERS_EXTEND_UNSIGNED
2918 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 1);
2919 #else
2920 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
2921 #endif
2922 }
2923
2924 to_rtx = change_address (to_rtx, VOIDmode,
2925 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
2926 force_reg (ptr_mode, offset_rtx)));
2927 }
2928 if (volatilep)
2929 {
2930 if (GET_CODE (to_rtx) == MEM)
2931 {
2932 /* When the offset is zero, to_rtx is the address of the
2933 structure we are storing into, and hence may be shared.
2934 We must make a new MEM before setting the volatile bit. */
2935 if (offset == 0)
2936 to_rtx = copy_rtx (to_rtx);
2937
2938 MEM_VOLATILE_P (to_rtx) = 1;
2939 }
2940 #if 0 /* This was turned off because, when a field is volatile
2941 in an object which is not volatile, the object may be in a register,
2942 and then we would abort over here. */
2943 else
2944 abort ();
2945 #endif
2946 }
2947
2948 if (TREE_CODE (to) == COMPONENT_REF
2949 && TREE_READONLY (TREE_OPERAND (to, 1)))
2950 {
2951 if (offset == 0)
2952 to_rtx = copy_rtx (to_rtx);
2953
2954 RTX_UNCHANGING_P (to_rtx) = 1;
2955 }
2956
2957 /* Check the access. */
2958 if (flag_check_memory_usage && GET_CODE (to_rtx) == MEM)
2959 {
2960 rtx to_addr;
2961 int size;
2962 int best_mode_size;
2963 enum machine_mode best_mode;
2964
2965 best_mode = get_best_mode (bitsize, bitpos,
2966 TYPE_ALIGN (TREE_TYPE (tem)),
2967 mode1, volatilep);
2968 if (best_mode == VOIDmode)
2969 best_mode = QImode;
2970
2971 best_mode_size = GET_MODE_BITSIZE (best_mode);
2972 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
2973 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
2974 size *= GET_MODE_SIZE (best_mode);
2975
2976 /* Check the access right of the pointer. */
2977 if (size)
2978 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
2979 to_addr, ptr_mode,
2980 GEN_INT (size), TYPE_MODE (sizetype),
2981 GEN_INT (MEMORY_USE_WO),
2982 TYPE_MODE (integer_type_node));
2983 }
2984
2985 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2986 (want_value
2987 /* Spurious cast makes HPUX compiler happy. */
2988 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2989 : VOIDmode),
2990 unsignedp,
2991 /* Required alignment of containing datum. */
2992 alignment,
2993 int_size_in_bytes (TREE_TYPE (tem)));
2994 preserve_temp_slots (result);
2995 free_temp_slots ();
2996 pop_temp_slots ();
2997
2998 /* If the value is meaningful, convert RESULT to the proper mode.
2999 Otherwise, return nothing. */
3000 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3001 TYPE_MODE (TREE_TYPE (from)),
3002 result,
3003 TREE_UNSIGNED (TREE_TYPE (to)))
3004 : NULL_RTX);
3005 }
3006
3007 /* If the rhs is a function call and its value is not an aggregate,
3008 call the function before we start to compute the lhs.
3009 This is needed for correct code for cases such as
3010 val = setjmp (buf) on machines where reference to val
3011 requires loading up part of an address in a separate insn.
3012
3013 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
3014 a promoted variable where the zero- or sign- extension needs to be done.
3015 Handling this in the normal way is safe because no computation is done
3016 before the call. */
3017 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3018 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3019 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
3020 {
3021 rtx value;
3022
3023 push_temp_slots ();
3024 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3025 if (to_rtx == 0)
3026 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3027
3028 /* Handle calls that return values in multiple non-contiguous locations.
3029 The Irix 6 ABI has examples of this. */
3030 if (GET_CODE (to_rtx) == PARALLEL)
3031 emit_group_load (to_rtx, value);
3032 else if (GET_MODE (to_rtx) == BLKmode)
3033 emit_block_move (to_rtx, value, expr_size (from),
3034 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3035 else
3036 emit_move_insn (to_rtx, value);
3037 preserve_temp_slots (to_rtx);
3038 free_temp_slots ();
3039 pop_temp_slots ();
3040 return want_value ? to_rtx : NULL_RTX;
3041 }
3042
3043 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3044 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3045
3046 if (to_rtx == 0)
3047 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3048
3049 /* Don't move directly into a return register. */
3050 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
3051 {
3052 rtx temp;
3053
3054 push_temp_slots ();
3055 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3056 emit_move_insn (to_rtx, temp);
3057 preserve_temp_slots (to_rtx);
3058 free_temp_slots ();
3059 pop_temp_slots ();
3060 return want_value ? to_rtx : NULL_RTX;
3061 }
3062
3063 /* In case we are returning the contents of an object which overlaps
3064 the place the value is being stored, use a safe function when copying
3065 a value through a pointer into a structure value return block. */
3066 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3067 && current_function_returns_struct
3068 && !current_function_returns_pcc_struct)
3069 {
3070 rtx from_rtx, size;
3071
3072 push_temp_slots ();
3073 size = expr_size (from);
3074 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3075 EXPAND_MEMORY_USE_DONT);
3076
3077 /* Copy the rights of the bitmap. */
3078 if (flag_check_memory_usage)
3079 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3080 XEXP (to_rtx, 0), ptr_mode,
3081 XEXP (from_rtx, 0), ptr_mode,
3082 convert_to_mode (TYPE_MODE (sizetype),
3083 size, TREE_UNSIGNED (sizetype)),
3084 TYPE_MODE (sizetype));
3085
3086 #ifdef TARGET_MEM_FUNCTIONS
3087 emit_library_call (memcpy_libfunc, 0,
3088 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3089 XEXP (from_rtx, 0), Pmode,
3090 convert_to_mode (TYPE_MODE (sizetype),
3091 size, TREE_UNSIGNED (sizetype)),
3092 TYPE_MODE (sizetype));
3093 #else
3094 emit_library_call (bcopy_libfunc, 0,
3095 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3096 XEXP (to_rtx, 0), Pmode,
3097 convert_to_mode (TYPE_MODE (integer_type_node),
3098 size, TREE_UNSIGNED (integer_type_node)),
3099 TYPE_MODE (integer_type_node));
3100 #endif
3101
3102 preserve_temp_slots (to_rtx);
3103 free_temp_slots ();
3104 pop_temp_slots ();
3105 return want_value ? to_rtx : NULL_RTX;
3106 }
3107
3108 /* Compute FROM and store the value in the rtx we got. */
3109
3110 push_temp_slots ();
3111 result = store_expr (from, to_rtx, want_value);
3112 preserve_temp_slots (result);
3113 free_temp_slots ();
3114 pop_temp_slots ();
3115 return want_value ? result : NULL_RTX;
3116 }
3117
3118 /* Generate code for computing expression EXP,
3119 and storing the value into TARGET.
3120 TARGET may contain a QUEUED rtx.
3121
3122 If WANT_VALUE is nonzero, return a copy of the value
3123 not in TARGET, so that we can be sure to use the proper
3124 value in a containing expression even if TARGET has something
3125 else stored in it. If possible, we copy the value through a pseudo
3126 and return that pseudo. Or, if the value is constant, we try to
3127 return the constant. In some cases, we return a pseudo
3128 copied *from* TARGET.
3129
3130 If the mode is BLKmode then we may return TARGET itself.
3131 It turns out that in BLKmode it doesn't cause a problem.
3132 because C has no operators that could combine two different
3133 assignments into the same BLKmode object with different values
3134 with no sequence point. Will other languages need this to
3135 be more thorough?
3136
3137 If WANT_VALUE is 0, we return NULL, to make sure
3138 to catch quickly any cases where the caller uses the value
3139 and fails to set WANT_VALUE. */
3140
3141 rtx
3142 store_expr (exp, target, want_value)
3143 register tree exp;
3144 register rtx target;
3145 int want_value;
3146 {
3147 register rtx temp;
3148 int dont_return_target = 0;
3149
3150 if (TREE_CODE (exp) == COMPOUND_EXPR)
3151 {
3152 /* Perform first part of compound expression, then assign from second
3153 part. */
3154 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3155 emit_queue ();
3156 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3157 }
3158 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3159 {
3160 /* For conditional expression, get safe form of the target. Then
3161 test the condition, doing the appropriate assignment on either
3162 side. This avoids the creation of unnecessary temporaries.
3163 For non-BLKmode, it is more efficient not to do this. */
3164
3165 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3166
3167 emit_queue ();
3168 target = protect_from_queue (target, 1);
3169
3170 do_pending_stack_adjust ();
3171 NO_DEFER_POP;
3172 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3173 start_cleanup_deferral ();
3174 store_expr (TREE_OPERAND (exp, 1), target, 0);
3175 end_cleanup_deferral ();
3176 emit_queue ();
3177 emit_jump_insn (gen_jump (lab2));
3178 emit_barrier ();
3179 emit_label (lab1);
3180 start_cleanup_deferral ();
3181 store_expr (TREE_OPERAND (exp, 2), target, 0);
3182 end_cleanup_deferral ();
3183 emit_queue ();
3184 emit_label (lab2);
3185 OK_DEFER_POP;
3186
3187 return want_value ? target : NULL_RTX;
3188 }
3189 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3190 && GET_MODE (target) != BLKmode)
3191 /* If target is in memory and caller wants value in a register instead,
3192 arrange that. Pass TARGET as target for expand_expr so that,
3193 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3194 We know expand_expr will not use the target in that case.
3195 Don't do this if TARGET is volatile because we are supposed
3196 to write it and then read it. */
3197 {
3198 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
3199 GET_MODE (target), 0);
3200 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3201 temp = copy_to_reg (temp);
3202 dont_return_target = 1;
3203 }
3204 else if (queued_subexp_p (target))
3205 /* If target contains a postincrement, let's not risk
3206 using it as the place to generate the rhs. */
3207 {
3208 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3209 {
3210 /* Expand EXP into a new pseudo. */
3211 temp = gen_reg_rtx (GET_MODE (target));
3212 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3213 }
3214 else
3215 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3216
3217 /* If target is volatile, ANSI requires accessing the value
3218 *from* the target, if it is accessed. So make that happen.
3219 In no case return the target itself. */
3220 if (! MEM_VOLATILE_P (target) && want_value)
3221 dont_return_target = 1;
3222 }
3223 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3224 /* If this is an scalar in a register that is stored in a wider mode
3225 than the declared mode, compute the result into its declared mode
3226 and then convert to the wider mode. Our value is the computed
3227 expression. */
3228 {
3229 /* If we don't want a value, we can do the conversion inside EXP,
3230 which will often result in some optimizations. Do the conversion
3231 in two steps: first change the signedness, if needed, then
3232 the extend. But don't do this if the type of EXP is a subtype
3233 of something else since then the conversion might involve
3234 more than just converting modes. */
3235 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3236 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3237 {
3238 if (TREE_UNSIGNED (TREE_TYPE (exp))
3239 != SUBREG_PROMOTED_UNSIGNED_P (target))
3240 exp
3241 = convert
3242 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3243 TREE_TYPE (exp)),
3244 exp);
3245
3246 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3247 SUBREG_PROMOTED_UNSIGNED_P (target)),
3248 exp);
3249 }
3250
3251 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3252
3253 /* If TEMP is a volatile MEM and we want a result value, make
3254 the access now so it gets done only once. Likewise if
3255 it contains TARGET. */
3256 if (GET_CODE (temp) == MEM && want_value
3257 && (MEM_VOLATILE_P (temp)
3258 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3259 temp = copy_to_reg (temp);
3260
3261 /* If TEMP is a VOIDmode constant, use convert_modes to make
3262 sure that we properly convert it. */
3263 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3264 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3265 TYPE_MODE (TREE_TYPE (exp)), temp,
3266 SUBREG_PROMOTED_UNSIGNED_P (target));
3267
3268 convert_move (SUBREG_REG (target), temp,
3269 SUBREG_PROMOTED_UNSIGNED_P (target));
3270 return want_value ? temp : NULL_RTX;
3271 }
3272 else
3273 {
3274 temp = expand_expr (exp, target, GET_MODE (target), 0);
3275 /* Return TARGET if it's a specified hardware register.
3276 If TARGET is a volatile mem ref, either return TARGET
3277 or return a reg copied *from* TARGET; ANSI requires this.
3278
3279 Otherwise, if TEMP is not TARGET, return TEMP
3280 if it is constant (for efficiency),
3281 or if we really want the correct value. */
3282 if (!(target && GET_CODE (target) == REG
3283 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3284 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3285 && ! rtx_equal_p (temp, target)
3286 && (CONSTANT_P (temp) || want_value))
3287 dont_return_target = 1;
3288 }
3289
3290 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3291 the same as that of TARGET, adjust the constant. This is needed, for
3292 example, in case it is a CONST_DOUBLE and we want only a word-sized
3293 value. */
3294 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3295 && TREE_CODE (exp) != ERROR_MARK
3296 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3297 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3298 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3299
3300 if (flag_check_memory_usage
3301 && GET_CODE (target) == MEM
3302 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3303 {
3304 if (GET_CODE (temp) == MEM)
3305 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3306 XEXP (target, 0), ptr_mode,
3307 XEXP (temp, 0), ptr_mode,
3308 expr_size (exp), TYPE_MODE (sizetype));
3309 else
3310 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3311 XEXP (target, 0), ptr_mode,
3312 expr_size (exp), TYPE_MODE (sizetype),
3313 GEN_INT (MEMORY_USE_WO),
3314 TYPE_MODE (integer_type_node));
3315 }
3316
3317 /* If value was not generated in the target, store it there.
3318 Convert the value to TARGET's type first if nec. */
3319
3320 if (! rtx_equal_p (temp, target) && TREE_CODE (exp) != ERROR_MARK)
3321 {
3322 target = protect_from_queue (target, 1);
3323 if (GET_MODE (temp) != GET_MODE (target)
3324 && GET_MODE (temp) != VOIDmode)
3325 {
3326 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3327 if (dont_return_target)
3328 {
3329 /* In this case, we will return TEMP,
3330 so make sure it has the proper mode.
3331 But don't forget to store the value into TARGET. */
3332 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3333 emit_move_insn (target, temp);
3334 }
3335 else
3336 convert_move (target, temp, unsignedp);
3337 }
3338
3339 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3340 {
3341 /* Handle copying a string constant into an array.
3342 The string constant may be shorter than the array.
3343 So copy just the string's actual length, and clear the rest. */
3344 rtx size;
3345 rtx addr;
3346
3347 /* Get the size of the data type of the string,
3348 which is actually the size of the target. */
3349 size = expr_size (exp);
3350 if (GET_CODE (size) == CONST_INT
3351 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3352 emit_block_move (target, temp, size,
3353 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3354 else
3355 {
3356 /* Compute the size of the data to copy from the string. */
3357 tree copy_size
3358 = size_binop (MIN_EXPR,
3359 make_tree (sizetype, size),
3360 convert (sizetype,
3361 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
3362 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3363 VOIDmode, 0);
3364 rtx label = 0;
3365
3366 /* Copy that much. */
3367 emit_block_move (target, temp, copy_size_rtx,
3368 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3369
3370 /* Figure out how much is left in TARGET that we have to clear.
3371 Do all calculations in ptr_mode. */
3372
3373 addr = XEXP (target, 0);
3374 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3375
3376 if (GET_CODE (copy_size_rtx) == CONST_INT)
3377 {
3378 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3379 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3380 }
3381 else
3382 {
3383 addr = force_reg (ptr_mode, addr);
3384 addr = expand_binop (ptr_mode, add_optab, addr,
3385 copy_size_rtx, NULL_RTX, 0,
3386 OPTAB_LIB_WIDEN);
3387
3388 size = expand_binop (ptr_mode, sub_optab, size,
3389 copy_size_rtx, NULL_RTX, 0,
3390 OPTAB_LIB_WIDEN);
3391
3392 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
3393 GET_MODE (size), 0, 0);
3394 label = gen_label_rtx ();
3395 emit_jump_insn (gen_blt (label));
3396 }
3397
3398 if (size != const0_rtx)
3399 {
3400 /* Be sure we can write on ADDR. */
3401 if (flag_check_memory_usage)
3402 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3403 addr, ptr_mode,
3404 size, TYPE_MODE (sizetype),
3405 GEN_INT (MEMORY_USE_WO),
3406 TYPE_MODE (integer_type_node));
3407 #ifdef TARGET_MEM_FUNCTIONS
3408 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3409 addr, ptr_mode,
3410 const0_rtx, TYPE_MODE (integer_type_node),
3411 convert_to_mode (TYPE_MODE (sizetype),
3412 size,
3413 TREE_UNSIGNED (sizetype)),
3414 TYPE_MODE (sizetype));
3415 #else
3416 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3417 addr, ptr_mode,
3418 convert_to_mode (TYPE_MODE (integer_type_node),
3419 size,
3420 TREE_UNSIGNED (integer_type_node)),
3421 TYPE_MODE (integer_type_node));
3422 #endif
3423 }
3424
3425 if (label)
3426 emit_label (label);
3427 }
3428 }
3429 /* Handle calls that return values in multiple non-contiguous locations.
3430 The Irix 6 ABI has examples of this. */
3431 else if (GET_CODE (target) == PARALLEL)
3432 emit_group_load (target, temp);
3433 else if (GET_MODE (temp) == BLKmode)
3434 emit_block_move (target, temp, expr_size (exp),
3435 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3436 else
3437 emit_move_insn (target, temp);
3438 }
3439
3440 /* If we don't want a value, return NULL_RTX. */
3441 if (! want_value)
3442 return NULL_RTX;
3443
3444 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3445 ??? The latter test doesn't seem to make sense. */
3446 else if (dont_return_target && GET_CODE (temp) != MEM)
3447 return temp;
3448
3449 /* Return TARGET itself if it is a hard register. */
3450 else if (want_value && GET_MODE (target) != BLKmode
3451 && ! (GET_CODE (target) == REG
3452 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3453 return copy_to_reg (target);
3454
3455 else
3456 return target;
3457 }
3458 \f
3459 /* Return 1 if EXP just contains zeros. */
3460
3461 static int
3462 is_zeros_p (exp)
3463 tree exp;
3464 {
3465 tree elt;
3466
3467 switch (TREE_CODE (exp))
3468 {
3469 case CONVERT_EXPR:
3470 case NOP_EXPR:
3471 case NON_LVALUE_EXPR:
3472 return is_zeros_p (TREE_OPERAND (exp, 0));
3473
3474 case INTEGER_CST:
3475 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3476
3477 case COMPLEX_CST:
3478 return
3479 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3480
3481 case REAL_CST:
3482 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
3483
3484 case CONSTRUCTOR:
3485 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3486 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
3487 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3488 if (! is_zeros_p (TREE_VALUE (elt)))
3489 return 0;
3490
3491 return 1;
3492
3493 default:
3494 return 0;
3495 }
3496 }
3497
3498 /* Return 1 if EXP contains mostly (3/4) zeros. */
3499
3500 static int
3501 mostly_zeros_p (exp)
3502 tree exp;
3503 {
3504 if (TREE_CODE (exp) == CONSTRUCTOR)
3505 {
3506 int elts = 0, zeros = 0;
3507 tree elt = CONSTRUCTOR_ELTS (exp);
3508 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3509 {
3510 /* If there are no ranges of true bits, it is all zero. */
3511 return elt == NULL_TREE;
3512 }
3513 for (; elt; elt = TREE_CHAIN (elt))
3514 {
3515 /* We do not handle the case where the index is a RANGE_EXPR,
3516 so the statistic will be somewhat inaccurate.
3517 We do make a more accurate count in store_constructor itself,
3518 so since this function is only used for nested array elements,
3519 this should be close enough. */
3520 if (mostly_zeros_p (TREE_VALUE (elt)))
3521 zeros++;
3522 elts++;
3523 }
3524
3525 return 4 * zeros >= 3 * elts;
3526 }
3527
3528 return is_zeros_p (exp);
3529 }
3530 \f
3531 /* Helper function for store_constructor.
3532 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3533 TYPE is the type of the CONSTRUCTOR, not the element type.
3534 CLEARED is as for store_constructor.
3535
3536 This provides a recursive shortcut back to store_constructor when it isn't
3537 necessary to go through store_field. This is so that we can pass through
3538 the cleared field to let store_constructor know that we may not have to
3539 clear a substructure if the outer structure has already been cleared. */
3540
3541 static void
3542 store_constructor_field (target, bitsize, bitpos,
3543 mode, exp, type, cleared)
3544 rtx target;
3545 int bitsize, bitpos;
3546 enum machine_mode mode;
3547 tree exp, type;
3548 int cleared;
3549 {
3550 if (TREE_CODE (exp) == CONSTRUCTOR
3551 && bitpos % BITS_PER_UNIT == 0
3552 /* If we have a non-zero bitpos for a register target, then we just
3553 let store_field do the bitfield handling. This is unlikely to
3554 generate unnecessary clear instructions anyways. */
3555 && (bitpos == 0 || GET_CODE (target) == MEM))
3556 {
3557 if (bitpos != 0)
3558 target = change_address (target, VOIDmode,
3559 plus_constant (XEXP (target, 0),
3560 bitpos / BITS_PER_UNIT));
3561 store_constructor (exp, target, cleared);
3562 }
3563 else
3564 store_field (target, bitsize, bitpos, mode, exp,
3565 VOIDmode, 0, TYPE_ALIGN (type) / BITS_PER_UNIT,
3566 int_size_in_bytes (type));
3567 }
3568
3569 /* Store the value of constructor EXP into the rtx TARGET.
3570 TARGET is either a REG or a MEM.
3571 CLEARED is true if TARGET is known to have been zero'd. */
3572
3573 static void
3574 store_constructor (exp, target, cleared)
3575 tree exp;
3576 rtx target;
3577 int cleared;
3578 {
3579 tree type = TREE_TYPE (exp);
3580
3581 /* We know our target cannot conflict, since safe_from_p has been called. */
3582 #if 0
3583 /* Don't try copying piece by piece into a hard register
3584 since that is vulnerable to being clobbered by EXP.
3585 Instead, construct in a pseudo register and then copy it all. */
3586 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
3587 {
3588 rtx temp = gen_reg_rtx (GET_MODE (target));
3589 store_constructor (exp, temp, 0);
3590 emit_move_insn (target, temp);
3591 return;
3592 }
3593 #endif
3594
3595 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
3596 || TREE_CODE (type) == QUAL_UNION_TYPE)
3597 {
3598 register tree elt;
3599
3600 /* Inform later passes that the whole union value is dead. */
3601 if (TREE_CODE (type) == UNION_TYPE
3602 || TREE_CODE (type) == QUAL_UNION_TYPE)
3603 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
3604
3605 /* If we are building a static constructor into a register,
3606 set the initial value as zero so we can fold the value into
3607 a constant. But if more than one register is involved,
3608 this probably loses. */
3609 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
3610 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
3611 {
3612 if (! cleared)
3613 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
3614
3615 cleared = 1;
3616 }
3617
3618 /* If the constructor has fewer fields than the structure
3619 or if we are initializing the structure to mostly zeros,
3620 clear the whole structure first. */
3621 else if ((list_length (CONSTRUCTOR_ELTS (exp))
3622 != list_length (TYPE_FIELDS (type)))
3623 || mostly_zeros_p (exp))
3624 {
3625 if (! cleared)
3626 clear_storage (target, expr_size (exp),
3627 TYPE_ALIGN (type) / BITS_PER_UNIT);
3628
3629 cleared = 1;
3630 }
3631 else
3632 /* Inform later passes that the old value is dead. */
3633 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
3634
3635 /* Store each element of the constructor into
3636 the corresponding field of TARGET. */
3637
3638 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3639 {
3640 register tree field = TREE_PURPOSE (elt);
3641 register enum machine_mode mode;
3642 int bitsize;
3643 int bitpos = 0;
3644 int unsignedp;
3645 tree pos, constant = 0, offset = 0;
3646 rtx to_rtx = target;
3647
3648 /* Just ignore missing fields.
3649 We cleared the whole structure, above,
3650 if any fields are missing. */
3651 if (field == 0)
3652 continue;
3653
3654 if (cleared && is_zeros_p (TREE_VALUE (elt)))
3655 continue;
3656
3657 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
3658 unsignedp = TREE_UNSIGNED (field);
3659 mode = DECL_MODE (field);
3660 if (DECL_BIT_FIELD (field))
3661 mode = VOIDmode;
3662
3663 pos = DECL_FIELD_BITPOS (field);
3664 if (TREE_CODE (pos) == INTEGER_CST)
3665 constant = pos;
3666 else if (TREE_CODE (pos) == PLUS_EXPR
3667 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3668 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
3669 else
3670 offset = pos;
3671
3672 if (constant)
3673 bitpos = TREE_INT_CST_LOW (constant);
3674
3675 if (offset)
3676 {
3677 rtx offset_rtx;
3678
3679 if (contains_placeholder_p (offset))
3680 offset = build (WITH_RECORD_EXPR, sizetype,
3681 offset, make_tree (TREE_TYPE (exp), target));
3682
3683 offset = size_binop (FLOOR_DIV_EXPR, offset,
3684 size_int (BITS_PER_UNIT));
3685
3686 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3687 if (GET_CODE (to_rtx) != MEM)
3688 abort ();
3689
3690 if (GET_MODE (offset_rtx) != ptr_mode)
3691 {
3692 #ifdef POINTERS_EXTEND_UNSIGNED
3693 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 1);
3694 #else
3695 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3696 #endif
3697 }
3698
3699 to_rtx
3700 = change_address (to_rtx, VOIDmode,
3701 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3702 force_reg (ptr_mode, offset_rtx)));
3703 }
3704 if (TREE_READONLY (field))
3705 {
3706 if (GET_CODE (to_rtx) == MEM)
3707 to_rtx = copy_rtx (to_rtx);
3708
3709 RTX_UNCHANGING_P (to_rtx) = 1;
3710 }
3711
3712 store_constructor_field (to_rtx, bitsize, bitpos,
3713 mode, TREE_VALUE (elt), type, cleared);
3714 }
3715 }
3716 else if (TREE_CODE (type) == ARRAY_TYPE)
3717 {
3718 register tree elt;
3719 register int i;
3720 int need_to_clear;
3721 tree domain = TYPE_DOMAIN (type);
3722 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
3723 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
3724 tree elttype = TREE_TYPE (type);
3725
3726 /* If the constructor has fewer elements than the array,
3727 clear the whole array first. Similarly if this is
3728 static constructor of a non-BLKmode object. */
3729 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
3730 need_to_clear = 1;
3731 else
3732 {
3733 HOST_WIDE_INT count = 0, zero_count = 0;
3734 need_to_clear = 0;
3735 /* This loop is a more accurate version of the loop in
3736 mostly_zeros_p (it handles RANGE_EXPR in an index).
3737 It is also needed to check for missing elements. */
3738 for (elt = CONSTRUCTOR_ELTS (exp);
3739 elt != NULL_TREE;
3740 elt = TREE_CHAIN (elt))
3741 {
3742 tree index = TREE_PURPOSE (elt);
3743 HOST_WIDE_INT this_node_count;
3744 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3745 {
3746 tree lo_index = TREE_OPERAND (index, 0);
3747 tree hi_index = TREE_OPERAND (index, 1);
3748 if (TREE_CODE (lo_index) != INTEGER_CST
3749 || TREE_CODE (hi_index) != INTEGER_CST)
3750 {
3751 need_to_clear = 1;
3752 break;
3753 }
3754 this_node_count = TREE_INT_CST_LOW (hi_index)
3755 - TREE_INT_CST_LOW (lo_index) + 1;
3756 }
3757 else
3758 this_node_count = 1;
3759 count += this_node_count;
3760 if (mostly_zeros_p (TREE_VALUE (elt)))
3761 zero_count += this_node_count;
3762 }
3763 /* Clear the entire array first if there are any missing elements,
3764 or if the incidence of zero elements is >= 75%. */
3765 if (count < maxelt - minelt + 1
3766 || 4 * zero_count >= 3 * count)
3767 need_to_clear = 1;
3768 }
3769 if (need_to_clear)
3770 {
3771 if (! cleared)
3772 clear_storage (target, expr_size (exp),
3773 TYPE_ALIGN (type) / BITS_PER_UNIT);
3774 cleared = 1;
3775 }
3776 else
3777 /* Inform later passes that the old value is dead. */
3778 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
3779
3780 /* Store each element of the constructor into
3781 the corresponding element of TARGET, determined
3782 by counting the elements. */
3783 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
3784 elt;
3785 elt = TREE_CHAIN (elt), i++)
3786 {
3787 register enum machine_mode mode;
3788 int bitsize;
3789 int bitpos;
3790 int unsignedp;
3791 tree value = TREE_VALUE (elt);
3792 tree index = TREE_PURPOSE (elt);
3793 rtx xtarget = target;
3794
3795 if (cleared && is_zeros_p (value))
3796 continue;
3797
3798 mode = TYPE_MODE (elttype);
3799 bitsize = GET_MODE_BITSIZE (mode);
3800 unsignedp = TREE_UNSIGNED (elttype);
3801
3802 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3803 {
3804 tree lo_index = TREE_OPERAND (index, 0);
3805 tree hi_index = TREE_OPERAND (index, 1);
3806 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
3807 struct nesting *loop;
3808 HOST_WIDE_INT lo, hi, count;
3809 tree position;
3810
3811 /* If the range is constant and "small", unroll the loop. */
3812 if (TREE_CODE (lo_index) == INTEGER_CST
3813 && TREE_CODE (hi_index) == INTEGER_CST
3814 && (lo = TREE_INT_CST_LOW (lo_index),
3815 hi = TREE_INT_CST_LOW (hi_index),
3816 count = hi - lo + 1,
3817 (GET_CODE (target) != MEM
3818 || count <= 2
3819 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
3820 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
3821 <= 40 * 8))))
3822 {
3823 lo -= minelt; hi -= minelt;
3824 for (; lo <= hi; lo++)
3825 {
3826 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
3827 store_constructor_field (target, bitsize, bitpos,
3828 mode, value, type, cleared);
3829 }
3830 }
3831 else
3832 {
3833 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
3834 loop_top = gen_label_rtx ();
3835 loop_end = gen_label_rtx ();
3836
3837 unsignedp = TREE_UNSIGNED (domain);
3838
3839 index = build_decl (VAR_DECL, NULL_TREE, domain);
3840
3841 DECL_RTL (index) = index_r
3842 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
3843 &unsignedp, 0));
3844
3845 if (TREE_CODE (value) == SAVE_EXPR
3846 && SAVE_EXPR_RTL (value) == 0)
3847 {
3848 /* Make sure value gets expanded once before the
3849 loop. */
3850 expand_expr (value, const0_rtx, VOIDmode, 0);
3851 emit_queue ();
3852 }
3853 store_expr (lo_index, index_r, 0);
3854 loop = expand_start_loop (0);
3855
3856 /* Assign value to element index. */
3857 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3858 size_int (BITS_PER_UNIT));
3859 position = size_binop (MULT_EXPR,
3860 size_binop (MINUS_EXPR, index,
3861 TYPE_MIN_VALUE (domain)),
3862 position);
3863 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3864 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
3865 xtarget = change_address (target, mode, addr);
3866 if (TREE_CODE (value) == CONSTRUCTOR)
3867 store_constructor (value, xtarget, cleared);
3868 else
3869 store_expr (value, xtarget, 0);
3870
3871 expand_exit_loop_if_false (loop,
3872 build (LT_EXPR, integer_type_node,
3873 index, hi_index));
3874
3875 expand_increment (build (PREINCREMENT_EXPR,
3876 TREE_TYPE (index),
3877 index, integer_one_node), 0, 0);
3878 expand_end_loop ();
3879 emit_label (loop_end);
3880
3881 /* Needed by stupid register allocation. to extend the
3882 lifetime of pseudo-regs used by target past the end
3883 of the loop. */
3884 emit_insn (gen_rtx_USE (GET_MODE (target), target));
3885 }
3886 }
3887 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
3888 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
3889 {
3890 rtx pos_rtx, addr;
3891 tree position;
3892
3893 if (index == 0)
3894 index = size_int (i);
3895
3896 if (minelt)
3897 index = size_binop (MINUS_EXPR, index,
3898 TYPE_MIN_VALUE (domain));
3899 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3900 size_int (BITS_PER_UNIT));
3901 position = size_binop (MULT_EXPR, index, position);
3902 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3903 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
3904 xtarget = change_address (target, mode, addr);
3905 store_expr (value, xtarget, 0);
3906 }
3907 else
3908 {
3909 if (index != 0)
3910 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
3911 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3912 else
3913 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3914 store_constructor_field (target, bitsize, bitpos,
3915 mode, value, type, cleared);
3916 }
3917 }
3918 }
3919 /* set constructor assignments */
3920 else if (TREE_CODE (type) == SET_TYPE)
3921 {
3922 tree elt = CONSTRUCTOR_ELTS (exp);
3923 int nbytes = int_size_in_bytes (type), nbits;
3924 tree domain = TYPE_DOMAIN (type);
3925 tree domain_min, domain_max, bitlength;
3926
3927 /* The default implementation strategy is to extract the constant
3928 parts of the constructor, use that to initialize the target,
3929 and then "or" in whatever non-constant ranges we need in addition.
3930
3931 If a large set is all zero or all ones, it is
3932 probably better to set it using memset (if available) or bzero.
3933 Also, if a large set has just a single range, it may also be
3934 better to first clear all the first clear the set (using
3935 bzero/memset), and set the bits we want. */
3936
3937 /* Check for all zeros. */
3938 if (elt == NULL_TREE)
3939 {
3940 if (!cleared)
3941 clear_storage (target, expr_size (exp),
3942 TYPE_ALIGN (type) / BITS_PER_UNIT);
3943 return;
3944 }
3945
3946 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
3947 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
3948 bitlength = size_binop (PLUS_EXPR,
3949 size_binop (MINUS_EXPR, domain_max, domain_min),
3950 size_one_node);
3951
3952 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
3953 abort ();
3954 nbits = TREE_INT_CST_LOW (bitlength);
3955
3956 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
3957 are "complicated" (more than one range), initialize (the
3958 constant parts) by copying from a constant. */
3959 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
3960 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
3961 {
3962 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
3963 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
3964 char *bit_buffer = (char *) alloca (nbits);
3965 HOST_WIDE_INT word = 0;
3966 int bit_pos = 0;
3967 int ibit = 0;
3968 int offset = 0; /* In bytes from beginning of set. */
3969 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
3970 for (;;)
3971 {
3972 if (bit_buffer[ibit])
3973 {
3974 if (BYTES_BIG_ENDIAN)
3975 word |= (1 << (set_word_size - 1 - bit_pos));
3976 else
3977 word |= 1 << bit_pos;
3978 }
3979 bit_pos++; ibit++;
3980 if (bit_pos >= set_word_size || ibit == nbits)
3981 {
3982 if (word != 0 || ! cleared)
3983 {
3984 rtx datum = GEN_INT (word);
3985 rtx to_rtx;
3986 /* The assumption here is that it is safe to use
3987 XEXP if the set is multi-word, but not if
3988 it's single-word. */
3989 if (GET_CODE (target) == MEM)
3990 {
3991 to_rtx = plus_constant (XEXP (target, 0), offset);
3992 to_rtx = change_address (target, mode, to_rtx);
3993 }
3994 else if (offset == 0)
3995 to_rtx = target;
3996 else
3997 abort ();
3998 emit_move_insn (to_rtx, datum);
3999 }
4000 if (ibit == nbits)
4001 break;
4002 word = 0;
4003 bit_pos = 0;
4004 offset += set_word_size / BITS_PER_UNIT;
4005 }
4006 }
4007 }
4008 else if (!cleared)
4009 {
4010 /* Don't bother clearing storage if the set is all ones. */
4011 if (TREE_CHAIN (elt) != NULL_TREE
4012 || (TREE_PURPOSE (elt) == NULL_TREE
4013 ? nbits != 1
4014 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
4015 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
4016 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
4017 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
4018 != nbits))))
4019 clear_storage (target, expr_size (exp),
4020 TYPE_ALIGN (type) / BITS_PER_UNIT);
4021 }
4022
4023 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4024 {
4025 /* start of range of element or NULL */
4026 tree startbit = TREE_PURPOSE (elt);
4027 /* end of range of element, or element value */
4028 tree endbit = TREE_VALUE (elt);
4029 #ifdef TARGET_MEM_FUNCTIONS
4030 HOST_WIDE_INT startb, endb;
4031 #endif
4032 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4033
4034 bitlength_rtx = expand_expr (bitlength,
4035 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4036
4037 /* handle non-range tuple element like [ expr ] */
4038 if (startbit == NULL_TREE)
4039 {
4040 startbit = save_expr (endbit);
4041 endbit = startbit;
4042 }
4043 startbit = convert (sizetype, startbit);
4044 endbit = convert (sizetype, endbit);
4045 if (! integer_zerop (domain_min))
4046 {
4047 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4048 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4049 }
4050 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4051 EXPAND_CONST_ADDRESS);
4052 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4053 EXPAND_CONST_ADDRESS);
4054
4055 if (REG_P (target))
4056 {
4057 targetx = assign_stack_temp (GET_MODE (target),
4058 GET_MODE_SIZE (GET_MODE (target)),
4059 0);
4060 emit_move_insn (targetx, target);
4061 }
4062 else if (GET_CODE (target) == MEM)
4063 targetx = target;
4064 else
4065 abort ();
4066
4067 #ifdef TARGET_MEM_FUNCTIONS
4068 /* Optimization: If startbit and endbit are
4069 constants divisible by BITS_PER_UNIT,
4070 call memset instead. */
4071 if (TREE_CODE (startbit) == INTEGER_CST
4072 && TREE_CODE (endbit) == INTEGER_CST
4073 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4074 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4075 {
4076 emit_library_call (memset_libfunc, 0,
4077 VOIDmode, 3,
4078 plus_constant (XEXP (targetx, 0),
4079 startb / BITS_PER_UNIT),
4080 Pmode,
4081 constm1_rtx, TYPE_MODE (integer_type_node),
4082 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4083 TYPE_MODE (sizetype));
4084 }
4085 else
4086 #endif
4087 {
4088 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4089 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4090 bitlength_rtx, TYPE_MODE (sizetype),
4091 startbit_rtx, TYPE_MODE (sizetype),
4092 endbit_rtx, TYPE_MODE (sizetype));
4093 }
4094 if (REG_P (target))
4095 emit_move_insn (target, targetx);
4096 }
4097 }
4098
4099 else
4100 abort ();
4101 }
4102
4103 /* Store the value of EXP (an expression tree)
4104 into a subfield of TARGET which has mode MODE and occupies
4105 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4106 If MODE is VOIDmode, it means that we are storing into a bit-field.
4107
4108 If VALUE_MODE is VOIDmode, return nothing in particular.
4109 UNSIGNEDP is not used in this case.
4110
4111 Otherwise, return an rtx for the value stored. This rtx
4112 has mode VALUE_MODE if that is convenient to do.
4113 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4114
4115 ALIGN is the alignment that TARGET is known to have, measured in bytes.
4116 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
4117
4118 static rtx
4119 store_field (target, bitsize, bitpos, mode, exp, value_mode,
4120 unsignedp, align, total_size)
4121 rtx target;
4122 int bitsize, bitpos;
4123 enum machine_mode mode;
4124 tree exp;
4125 enum machine_mode value_mode;
4126 int unsignedp;
4127 int align;
4128 int total_size;
4129 {
4130 HOST_WIDE_INT width_mask = 0;
4131
4132 if (TREE_CODE (exp) == ERROR_MARK)
4133 return const0_rtx;
4134
4135 if (bitsize < HOST_BITS_PER_WIDE_INT)
4136 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4137
4138 /* If we are storing into an unaligned field of an aligned union that is
4139 in a register, we may have the mode of TARGET being an integer mode but
4140 MODE == BLKmode. In that case, get an aligned object whose size and
4141 alignment are the same as TARGET and store TARGET into it (we can avoid
4142 the store if the field being stored is the entire width of TARGET). Then
4143 call ourselves recursively to store the field into a BLKmode version of
4144 that object. Finally, load from the object into TARGET. This is not
4145 very efficient in general, but should only be slightly more expensive
4146 than the otherwise-required unaligned accesses. Perhaps this can be
4147 cleaned up later. */
4148
4149 if (mode == BLKmode
4150 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4151 {
4152 rtx object = assign_stack_temp (GET_MODE (target),
4153 GET_MODE_SIZE (GET_MODE (target)), 0);
4154 rtx blk_object = copy_rtx (object);
4155
4156 MEM_IN_STRUCT_P (object) = 1;
4157 MEM_IN_STRUCT_P (blk_object) = 1;
4158 PUT_MODE (blk_object, BLKmode);
4159
4160 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4161 emit_move_insn (object, target);
4162
4163 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4164 align, total_size);
4165
4166 /* Even though we aren't returning target, we need to
4167 give it the updated value. */
4168 emit_move_insn (target, object);
4169
4170 return blk_object;
4171 }
4172
4173 /* If the structure is in a register or if the component
4174 is a bit field, we cannot use addressing to access it.
4175 Use bit-field techniques or SUBREG to store in it. */
4176
4177 if (mode == VOIDmode
4178 || (mode != BLKmode && ! direct_store[(int) mode])
4179 || GET_CODE (target) == REG
4180 || GET_CODE (target) == SUBREG
4181 /* If the field isn't aligned enough to store as an ordinary memref,
4182 store it as a bit field. */
4183 || (SLOW_UNALIGNED_ACCESS
4184 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
4185 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
4186 {
4187 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4188
4189 /* If BITSIZE is narrower than the size of the type of EXP
4190 we will be narrowing TEMP. Normally, what's wanted are the
4191 low-order bits. However, if EXP's type is a record and this is
4192 big-endian machine, we want the upper BITSIZE bits. */
4193 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4194 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4195 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4196 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4197 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4198 - bitsize),
4199 temp, 1);
4200
4201 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4202 MODE. */
4203 if (mode != VOIDmode && mode != BLKmode
4204 && mode != TYPE_MODE (TREE_TYPE (exp)))
4205 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4206
4207 /* If the modes of TARGET and TEMP are both BLKmode, both
4208 must be in memory and BITPOS must be aligned on a byte
4209 boundary. If so, we simply do a block copy. */
4210 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4211 {
4212 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4213 || bitpos % BITS_PER_UNIT != 0)
4214 abort ();
4215
4216 target = change_address (target, VOIDmode,
4217 plus_constant (XEXP (target, 0),
4218 bitpos / BITS_PER_UNIT));
4219
4220 emit_block_move (target, temp,
4221 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4222 / BITS_PER_UNIT),
4223 1);
4224
4225 return value_mode == VOIDmode ? const0_rtx : target;
4226 }
4227
4228 /* Store the value in the bitfield. */
4229 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4230 if (value_mode != VOIDmode)
4231 {
4232 /* The caller wants an rtx for the value. */
4233 /* If possible, avoid refetching from the bitfield itself. */
4234 if (width_mask != 0
4235 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4236 {
4237 tree count;
4238 enum machine_mode tmode;
4239
4240 if (unsignedp)
4241 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4242 tmode = GET_MODE (temp);
4243 if (tmode == VOIDmode)
4244 tmode = value_mode;
4245 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4246 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4247 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4248 }
4249 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4250 NULL_RTX, value_mode, 0, align,
4251 total_size);
4252 }
4253 return const0_rtx;
4254 }
4255 else
4256 {
4257 rtx addr = XEXP (target, 0);
4258 rtx to_rtx;
4259
4260 /* If a value is wanted, it must be the lhs;
4261 so make the address stable for multiple use. */
4262
4263 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4264 && ! CONSTANT_ADDRESS_P (addr)
4265 /* A frame-pointer reference is already stable. */
4266 && ! (GET_CODE (addr) == PLUS
4267 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4268 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4269 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4270 addr = copy_to_reg (addr);
4271
4272 /* Now build a reference to just the desired component. */
4273
4274 to_rtx = copy_rtx (change_address (target, mode,
4275 plus_constant (addr,
4276 (bitpos
4277 / BITS_PER_UNIT))));
4278 MEM_IN_STRUCT_P (to_rtx) = 1;
4279
4280 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4281 }
4282 }
4283 \f
4284 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4285 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4286 ARRAY_REFs and find the ultimate containing object, which we return.
4287
4288 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4289 bit position, and *PUNSIGNEDP to the signedness of the field.
4290 If the position of the field is variable, we store a tree
4291 giving the variable offset (in units) in *POFFSET.
4292 This offset is in addition to the bit position.
4293 If the position is not variable, we store 0 in *POFFSET.
4294 We set *PALIGNMENT to the alignment in bytes of the address that will be
4295 computed. This is the alignment of the thing we return if *POFFSET
4296 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4297
4298 If any of the extraction expressions is volatile,
4299 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4300
4301 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4302 is a mode that can be used to access the field. In that case, *PBITSIZE
4303 is redundant.
4304
4305 If the field describes a variable-sized object, *PMODE is set to
4306 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4307 this case, but the address of the object can be found. */
4308
4309 tree
4310 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4311 punsignedp, pvolatilep, palignment)
4312 tree exp;
4313 int *pbitsize;
4314 int *pbitpos;
4315 tree *poffset;
4316 enum machine_mode *pmode;
4317 int *punsignedp;
4318 int *pvolatilep;
4319 int *palignment;
4320 {
4321 tree orig_exp = exp;
4322 tree size_tree = 0;
4323 enum machine_mode mode = VOIDmode;
4324 tree offset = integer_zero_node;
4325 int alignment = BIGGEST_ALIGNMENT;
4326
4327 if (TREE_CODE (exp) == COMPONENT_REF)
4328 {
4329 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4330 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4331 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4332 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4333 }
4334 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4335 {
4336 size_tree = TREE_OPERAND (exp, 1);
4337 *punsignedp = TREE_UNSIGNED (exp);
4338 }
4339 else
4340 {
4341 mode = TYPE_MODE (TREE_TYPE (exp));
4342 *pbitsize = GET_MODE_BITSIZE (mode);
4343 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4344 }
4345
4346 if (size_tree)
4347 {
4348 if (TREE_CODE (size_tree) != INTEGER_CST)
4349 mode = BLKmode, *pbitsize = -1;
4350 else
4351 *pbitsize = TREE_INT_CST_LOW (size_tree);
4352 }
4353
4354 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4355 and find the ultimate containing object. */
4356
4357 *pbitpos = 0;
4358
4359 while (1)
4360 {
4361 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4362 {
4363 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4364 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4365 : TREE_OPERAND (exp, 2));
4366 tree constant = integer_zero_node, var = pos;
4367
4368 /* If this field hasn't been filled in yet, don't go
4369 past it. This should only happen when folding expressions
4370 made during type construction. */
4371 if (pos == 0)
4372 break;
4373
4374 /* Assume here that the offset is a multiple of a unit.
4375 If not, there should be an explicitly added constant. */
4376 if (TREE_CODE (pos) == PLUS_EXPR
4377 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4378 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
4379 else if (TREE_CODE (pos) == INTEGER_CST)
4380 constant = pos, var = integer_zero_node;
4381
4382 *pbitpos += TREE_INT_CST_LOW (constant);
4383 offset = size_binop (PLUS_EXPR, offset,
4384 size_binop (EXACT_DIV_EXPR, var,
4385 size_int (BITS_PER_UNIT)));
4386 }
4387
4388 else if (TREE_CODE (exp) == ARRAY_REF)
4389 {
4390 /* This code is based on the code in case ARRAY_REF in expand_expr
4391 below. We assume here that the size of an array element is
4392 always an integral multiple of BITS_PER_UNIT. */
4393
4394 tree index = TREE_OPERAND (exp, 1);
4395 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4396 tree low_bound
4397 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4398 tree index_type = TREE_TYPE (index);
4399 tree xindex;
4400
4401 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
4402 {
4403 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4404 index);
4405 index_type = TREE_TYPE (index);
4406 }
4407
4408 if (! integer_zerop (low_bound))
4409 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
4410
4411 if (TREE_CODE (index) == INTEGER_CST)
4412 {
4413 index = convert (sbitsizetype, index);
4414 index_type = TREE_TYPE (index);
4415 }
4416
4417 xindex = fold (build (MULT_EXPR, sbitsizetype, index,
4418 convert (sbitsizetype,
4419 TYPE_SIZE (TREE_TYPE (exp)))));
4420
4421 if (TREE_CODE (xindex) == INTEGER_CST
4422 && TREE_INT_CST_HIGH (xindex) == 0)
4423 *pbitpos += TREE_INT_CST_LOW (xindex);
4424 else
4425 {
4426 /* Either the bit offset calculated above is not constant, or
4427 it overflowed. In either case, redo the multiplication
4428 against the size in units. This is especially important
4429 in the non-constant case to avoid a division at runtime. */
4430 xindex = fold (build (MULT_EXPR, ssizetype, index,
4431 convert (ssizetype,
4432 TYPE_SIZE_UNIT (TREE_TYPE (exp)))));
4433
4434 if (contains_placeholder_p (xindex))
4435 xindex = build (WITH_RECORD_EXPR, sizetype, xindex, exp);
4436
4437 offset = size_binop (PLUS_EXPR, offset, xindex);
4438 }
4439 }
4440 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
4441 && ! ((TREE_CODE (exp) == NOP_EXPR
4442 || TREE_CODE (exp) == CONVERT_EXPR)
4443 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4444 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
4445 != UNION_TYPE))
4446 && (TYPE_MODE (TREE_TYPE (exp))
4447 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4448 break;
4449
4450 /* If any reference in the chain is volatile, the effect is volatile. */
4451 if (TREE_THIS_VOLATILE (exp))
4452 *pvolatilep = 1;
4453
4454 /* If the offset is non-constant already, then we can't assume any
4455 alignment more than the alignment here. */
4456 if (! integer_zerop (offset))
4457 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4458
4459 exp = TREE_OPERAND (exp, 0);
4460 }
4461
4462 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
4463 alignment = MIN (alignment, DECL_ALIGN (exp));
4464 else if (TREE_TYPE (exp) != 0)
4465 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4466
4467 if (integer_zerop (offset))
4468 offset = 0;
4469
4470 if (offset != 0 && contains_placeholder_p (offset))
4471 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
4472
4473 *pmode = mode;
4474 *poffset = offset;
4475 *palignment = alignment / BITS_PER_UNIT;
4476 return exp;
4477 }
4478
4479 /* Subroutine of expand_exp: compute memory_usage from modifier. */
4480 static enum memory_use_mode
4481 get_memory_usage_from_modifier (modifier)
4482 enum expand_modifier modifier;
4483 {
4484 switch (modifier)
4485 {
4486 case EXPAND_NORMAL:
4487 case EXPAND_SUM:
4488 return MEMORY_USE_RO;
4489 break;
4490 case EXPAND_MEMORY_USE_WO:
4491 return MEMORY_USE_WO;
4492 break;
4493 case EXPAND_MEMORY_USE_RW:
4494 return MEMORY_USE_RW;
4495 break;
4496 case EXPAND_MEMORY_USE_DONT:
4497 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
4498 MEMORY_USE_DONT, because they are modifiers to a call of
4499 expand_expr in the ADDR_EXPR case of expand_expr. */
4500 case EXPAND_CONST_ADDRESS:
4501 case EXPAND_INITIALIZER:
4502 return MEMORY_USE_DONT;
4503 case EXPAND_MEMORY_USE_BAD:
4504 default:
4505 abort ();
4506 }
4507 }
4508 \f
4509 /* Given an rtx VALUE that may contain additions and multiplications,
4510 return an equivalent value that just refers to a register or memory.
4511 This is done by generating instructions to perform the arithmetic
4512 and returning a pseudo-register containing the value.
4513
4514 The returned value may be a REG, SUBREG, MEM or constant. */
4515
4516 rtx
4517 force_operand (value, target)
4518 rtx value, target;
4519 {
4520 register optab binoptab = 0;
4521 /* Use a temporary to force order of execution of calls to
4522 `force_operand'. */
4523 rtx tmp;
4524 register rtx op2;
4525 /* Use subtarget as the target for operand 0 of a binary operation. */
4526 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4527
4528 /* Check for a PIC address load. */
4529 if (flag_pic
4530 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
4531 && XEXP (value, 0) == pic_offset_table_rtx
4532 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
4533 || GET_CODE (XEXP (value, 1)) == LABEL_REF
4534 || GET_CODE (XEXP (value, 1)) == CONST))
4535 {
4536 if (!subtarget)
4537 subtarget = gen_reg_rtx (GET_MODE (value));
4538 emit_move_insn (subtarget, value);
4539 return subtarget;
4540 }
4541
4542 if (GET_CODE (value) == PLUS)
4543 binoptab = add_optab;
4544 else if (GET_CODE (value) == MINUS)
4545 binoptab = sub_optab;
4546 else if (GET_CODE (value) == MULT)
4547 {
4548 op2 = XEXP (value, 1);
4549 if (!CONSTANT_P (op2)
4550 && !(GET_CODE (op2) == REG && op2 != subtarget))
4551 subtarget = 0;
4552 tmp = force_operand (XEXP (value, 0), subtarget);
4553 return expand_mult (GET_MODE (value), tmp,
4554 force_operand (op2, NULL_RTX),
4555 target, 0);
4556 }
4557
4558 if (binoptab)
4559 {
4560 op2 = XEXP (value, 1);
4561 if (!CONSTANT_P (op2)
4562 && !(GET_CODE (op2) == REG && op2 != subtarget))
4563 subtarget = 0;
4564 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
4565 {
4566 binoptab = add_optab;
4567 op2 = negate_rtx (GET_MODE (value), op2);
4568 }
4569
4570 /* Check for an addition with OP2 a constant integer and our first
4571 operand a PLUS of a virtual register and something else. In that
4572 case, we want to emit the sum of the virtual register and the
4573 constant first and then add the other value. This allows virtual
4574 register instantiation to simply modify the constant rather than
4575 creating another one around this addition. */
4576 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
4577 && GET_CODE (XEXP (value, 0)) == PLUS
4578 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
4579 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
4580 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
4581 {
4582 rtx temp = expand_binop (GET_MODE (value), binoptab,
4583 XEXP (XEXP (value, 0), 0), op2,
4584 subtarget, 0, OPTAB_LIB_WIDEN);
4585 return expand_binop (GET_MODE (value), binoptab, temp,
4586 force_operand (XEXP (XEXP (value, 0), 1), 0),
4587 target, 0, OPTAB_LIB_WIDEN);
4588 }
4589
4590 tmp = force_operand (XEXP (value, 0), subtarget);
4591 return expand_binop (GET_MODE (value), binoptab, tmp,
4592 force_operand (op2, NULL_RTX),
4593 target, 0, OPTAB_LIB_WIDEN);
4594 /* We give UNSIGNEDP = 0 to expand_binop
4595 because the only operations we are expanding here are signed ones. */
4596 }
4597 return value;
4598 }
4599 \f
4600 /* Subroutine of expand_expr:
4601 save the non-copied parts (LIST) of an expr (LHS), and return a list
4602 which can restore these values to their previous values,
4603 should something modify their storage. */
4604
4605 static tree
4606 save_noncopied_parts (lhs, list)
4607 tree lhs;
4608 tree list;
4609 {
4610 tree tail;
4611 tree parts = 0;
4612
4613 for (tail = list; tail; tail = TREE_CHAIN (tail))
4614 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4615 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
4616 else
4617 {
4618 tree part = TREE_VALUE (tail);
4619 tree part_type = TREE_TYPE (part);
4620 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
4621 rtx target = assign_temp (part_type, 0, 1, 1);
4622 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
4623 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
4624 parts = tree_cons (to_be_saved,
4625 build (RTL_EXPR, part_type, NULL_TREE,
4626 (tree) target),
4627 parts);
4628 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
4629 }
4630 return parts;
4631 }
4632
4633 /* Subroutine of expand_expr:
4634 record the non-copied parts (LIST) of an expr (LHS), and return a list
4635 which specifies the initial values of these parts. */
4636
4637 static tree
4638 init_noncopied_parts (lhs, list)
4639 tree lhs;
4640 tree list;
4641 {
4642 tree tail;
4643 tree parts = 0;
4644
4645 for (tail = list; tail; tail = TREE_CHAIN (tail))
4646 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4647 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
4648 else
4649 {
4650 tree part = TREE_VALUE (tail);
4651 tree part_type = TREE_TYPE (part);
4652 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
4653 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
4654 }
4655 return parts;
4656 }
4657
4658 /* Subroutine of expand_expr: return nonzero iff there is no way that
4659 EXP can reference X, which is being modified. TOP_P is nonzero if this
4660 call is going to be used to determine whether we need a temporary
4661 for EXP, as opposed to a recursive call to this function.
4662
4663 It is always safe for this routine to return zero since it merely
4664 searches for optimization opportunities. */
4665
4666 static int
4667 safe_from_p (x, exp, top_p)
4668 rtx x;
4669 tree exp;
4670 int top_p;
4671 {
4672 rtx exp_rtl = 0;
4673 int i, nops;
4674 static int save_expr_count;
4675 static int save_expr_size = 0;
4676 static tree *save_expr_rewritten;
4677 static tree save_expr_trees[256];
4678
4679 if (x == 0
4680 /* If EXP has varying size, we MUST use a target since we currently
4681 have no way of allocating temporaries of variable size
4682 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
4683 So we assume here that something at a higher level has prevented a
4684 clash. This is somewhat bogus, but the best we can do. Only
4685 do this when X is BLKmode and when we are at the top level. */
4686 || (top_p && TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4687 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
4688 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
4689 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
4690 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
4691 != INTEGER_CST)
4692 && GET_MODE (x) == BLKmode))
4693 return 1;
4694
4695 if (top_p && save_expr_size == 0)
4696 {
4697 int rtn;
4698
4699 save_expr_count = 0;
4700 save_expr_size = sizeof (save_expr_trees) / sizeof (save_expr_trees[0]);
4701 save_expr_rewritten = &save_expr_trees[0];
4702
4703 rtn = safe_from_p (x, exp, 1);
4704
4705 for (i = 0; i < save_expr_count; ++i)
4706 {
4707 if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
4708 abort ();
4709 TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
4710 }
4711
4712 save_expr_size = 0;
4713
4714 return rtn;
4715 }
4716
4717 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
4718 find the underlying pseudo. */
4719 if (GET_CODE (x) == SUBREG)
4720 {
4721 x = SUBREG_REG (x);
4722 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4723 return 0;
4724 }
4725
4726 /* If X is a location in the outgoing argument area, it is always safe. */
4727 if (GET_CODE (x) == MEM
4728 && (XEXP (x, 0) == virtual_outgoing_args_rtx
4729 || (GET_CODE (XEXP (x, 0)) == PLUS
4730 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
4731 return 1;
4732
4733 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4734 {
4735 case 'd':
4736 exp_rtl = DECL_RTL (exp);
4737 break;
4738
4739 case 'c':
4740 return 1;
4741
4742 case 'x':
4743 if (TREE_CODE (exp) == TREE_LIST)
4744 return ((TREE_VALUE (exp) == 0
4745 || safe_from_p (x, TREE_VALUE (exp), 0))
4746 && (TREE_CHAIN (exp) == 0
4747 || safe_from_p (x, TREE_CHAIN (exp), 0)));
4748 else if (TREE_CODE (exp) == ERROR_MARK)
4749 return 1; /* An already-visited SAVE_EXPR? */
4750 else
4751 return 0;
4752
4753 case '1':
4754 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
4755
4756 case '2':
4757 case '<':
4758 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
4759 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
4760
4761 case 'e':
4762 case 'r':
4763 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
4764 the expression. If it is set, we conflict iff we are that rtx or
4765 both are in memory. Otherwise, we check all operands of the
4766 expression recursively. */
4767
4768 switch (TREE_CODE (exp))
4769 {
4770 case ADDR_EXPR:
4771 return (staticp (TREE_OPERAND (exp, 0))
4772 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
4773 || TREE_STATIC (exp));
4774
4775 case INDIRECT_REF:
4776 if (GET_CODE (x) == MEM)
4777 return 0;
4778 break;
4779
4780 case CALL_EXPR:
4781 exp_rtl = CALL_EXPR_RTL (exp);
4782 if (exp_rtl == 0)
4783 {
4784 /* Assume that the call will clobber all hard registers and
4785 all of memory. */
4786 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4787 || GET_CODE (x) == MEM)
4788 return 0;
4789 }
4790
4791 break;
4792
4793 case RTL_EXPR:
4794 /* If a sequence exists, we would have to scan every instruction
4795 in the sequence to see if it was safe. This is probably not
4796 worthwhile. */
4797 if (RTL_EXPR_SEQUENCE (exp))
4798 return 0;
4799
4800 exp_rtl = RTL_EXPR_RTL (exp);
4801 break;
4802
4803 case WITH_CLEANUP_EXPR:
4804 exp_rtl = RTL_EXPR_RTL (exp);
4805 break;
4806
4807 case CLEANUP_POINT_EXPR:
4808 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
4809
4810 case SAVE_EXPR:
4811 exp_rtl = SAVE_EXPR_RTL (exp);
4812 if (exp_rtl)
4813 break;
4814
4815 /* This SAVE_EXPR might appear many times in the top-level
4816 safe_from_p() expression, and if it has a complex
4817 subexpression, examining it multiple times could result
4818 in a combinatorial explosion. E.g. on an Alpha
4819 running at least 200MHz, a Fortran test case compiled with
4820 optimization took about 28 minutes to compile -- even though
4821 it was only a few lines long, and the complicated line causing
4822 so much time to be spent in the earlier version of safe_from_p()
4823 had only 293 or so unique nodes.
4824
4825 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
4826 where it is so we can turn it back in the top-level safe_from_p()
4827 when we're done. */
4828
4829 /* For now, don't bother re-sizing the array. */
4830 if (save_expr_count >= save_expr_size)
4831 return 0;
4832 save_expr_rewritten[save_expr_count++] = exp;
4833 TREE_SET_CODE (exp, ERROR_MARK);
4834
4835 nops = tree_code_length[(int) SAVE_EXPR];
4836 for (i = 0; i < nops; i++)
4837 if (TREE_OPERAND (exp, i) != 0
4838 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
4839 return 0;
4840 return 1;
4841
4842 case BIND_EXPR:
4843 /* The only operand we look at is operand 1. The rest aren't
4844 part of the expression. */
4845 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
4846
4847 case METHOD_CALL_EXPR:
4848 /* This takes a rtx argument, but shouldn't appear here. */
4849 abort ();
4850
4851 default:
4852 break;
4853 }
4854
4855 /* If we have an rtx, we do not need to scan our operands. */
4856 if (exp_rtl)
4857 break;
4858
4859 nops = tree_code_length[(int) TREE_CODE (exp)];
4860 for (i = 0; i < nops; i++)
4861 if (TREE_OPERAND (exp, i) != 0
4862 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
4863 return 0;
4864 }
4865
4866 /* If we have an rtl, find any enclosed object. Then see if we conflict
4867 with it. */
4868 if (exp_rtl)
4869 {
4870 if (GET_CODE (exp_rtl) == SUBREG)
4871 {
4872 exp_rtl = SUBREG_REG (exp_rtl);
4873 if (GET_CODE (exp_rtl) == REG
4874 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
4875 return 0;
4876 }
4877
4878 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
4879 are memory and EXP is not readonly. */
4880 return ! (rtx_equal_p (x, exp_rtl)
4881 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
4882 && ! TREE_READONLY (exp)));
4883 }
4884
4885 /* If we reach here, it is safe. */
4886 return 1;
4887 }
4888
4889 /* Subroutine of expand_expr: return nonzero iff EXP is an
4890 expression whose type is statically determinable. */
4891
4892 static int
4893 fixed_type_p (exp)
4894 tree exp;
4895 {
4896 if (TREE_CODE (exp) == PARM_DECL
4897 || TREE_CODE (exp) == VAR_DECL
4898 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
4899 || TREE_CODE (exp) == COMPONENT_REF
4900 || TREE_CODE (exp) == ARRAY_REF)
4901 return 1;
4902 return 0;
4903 }
4904
4905 /* Subroutine of expand_expr: return rtx if EXP is a
4906 variable or parameter; else return 0. */
4907
4908 static rtx
4909 var_rtx (exp)
4910 tree exp;
4911 {
4912 STRIP_NOPS (exp);
4913 switch (TREE_CODE (exp))
4914 {
4915 case PARM_DECL:
4916 case VAR_DECL:
4917 return DECL_RTL (exp);
4918 default:
4919 return 0;
4920 }
4921 }
4922 \f
4923 /* expand_expr: generate code for computing expression EXP.
4924 An rtx for the computed value is returned. The value is never null.
4925 In the case of a void EXP, const0_rtx is returned.
4926
4927 The value may be stored in TARGET if TARGET is nonzero.
4928 TARGET is just a suggestion; callers must assume that
4929 the rtx returned may not be the same as TARGET.
4930
4931 If TARGET is CONST0_RTX, it means that the value will be ignored.
4932
4933 If TMODE is not VOIDmode, it suggests generating the
4934 result in mode TMODE. But this is done only when convenient.
4935 Otherwise, TMODE is ignored and the value generated in its natural mode.
4936 TMODE is just a suggestion; callers must assume that
4937 the rtx returned may not have mode TMODE.
4938
4939 Note that TARGET may have neither TMODE nor MODE. In that case, it
4940 probably will not be used.
4941
4942 If MODIFIER is EXPAND_SUM then when EXP is an addition
4943 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
4944 or a nest of (PLUS ...) and (MINUS ...) where the terms are
4945 products as above, or REG or MEM, or constant.
4946 Ordinarily in such cases we would output mul or add instructions
4947 and then return a pseudo reg containing the sum.
4948
4949 EXPAND_INITIALIZER is much like EXPAND_SUM except that
4950 it also marks a label as absolutely required (it can't be dead).
4951 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
4952 This is used for outputting expressions used in initializers.
4953
4954 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
4955 with a constant address even if that address is not normally legitimate.
4956 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
4957
4958 rtx
4959 expand_expr (exp, target, tmode, modifier)
4960 register tree exp;
4961 rtx target;
4962 enum machine_mode tmode;
4963 enum expand_modifier modifier;
4964 {
4965 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
4966 This is static so it will be accessible to our recursive callees. */
4967 static tree placeholder_list = 0;
4968 register rtx op0, op1, temp;
4969 tree type = TREE_TYPE (exp);
4970 int unsignedp = TREE_UNSIGNED (type);
4971 register enum machine_mode mode = TYPE_MODE (type);
4972 register enum tree_code code = TREE_CODE (exp);
4973 optab this_optab;
4974 /* Use subtarget as the target for operand 0 of a binary operation. */
4975 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4976 rtx original_target = target;
4977 int ignore = (target == const0_rtx
4978 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
4979 || code == CONVERT_EXPR || code == REFERENCE_EXPR
4980 || code == COND_EXPR)
4981 && TREE_CODE (type) == VOID_TYPE));
4982 tree context;
4983 /* Used by check-memory-usage to make modifier read only. */
4984 enum expand_modifier ro_modifier;
4985
4986 /* Make a read-only version of the modifier. */
4987 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
4988 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
4989 ro_modifier = modifier;
4990 else
4991 ro_modifier = EXPAND_NORMAL;
4992
4993 /* Don't use hard regs as subtargets, because the combiner
4994 can only handle pseudo regs. */
4995 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
4996 subtarget = 0;
4997 /* Avoid subtargets inside loops,
4998 since they hide some invariant expressions. */
4999 if (preserve_subexpressions_p ())
5000 subtarget = 0;
5001
5002 /* If we are going to ignore this result, we need only do something
5003 if there is a side-effect somewhere in the expression. If there
5004 is, short-circuit the most common cases here. Note that we must
5005 not call expand_expr with anything but const0_rtx in case this
5006 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5007
5008 if (ignore)
5009 {
5010 if (! TREE_SIDE_EFFECTS (exp))
5011 return const0_rtx;
5012
5013 /* Ensure we reference a volatile object even if value is ignored. */
5014 if (TREE_THIS_VOLATILE (exp)
5015 && TREE_CODE (exp) != FUNCTION_DECL
5016 && mode != VOIDmode && mode != BLKmode)
5017 {
5018 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5019 if (GET_CODE (temp) == MEM)
5020 temp = copy_to_reg (temp);
5021 return const0_rtx;
5022 }
5023
5024 if (TREE_CODE_CLASS (code) == '1')
5025 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5026 VOIDmode, ro_modifier);
5027 else if (TREE_CODE_CLASS (code) == '2'
5028 || TREE_CODE_CLASS (code) == '<')
5029 {
5030 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5031 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5032 return const0_rtx;
5033 }
5034 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5035 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5036 /* If the second operand has no side effects, just evaluate
5037 the first. */
5038 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5039 VOIDmode, ro_modifier);
5040
5041 target = 0;
5042 }
5043
5044 /* If will do cse, generate all results into pseudo registers
5045 since 1) that allows cse to find more things
5046 and 2) otherwise cse could produce an insn the machine
5047 cannot support. */
5048
5049 if (! cse_not_expected && mode != BLKmode && target
5050 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5051 target = subtarget;
5052
5053 switch (code)
5054 {
5055 case LABEL_DECL:
5056 {
5057 tree function = decl_function_context (exp);
5058 /* Handle using a label in a containing function. */
5059 if (function != current_function_decl
5060 && function != inline_function_decl && function != 0)
5061 {
5062 struct function *p = find_function_data (function);
5063 /* Allocate in the memory associated with the function
5064 that the label is in. */
5065 push_obstacks (p->function_obstack,
5066 p->function_maybepermanent_obstack);
5067
5068 p->forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5069 label_rtx (exp),
5070 p->forced_labels);
5071 pop_obstacks ();
5072 }
5073 else if (modifier == EXPAND_INITIALIZER)
5074 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5075 label_rtx (exp), forced_labels);
5076 temp = gen_rtx_MEM (FUNCTION_MODE,
5077 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
5078 if (function != current_function_decl
5079 && function != inline_function_decl && function != 0)
5080 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5081 return temp;
5082 }
5083
5084 case PARM_DECL:
5085 if (DECL_RTL (exp) == 0)
5086 {
5087 error_with_decl (exp, "prior parameter's size depends on `%s'");
5088 return CONST0_RTX (mode);
5089 }
5090
5091 /* ... fall through ... */
5092
5093 case VAR_DECL:
5094 /* If a static var's type was incomplete when the decl was written,
5095 but the type is complete now, lay out the decl now. */
5096 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5097 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5098 {
5099 push_obstacks_nochange ();
5100 end_temporary_allocation ();
5101 layout_decl (exp, 0);
5102 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5103 pop_obstacks ();
5104 }
5105
5106 /* Only check automatic variables. Currently, function arguments are
5107 not checked (this can be done at compile-time with prototypes).
5108 Aggregates are not checked. */
5109 if (flag_check_memory_usage && code == VAR_DECL
5110 && GET_CODE (DECL_RTL (exp)) == MEM
5111 && DECL_CONTEXT (exp) != NULL_TREE
5112 && ! TREE_STATIC (exp)
5113 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5114 {
5115 enum memory_use_mode memory_usage;
5116 memory_usage = get_memory_usage_from_modifier (modifier);
5117
5118 if (memory_usage != MEMORY_USE_DONT)
5119 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5120 XEXP (DECL_RTL (exp), 0), ptr_mode,
5121 GEN_INT (int_size_in_bytes (type)),
5122 TYPE_MODE (sizetype),
5123 GEN_INT (memory_usage),
5124 TYPE_MODE (integer_type_node));
5125 }
5126
5127 /* ... fall through ... */
5128
5129 case FUNCTION_DECL:
5130 case RESULT_DECL:
5131 if (DECL_RTL (exp) == 0)
5132 abort ();
5133
5134 /* Ensure variable marked as used even if it doesn't go through
5135 a parser. If it hasn't be used yet, write out an external
5136 definition. */
5137 if (! TREE_USED (exp))
5138 {
5139 assemble_external (exp);
5140 TREE_USED (exp) = 1;
5141 }
5142
5143 /* Show we haven't gotten RTL for this yet. */
5144 temp = 0;
5145
5146 /* Handle variables inherited from containing functions. */
5147 context = decl_function_context (exp);
5148
5149 /* We treat inline_function_decl as an alias for the current function
5150 because that is the inline function whose vars, types, etc.
5151 are being merged into the current function.
5152 See expand_inline_function. */
5153
5154 if (context != 0 && context != current_function_decl
5155 && context != inline_function_decl
5156 /* If var is static, we don't need a static chain to access it. */
5157 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5158 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5159 {
5160 rtx addr;
5161
5162 /* Mark as non-local and addressable. */
5163 DECL_NONLOCAL (exp) = 1;
5164 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5165 abort ();
5166 mark_addressable (exp);
5167 if (GET_CODE (DECL_RTL (exp)) != MEM)
5168 abort ();
5169 addr = XEXP (DECL_RTL (exp), 0);
5170 if (GET_CODE (addr) == MEM)
5171 addr = gen_rtx_MEM (Pmode,
5172 fix_lexical_addr (XEXP (addr, 0), exp));
5173 else
5174 addr = fix_lexical_addr (addr, exp);
5175 temp = change_address (DECL_RTL (exp), mode, addr);
5176 }
5177
5178 /* This is the case of an array whose size is to be determined
5179 from its initializer, while the initializer is still being parsed.
5180 See expand_decl. */
5181
5182 else if (GET_CODE (DECL_RTL (exp)) == MEM
5183 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5184 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
5185 XEXP (DECL_RTL (exp), 0));
5186
5187 /* If DECL_RTL is memory, we are in the normal case and either
5188 the address is not valid or it is not a register and -fforce-addr
5189 is specified, get the address into a register. */
5190
5191 else if (GET_CODE (DECL_RTL (exp)) == MEM
5192 && modifier != EXPAND_CONST_ADDRESS
5193 && modifier != EXPAND_SUM
5194 && modifier != EXPAND_INITIALIZER
5195 && (! memory_address_p (DECL_MODE (exp),
5196 XEXP (DECL_RTL (exp), 0))
5197 || (flag_force_addr
5198 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5199 temp = change_address (DECL_RTL (exp), VOIDmode,
5200 copy_rtx (XEXP (DECL_RTL (exp), 0)));
5201
5202 /* If we got something, return it. But first, set the alignment
5203 the address is a register. */
5204 if (temp != 0)
5205 {
5206 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
5207 mark_reg_pointer (XEXP (temp, 0),
5208 DECL_ALIGN (exp) / BITS_PER_UNIT);
5209
5210 return temp;
5211 }
5212
5213 /* If the mode of DECL_RTL does not match that of the decl, it
5214 must be a promoted value. We return a SUBREG of the wanted mode,
5215 but mark it so that we know that it was already extended. */
5216
5217 if (GET_CODE (DECL_RTL (exp)) == REG
5218 && GET_MODE (DECL_RTL (exp)) != mode)
5219 {
5220 /* Get the signedness used for this variable. Ensure we get the
5221 same mode we got when the variable was declared. */
5222 if (GET_MODE (DECL_RTL (exp))
5223 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
5224 abort ();
5225
5226 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
5227 SUBREG_PROMOTED_VAR_P (temp) = 1;
5228 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5229 return temp;
5230 }
5231
5232 return DECL_RTL (exp);
5233
5234 case INTEGER_CST:
5235 return immed_double_const (TREE_INT_CST_LOW (exp),
5236 TREE_INT_CST_HIGH (exp),
5237 mode);
5238
5239 case CONST_DECL:
5240 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
5241 EXPAND_MEMORY_USE_BAD);
5242
5243 case REAL_CST:
5244 /* If optimized, generate immediate CONST_DOUBLE
5245 which will be turned into memory by reload if necessary.
5246
5247 We used to force a register so that loop.c could see it. But
5248 this does not allow gen_* patterns to perform optimizations with
5249 the constants. It also produces two insns in cases like "x = 1.0;".
5250 On most machines, floating-point constants are not permitted in
5251 many insns, so we'd end up copying it to a register in any case.
5252
5253 Now, we do the copying in expand_binop, if appropriate. */
5254 return immed_real_const (exp);
5255
5256 case COMPLEX_CST:
5257 case STRING_CST:
5258 if (! TREE_CST_RTL (exp))
5259 output_constant_def (exp);
5260
5261 /* TREE_CST_RTL probably contains a constant address.
5262 On RISC machines where a constant address isn't valid,
5263 make some insns to get that address into a register. */
5264 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
5265 && modifier != EXPAND_CONST_ADDRESS
5266 && modifier != EXPAND_INITIALIZER
5267 && modifier != EXPAND_SUM
5268 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
5269 || (flag_force_addr
5270 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
5271 return change_address (TREE_CST_RTL (exp), VOIDmode,
5272 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
5273 return TREE_CST_RTL (exp);
5274
5275 case EXPR_WITH_FILE_LOCATION:
5276 {
5277 rtx to_return;
5278 char *saved_input_filename = input_filename;
5279 int saved_lineno = lineno;
5280 input_filename = EXPR_WFL_FILENAME (exp);
5281 lineno = EXPR_WFL_LINENO (exp);
5282 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
5283 emit_line_note (input_filename, lineno);
5284 /* Possibly avoid switching back and force here */
5285 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
5286 input_filename = saved_input_filename;
5287 lineno = saved_lineno;
5288 return to_return;
5289 }
5290
5291 case SAVE_EXPR:
5292 context = decl_function_context (exp);
5293
5294 /* If this SAVE_EXPR was at global context, assume we are an
5295 initialization function and move it into our context. */
5296 if (context == 0)
5297 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
5298
5299 /* We treat inline_function_decl as an alias for the current function
5300 because that is the inline function whose vars, types, etc.
5301 are being merged into the current function.
5302 See expand_inline_function. */
5303 if (context == current_function_decl || context == inline_function_decl)
5304 context = 0;
5305
5306 /* If this is non-local, handle it. */
5307 if (context)
5308 {
5309 /* The following call just exists to abort if the context is
5310 not of a containing function. */
5311 find_function_data (context);
5312
5313 temp = SAVE_EXPR_RTL (exp);
5314 if (temp && GET_CODE (temp) == REG)
5315 {
5316 put_var_into_stack (exp);
5317 temp = SAVE_EXPR_RTL (exp);
5318 }
5319 if (temp == 0 || GET_CODE (temp) != MEM)
5320 abort ();
5321 return change_address (temp, mode,
5322 fix_lexical_addr (XEXP (temp, 0), exp));
5323 }
5324 if (SAVE_EXPR_RTL (exp) == 0)
5325 {
5326 if (mode == VOIDmode)
5327 temp = const0_rtx;
5328 else
5329 temp = assign_temp (type, 3, 0, 0);
5330
5331 SAVE_EXPR_RTL (exp) = temp;
5332 if (!optimize && GET_CODE (temp) == REG)
5333 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
5334 save_expr_regs);
5335
5336 /* If the mode of TEMP does not match that of the expression, it
5337 must be a promoted value. We pass store_expr a SUBREG of the
5338 wanted mode but mark it so that we know that it was already
5339 extended. Note that `unsignedp' was modified above in
5340 this case. */
5341
5342 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
5343 {
5344 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
5345 SUBREG_PROMOTED_VAR_P (temp) = 1;
5346 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5347 }
5348
5349 if (temp == const0_rtx)
5350 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5351 EXPAND_MEMORY_USE_BAD);
5352 else
5353 store_expr (TREE_OPERAND (exp, 0), temp, 0);
5354
5355 TREE_USED (exp) = 1;
5356 }
5357
5358 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
5359 must be a promoted value. We return a SUBREG of the wanted mode,
5360 but mark it so that we know that it was already extended. */
5361
5362 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
5363 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
5364 {
5365 /* Compute the signedness and make the proper SUBREG. */
5366 promote_mode (type, mode, &unsignedp, 0);
5367 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
5368 SUBREG_PROMOTED_VAR_P (temp) = 1;
5369 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5370 return temp;
5371 }
5372
5373 return SAVE_EXPR_RTL (exp);
5374
5375 case UNSAVE_EXPR:
5376 {
5377 rtx temp;
5378 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5379 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
5380 return temp;
5381 }
5382
5383 case PLACEHOLDER_EXPR:
5384 {
5385 tree placeholder_expr;
5386
5387 /* If there is an object on the head of the placeholder list,
5388 see if some object in it of type TYPE or a pointer to it. For
5389 further information, see tree.def. */
5390 for (placeholder_expr = placeholder_list;
5391 placeholder_expr != 0;
5392 placeholder_expr = TREE_CHAIN (placeholder_expr))
5393 {
5394 tree need_type = TYPE_MAIN_VARIANT (type);
5395 tree object = 0;
5396 tree old_list = placeholder_list;
5397 tree elt;
5398
5399 /* Find the outermost reference that is of the type we want.
5400 If none, see if any object has a type that is a pointer to
5401 the type we want. */
5402 for (elt = TREE_PURPOSE (placeholder_expr);
5403 elt != 0 && object == 0;
5404 elt
5405 = ((TREE_CODE (elt) == COMPOUND_EXPR
5406 || TREE_CODE (elt) == COND_EXPR)
5407 ? TREE_OPERAND (elt, 1)
5408 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5409 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5410 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5411 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5412 ? TREE_OPERAND (elt, 0) : 0))
5413 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
5414 object = elt;
5415
5416 for (elt = TREE_PURPOSE (placeholder_expr);
5417 elt != 0 && object == 0;
5418 elt
5419 = ((TREE_CODE (elt) == COMPOUND_EXPR
5420 || TREE_CODE (elt) == COND_EXPR)
5421 ? TREE_OPERAND (elt, 1)
5422 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5423 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5424 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5425 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5426 ? TREE_OPERAND (elt, 0) : 0))
5427 if (POINTER_TYPE_P (TREE_TYPE (elt))
5428 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
5429 == need_type))
5430 object = build1 (INDIRECT_REF, need_type, elt);
5431
5432 if (object != 0)
5433 {
5434 /* Expand this object skipping the list entries before
5435 it was found in case it is also a PLACEHOLDER_EXPR.
5436 In that case, we want to translate it using subsequent
5437 entries. */
5438 placeholder_list = TREE_CHAIN (placeholder_expr);
5439 temp = expand_expr (object, original_target, tmode,
5440 ro_modifier);
5441 placeholder_list = old_list;
5442 return temp;
5443 }
5444 }
5445 }
5446
5447 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
5448 abort ();
5449
5450 case WITH_RECORD_EXPR:
5451 /* Put the object on the placeholder list, expand our first operand,
5452 and pop the list. */
5453 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
5454 placeholder_list);
5455 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
5456 tmode, ro_modifier);
5457 placeholder_list = TREE_CHAIN (placeholder_list);
5458 return target;
5459
5460 case EXIT_EXPR:
5461 expand_exit_loop_if_false (NULL_PTR,
5462 invert_truthvalue (TREE_OPERAND (exp, 0)));
5463 return const0_rtx;
5464
5465 case LOOP_EXPR:
5466 push_temp_slots ();
5467 expand_start_loop (1);
5468 expand_expr_stmt (TREE_OPERAND (exp, 0));
5469 expand_end_loop ();
5470 pop_temp_slots ();
5471
5472 return const0_rtx;
5473
5474 case BIND_EXPR:
5475 {
5476 tree vars = TREE_OPERAND (exp, 0);
5477 int vars_need_expansion = 0;
5478
5479 /* Need to open a binding contour here because
5480 if there are any cleanups they must be contained here. */
5481 expand_start_bindings (0);
5482
5483 /* Mark the corresponding BLOCK for output in its proper place. */
5484 if (TREE_OPERAND (exp, 2) != 0
5485 && ! TREE_USED (TREE_OPERAND (exp, 2)))
5486 insert_block (TREE_OPERAND (exp, 2));
5487
5488 /* If VARS have not yet been expanded, expand them now. */
5489 while (vars)
5490 {
5491 if (DECL_RTL (vars) == 0)
5492 {
5493 vars_need_expansion = 1;
5494 expand_decl (vars);
5495 }
5496 expand_decl_init (vars);
5497 vars = TREE_CHAIN (vars);
5498 }
5499
5500 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
5501
5502 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
5503
5504 return temp;
5505 }
5506
5507 case RTL_EXPR:
5508 if (RTL_EXPR_SEQUENCE (exp))
5509 {
5510 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
5511 abort ();
5512 emit_insns (RTL_EXPR_SEQUENCE (exp));
5513 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
5514 }
5515 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
5516 free_temps_for_rtl_expr (exp);
5517 return RTL_EXPR_RTL (exp);
5518
5519 case CONSTRUCTOR:
5520 /* If we don't need the result, just ensure we evaluate any
5521 subexpressions. */
5522 if (ignore)
5523 {
5524 tree elt;
5525 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
5526 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
5527 EXPAND_MEMORY_USE_BAD);
5528 return const0_rtx;
5529 }
5530
5531 /* All elts simple constants => refer to a constant in memory. But
5532 if this is a non-BLKmode mode, let it store a field at a time
5533 since that should make a CONST_INT or CONST_DOUBLE when we
5534 fold. Likewise, if we have a target we can use, it is best to
5535 store directly into the target unless the type is large enough
5536 that memcpy will be used. If we are making an initializer and
5537 all operands are constant, put it in memory as well. */
5538 else if ((TREE_STATIC (exp)
5539 && ((mode == BLKmode
5540 && ! (target != 0 && safe_from_p (target, exp, 1)))
5541 || TREE_ADDRESSABLE (exp)
5542 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5543 && (move_by_pieces_ninsns
5544 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
5545 TYPE_ALIGN (type) / BITS_PER_UNIT)
5546 > MOVE_RATIO)
5547 && ! mostly_zeros_p (exp))))
5548 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
5549 {
5550 rtx constructor = output_constant_def (exp);
5551 if (modifier != EXPAND_CONST_ADDRESS
5552 && modifier != EXPAND_INITIALIZER
5553 && modifier != EXPAND_SUM
5554 && (! memory_address_p (GET_MODE (constructor),
5555 XEXP (constructor, 0))
5556 || (flag_force_addr
5557 && GET_CODE (XEXP (constructor, 0)) != REG)))
5558 constructor = change_address (constructor, VOIDmode,
5559 XEXP (constructor, 0));
5560 return constructor;
5561 }
5562
5563 else
5564 {
5565 /* Handle calls that pass values in multiple non-contiguous
5566 locations. The Irix 6 ABI has examples of this. */
5567 if (target == 0 || ! safe_from_p (target, exp, 1)
5568 || GET_CODE (target) == PARALLEL)
5569 {
5570 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
5571 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5572 else
5573 target = assign_temp (type, 0, 1, 1);
5574 }
5575
5576 if (TREE_READONLY (exp))
5577 {
5578 if (GET_CODE (target) == MEM)
5579 target = copy_rtx (target);
5580
5581 RTX_UNCHANGING_P (target) = 1;
5582 }
5583
5584 store_constructor (exp, target, 0);
5585 return target;
5586 }
5587
5588 case INDIRECT_REF:
5589 {
5590 tree exp1 = TREE_OPERAND (exp, 0);
5591 tree exp2;
5592 tree index;
5593 tree string = string_constant (exp1, &index);
5594 int i;
5595
5596 /* Try to optimize reads from const strings. */
5597 if (string
5598 && TREE_CODE (string) == STRING_CST
5599 && TREE_CODE (index) == INTEGER_CST
5600 && !TREE_INT_CST_HIGH (index)
5601 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (string)
5602 && GET_MODE_CLASS (mode) == MODE_INT
5603 && GET_MODE_SIZE (mode) == 1
5604 && modifier != EXPAND_MEMORY_USE_WO)
5605 return GEN_INT (TREE_STRING_POINTER (string)[i]);
5606
5607 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
5608 op0 = memory_address (mode, op0);
5609
5610 if (flag_check_memory_usage && !AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5611 {
5612 enum memory_use_mode memory_usage;
5613 memory_usage = get_memory_usage_from_modifier (modifier);
5614
5615 if (memory_usage != MEMORY_USE_DONT)
5616 {
5617 in_check_memory_usage = 1;
5618 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5619 op0, ptr_mode,
5620 GEN_INT (int_size_in_bytes (type)),
5621 TYPE_MODE (sizetype),
5622 GEN_INT (memory_usage),
5623 TYPE_MODE (integer_type_node));
5624 in_check_memory_usage = 0;
5625 }
5626 }
5627
5628 temp = gen_rtx_MEM (mode, op0);
5629 /* If address was computed by addition,
5630 mark this as an element of an aggregate. */
5631 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5632 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
5633 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
5634 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
5635 || (TREE_CODE (exp1) == ADDR_EXPR
5636 && (exp2 = TREE_OPERAND (exp1, 0))
5637 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
5638 MEM_IN_STRUCT_P (temp) = 1;
5639 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
5640
5641 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
5642 here, because, in C and C++, the fact that a location is accessed
5643 through a pointer to const does not mean that the value there can
5644 never change. Languages where it can never change should
5645 also set TREE_STATIC. */
5646 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
5647 return temp;
5648 }
5649
5650 case ARRAY_REF:
5651 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
5652 abort ();
5653
5654 {
5655 tree array = TREE_OPERAND (exp, 0);
5656 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5657 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
5658 tree index = TREE_OPERAND (exp, 1);
5659 tree index_type = TREE_TYPE (index);
5660 HOST_WIDE_INT i;
5661
5662 /* Optimize the special-case of a zero lower bound.
5663
5664 We convert the low_bound to sizetype to avoid some problems
5665 with constant folding. (E.g. suppose the lower bound is 1,
5666 and its mode is QI. Without the conversion, (ARRAY
5667 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
5668 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
5669
5670 But sizetype isn't quite right either (especially if
5671 the lowbound is negative). FIXME */
5672
5673 if (! integer_zerop (low_bound))
5674 index = fold (build (MINUS_EXPR, index_type, index,
5675 convert (sizetype, low_bound)));
5676
5677 /* Fold an expression like: "foo"[2].
5678 This is not done in fold so it won't happen inside &.
5679 Don't fold if this is for wide characters since it's too
5680 difficult to do correctly and this is a very rare case. */
5681
5682 if (TREE_CODE (array) == STRING_CST
5683 && TREE_CODE (index) == INTEGER_CST
5684 && !TREE_INT_CST_HIGH (index)
5685 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
5686 && GET_MODE_CLASS (mode) == MODE_INT
5687 && GET_MODE_SIZE (mode) == 1)
5688 return GEN_INT (TREE_STRING_POINTER (array)[i]);
5689
5690 /* If this is a constant index into a constant array,
5691 just get the value from the array. Handle both the cases when
5692 we have an explicit constructor and when our operand is a variable
5693 that was declared const. */
5694
5695 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
5696 {
5697 if (TREE_CODE (index) == INTEGER_CST
5698 && TREE_INT_CST_HIGH (index) == 0)
5699 {
5700 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
5701
5702 i = TREE_INT_CST_LOW (index);
5703 while (elem && i--)
5704 elem = TREE_CHAIN (elem);
5705 if (elem)
5706 return expand_expr (fold (TREE_VALUE (elem)), target,
5707 tmode, ro_modifier);
5708 }
5709 }
5710
5711 else if (optimize >= 1
5712 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
5713 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
5714 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
5715 {
5716 if (TREE_CODE (index) == INTEGER_CST)
5717 {
5718 tree init = DECL_INITIAL (array);
5719
5720 i = TREE_INT_CST_LOW (index);
5721 if (TREE_CODE (init) == CONSTRUCTOR)
5722 {
5723 tree elem = CONSTRUCTOR_ELTS (init);
5724
5725 while (elem
5726 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
5727 elem = TREE_CHAIN (elem);
5728 if (elem)
5729 return expand_expr (fold (TREE_VALUE (elem)), target,
5730 tmode, ro_modifier);
5731 }
5732 else if (TREE_CODE (init) == STRING_CST
5733 && TREE_INT_CST_HIGH (index) == 0
5734 && (TREE_INT_CST_LOW (index)
5735 < TREE_STRING_LENGTH (init)))
5736 return (GEN_INT
5737 (TREE_STRING_POINTER
5738 (init)[TREE_INT_CST_LOW (index)]));
5739 }
5740 }
5741 }
5742
5743 /* ... fall through ... */
5744
5745 case COMPONENT_REF:
5746 case BIT_FIELD_REF:
5747 /* If the operand is a CONSTRUCTOR, we can just extract the
5748 appropriate field if it is present. Don't do this if we have
5749 already written the data since we want to refer to that copy
5750 and varasm.c assumes that's what we'll do. */
5751 if (code != ARRAY_REF
5752 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
5753 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
5754 {
5755 tree elt;
5756
5757 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
5758 elt = TREE_CHAIN (elt))
5759 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
5760 /* We can normally use the value of the field in the
5761 CONSTRUCTOR. However, if this is a bitfield in
5762 an integral mode that we can fit in a HOST_WIDE_INT,
5763 we must mask only the number of bits in the bitfield,
5764 since this is done implicitly by the constructor. If
5765 the bitfield does not meet either of those conditions,
5766 we can't do this optimization. */
5767 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
5768 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
5769 == MODE_INT)
5770 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
5771 <= HOST_BITS_PER_WIDE_INT))))
5772 {
5773 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
5774 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
5775 {
5776 int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt));
5777
5778 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
5779 {
5780 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
5781 op0 = expand_and (op0, op1, target);
5782 }
5783 else
5784 {
5785 enum machine_mode imode
5786 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
5787 tree count
5788 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
5789 0);
5790
5791 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
5792 target, 0);
5793 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
5794 target, 0);
5795 }
5796 }
5797
5798 return op0;
5799 }
5800 }
5801
5802 {
5803 enum machine_mode mode1;
5804 int bitsize;
5805 int bitpos;
5806 tree offset;
5807 int volatilep = 0;
5808 int alignment;
5809 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
5810 &mode1, &unsignedp, &volatilep,
5811 &alignment);
5812
5813 /* If we got back the original object, something is wrong. Perhaps
5814 we are evaluating an expression too early. In any event, don't
5815 infinitely recurse. */
5816 if (tem == exp)
5817 abort ();
5818
5819 /* If TEM's type is a union of variable size, pass TARGET to the inner
5820 computation, since it will need a temporary and TARGET is known
5821 to have to do. This occurs in unchecked conversion in Ada. */
5822
5823 op0 = expand_expr (tem,
5824 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
5825 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
5826 != INTEGER_CST)
5827 ? target : NULL_RTX),
5828 VOIDmode,
5829 modifier == EXPAND_INITIALIZER
5830 ? modifier : EXPAND_NORMAL);
5831
5832 /* If this is a constant, put it into a register if it is a
5833 legitimate constant and memory if it isn't. */
5834 if (CONSTANT_P (op0))
5835 {
5836 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
5837 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
5838 op0 = force_reg (mode, op0);
5839 else
5840 op0 = validize_mem (force_const_mem (mode, op0));
5841 }
5842
5843 if (offset != 0)
5844 {
5845 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
5846
5847 if (GET_CODE (op0) != MEM)
5848 abort ();
5849
5850 if (GET_MODE (offset_rtx) != ptr_mode)
5851 {
5852 #ifdef POINTERS_EXTEND_UNSIGNED
5853 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 1);
5854 #else
5855 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
5856 #endif
5857 }
5858
5859 op0 = change_address (op0, VOIDmode,
5860 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
5861 force_reg (ptr_mode, offset_rtx)));
5862 }
5863
5864 /* Don't forget about volatility even if this is a bitfield. */
5865 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
5866 {
5867 op0 = copy_rtx (op0);
5868 MEM_VOLATILE_P (op0) = 1;
5869 }
5870
5871 /* Check the access. */
5872 if (flag_check_memory_usage && GET_CODE (op0) == MEM)
5873 {
5874 enum memory_use_mode memory_usage;
5875 memory_usage = get_memory_usage_from_modifier (modifier);
5876
5877 if (memory_usage != MEMORY_USE_DONT)
5878 {
5879 rtx to;
5880 int size;
5881
5882 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
5883 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
5884
5885 /* Check the access right of the pointer. */
5886 if (size > BITS_PER_UNIT)
5887 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5888 to, ptr_mode,
5889 GEN_INT (size / BITS_PER_UNIT),
5890 TYPE_MODE (sizetype),
5891 GEN_INT (memory_usage),
5892 TYPE_MODE (integer_type_node));
5893 }
5894 }
5895
5896 /* In cases where an aligned union has an unaligned object
5897 as a field, we might be extracting a BLKmode value from
5898 an integer-mode (e.g., SImode) object. Handle this case
5899 by doing the extract into an object as wide as the field
5900 (which we know to be the width of a basic mode), then
5901 storing into memory, and changing the mode to BLKmode.
5902 If we ultimately want the address (EXPAND_CONST_ADDRESS or
5903 EXPAND_INITIALIZER), then we must not copy to a temporary. */
5904 if (mode1 == VOIDmode
5905 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
5906 || (modifier != EXPAND_CONST_ADDRESS
5907 && modifier != EXPAND_INITIALIZER
5908 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
5909 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5910 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5911 /* If the field isn't aligned enough to fetch as a memref,
5912 fetch it as a bit field. */
5913 || (SLOW_UNALIGNED_ACCESS
5914 && ((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
5915 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
5916 {
5917 enum machine_mode ext_mode = mode;
5918
5919 if (ext_mode == BLKmode)
5920 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
5921
5922 if (ext_mode == BLKmode)
5923 {
5924 /* In this case, BITPOS must start at a byte boundary and
5925 TARGET, if specified, must be a MEM. */
5926 if (GET_CODE (op0) != MEM
5927 || (target != 0 && GET_CODE (target) != MEM)
5928 || bitpos % BITS_PER_UNIT != 0)
5929 abort ();
5930
5931 op0 = change_address (op0, VOIDmode,
5932 plus_constant (XEXP (op0, 0),
5933 bitpos / BITS_PER_UNIT));
5934 if (target == 0)
5935 target = assign_temp (type, 0, 1, 1);
5936
5937 emit_block_move (target, op0,
5938 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5939 / BITS_PER_UNIT),
5940 1);
5941
5942 return target;
5943 }
5944
5945 op0 = validize_mem (op0);
5946
5947 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
5948 mark_reg_pointer (XEXP (op0, 0), alignment);
5949
5950 op0 = extract_bit_field (op0, bitsize, bitpos,
5951 unsignedp, target, ext_mode, ext_mode,
5952 alignment,
5953 int_size_in_bytes (TREE_TYPE (tem)));
5954
5955 /* If the result is a record type and BITSIZE is narrower than
5956 the mode of OP0, an integral mode, and this is a big endian
5957 machine, we must put the field into the high-order bits. */
5958 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
5959 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
5960 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
5961 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
5962 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
5963 - bitsize),
5964 op0, 1);
5965
5966 if (mode == BLKmode)
5967 {
5968 rtx new = assign_stack_temp (ext_mode,
5969 bitsize / BITS_PER_UNIT, 0);
5970
5971 emit_move_insn (new, op0);
5972 op0 = copy_rtx (new);
5973 PUT_MODE (op0, BLKmode);
5974 MEM_IN_STRUCT_P (op0) = 1;
5975 }
5976
5977 return op0;
5978 }
5979
5980 /* If the result is BLKmode, use that to access the object
5981 now as well. */
5982 if (mode == BLKmode)
5983 mode1 = BLKmode;
5984
5985 /* Get a reference to just this component. */
5986 if (modifier == EXPAND_CONST_ADDRESS
5987 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5988 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
5989 (bitpos / BITS_PER_UNIT)));
5990 else
5991 op0 = change_address (op0, mode1,
5992 plus_constant (XEXP (op0, 0),
5993 (bitpos / BITS_PER_UNIT)));
5994 if (GET_CODE (XEXP (op0, 0)) == REG)
5995 mark_reg_pointer (XEXP (op0, 0), alignment);
5996
5997 MEM_IN_STRUCT_P (op0) = 1;
5998 MEM_VOLATILE_P (op0) |= volatilep;
5999 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
6000 || modifier == EXPAND_CONST_ADDRESS
6001 || modifier == EXPAND_INITIALIZER)
6002 return op0;
6003 else if (target == 0)
6004 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6005
6006 convert_move (target, op0, unsignedp);
6007 return target;
6008 }
6009
6010 /* Intended for a reference to a buffer of a file-object in Pascal.
6011 But it's not certain that a special tree code will really be
6012 necessary for these. INDIRECT_REF might work for them. */
6013 case BUFFER_REF:
6014 abort ();
6015
6016 case IN_EXPR:
6017 {
6018 /* Pascal set IN expression.
6019
6020 Algorithm:
6021 rlo = set_low - (set_low%bits_per_word);
6022 the_word = set [ (index - rlo)/bits_per_word ];
6023 bit_index = index % bits_per_word;
6024 bitmask = 1 << bit_index;
6025 return !!(the_word & bitmask); */
6026
6027 tree set = TREE_OPERAND (exp, 0);
6028 tree index = TREE_OPERAND (exp, 1);
6029 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
6030 tree set_type = TREE_TYPE (set);
6031 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
6032 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
6033 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
6034 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
6035 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
6036 rtx setval = expand_expr (set, 0, VOIDmode, 0);
6037 rtx setaddr = XEXP (setval, 0);
6038 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
6039 rtx rlow;
6040 rtx diff, quo, rem, addr, bit, result;
6041
6042 preexpand_calls (exp);
6043
6044 /* If domain is empty, answer is no. Likewise if index is constant
6045 and out of bounds. */
6046 if (((TREE_CODE (set_high_bound) == INTEGER_CST
6047 && TREE_CODE (set_low_bound) == INTEGER_CST
6048 && tree_int_cst_lt (set_high_bound, set_low_bound))
6049 || (TREE_CODE (index) == INTEGER_CST
6050 && TREE_CODE (set_low_bound) == INTEGER_CST
6051 && tree_int_cst_lt (index, set_low_bound))
6052 || (TREE_CODE (set_high_bound) == INTEGER_CST
6053 && TREE_CODE (index) == INTEGER_CST
6054 && tree_int_cst_lt (set_high_bound, index))))
6055 return const0_rtx;
6056
6057 if (target == 0)
6058 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6059
6060 /* If we get here, we have to generate the code for both cases
6061 (in range and out of range). */
6062
6063 op0 = gen_label_rtx ();
6064 op1 = gen_label_rtx ();
6065
6066 if (! (GET_CODE (index_val) == CONST_INT
6067 && GET_CODE (lo_r) == CONST_INT))
6068 {
6069 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
6070 GET_MODE (index_val), iunsignedp, 0);
6071 emit_jump_insn (gen_blt (op1));
6072 }
6073
6074 if (! (GET_CODE (index_val) == CONST_INT
6075 && GET_CODE (hi_r) == CONST_INT))
6076 {
6077 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
6078 GET_MODE (index_val), iunsignedp, 0);
6079 emit_jump_insn (gen_bgt (op1));
6080 }
6081
6082 /* Calculate the element number of bit zero in the first word
6083 of the set. */
6084 if (GET_CODE (lo_r) == CONST_INT)
6085 rlow = GEN_INT (INTVAL (lo_r)
6086 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
6087 else
6088 rlow = expand_binop (index_mode, and_optab, lo_r,
6089 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
6090 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6091
6092 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
6093 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6094
6095 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
6096 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6097 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
6098 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6099
6100 addr = memory_address (byte_mode,
6101 expand_binop (index_mode, add_optab, diff,
6102 setaddr, NULL_RTX, iunsignedp,
6103 OPTAB_LIB_WIDEN));
6104
6105 /* Extract the bit we want to examine */
6106 bit = expand_shift (RSHIFT_EXPR, byte_mode,
6107 gen_rtx_MEM (byte_mode, addr),
6108 make_tree (TREE_TYPE (index), rem),
6109 NULL_RTX, 1);
6110 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
6111 GET_MODE (target) == byte_mode ? target : 0,
6112 1, OPTAB_LIB_WIDEN);
6113
6114 if (result != target)
6115 convert_move (target, result, 1);
6116
6117 /* Output the code to handle the out-of-range case. */
6118 emit_jump (op0);
6119 emit_label (op1);
6120 emit_move_insn (target, const0_rtx);
6121 emit_label (op0);
6122 return target;
6123 }
6124
6125 case WITH_CLEANUP_EXPR:
6126 if (RTL_EXPR_RTL (exp) == 0)
6127 {
6128 RTL_EXPR_RTL (exp)
6129 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6130 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
6131
6132 /* That's it for this cleanup. */
6133 TREE_OPERAND (exp, 2) = 0;
6134 }
6135 return RTL_EXPR_RTL (exp);
6136
6137 case CLEANUP_POINT_EXPR:
6138 {
6139 extern int temp_slot_level;
6140 /* Start a new binding layer that will keep track of all cleanup
6141 actions to be performed. */
6142 expand_start_bindings (0);
6143
6144 target_temp_slot_level = temp_slot_level;
6145
6146 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6147 /* If we're going to use this value, load it up now. */
6148 if (! ignore)
6149 op0 = force_not_mem (op0);
6150 preserve_temp_slots (op0);
6151 expand_end_bindings (NULL_TREE, 0, 0);
6152 }
6153 return op0;
6154
6155 case CALL_EXPR:
6156 /* Check for a built-in function. */
6157 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6158 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6159 == FUNCTION_DECL)
6160 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6161 return expand_builtin (exp, target, subtarget, tmode, ignore);
6162
6163 /* If this call was expanded already by preexpand_calls,
6164 just return the result we got. */
6165 if (CALL_EXPR_RTL (exp) != 0)
6166 return CALL_EXPR_RTL (exp);
6167
6168 return expand_call (exp, target, ignore);
6169
6170 case NON_LVALUE_EXPR:
6171 case NOP_EXPR:
6172 case CONVERT_EXPR:
6173 case REFERENCE_EXPR:
6174 if (TREE_CODE (type) == UNION_TYPE)
6175 {
6176 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
6177 if (target == 0)
6178 {
6179 if (mode != BLKmode)
6180 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6181 else
6182 target = assign_temp (type, 0, 1, 1);
6183 }
6184
6185 if (GET_CODE (target) == MEM)
6186 /* Store data into beginning of memory target. */
6187 store_expr (TREE_OPERAND (exp, 0),
6188 change_address (target, TYPE_MODE (valtype), 0), 0);
6189
6190 else if (GET_CODE (target) == REG)
6191 /* Store this field into a union of the proper type. */
6192 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
6193 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
6194 VOIDmode, 0, 1,
6195 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
6196 else
6197 abort ();
6198
6199 /* Return the entire union. */
6200 return target;
6201 }
6202
6203 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6204 {
6205 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
6206 ro_modifier);
6207
6208 /* If the signedness of the conversion differs and OP0 is
6209 a promoted SUBREG, clear that indication since we now
6210 have to do the proper extension. */
6211 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
6212 && GET_CODE (op0) == SUBREG)
6213 SUBREG_PROMOTED_VAR_P (op0) = 0;
6214
6215 return op0;
6216 }
6217
6218 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
6219 if (GET_MODE (op0) == mode)
6220 return op0;
6221
6222 /* If OP0 is a constant, just convert it into the proper mode. */
6223 if (CONSTANT_P (op0))
6224 return
6225 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6226 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6227
6228 if (modifier == EXPAND_INITIALIZER)
6229 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
6230
6231 if (target == 0)
6232 return
6233 convert_to_mode (mode, op0,
6234 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6235 else
6236 convert_move (target, op0,
6237 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6238 return target;
6239
6240 case PLUS_EXPR:
6241 /* We come here from MINUS_EXPR when the second operand is a
6242 constant. */
6243 plus_expr:
6244 this_optab = add_optab;
6245
6246 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
6247 something else, make sure we add the register to the constant and
6248 then to the other thing. This case can occur during strength
6249 reduction and doing it this way will produce better code if the
6250 frame pointer or argument pointer is eliminated.
6251
6252 fold-const.c will ensure that the constant is always in the inner
6253 PLUS_EXPR, so the only case we need to do anything about is if
6254 sp, ap, or fp is our second argument, in which case we must swap
6255 the innermost first argument and our second argument. */
6256
6257 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
6258 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
6259 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
6260 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
6261 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
6262 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
6263 {
6264 tree t = TREE_OPERAND (exp, 1);
6265
6266 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6267 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
6268 }
6269
6270 /* If the result is to be ptr_mode and we are adding an integer to
6271 something, we might be forming a constant. So try to use
6272 plus_constant. If it produces a sum and we can't accept it,
6273 use force_operand. This allows P = &ARR[const] to generate
6274 efficient code on machines where a SYMBOL_REF is not a valid
6275 address.
6276
6277 If this is an EXPAND_SUM call, always return the sum. */
6278 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
6279 || mode == ptr_mode)
6280 {
6281 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
6282 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
6283 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
6284 {
6285 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
6286 EXPAND_SUM);
6287 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
6288 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6289 op1 = force_operand (op1, target);
6290 return op1;
6291 }
6292
6293 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6294 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
6295 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
6296 {
6297 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
6298 EXPAND_SUM);
6299 if (! CONSTANT_P (op0))
6300 {
6301 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6302 VOIDmode, modifier);
6303 /* Don't go to both_summands if modifier
6304 says it's not right to return a PLUS. */
6305 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6306 goto binop2;
6307 goto both_summands;
6308 }
6309 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
6310 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6311 op0 = force_operand (op0, target);
6312 return op0;
6313 }
6314 }
6315
6316 /* No sense saving up arithmetic to be done
6317 if it's all in the wrong mode to form part of an address.
6318 And force_operand won't know whether to sign-extend or
6319 zero-extend. */
6320 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6321 || mode != ptr_mode)
6322 goto binop;
6323
6324 preexpand_calls (exp);
6325 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
6326 subtarget = 0;
6327
6328 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
6329 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
6330
6331 both_summands:
6332 /* Make sure any term that's a sum with a constant comes last. */
6333 if (GET_CODE (op0) == PLUS
6334 && CONSTANT_P (XEXP (op0, 1)))
6335 {
6336 temp = op0;
6337 op0 = op1;
6338 op1 = temp;
6339 }
6340 /* If adding to a sum including a constant,
6341 associate it to put the constant outside. */
6342 if (GET_CODE (op1) == PLUS
6343 && CONSTANT_P (XEXP (op1, 1)))
6344 {
6345 rtx constant_term = const0_rtx;
6346
6347 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
6348 if (temp != 0)
6349 op0 = temp;
6350 /* Ensure that MULT comes first if there is one. */
6351 else if (GET_CODE (op0) == MULT)
6352 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
6353 else
6354 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
6355
6356 /* Let's also eliminate constants from op0 if possible. */
6357 op0 = eliminate_constant_term (op0, &constant_term);
6358
6359 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
6360 their sum should be a constant. Form it into OP1, since the
6361 result we want will then be OP0 + OP1. */
6362
6363 temp = simplify_binary_operation (PLUS, mode, constant_term,
6364 XEXP (op1, 1));
6365 if (temp != 0)
6366 op1 = temp;
6367 else
6368 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
6369 }
6370
6371 /* Put a constant term last and put a multiplication first. */
6372 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
6373 temp = op1, op1 = op0, op0 = temp;
6374
6375 temp = simplify_binary_operation (PLUS, mode, op0, op1);
6376 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
6377
6378 case MINUS_EXPR:
6379 /* For initializers, we are allowed to return a MINUS of two
6380 symbolic constants. Here we handle all cases when both operands
6381 are constant. */
6382 /* Handle difference of two symbolic constants,
6383 for the sake of an initializer. */
6384 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6385 && really_constant_p (TREE_OPERAND (exp, 0))
6386 && really_constant_p (TREE_OPERAND (exp, 1)))
6387 {
6388 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
6389 VOIDmode, ro_modifier);
6390 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6391 VOIDmode, ro_modifier);
6392
6393 /* If the last operand is a CONST_INT, use plus_constant of
6394 the negated constant. Else make the MINUS. */
6395 if (GET_CODE (op1) == CONST_INT)
6396 return plus_constant (op0, - INTVAL (op1));
6397 else
6398 return gen_rtx_MINUS (mode, op0, op1);
6399 }
6400 /* Convert A - const to A + (-const). */
6401 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6402 {
6403 tree negated = fold (build1 (NEGATE_EXPR, type,
6404 TREE_OPERAND (exp, 1)));
6405
6406 /* Deal with the case where we can't negate the constant
6407 in TYPE. */
6408 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
6409 {
6410 tree newtype = signed_type (type);
6411 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
6412 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
6413 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
6414
6415 if (! TREE_OVERFLOW (newneg))
6416 return expand_expr (convert (type,
6417 build (PLUS_EXPR, newtype,
6418 newop0, newneg)),
6419 target, tmode, ro_modifier);
6420 }
6421 else
6422 {
6423 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
6424 goto plus_expr;
6425 }
6426 }
6427 this_optab = sub_optab;
6428 goto binop;
6429
6430 case MULT_EXPR:
6431 preexpand_calls (exp);
6432 /* If first operand is constant, swap them.
6433 Thus the following special case checks need only
6434 check the second operand. */
6435 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6436 {
6437 register tree t1 = TREE_OPERAND (exp, 0);
6438 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
6439 TREE_OPERAND (exp, 1) = t1;
6440 }
6441
6442 /* Attempt to return something suitable for generating an
6443 indexed address, for machines that support that. */
6444
6445 if (modifier == EXPAND_SUM && mode == ptr_mode
6446 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6447 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
6448 {
6449 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
6450 EXPAND_SUM);
6451
6452 /* Apply distributive law if OP0 is x+c. */
6453 if (GET_CODE (op0) == PLUS
6454 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
6455 return gen_rtx_PLUS (mode,
6456 gen_rtx_MULT (mode, XEXP (op0, 0),
6457 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
6458 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
6459 * INTVAL (XEXP (op0, 1))));
6460
6461 if (GET_CODE (op0) != REG)
6462 op0 = force_operand (op0, NULL_RTX);
6463 if (GET_CODE (op0) != REG)
6464 op0 = copy_to_mode_reg (mode, op0);
6465
6466 return gen_rtx_MULT (mode, op0,
6467 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
6468 }
6469
6470 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
6471 subtarget = 0;
6472
6473 /* Check for multiplying things that have been extended
6474 from a narrower type. If this machine supports multiplying
6475 in that narrower type with a result in the desired type,
6476 do it that way, and avoid the explicit type-conversion. */
6477 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
6478 && TREE_CODE (type) == INTEGER_TYPE
6479 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6480 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
6481 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6482 && int_fits_type_p (TREE_OPERAND (exp, 1),
6483 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6484 /* Don't use a widening multiply if a shift will do. */
6485 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
6486 > HOST_BITS_PER_WIDE_INT)
6487 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
6488 ||
6489 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
6490 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6491 ==
6492 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
6493 /* If both operands are extended, they must either both
6494 be zero-extended or both be sign-extended. */
6495 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6496 ==
6497 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
6498 {
6499 enum machine_mode innermode
6500 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
6501 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6502 ? smul_widen_optab : umul_widen_optab);
6503 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6504 ? umul_widen_optab : smul_widen_optab);
6505 if (mode == GET_MODE_WIDER_MODE (innermode))
6506 {
6507 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
6508 {
6509 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6510 NULL_RTX, VOIDmode, 0);
6511 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6512 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6513 VOIDmode, 0);
6514 else
6515 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6516 NULL_RTX, VOIDmode, 0);
6517 goto binop2;
6518 }
6519 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
6520 && innermode == word_mode)
6521 {
6522 rtx htem;
6523 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6524 NULL_RTX, VOIDmode, 0);
6525 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6526 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6527 VOIDmode, 0);
6528 else
6529 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6530 NULL_RTX, VOIDmode, 0);
6531 temp = expand_binop (mode, other_optab, op0, op1, target,
6532 unsignedp, OPTAB_LIB_WIDEN);
6533 htem = expand_mult_highpart_adjust (innermode,
6534 gen_highpart (innermode, temp),
6535 op0, op1,
6536 gen_highpart (innermode, temp),
6537 unsignedp);
6538 emit_move_insn (gen_highpart (innermode, temp), htem);
6539 return temp;
6540 }
6541 }
6542 }
6543 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6544 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6545 return expand_mult (mode, op0, op1, target, unsignedp);
6546
6547 case TRUNC_DIV_EXPR:
6548 case FLOOR_DIV_EXPR:
6549 case CEIL_DIV_EXPR:
6550 case ROUND_DIV_EXPR:
6551 case EXACT_DIV_EXPR:
6552 preexpand_calls (exp);
6553 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
6554 subtarget = 0;
6555 /* Possible optimization: compute the dividend with EXPAND_SUM
6556 then if the divisor is constant can optimize the case
6557 where some terms of the dividend have coeffs divisible by it. */
6558 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6559 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6560 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
6561
6562 case RDIV_EXPR:
6563 this_optab = flodiv_optab;
6564 goto binop;
6565
6566 case TRUNC_MOD_EXPR:
6567 case FLOOR_MOD_EXPR:
6568 case CEIL_MOD_EXPR:
6569 case ROUND_MOD_EXPR:
6570 preexpand_calls (exp);
6571 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
6572 subtarget = 0;
6573 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6574 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6575 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
6576
6577 case FIX_ROUND_EXPR:
6578 case FIX_FLOOR_EXPR:
6579 case FIX_CEIL_EXPR:
6580 abort (); /* Not used for C. */
6581
6582 case FIX_TRUNC_EXPR:
6583 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6584 if (target == 0)
6585 target = gen_reg_rtx (mode);
6586 expand_fix (target, op0, unsignedp);
6587 return target;
6588
6589 case FLOAT_EXPR:
6590 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6591 if (target == 0)
6592 target = gen_reg_rtx (mode);
6593 /* expand_float can't figure out what to do if FROM has VOIDmode.
6594 So give it the correct mode. With -O, cse will optimize this. */
6595 if (GET_MODE (op0) == VOIDmode)
6596 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6597 op0);
6598 expand_float (target, op0,
6599 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6600 return target;
6601
6602 case NEGATE_EXPR:
6603 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6604 temp = expand_unop (mode, neg_optab, op0, target, 0);
6605 if (temp == 0)
6606 abort ();
6607 return temp;
6608
6609 case ABS_EXPR:
6610 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6611
6612 /* Handle complex values specially. */
6613 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
6614 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
6615 return expand_complex_abs (mode, op0, target, unsignedp);
6616
6617 /* Unsigned abs is simply the operand. Testing here means we don't
6618 risk generating incorrect code below. */
6619 if (TREE_UNSIGNED (type))
6620 return op0;
6621
6622 return expand_abs (mode, op0, target, unsignedp,
6623 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
6624
6625 case MAX_EXPR:
6626 case MIN_EXPR:
6627 target = original_target;
6628 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
6629 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
6630 || GET_MODE (target) != mode
6631 || (GET_CODE (target) == REG
6632 && REGNO (target) < FIRST_PSEUDO_REGISTER))
6633 target = gen_reg_rtx (mode);
6634 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6635 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6636
6637 /* First try to do it with a special MIN or MAX instruction.
6638 If that does not win, use a conditional jump to select the proper
6639 value. */
6640 this_optab = (TREE_UNSIGNED (type)
6641 ? (code == MIN_EXPR ? umin_optab : umax_optab)
6642 : (code == MIN_EXPR ? smin_optab : smax_optab));
6643
6644 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
6645 OPTAB_WIDEN);
6646 if (temp != 0)
6647 return temp;
6648
6649 /* At this point, a MEM target is no longer useful; we will get better
6650 code without it. */
6651
6652 if (GET_CODE (target) == MEM)
6653 target = gen_reg_rtx (mode);
6654
6655 if (target != op0)
6656 emit_move_insn (target, op0);
6657
6658 op0 = gen_label_rtx ();
6659
6660 /* If this mode is an integer too wide to compare properly,
6661 compare word by word. Rely on cse to optimize constant cases. */
6662 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
6663 {
6664 if (code == MAX_EXPR)
6665 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6666 target, op1, NULL_RTX, op0);
6667 else
6668 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6669 op1, target, NULL_RTX, op0);
6670 emit_move_insn (target, op1);
6671 }
6672 else
6673 {
6674 if (code == MAX_EXPR)
6675 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
6676 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
6677 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
6678 else
6679 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
6680 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
6681 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
6682 if (temp == const0_rtx)
6683 emit_move_insn (target, op1);
6684 else if (temp != const_true_rtx)
6685 {
6686 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
6687 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
6688 else
6689 abort ();
6690 emit_move_insn (target, op1);
6691 }
6692 }
6693 emit_label (op0);
6694 return target;
6695
6696 case BIT_NOT_EXPR:
6697 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6698 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
6699 if (temp == 0)
6700 abort ();
6701 return temp;
6702
6703 case FFS_EXPR:
6704 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6705 temp = expand_unop (mode, ffs_optab, op0, target, 1);
6706 if (temp == 0)
6707 abort ();
6708 return temp;
6709
6710 /* ??? Can optimize bitwise operations with one arg constant.
6711 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
6712 and (a bitwise1 b) bitwise2 b (etc)
6713 but that is probably not worth while. */
6714
6715 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
6716 boolean values when we want in all cases to compute both of them. In
6717 general it is fastest to do TRUTH_AND_EXPR by computing both operands
6718 as actual zero-or-1 values and then bitwise anding. In cases where
6719 there cannot be any side effects, better code would be made by
6720 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
6721 how to recognize those cases. */
6722
6723 case TRUTH_AND_EXPR:
6724 case BIT_AND_EXPR:
6725 this_optab = and_optab;
6726 goto binop;
6727
6728 case TRUTH_OR_EXPR:
6729 case BIT_IOR_EXPR:
6730 this_optab = ior_optab;
6731 goto binop;
6732
6733 case TRUTH_XOR_EXPR:
6734 case BIT_XOR_EXPR:
6735 this_optab = xor_optab;
6736 goto binop;
6737
6738 case LSHIFT_EXPR:
6739 case RSHIFT_EXPR:
6740 case LROTATE_EXPR:
6741 case RROTATE_EXPR:
6742 preexpand_calls (exp);
6743 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
6744 subtarget = 0;
6745 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6746 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
6747 unsignedp);
6748
6749 /* Could determine the answer when only additive constants differ. Also,
6750 the addition of one can be handled by changing the condition. */
6751 case LT_EXPR:
6752 case LE_EXPR:
6753 case GT_EXPR:
6754 case GE_EXPR:
6755 case EQ_EXPR:
6756 case NE_EXPR:
6757 preexpand_calls (exp);
6758 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
6759 if (temp != 0)
6760 return temp;
6761
6762 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
6763 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
6764 && original_target
6765 && GET_CODE (original_target) == REG
6766 && (GET_MODE (original_target)
6767 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6768 {
6769 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
6770 VOIDmode, 0);
6771
6772 if (temp != original_target)
6773 temp = copy_to_reg (temp);
6774
6775 op1 = gen_label_rtx ();
6776 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
6777 GET_MODE (temp), unsignedp, 0);
6778 emit_jump_insn (gen_beq (op1));
6779 emit_move_insn (temp, const1_rtx);
6780 emit_label (op1);
6781 return temp;
6782 }
6783
6784 /* If no set-flag instruction, must generate a conditional
6785 store into a temporary variable. Drop through
6786 and handle this like && and ||. */
6787
6788 case TRUTH_ANDIF_EXPR:
6789 case TRUTH_ORIF_EXPR:
6790 if (! ignore
6791 && (target == 0 || ! safe_from_p (target, exp, 1)
6792 /* Make sure we don't have a hard reg (such as function's return
6793 value) live across basic blocks, if not optimizing. */
6794 || (!optimize && GET_CODE (target) == REG
6795 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
6796 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6797
6798 if (target)
6799 emit_clr_insn (target);
6800
6801 op1 = gen_label_rtx ();
6802 jumpifnot (exp, op1);
6803
6804 if (target)
6805 emit_0_to_1_insn (target);
6806
6807 emit_label (op1);
6808 return ignore ? const0_rtx : target;
6809
6810 case TRUTH_NOT_EXPR:
6811 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6812 /* The parser is careful to generate TRUTH_NOT_EXPR
6813 only with operands that are always zero or one. */
6814 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
6815 target, 1, OPTAB_LIB_WIDEN);
6816 if (temp == 0)
6817 abort ();
6818 return temp;
6819
6820 case COMPOUND_EXPR:
6821 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6822 emit_queue ();
6823 return expand_expr (TREE_OPERAND (exp, 1),
6824 (ignore ? const0_rtx : target),
6825 VOIDmode, 0);
6826
6827 case COND_EXPR:
6828 /* If we would have a "singleton" (see below) were it not for a
6829 conversion in each arm, bring that conversion back out. */
6830 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
6831 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
6832 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
6833 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
6834 {
6835 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
6836 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
6837
6838 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
6839 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
6840 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
6841 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
6842 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
6843 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
6844 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
6845 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
6846 return expand_expr (build1 (NOP_EXPR, type,
6847 build (COND_EXPR, TREE_TYPE (true),
6848 TREE_OPERAND (exp, 0),
6849 true, false)),
6850 target, tmode, modifier);
6851 }
6852
6853 {
6854 /* Note that COND_EXPRs whose type is a structure or union
6855 are required to be constructed to contain assignments of
6856 a temporary variable, so that we can evaluate them here
6857 for side effect only. If type is void, we must do likewise. */
6858
6859 /* If an arm of the branch requires a cleanup,
6860 only that cleanup is performed. */
6861
6862 tree singleton = 0;
6863 tree binary_op = 0, unary_op = 0;
6864
6865 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
6866 convert it to our mode, if necessary. */
6867 if (integer_onep (TREE_OPERAND (exp, 1))
6868 && integer_zerop (TREE_OPERAND (exp, 2))
6869 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6870 {
6871 if (ignore)
6872 {
6873 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6874 ro_modifier);
6875 return const0_rtx;
6876 }
6877
6878 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
6879 if (GET_MODE (op0) == mode)
6880 return op0;
6881
6882 if (target == 0)
6883 target = gen_reg_rtx (mode);
6884 convert_move (target, op0, unsignedp);
6885 return target;
6886 }
6887
6888 /* Check for X ? A + B : A. If we have this, we can copy A to the
6889 output and conditionally add B. Similarly for unary operations.
6890 Don't do this if X has side-effects because those side effects
6891 might affect A or B and the "?" operation is a sequence point in
6892 ANSI. (operand_equal_p tests for side effects.) */
6893
6894 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
6895 && operand_equal_p (TREE_OPERAND (exp, 2),
6896 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6897 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
6898 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
6899 && operand_equal_p (TREE_OPERAND (exp, 1),
6900 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6901 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
6902 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
6903 && operand_equal_p (TREE_OPERAND (exp, 2),
6904 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6905 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
6906 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
6907 && operand_equal_p (TREE_OPERAND (exp, 1),
6908 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6909 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
6910
6911 /* If we are not to produce a result, we have no target. Otherwise,
6912 if a target was specified use it; it will not be used as an
6913 intermediate target unless it is safe. If no target, use a
6914 temporary. */
6915
6916 if (ignore)
6917 temp = 0;
6918 else if (original_target
6919 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
6920 || (singleton && GET_CODE (original_target) == REG
6921 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
6922 && original_target == var_rtx (singleton)))
6923 && GET_MODE (original_target) == mode
6924 #ifdef HAVE_conditional_move
6925 && (! can_conditionally_move_p (mode)
6926 || GET_CODE (original_target) == REG
6927 || TREE_ADDRESSABLE (type))
6928 #endif
6929 && ! (GET_CODE (original_target) == MEM
6930 && MEM_VOLATILE_P (original_target)))
6931 temp = original_target;
6932 else if (TREE_ADDRESSABLE (type))
6933 abort ();
6934 else
6935 temp = assign_temp (type, 0, 0, 1);
6936
6937 /* If we had X ? A + C : A, with C a constant power of 2, and we can
6938 do the test of X as a store-flag operation, do this as
6939 A + ((X != 0) << log C). Similarly for other simple binary
6940 operators. Only do for C == 1 if BRANCH_COST is low. */
6941 if (temp && singleton && binary_op
6942 && (TREE_CODE (binary_op) == PLUS_EXPR
6943 || TREE_CODE (binary_op) == MINUS_EXPR
6944 || TREE_CODE (binary_op) == BIT_IOR_EXPR
6945 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
6946 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
6947 : integer_onep (TREE_OPERAND (binary_op, 1)))
6948 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6949 {
6950 rtx result;
6951 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
6952 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
6953 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
6954 : xor_optab);
6955
6956 /* If we had X ? A : A + 1, do this as A + (X == 0).
6957
6958 We have to invert the truth value here and then put it
6959 back later if do_store_flag fails. We cannot simply copy
6960 TREE_OPERAND (exp, 0) to another variable and modify that
6961 because invert_truthvalue can modify the tree pointed to
6962 by its argument. */
6963 if (singleton == TREE_OPERAND (exp, 1))
6964 TREE_OPERAND (exp, 0)
6965 = invert_truthvalue (TREE_OPERAND (exp, 0));
6966
6967 result = do_store_flag (TREE_OPERAND (exp, 0),
6968 (safe_from_p (temp, singleton, 1)
6969 ? temp : NULL_RTX),
6970 mode, BRANCH_COST <= 1);
6971
6972 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
6973 result = expand_shift (LSHIFT_EXPR, mode, result,
6974 build_int_2 (tree_log2
6975 (TREE_OPERAND
6976 (binary_op, 1)),
6977 0),
6978 (safe_from_p (temp, singleton, 1)
6979 ? temp : NULL_RTX), 0);
6980
6981 if (result)
6982 {
6983 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
6984 return expand_binop (mode, boptab, op1, result, temp,
6985 unsignedp, OPTAB_LIB_WIDEN);
6986 }
6987 else if (singleton == TREE_OPERAND (exp, 1))
6988 TREE_OPERAND (exp, 0)
6989 = invert_truthvalue (TREE_OPERAND (exp, 0));
6990 }
6991
6992 do_pending_stack_adjust ();
6993 NO_DEFER_POP;
6994 op0 = gen_label_rtx ();
6995
6996 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
6997 {
6998 if (temp != 0)
6999 {
7000 /* If the target conflicts with the other operand of the
7001 binary op, we can't use it. Also, we can't use the target
7002 if it is a hard register, because evaluating the condition
7003 might clobber it. */
7004 if ((binary_op
7005 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
7006 || (GET_CODE (temp) == REG
7007 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
7008 temp = gen_reg_rtx (mode);
7009 store_expr (singleton, temp, 0);
7010 }
7011 else
7012 expand_expr (singleton,
7013 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7014 if (singleton == TREE_OPERAND (exp, 1))
7015 jumpif (TREE_OPERAND (exp, 0), op0);
7016 else
7017 jumpifnot (TREE_OPERAND (exp, 0), op0);
7018
7019 start_cleanup_deferral ();
7020 if (binary_op && temp == 0)
7021 /* Just touch the other operand. */
7022 expand_expr (TREE_OPERAND (binary_op, 1),
7023 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7024 else if (binary_op)
7025 store_expr (build (TREE_CODE (binary_op), type,
7026 make_tree (type, temp),
7027 TREE_OPERAND (binary_op, 1)),
7028 temp, 0);
7029 else
7030 store_expr (build1 (TREE_CODE (unary_op), type,
7031 make_tree (type, temp)),
7032 temp, 0);
7033 op1 = op0;
7034 }
7035 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7036 comparison operator. If we have one of these cases, set the
7037 output to A, branch on A (cse will merge these two references),
7038 then set the output to FOO. */
7039 else if (temp
7040 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7041 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7042 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7043 TREE_OPERAND (exp, 1), 0)
7044 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7045 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
7046 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
7047 {
7048 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7049 temp = gen_reg_rtx (mode);
7050 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7051 jumpif (TREE_OPERAND (exp, 0), op0);
7052
7053 start_cleanup_deferral ();
7054 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7055 op1 = op0;
7056 }
7057 else if (temp
7058 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7059 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7060 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7061 TREE_OPERAND (exp, 2), 0)
7062 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7063 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
7064 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
7065 {
7066 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7067 temp = gen_reg_rtx (mode);
7068 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7069 jumpifnot (TREE_OPERAND (exp, 0), op0);
7070
7071 start_cleanup_deferral ();
7072 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7073 op1 = op0;
7074 }
7075 else
7076 {
7077 op1 = gen_label_rtx ();
7078 jumpifnot (TREE_OPERAND (exp, 0), op0);
7079
7080 start_cleanup_deferral ();
7081 if (temp != 0)
7082 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7083 else
7084 expand_expr (TREE_OPERAND (exp, 1),
7085 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7086 end_cleanup_deferral ();
7087 emit_queue ();
7088 emit_jump_insn (gen_jump (op1));
7089 emit_barrier ();
7090 emit_label (op0);
7091 start_cleanup_deferral ();
7092 if (temp != 0)
7093 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7094 else
7095 expand_expr (TREE_OPERAND (exp, 2),
7096 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7097 }
7098
7099 end_cleanup_deferral ();
7100
7101 emit_queue ();
7102 emit_label (op1);
7103 OK_DEFER_POP;
7104
7105 return temp;
7106 }
7107
7108 case TARGET_EXPR:
7109 {
7110 /* Something needs to be initialized, but we didn't know
7111 where that thing was when building the tree. For example,
7112 it could be the return value of a function, or a parameter
7113 to a function which lays down in the stack, or a temporary
7114 variable which must be passed by reference.
7115
7116 We guarantee that the expression will either be constructed
7117 or copied into our original target. */
7118
7119 tree slot = TREE_OPERAND (exp, 0);
7120 tree cleanups = NULL_TREE;
7121 tree exp1;
7122
7123 if (TREE_CODE (slot) != VAR_DECL)
7124 abort ();
7125
7126 if (! ignore)
7127 target = original_target;
7128
7129 if (target == 0)
7130 {
7131 if (DECL_RTL (slot) != 0)
7132 {
7133 target = DECL_RTL (slot);
7134 /* If we have already expanded the slot, so don't do
7135 it again. (mrs) */
7136 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7137 return target;
7138 }
7139 else
7140 {
7141 target = assign_temp (type, 2, 0, 1);
7142 /* All temp slots at this level must not conflict. */
7143 preserve_temp_slots (target);
7144 DECL_RTL (slot) = target;
7145 if (TREE_ADDRESSABLE (slot))
7146 {
7147 TREE_ADDRESSABLE (slot) = 0;
7148 mark_addressable (slot);
7149 }
7150
7151 /* Since SLOT is not known to the called function
7152 to belong to its stack frame, we must build an explicit
7153 cleanup. This case occurs when we must build up a reference
7154 to pass the reference as an argument. In this case,
7155 it is very likely that such a reference need not be
7156 built here. */
7157
7158 if (TREE_OPERAND (exp, 2) == 0)
7159 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
7160 cleanups = TREE_OPERAND (exp, 2);
7161 }
7162 }
7163 else
7164 {
7165 /* This case does occur, when expanding a parameter which
7166 needs to be constructed on the stack. The target
7167 is the actual stack address that we want to initialize.
7168 The function we call will perform the cleanup in this case. */
7169
7170 /* If we have already assigned it space, use that space,
7171 not target that we were passed in, as our target
7172 parameter is only a hint. */
7173 if (DECL_RTL (slot) != 0)
7174 {
7175 target = DECL_RTL (slot);
7176 /* If we have already expanded the slot, so don't do
7177 it again. (mrs) */
7178 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7179 return target;
7180 }
7181 else
7182 {
7183 DECL_RTL (slot) = target;
7184 /* If we must have an addressable slot, then make sure that
7185 the RTL that we just stored in slot is OK. */
7186 if (TREE_ADDRESSABLE (slot))
7187 {
7188 TREE_ADDRESSABLE (slot) = 0;
7189 mark_addressable (slot);
7190 }
7191 }
7192 }
7193
7194 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
7195 /* Mark it as expanded. */
7196 TREE_OPERAND (exp, 1) = NULL_TREE;
7197
7198 TREE_USED (slot) = 1;
7199 store_expr (exp1, target, 0);
7200
7201 expand_decl_cleanup (NULL_TREE, cleanups);
7202
7203 return target;
7204 }
7205
7206 case INIT_EXPR:
7207 {
7208 tree lhs = TREE_OPERAND (exp, 0);
7209 tree rhs = TREE_OPERAND (exp, 1);
7210 tree noncopied_parts = 0;
7211 tree lhs_type = TREE_TYPE (lhs);
7212
7213 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7214 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
7215 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
7216 TYPE_NONCOPIED_PARTS (lhs_type));
7217 while (noncopied_parts != 0)
7218 {
7219 expand_assignment (TREE_VALUE (noncopied_parts),
7220 TREE_PURPOSE (noncopied_parts), 0, 0);
7221 noncopied_parts = TREE_CHAIN (noncopied_parts);
7222 }
7223 return temp;
7224 }
7225
7226 case MODIFY_EXPR:
7227 {
7228 /* If lhs is complex, expand calls in rhs before computing it.
7229 That's so we don't compute a pointer and save it over a call.
7230 If lhs is simple, compute it first so we can give it as a
7231 target if the rhs is just a call. This avoids an extra temp and copy
7232 and that prevents a partial-subsumption which makes bad code.
7233 Actually we could treat component_ref's of vars like vars. */
7234
7235 tree lhs = TREE_OPERAND (exp, 0);
7236 tree rhs = TREE_OPERAND (exp, 1);
7237 tree noncopied_parts = 0;
7238 tree lhs_type = TREE_TYPE (lhs);
7239
7240 temp = 0;
7241
7242 if (TREE_CODE (lhs) != VAR_DECL
7243 && TREE_CODE (lhs) != RESULT_DECL
7244 && TREE_CODE (lhs) != PARM_DECL
7245 && ! (TREE_CODE (lhs) == INDIRECT_REF
7246 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
7247 preexpand_calls (exp);
7248
7249 /* Check for |= or &= of a bitfield of size one into another bitfield
7250 of size 1. In this case, (unless we need the result of the
7251 assignment) we can do this more efficiently with a
7252 test followed by an assignment, if necessary.
7253
7254 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7255 things change so we do, this code should be enhanced to
7256 support it. */
7257 if (ignore
7258 && TREE_CODE (lhs) == COMPONENT_REF
7259 && (TREE_CODE (rhs) == BIT_IOR_EXPR
7260 || TREE_CODE (rhs) == BIT_AND_EXPR)
7261 && TREE_OPERAND (rhs, 0) == lhs
7262 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
7263 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
7264 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
7265 {
7266 rtx label = gen_label_rtx ();
7267
7268 do_jump (TREE_OPERAND (rhs, 1),
7269 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
7270 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
7271 expand_assignment (lhs, convert (TREE_TYPE (rhs),
7272 (TREE_CODE (rhs) == BIT_IOR_EXPR
7273 ? integer_one_node
7274 : integer_zero_node)),
7275 0, 0);
7276 do_pending_stack_adjust ();
7277 emit_label (label);
7278 return const0_rtx;
7279 }
7280
7281 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
7282 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
7283 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
7284 TYPE_NONCOPIED_PARTS (lhs_type));
7285
7286 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7287 while (noncopied_parts != 0)
7288 {
7289 expand_assignment (TREE_PURPOSE (noncopied_parts),
7290 TREE_VALUE (noncopied_parts), 0, 0);
7291 noncopied_parts = TREE_CHAIN (noncopied_parts);
7292 }
7293 return temp;
7294 }
7295
7296 case PREINCREMENT_EXPR:
7297 case PREDECREMENT_EXPR:
7298 return expand_increment (exp, 0, ignore);
7299
7300 case POSTINCREMENT_EXPR:
7301 case POSTDECREMENT_EXPR:
7302 /* Faster to treat as pre-increment if result is not used. */
7303 return expand_increment (exp, ! ignore, ignore);
7304
7305 case ADDR_EXPR:
7306 /* If nonzero, TEMP will be set to the address of something that might
7307 be a MEM corresponding to a stack slot. */
7308 temp = 0;
7309
7310 /* Are we taking the address of a nested function? */
7311 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
7312 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
7313 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
7314 && ! TREE_STATIC (exp))
7315 {
7316 op0 = trampoline_address (TREE_OPERAND (exp, 0));
7317 op0 = force_operand (op0, target);
7318 }
7319 /* If we are taking the address of something erroneous, just
7320 return a zero. */
7321 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
7322 return const0_rtx;
7323 else
7324 {
7325 /* We make sure to pass const0_rtx down if we came in with
7326 ignore set, to avoid doing the cleanups twice for something. */
7327 op0 = expand_expr (TREE_OPERAND (exp, 0),
7328 ignore ? const0_rtx : NULL_RTX, VOIDmode,
7329 (modifier == EXPAND_INITIALIZER
7330 ? modifier : EXPAND_CONST_ADDRESS));
7331
7332 /* If we are going to ignore the result, OP0 will have been set
7333 to const0_rtx, so just return it. Don't get confused and
7334 think we are taking the address of the constant. */
7335 if (ignore)
7336 return op0;
7337
7338 op0 = protect_from_queue (op0, 0);
7339
7340 /* We would like the object in memory. If it is a constant,
7341 we can have it be statically allocated into memory. For
7342 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
7343 memory and store the value into it. */
7344
7345 if (CONSTANT_P (op0))
7346 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7347 op0);
7348 else if (GET_CODE (op0) == MEM)
7349 {
7350 mark_temp_addr_taken (op0);
7351 temp = XEXP (op0, 0);
7352 }
7353
7354 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7355 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7356 {
7357 /* If this object is in a register, it must be not
7358 be BLKmode. */
7359 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7360 rtx memloc = assign_temp (inner_type, 1, 1, 1);
7361
7362 mark_temp_addr_taken (memloc);
7363 emit_move_insn (memloc, op0);
7364 op0 = memloc;
7365 }
7366
7367 if (GET_CODE (op0) != MEM)
7368 abort ();
7369
7370 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7371 {
7372 temp = XEXP (op0, 0);
7373 #ifdef POINTERS_EXTEND_UNSIGNED
7374 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
7375 && mode == ptr_mode)
7376 temp = convert_memory_address (ptr_mode, temp);
7377 #endif
7378 return temp;
7379 }
7380
7381 op0 = force_operand (XEXP (op0, 0), target);
7382 }
7383
7384 if (flag_force_addr && GET_CODE (op0) != REG)
7385 op0 = force_reg (Pmode, op0);
7386
7387 if (GET_CODE (op0) == REG
7388 && ! REG_USERVAR_P (op0))
7389 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
7390
7391 /* If we might have had a temp slot, add an equivalent address
7392 for it. */
7393 if (temp != 0)
7394 update_temp_slot_address (temp, op0);
7395
7396 #ifdef POINTERS_EXTEND_UNSIGNED
7397 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
7398 && mode == ptr_mode)
7399 op0 = convert_memory_address (ptr_mode, op0);
7400 #endif
7401
7402 return op0;
7403
7404 case ENTRY_VALUE_EXPR:
7405 abort ();
7406
7407 /* COMPLEX type for Extended Pascal & Fortran */
7408 case COMPLEX_EXPR:
7409 {
7410 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7411 rtx insns;
7412
7413 /* Get the rtx code of the operands. */
7414 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7415 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
7416
7417 if (! target)
7418 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7419
7420 start_sequence ();
7421
7422 /* Move the real (op0) and imaginary (op1) parts to their location. */
7423 emit_move_insn (gen_realpart (mode, target), op0);
7424 emit_move_insn (gen_imagpart (mode, target), op1);
7425
7426 insns = get_insns ();
7427 end_sequence ();
7428
7429 /* Complex construction should appear as a single unit. */
7430 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
7431 each with a separate pseudo as destination.
7432 It's not correct for flow to treat them as a unit. */
7433 if (GET_CODE (target) != CONCAT)
7434 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
7435 else
7436 emit_insns (insns);
7437
7438 return target;
7439 }
7440
7441 case REALPART_EXPR:
7442 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7443 return gen_realpart (mode, op0);
7444
7445 case IMAGPART_EXPR:
7446 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7447 return gen_imagpart (mode, op0);
7448
7449 case CONJ_EXPR:
7450 {
7451 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7452 rtx imag_t;
7453 rtx insns;
7454
7455 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7456
7457 if (! target)
7458 target = gen_reg_rtx (mode);
7459
7460 start_sequence ();
7461
7462 /* Store the realpart and the negated imagpart to target. */
7463 emit_move_insn (gen_realpart (partmode, target),
7464 gen_realpart (partmode, op0));
7465
7466 imag_t = gen_imagpart (partmode, target);
7467 temp = expand_unop (partmode, neg_optab,
7468 gen_imagpart (partmode, op0), imag_t, 0);
7469 if (temp != imag_t)
7470 emit_move_insn (imag_t, temp);
7471
7472 insns = get_insns ();
7473 end_sequence ();
7474
7475 /* Conjugate should appear as a single unit
7476 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
7477 each with a separate pseudo as destination.
7478 It's not correct for flow to treat them as a unit. */
7479 if (GET_CODE (target) != CONCAT)
7480 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
7481 else
7482 emit_insns (insns);
7483
7484 return target;
7485 }
7486
7487 case TRY_CATCH_EXPR:
7488 {
7489 tree handler = TREE_OPERAND (exp, 1);
7490
7491 expand_eh_region_start ();
7492
7493 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7494
7495 expand_eh_region_end (handler);
7496
7497 return op0;
7498 }
7499
7500 case POPDCC_EXPR:
7501 {
7502 rtx dcc = get_dynamic_cleanup_chain ();
7503 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
7504 return const0_rtx;
7505 }
7506
7507 case POPDHC_EXPR:
7508 {
7509 rtx dhc = get_dynamic_handler_chain ();
7510 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
7511 return const0_rtx;
7512 }
7513
7514 case ERROR_MARK:
7515 op0 = CONST0_RTX (tmode);
7516 if (op0 != 0)
7517 return op0;
7518 return const0_rtx;
7519
7520 default:
7521 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7522 }
7523
7524 /* Here to do an ordinary binary operator, generating an instruction
7525 from the optab already placed in `this_optab'. */
7526 binop:
7527 preexpand_calls (exp);
7528 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7529 subtarget = 0;
7530 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7531 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7532 binop2:
7533 temp = expand_binop (mode, this_optab, op0, op1, target,
7534 unsignedp, OPTAB_LIB_WIDEN);
7535 if (temp == 0)
7536 abort ();
7537 return temp;
7538 }
7539
7540
7541 \f
7542 /* Return the alignment in bits of EXP, a pointer valued expression.
7543 But don't return more than MAX_ALIGN no matter what.
7544 The alignment returned is, by default, the alignment of the thing that
7545 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
7546
7547 Otherwise, look at the expression to see if we can do better, i.e., if the
7548 expression is actually pointing at an object whose alignment is tighter. */
7549
7550 static int
7551 get_pointer_alignment (exp, max_align)
7552 tree exp;
7553 unsigned max_align;
7554 {
7555 unsigned align, inner;
7556
7557 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7558 return 0;
7559
7560 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7561 align = MIN (align, max_align);
7562
7563 while (1)
7564 {
7565 switch (TREE_CODE (exp))
7566 {
7567 case NOP_EXPR:
7568 case CONVERT_EXPR:
7569 case NON_LVALUE_EXPR:
7570 exp = TREE_OPERAND (exp, 0);
7571 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7572 return align;
7573 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7574 align = MIN (inner, max_align);
7575 break;
7576
7577 case PLUS_EXPR:
7578 /* If sum of pointer + int, restrict our maximum alignment to that
7579 imposed by the integer. If not, we can't do any better than
7580 ALIGN. */
7581 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
7582 return align;
7583
7584 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
7585 & (max_align - 1))
7586 != 0)
7587 max_align >>= 1;
7588
7589 exp = TREE_OPERAND (exp, 0);
7590 break;
7591
7592 case ADDR_EXPR:
7593 /* See what we are pointing at and look at its alignment. */
7594 exp = TREE_OPERAND (exp, 0);
7595 if (TREE_CODE (exp) == FUNCTION_DECL)
7596 align = FUNCTION_BOUNDARY;
7597 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
7598 align = DECL_ALIGN (exp);
7599 #ifdef CONSTANT_ALIGNMENT
7600 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
7601 align = CONSTANT_ALIGNMENT (exp, align);
7602 #endif
7603 return MIN (align, max_align);
7604
7605 default:
7606 return align;
7607 }
7608 }
7609 }
7610 \f
7611 /* Return the tree node and offset if a given argument corresponds to
7612 a string constant. */
7613
7614 static tree
7615 string_constant (arg, ptr_offset)
7616 tree arg;
7617 tree *ptr_offset;
7618 {
7619 STRIP_NOPS (arg);
7620
7621 if (TREE_CODE (arg) == ADDR_EXPR
7622 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
7623 {
7624 *ptr_offset = integer_zero_node;
7625 return TREE_OPERAND (arg, 0);
7626 }
7627 else if (TREE_CODE (arg) == PLUS_EXPR)
7628 {
7629 tree arg0 = TREE_OPERAND (arg, 0);
7630 tree arg1 = TREE_OPERAND (arg, 1);
7631
7632 STRIP_NOPS (arg0);
7633 STRIP_NOPS (arg1);
7634
7635 if (TREE_CODE (arg0) == ADDR_EXPR
7636 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
7637 {
7638 *ptr_offset = arg1;
7639 return TREE_OPERAND (arg0, 0);
7640 }
7641 else if (TREE_CODE (arg1) == ADDR_EXPR
7642 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
7643 {
7644 *ptr_offset = arg0;
7645 return TREE_OPERAND (arg1, 0);
7646 }
7647 }
7648
7649 return 0;
7650 }
7651
7652 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
7653 way, because it could contain a zero byte in the middle.
7654 TREE_STRING_LENGTH is the size of the character array, not the string.
7655
7656 Unfortunately, string_constant can't access the values of const char
7657 arrays with initializers, so neither can we do so here. */
7658
7659 static tree
7660 c_strlen (src)
7661 tree src;
7662 {
7663 tree offset_node;
7664 int offset, max;
7665 char *ptr;
7666
7667 src = string_constant (src, &offset_node);
7668 if (src == 0)
7669 return 0;
7670 max = TREE_STRING_LENGTH (src);
7671 ptr = TREE_STRING_POINTER (src);
7672 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
7673 {
7674 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
7675 compute the offset to the following null if we don't know where to
7676 start searching for it. */
7677 int i;
7678 for (i = 0; i < max; i++)
7679 if (ptr[i] == 0)
7680 return 0;
7681 /* We don't know the starting offset, but we do know that the string
7682 has no internal zero bytes. We can assume that the offset falls
7683 within the bounds of the string; otherwise, the programmer deserves
7684 what he gets. Subtract the offset from the length of the string,
7685 and return that. */
7686 /* This would perhaps not be valid if we were dealing with named
7687 arrays in addition to literal string constants. */
7688 return size_binop (MINUS_EXPR, size_int (max), offset_node);
7689 }
7690
7691 /* We have a known offset into the string. Start searching there for
7692 a null character. */
7693 if (offset_node == 0)
7694 offset = 0;
7695 else
7696 {
7697 /* Did we get a long long offset? If so, punt. */
7698 if (TREE_INT_CST_HIGH (offset_node) != 0)
7699 return 0;
7700 offset = TREE_INT_CST_LOW (offset_node);
7701 }
7702 /* If the offset is known to be out of bounds, warn, and call strlen at
7703 runtime. */
7704 if (offset < 0 || offset > max)
7705 {
7706 warning ("offset outside bounds of constant string");
7707 return 0;
7708 }
7709 /* Use strlen to search for the first zero byte. Since any strings
7710 constructed with build_string will have nulls appended, we win even
7711 if we get handed something like (char[4])"abcd".
7712
7713 Since OFFSET is our starting index into the string, no further
7714 calculation is needed. */
7715 return size_int (strlen (ptr + offset));
7716 }
7717
7718 rtx
7719 expand_builtin_return_addr (fndecl_code, count, tem)
7720 enum built_in_function fndecl_code;
7721 int count;
7722 rtx tem;
7723 {
7724 int i;
7725
7726 /* Some machines need special handling before we can access
7727 arbitrary frames. For example, on the sparc, we must first flush
7728 all register windows to the stack. */
7729 #ifdef SETUP_FRAME_ADDRESSES
7730 if (count > 0)
7731 SETUP_FRAME_ADDRESSES ();
7732 #endif
7733
7734 /* On the sparc, the return address is not in the frame, it is in a
7735 register. There is no way to access it off of the current frame
7736 pointer, but it can be accessed off the previous frame pointer by
7737 reading the value from the register window save area. */
7738 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
7739 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
7740 count--;
7741 #endif
7742
7743 /* Scan back COUNT frames to the specified frame. */
7744 for (i = 0; i < count; i++)
7745 {
7746 /* Assume the dynamic chain pointer is in the word that the
7747 frame address points to, unless otherwise specified. */
7748 #ifdef DYNAMIC_CHAIN_ADDRESS
7749 tem = DYNAMIC_CHAIN_ADDRESS (tem);
7750 #endif
7751 tem = memory_address (Pmode, tem);
7752 tem = copy_to_reg (gen_rtx_MEM (Pmode, tem));
7753 }
7754
7755 /* For __builtin_frame_address, return what we've got. */
7756 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
7757 return tem;
7758
7759 /* For __builtin_return_address, Get the return address from that
7760 frame. */
7761 #ifdef RETURN_ADDR_RTX
7762 tem = RETURN_ADDR_RTX (count, tem);
7763 #else
7764 tem = memory_address (Pmode,
7765 plus_constant (tem, GET_MODE_SIZE (Pmode)));
7766 tem = gen_rtx_MEM (Pmode, tem);
7767 #endif
7768 return tem;
7769 }
7770
7771 /* __builtin_setjmp is passed a pointer to an array of five words (not
7772 all will be used on all machines). It operates similarly to the C
7773 library function of the same name, but is more efficient. Much of
7774 the code below (and for longjmp) is copied from the handling of
7775 non-local gotos.
7776
7777 NOTE: This is intended for use by GNAT and the exception handling
7778 scheme in the compiler and will only work in the method used by
7779 them. */
7780
7781 rtx
7782 expand_builtin_setjmp (buf_addr, target, first_label, next_label)
7783 rtx buf_addr;
7784 rtx target;
7785 rtx first_label, next_label;
7786 {
7787 rtx lab1 = gen_label_rtx ();
7788 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
7789 enum machine_mode value_mode;
7790 rtx stack_save;
7791
7792 value_mode = TYPE_MODE (integer_type_node);
7793
7794 #ifdef POINTERS_EXTEND_UNSIGNED
7795 buf_addr = convert_memory_address (Pmode, buf_addr);
7796 #endif
7797
7798 buf_addr = force_reg (Pmode, buf_addr);
7799
7800 if (target == 0 || GET_CODE (target) != REG
7801 || REGNO (target) < FIRST_PSEUDO_REGISTER)
7802 target = gen_reg_rtx (value_mode);
7803
7804 emit_queue ();
7805
7806 #ifndef BUILTIN_SETJMP_FRAME_VALUE
7807 #define BUILTIN_SETJMP_FRAME_VALUE virtual_stack_vars_rtx
7808 #endif
7809
7810 /* We store the frame pointer and the address of lab1 in the buffer
7811 and use the rest of it for the stack save area, which is
7812 machine-dependent. */
7813 emit_move_insn (gen_rtx_MEM (Pmode, buf_addr),
7814 BUILTIN_SETJMP_FRAME_VALUE);
7815 emit_move_insn (validize_mem
7816 (gen_rtx_MEM (Pmode,
7817 plus_constant (buf_addr,
7818 GET_MODE_SIZE (Pmode)))),
7819 gen_rtx_LABEL_REF (Pmode, lab1));
7820
7821 stack_save = gen_rtx_MEM (sa_mode,
7822 plus_constant (buf_addr,
7823 2 * GET_MODE_SIZE (Pmode)));
7824 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
7825
7826 /* If there is further processing to do, do it. */
7827 #ifdef HAVE_builtin_setjmp_setup
7828 if (HAVE_builtin_setjmp_setup)
7829 emit_insn (gen_builtin_setjmp_setup (buf_addr));
7830 #endif
7831
7832 /* Set TARGET to zero and branch to the first-time-through label. */
7833 emit_move_insn (target, const0_rtx);
7834 emit_jump_insn (gen_jump (first_label));
7835 emit_barrier ();
7836 emit_label (lab1);
7837
7838 /* Tell flow about the strange goings on. */
7839 current_function_has_nonlocal_label = 1;
7840
7841 /* Clobber the FP when we get here, so we have to make sure it's
7842 marked as used by this function. */
7843 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
7844
7845 /* Mark the static chain as clobbered here so life information
7846 doesn't get messed up for it. */
7847 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
7848
7849 /* Now put in the code to restore the frame pointer, and argument
7850 pointer, if needed. The code below is from expand_end_bindings
7851 in stmt.c; see detailed documentation there. */
7852 #ifdef HAVE_nonlocal_goto
7853 if (! HAVE_nonlocal_goto)
7854 #endif
7855 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
7856
7857 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
7858 if (fixed_regs[ARG_POINTER_REGNUM])
7859 {
7860 #ifdef ELIMINABLE_REGS
7861 int i;
7862 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
7863
7864 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
7865 if (elim_regs[i].from == ARG_POINTER_REGNUM
7866 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
7867 break;
7868
7869 if (i == sizeof elim_regs / sizeof elim_regs [0])
7870 #endif
7871 {
7872 /* Now restore our arg pointer from the address at which it
7873 was saved in our stack frame.
7874 If there hasn't be space allocated for it yet, make
7875 some now. */
7876 if (arg_pointer_save_area == 0)
7877 arg_pointer_save_area
7878 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
7879 emit_move_insn (virtual_incoming_args_rtx,
7880 copy_to_reg (arg_pointer_save_area));
7881 }
7882 }
7883 #endif
7884
7885 #ifdef HAVE_builtin_setjmp_receiver
7886 if (HAVE_builtin_setjmp_receiver)
7887 emit_insn (gen_builtin_setjmp_receiver (lab1));
7888 else
7889 #endif
7890 #ifdef HAVE_nonlocal_goto_receiver
7891 if (HAVE_nonlocal_goto_receiver)
7892 emit_insn (gen_nonlocal_goto_receiver ());
7893 else
7894 #endif
7895 {
7896 ; /* Nothing */
7897 }
7898
7899 /* Set TARGET, and branch to the next-time-through label. */
7900 emit_move_insn (target, gen_lowpart (GET_MODE (target), static_chain_rtx));
7901 emit_jump_insn (gen_jump (next_label));
7902 emit_barrier ();
7903
7904 return target;
7905 }
7906
7907 void
7908 expand_builtin_longjmp (buf_addr, value)
7909 rtx buf_addr, value;
7910 {
7911 rtx fp, lab, stack;
7912 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
7913
7914 #ifdef POINTERS_EXTEND_UNSIGNED
7915 buf_addr = convert_memory_address (Pmode, buf_addr);
7916 #endif
7917 buf_addr = force_reg (Pmode, buf_addr);
7918
7919 /* The value sent by longjmp is not allowed to be zero. Force it
7920 to one if so. */
7921 if (GET_CODE (value) == CONST_INT)
7922 {
7923 if (INTVAL (value) == 0)
7924 value = const1_rtx;
7925 }
7926 else
7927 {
7928 lab = gen_label_rtx ();
7929
7930 emit_cmp_insn (value, const0_rtx, NE, NULL_RTX, GET_MODE (value), 0, 0);
7931 emit_jump_insn (gen_bne (lab));
7932 emit_move_insn (value, const1_rtx);
7933 emit_label (lab);
7934 }
7935
7936 /* Make sure the value is in the right mode to be copied to the chain. */
7937 if (GET_MODE (value) != VOIDmode)
7938 value = gen_lowpart (GET_MODE (static_chain_rtx), value);
7939
7940 #ifdef HAVE_builtin_longjmp
7941 if (HAVE_builtin_longjmp)
7942 {
7943 /* Copy the "return value" to the static chain reg. */
7944 emit_move_insn (static_chain_rtx, value);
7945 emit_insn (gen_rtx_USE (VOIDmode, static_chain_rtx));
7946 emit_insn (gen_builtin_longjmp (buf_addr));
7947 }
7948 else
7949 #endif
7950 {
7951 fp = gen_rtx_MEM (Pmode, buf_addr);
7952 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
7953 GET_MODE_SIZE (Pmode)));
7954
7955 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
7956 2 * GET_MODE_SIZE (Pmode)));
7957
7958 /* Pick up FP, label, and SP from the block and jump. This code is
7959 from expand_goto in stmt.c; see there for detailed comments. */
7960 #if HAVE_nonlocal_goto
7961 if (HAVE_nonlocal_goto)
7962 emit_insn (gen_nonlocal_goto (value, fp, stack, lab));
7963 else
7964 #endif
7965 {
7966 lab = copy_to_reg (lab);
7967
7968 /* Copy the "return value" to the static chain reg. */
7969 emit_move_insn (static_chain_rtx, value);
7970
7971 emit_move_insn (hard_frame_pointer_rtx, fp);
7972 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
7973
7974 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
7975 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
7976 emit_insn (gen_rtx_USE (VOIDmode, static_chain_rtx));
7977 emit_indirect_jump (lab);
7978 }
7979 }
7980 }
7981
7982 \f
7983 /* Expand an expression EXP that calls a built-in function,
7984 with result going to TARGET if that's convenient
7985 (and in mode MODE if that's convenient).
7986 SUBTARGET may be used as the target for computing one of EXP's operands.
7987 IGNORE is nonzero if the value is to be ignored. */
7988
7989 #define CALLED_AS_BUILT_IN(NODE) \
7990 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
7991
7992 static rtx
7993 expand_builtin (exp, target, subtarget, mode, ignore)
7994 tree exp;
7995 rtx target;
7996 rtx subtarget;
7997 enum machine_mode mode;
7998 int ignore;
7999 {
8000 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8001 tree arglist = TREE_OPERAND (exp, 1);
8002 rtx op0;
8003 rtx lab1, insns;
8004 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
8005 optab builtin_optab;
8006
8007 switch (DECL_FUNCTION_CODE (fndecl))
8008 {
8009 case BUILT_IN_ABS:
8010 case BUILT_IN_LABS:
8011 case BUILT_IN_FABS:
8012 /* build_function_call changes these into ABS_EXPR. */
8013 abort ();
8014
8015 case BUILT_IN_SIN:
8016 case BUILT_IN_COS:
8017 /* Treat these like sqrt, but only if the user asks for them. */
8018 if (! flag_fast_math)
8019 break;
8020 case BUILT_IN_FSQRT:
8021 /* If not optimizing, call the library function. */
8022 if (! optimize)
8023 break;
8024
8025 if (arglist == 0
8026 /* Arg could be wrong type if user redeclared this fcn wrong. */
8027 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
8028 break;
8029
8030 /* Stabilize and compute the argument. */
8031 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
8032 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
8033 {
8034 exp = copy_node (exp);
8035 arglist = copy_node (arglist);
8036 TREE_OPERAND (exp, 1) = arglist;
8037 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
8038 }
8039 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8040
8041 /* Make a suitable register to place result in. */
8042 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8043
8044 emit_queue ();
8045 start_sequence ();
8046
8047 switch (DECL_FUNCTION_CODE (fndecl))
8048 {
8049 case BUILT_IN_SIN:
8050 builtin_optab = sin_optab; break;
8051 case BUILT_IN_COS:
8052 builtin_optab = cos_optab; break;
8053 case BUILT_IN_FSQRT:
8054 builtin_optab = sqrt_optab; break;
8055 default:
8056 abort ();
8057 }
8058
8059 /* Compute into TARGET.
8060 Set TARGET to wherever the result comes back. */
8061 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8062 builtin_optab, op0, target, 0);
8063
8064 /* If we were unable to expand via the builtin, stop the
8065 sequence (without outputting the insns) and break, causing
8066 a call to the library function. */
8067 if (target == 0)
8068 {
8069 end_sequence ();
8070 break;
8071 }
8072
8073 /* Check the results by default. But if flag_fast_math is turned on,
8074 then assume sqrt will always be called with valid arguments. */
8075
8076 if (! flag_fast_math)
8077 {
8078 /* Don't define the builtin FP instructions
8079 if your machine is not IEEE. */
8080 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
8081 abort ();
8082
8083 lab1 = gen_label_rtx ();
8084
8085 /* Test the result; if it is NaN, set errno=EDOM because
8086 the argument was not in the domain. */
8087 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
8088 emit_jump_insn (gen_beq (lab1));
8089
8090 #ifdef TARGET_EDOM
8091 {
8092 #ifdef GEN_ERRNO_RTX
8093 rtx errno_rtx = GEN_ERRNO_RTX;
8094 #else
8095 rtx errno_rtx
8096 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
8097 #endif
8098
8099 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
8100 }
8101 #else
8102 /* We can't set errno=EDOM directly; let the library call do it.
8103 Pop the arguments right away in case the call gets deleted. */
8104 NO_DEFER_POP;
8105 expand_call (exp, target, 0);
8106 OK_DEFER_POP;
8107 #endif
8108
8109 emit_label (lab1);
8110 }
8111
8112 /* Output the entire sequence. */
8113 insns = get_insns ();
8114 end_sequence ();
8115 emit_insns (insns);
8116
8117 return target;
8118
8119 case BUILT_IN_FMOD:
8120 break;
8121
8122 /* __builtin_apply_args returns block of memory allocated on
8123 the stack into which is stored the arg pointer, structure
8124 value address, static chain, and all the registers that might
8125 possibly be used in performing a function call. The code is
8126 moved to the start of the function so the incoming values are
8127 saved. */
8128 case BUILT_IN_APPLY_ARGS:
8129 /* Don't do __builtin_apply_args more than once in a function.
8130 Save the result of the first call and reuse it. */
8131 if (apply_args_value != 0)
8132 return apply_args_value;
8133 {
8134 /* When this function is called, it means that registers must be
8135 saved on entry to this function. So we migrate the
8136 call to the first insn of this function. */
8137 rtx temp;
8138 rtx seq;
8139
8140 start_sequence ();
8141 temp = expand_builtin_apply_args ();
8142 seq = get_insns ();
8143 end_sequence ();
8144
8145 apply_args_value = temp;
8146
8147 /* Put the sequence after the NOTE that starts the function.
8148 If this is inside a SEQUENCE, make the outer-level insn
8149 chain current, so the code is placed at the start of the
8150 function. */
8151 push_topmost_sequence ();
8152 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8153 pop_topmost_sequence ();
8154 return temp;
8155 }
8156
8157 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8158 FUNCTION with a copy of the parameters described by
8159 ARGUMENTS, and ARGSIZE. It returns a block of memory
8160 allocated on the stack into which is stored all the registers
8161 that might possibly be used for returning the result of a
8162 function. ARGUMENTS is the value returned by
8163 __builtin_apply_args. ARGSIZE is the number of bytes of
8164 arguments that must be copied. ??? How should this value be
8165 computed? We'll also need a safe worst case value for varargs
8166 functions. */
8167 case BUILT_IN_APPLY:
8168 if (arglist == 0
8169 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8170 || ! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (arglist)))
8171 || TREE_CHAIN (arglist) == 0
8172 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8173 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8174 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8175 return const0_rtx;
8176 else
8177 {
8178 int i;
8179 tree t;
8180 rtx ops[3];
8181
8182 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
8183 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
8184
8185 return expand_builtin_apply (ops[0], ops[1], ops[2]);
8186 }
8187
8188 /* __builtin_return (RESULT) causes the function to return the
8189 value described by RESULT. RESULT is address of the block of
8190 memory returned by __builtin_apply. */
8191 case BUILT_IN_RETURN:
8192 if (arglist
8193 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8194 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
8195 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
8196 NULL_RTX, VOIDmode, 0));
8197 return const0_rtx;
8198
8199 case BUILT_IN_SAVEREGS:
8200 /* Don't do __builtin_saveregs more than once in a function.
8201 Save the result of the first call and reuse it. */
8202 if (saveregs_value != 0)
8203 return saveregs_value;
8204 {
8205 /* When this function is called, it means that registers must be
8206 saved on entry to this function. So we migrate the
8207 call to the first insn of this function. */
8208 rtx temp;
8209 rtx seq;
8210
8211 /* Now really call the function. `expand_call' does not call
8212 expand_builtin, so there is no danger of infinite recursion here. */
8213 start_sequence ();
8214
8215 #ifdef EXPAND_BUILTIN_SAVEREGS
8216 /* Do whatever the machine needs done in this case. */
8217 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
8218 #else
8219 /* The register where the function returns its value
8220 is likely to have something else in it, such as an argument.
8221 So preserve that register around the call. */
8222
8223 if (value_mode != VOIDmode)
8224 {
8225 rtx valreg = hard_libcall_value (value_mode);
8226 rtx saved_valreg = gen_reg_rtx (value_mode);
8227
8228 emit_move_insn (saved_valreg, valreg);
8229 temp = expand_call (exp, target, ignore);
8230 emit_move_insn (valreg, saved_valreg);
8231 }
8232 else
8233 /* Generate the call, putting the value in a pseudo. */
8234 temp = expand_call (exp, target, ignore);
8235 #endif
8236
8237 seq = get_insns ();
8238 end_sequence ();
8239
8240 saveregs_value = temp;
8241
8242 /* Put the sequence after the NOTE that starts the function.
8243 If this is inside a SEQUENCE, make the outer-level insn
8244 chain current, so the code is placed at the start of the
8245 function. */
8246 push_topmost_sequence ();
8247 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8248 pop_topmost_sequence ();
8249 return temp;
8250 }
8251
8252 /* __builtin_args_info (N) returns word N of the arg space info
8253 for the current function. The number and meanings of words
8254 is controlled by the definition of CUMULATIVE_ARGS. */
8255 case BUILT_IN_ARGS_INFO:
8256 {
8257 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
8258 int *word_ptr = (int *) &current_function_args_info;
8259 #if 0
8260 /* These are used by the code below that is if 0'ed away */
8261 int i;
8262 tree type, elts, result;
8263 #endif
8264
8265 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
8266 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
8267 __FILE__, __LINE__);
8268
8269 if (arglist != 0)
8270 {
8271 tree arg = TREE_VALUE (arglist);
8272 if (TREE_CODE (arg) != INTEGER_CST)
8273 error ("argument of `__builtin_args_info' must be constant");
8274 else
8275 {
8276 int wordnum = TREE_INT_CST_LOW (arg);
8277
8278 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
8279 error ("argument of `__builtin_args_info' out of range");
8280 else
8281 return GEN_INT (word_ptr[wordnum]);
8282 }
8283 }
8284 else
8285 error ("missing argument in `__builtin_args_info'");
8286
8287 return const0_rtx;
8288
8289 #if 0
8290 for (i = 0; i < nwords; i++)
8291 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
8292
8293 type = build_array_type (integer_type_node,
8294 build_index_type (build_int_2 (nwords, 0)));
8295 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
8296 TREE_CONSTANT (result) = 1;
8297 TREE_STATIC (result) = 1;
8298 result = build (INDIRECT_REF, build_pointer_type (type), result);
8299 TREE_CONSTANT (result) = 1;
8300 return expand_expr (result, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD);
8301 #endif
8302 }
8303
8304 /* Return the address of the first anonymous stack arg. */
8305 case BUILT_IN_NEXT_ARG:
8306 {
8307 tree fntype = TREE_TYPE (current_function_decl);
8308
8309 if ((TYPE_ARG_TYPES (fntype) == 0
8310 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
8311 == void_type_node))
8312 && ! current_function_varargs)
8313 {
8314 error ("`va_start' used in function with fixed args");
8315 return const0_rtx;
8316 }
8317
8318 if (arglist)
8319 {
8320 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
8321 tree arg = TREE_VALUE (arglist);
8322
8323 /* Strip off all nops for the sake of the comparison. This
8324 is not quite the same as STRIP_NOPS. It does more.
8325 We must also strip off INDIRECT_EXPR for C++ reference
8326 parameters. */
8327 while (TREE_CODE (arg) == NOP_EXPR
8328 || TREE_CODE (arg) == CONVERT_EXPR
8329 || TREE_CODE (arg) == NON_LVALUE_EXPR
8330 || TREE_CODE (arg) == INDIRECT_REF)
8331 arg = TREE_OPERAND (arg, 0);
8332 if (arg != last_parm)
8333 warning ("second parameter of `va_start' not last named argument");
8334 }
8335 else if (! current_function_varargs)
8336 /* Evidently an out of date version of <stdarg.h>; can't validate
8337 va_start's second argument, but can still work as intended. */
8338 warning ("`__builtin_next_arg' called without an argument");
8339 }
8340
8341 return expand_binop (Pmode, add_optab,
8342 current_function_internal_arg_pointer,
8343 current_function_arg_offset_rtx,
8344 NULL_RTX, 0, OPTAB_LIB_WIDEN);
8345
8346 case BUILT_IN_CLASSIFY_TYPE:
8347 if (arglist != 0)
8348 {
8349 tree type = TREE_TYPE (TREE_VALUE (arglist));
8350 enum tree_code code = TREE_CODE (type);
8351 if (code == VOID_TYPE)
8352 return GEN_INT (void_type_class);
8353 if (code == INTEGER_TYPE)
8354 return GEN_INT (integer_type_class);
8355 if (code == CHAR_TYPE)
8356 return GEN_INT (char_type_class);
8357 if (code == ENUMERAL_TYPE)
8358 return GEN_INT (enumeral_type_class);
8359 if (code == BOOLEAN_TYPE)
8360 return GEN_INT (boolean_type_class);
8361 if (code == POINTER_TYPE)
8362 return GEN_INT (pointer_type_class);
8363 if (code == REFERENCE_TYPE)
8364 return GEN_INT (reference_type_class);
8365 if (code == OFFSET_TYPE)
8366 return GEN_INT (offset_type_class);
8367 if (code == REAL_TYPE)
8368 return GEN_INT (real_type_class);
8369 if (code == COMPLEX_TYPE)
8370 return GEN_INT (complex_type_class);
8371 if (code == FUNCTION_TYPE)
8372 return GEN_INT (function_type_class);
8373 if (code == METHOD_TYPE)
8374 return GEN_INT (method_type_class);
8375 if (code == RECORD_TYPE)
8376 return GEN_INT (record_type_class);
8377 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
8378 return GEN_INT (union_type_class);
8379 if (code == ARRAY_TYPE)
8380 {
8381 if (TYPE_STRING_FLAG (type))
8382 return GEN_INT (string_type_class);
8383 else
8384 return GEN_INT (array_type_class);
8385 }
8386 if (code == SET_TYPE)
8387 return GEN_INT (set_type_class);
8388 if (code == FILE_TYPE)
8389 return GEN_INT (file_type_class);
8390 if (code == LANG_TYPE)
8391 return GEN_INT (lang_type_class);
8392 }
8393 return GEN_INT (no_type_class);
8394
8395 case BUILT_IN_CONSTANT_P:
8396 if (arglist == 0)
8397 return const0_rtx;
8398 else
8399 {
8400 tree arg = TREE_VALUE (arglist);
8401
8402 STRIP_NOPS (arg);
8403 return (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c'
8404 || (TREE_CODE (arg) == ADDR_EXPR
8405 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8406 ? const1_rtx : const0_rtx);
8407 }
8408
8409 case BUILT_IN_FRAME_ADDRESS:
8410 /* The argument must be a nonnegative integer constant.
8411 It counts the number of frames to scan up the stack.
8412 The value is the address of that frame. */
8413 case BUILT_IN_RETURN_ADDRESS:
8414 /* The argument must be a nonnegative integer constant.
8415 It counts the number of frames to scan up the stack.
8416 The value is the return address saved in that frame. */
8417 if (arglist == 0)
8418 /* Warning about missing arg was already issued. */
8419 return const0_rtx;
8420 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST
8421 || tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
8422 {
8423 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8424 error ("invalid arg to `__builtin_frame_address'");
8425 else
8426 error ("invalid arg to `__builtin_return_address'");
8427 return const0_rtx;
8428 }
8429 else
8430 {
8431 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
8432 TREE_INT_CST_LOW (TREE_VALUE (arglist)),
8433 hard_frame_pointer_rtx);
8434
8435 /* Some ports cannot access arbitrary stack frames. */
8436 if (tem == NULL)
8437 {
8438 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8439 warning ("unsupported arg to `__builtin_frame_address'");
8440 else
8441 warning ("unsupported arg to `__builtin_return_address'");
8442 return const0_rtx;
8443 }
8444
8445 /* For __builtin_frame_address, return what we've got. */
8446 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8447 return tem;
8448
8449 if (GET_CODE (tem) != REG)
8450 tem = copy_to_reg (tem);
8451 return tem;
8452 }
8453
8454 /* Returns the address of the area where the structure is returned.
8455 0 otherwise. */
8456 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
8457 if (arglist != 0
8458 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
8459 || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) != MEM)
8460 return const0_rtx;
8461 else
8462 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
8463
8464 case BUILT_IN_ALLOCA:
8465 if (arglist == 0
8466 /* Arg could be non-integer if user redeclared this fcn wrong. */
8467 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8468 break;
8469
8470 /* Compute the argument. */
8471 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
8472
8473 /* Allocate the desired space. */
8474 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
8475
8476 case BUILT_IN_FFS:
8477 /* If not optimizing, call the library function. */
8478 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8479 break;
8480
8481 if (arglist == 0
8482 /* Arg could be non-integer if user redeclared this fcn wrong. */
8483 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8484 break;
8485
8486 /* Compute the argument. */
8487 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8488 /* Compute ffs, into TARGET if possible.
8489 Set TARGET to wherever the result comes back. */
8490 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8491 ffs_optab, op0, target, 1);
8492 if (target == 0)
8493 abort ();
8494 return target;
8495
8496 case BUILT_IN_STRLEN:
8497 /* If not optimizing, call the library function. */
8498 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8499 break;
8500
8501 if (arglist == 0
8502 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8503 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8504 break;
8505 else
8506 {
8507 tree src = TREE_VALUE (arglist);
8508 tree len = c_strlen (src);
8509
8510 int align
8511 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8512
8513 rtx result, src_rtx, char_rtx;
8514 enum machine_mode insn_mode = value_mode, char_mode;
8515 enum insn_code icode;
8516
8517 /* If the length is known, just return it. */
8518 if (len != 0)
8519 return expand_expr (len, target, mode, EXPAND_MEMORY_USE_BAD);
8520
8521 /* If SRC is not a pointer type, don't do this operation inline. */
8522 if (align == 0)
8523 break;
8524
8525 /* Call a function if we can't compute strlen in the right mode. */
8526
8527 while (insn_mode != VOIDmode)
8528 {
8529 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
8530 if (icode != CODE_FOR_nothing)
8531 break;
8532
8533 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
8534 }
8535 if (insn_mode == VOIDmode)
8536 break;
8537
8538 /* Make a place to write the result of the instruction. */
8539 result = target;
8540 if (! (result != 0
8541 && GET_CODE (result) == REG
8542 && GET_MODE (result) == insn_mode
8543 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8544 result = gen_reg_rtx (insn_mode);
8545
8546 /* Make sure the operands are acceptable to the predicates. */
8547
8548 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
8549 result = gen_reg_rtx (insn_mode);
8550 src_rtx = memory_address (BLKmode,
8551 expand_expr (src, NULL_RTX, ptr_mode,
8552 EXPAND_NORMAL));
8553
8554 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
8555 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
8556
8557 /* Check the string is readable and has an end. */
8558 if (flag_check_memory_usage)
8559 emit_library_call (chkr_check_str_libfunc, 1, VOIDmode, 2,
8560 src_rtx, ptr_mode,
8561 GEN_INT (MEMORY_USE_RO),
8562 TYPE_MODE (integer_type_node));
8563
8564 char_rtx = const0_rtx;
8565 char_mode = insn_operand_mode[(int)icode][2];
8566 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
8567 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
8568
8569 emit_insn (GEN_FCN (icode) (result,
8570 gen_rtx_MEM (BLKmode, src_rtx),
8571 char_rtx, GEN_INT (align)));
8572
8573 /* Return the value in the proper mode for this function. */
8574 if (GET_MODE (result) == value_mode)
8575 return result;
8576 else if (target != 0)
8577 {
8578 convert_move (target, result, 0);
8579 return target;
8580 }
8581 else
8582 return convert_to_mode (value_mode, result, 0);
8583 }
8584
8585 case BUILT_IN_STRCPY:
8586 /* If not optimizing, call the library function. */
8587 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8588 break;
8589
8590 if (arglist == 0
8591 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8592 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8593 || TREE_CHAIN (arglist) == 0
8594 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
8595 break;
8596 else
8597 {
8598 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
8599
8600 if (len == 0)
8601 break;
8602
8603 len = size_binop (PLUS_EXPR, len, integer_one_node);
8604
8605 chainon (arglist, build_tree_list (NULL_TREE, len));
8606 }
8607
8608 /* Drops in. */
8609 case BUILT_IN_MEMCPY:
8610 /* If not optimizing, call the library function. */
8611 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8612 break;
8613
8614 if (arglist == 0
8615 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8616 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8617 || TREE_CHAIN (arglist) == 0
8618 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
8619 != POINTER_TYPE)
8620 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8621 || (TREE_CODE (TREE_TYPE (TREE_VALUE
8622 (TREE_CHAIN (TREE_CHAIN (arglist)))))
8623 != INTEGER_TYPE))
8624 break;
8625 else
8626 {
8627 tree dest = TREE_VALUE (arglist);
8628 tree src = TREE_VALUE (TREE_CHAIN (arglist));
8629 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8630 tree type;
8631
8632 int src_align
8633 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8634 int dest_align
8635 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8636 rtx dest_rtx, dest_mem, src_mem, src_rtx, dest_addr, len_rtx;
8637
8638 /* If either SRC or DEST is not a pointer type, don't do
8639 this operation in-line. */
8640 if (src_align == 0 || dest_align == 0)
8641 {
8642 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
8643 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8644 break;
8645 }
8646
8647 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
8648 dest_mem = gen_rtx_MEM (BLKmode,
8649 memory_address (BLKmode, dest_rtx));
8650 /* There could be a void* cast on top of the object. */
8651 while (TREE_CODE (dest) == NOP_EXPR)
8652 dest = TREE_OPERAND (dest, 0);
8653 type = TREE_TYPE (TREE_TYPE (dest));
8654 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
8655 src_rtx = expand_expr (src, NULL_RTX, ptr_mode, EXPAND_SUM);
8656 src_mem = gen_rtx_MEM (BLKmode,
8657 memory_address (BLKmode, src_rtx));
8658 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
8659
8660 /* Just copy the rights of SRC to the rights of DEST. */
8661 if (flag_check_memory_usage)
8662 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
8663 dest_rtx, ptr_mode,
8664 src_rtx, ptr_mode,
8665 len_rtx, TYPE_MODE (sizetype));
8666
8667 /* There could be a void* cast on top of the object. */
8668 while (TREE_CODE (src) == NOP_EXPR)
8669 src = TREE_OPERAND (src, 0);
8670 type = TREE_TYPE (TREE_TYPE (src));
8671 MEM_IN_STRUCT_P (src_mem) = AGGREGATE_TYPE_P (type);
8672
8673 /* Copy word part most expediently. */
8674 dest_addr
8675 = emit_block_move (dest_mem, src_mem, len_rtx,
8676 MIN (src_align, dest_align));
8677
8678 if (dest_addr == 0)
8679 dest_addr = force_operand (dest_rtx, NULL_RTX);
8680
8681 return dest_addr;
8682 }
8683
8684 case BUILT_IN_MEMSET:
8685 /* If not optimizing, call the library function. */
8686 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8687 break;
8688
8689 if (arglist == 0
8690 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8691 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8692 || TREE_CHAIN (arglist) == 0
8693 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
8694 != INTEGER_TYPE)
8695 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8696 || (INTEGER_TYPE
8697 != (TREE_CODE (TREE_TYPE
8698 (TREE_VALUE
8699 (TREE_CHAIN (TREE_CHAIN (arglist))))))))
8700 break;
8701 else
8702 {
8703 tree dest = TREE_VALUE (arglist);
8704 tree val = TREE_VALUE (TREE_CHAIN (arglist));
8705 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8706 tree type;
8707
8708 int dest_align
8709 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8710 rtx dest_rtx, dest_mem, dest_addr, len_rtx;
8711
8712 /* If DEST is not a pointer type, don't do this
8713 operation in-line. */
8714 if (dest_align == 0)
8715 break;
8716
8717 /* If the arguments have side-effects, then we can only evaluate
8718 them at most once. The following code evaluates them twice if
8719 they are not constants because we break out to expand_call
8720 in that case. They can't be constants if they have side-effects
8721 so we can check for that first. Alternatively, we could call
8722 save_expr to make multiple evaluation safe. */
8723 if (TREE_SIDE_EFFECTS (val) || TREE_SIDE_EFFECTS (len))
8724 break;
8725
8726 /* If VAL is not 0, don't do this operation in-line. */
8727 if (expand_expr (val, NULL_RTX, VOIDmode, 0) != const0_rtx)
8728 break;
8729
8730 /* If LEN does not expand to a constant, don't do this
8731 operation in-line. */
8732 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
8733 if (GET_CODE (len_rtx) != CONST_INT)
8734 break;
8735
8736 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
8737 dest_mem = gen_rtx_MEM (BLKmode,
8738 memory_address (BLKmode, dest_rtx));
8739
8740 /* Just check DST is writable and mark it as readable. */
8741 if (flag_check_memory_usage)
8742 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
8743 dest_rtx, ptr_mode,
8744 len_rtx, TYPE_MODE (sizetype),
8745 GEN_INT (MEMORY_USE_WO),
8746 TYPE_MODE (integer_type_node));
8747
8748 /* There could be a void* cast on top of the object. */
8749 while (TREE_CODE (dest) == NOP_EXPR)
8750 dest = TREE_OPERAND (dest, 0);
8751
8752 if (TREE_CODE (dest) == ADDR_EXPR)
8753 /* If this is the address of an object, check whether the
8754 object is an array. */
8755 type = TREE_TYPE (TREE_OPERAND (dest, 0));
8756 else
8757 type = TREE_TYPE (TREE_TYPE (dest));
8758 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
8759
8760 dest_addr = clear_storage (dest_mem, len_rtx, dest_align);
8761
8762 if (dest_addr == 0)
8763 dest_addr = force_operand (dest_rtx, NULL_RTX);
8764
8765 return dest_addr;
8766 }
8767
8768 /* These comparison functions need an instruction that returns an actual
8769 index. An ordinary compare that just sets the condition codes
8770 is not enough. */
8771 #ifdef HAVE_cmpstrsi
8772 case BUILT_IN_STRCMP:
8773 /* If not optimizing, call the library function. */
8774 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8775 break;
8776
8777 /* If we need to check memory accesses, call the library function. */
8778 if (flag_check_memory_usage)
8779 break;
8780
8781 if (arglist == 0
8782 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8783 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8784 || TREE_CHAIN (arglist) == 0
8785 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
8786 break;
8787 else if (!HAVE_cmpstrsi)
8788 break;
8789 {
8790 tree arg1 = TREE_VALUE (arglist);
8791 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
8792 tree len, len2;
8793
8794 len = c_strlen (arg1);
8795 if (len)
8796 len = size_binop (PLUS_EXPR, integer_one_node, len);
8797 len2 = c_strlen (arg2);
8798 if (len2)
8799 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
8800
8801 /* If we don't have a constant length for the first, use the length
8802 of the second, if we know it. We don't require a constant for
8803 this case; some cost analysis could be done if both are available
8804 but neither is constant. For now, assume they're equally cheap.
8805
8806 If both strings have constant lengths, use the smaller. This
8807 could arise if optimization results in strcpy being called with
8808 two fixed strings, or if the code was machine-generated. We should
8809 add some code to the `memcmp' handler below to deal with such
8810 situations, someday. */
8811 if (!len || TREE_CODE (len) != INTEGER_CST)
8812 {
8813 if (len2)
8814 len = len2;
8815 else if (len == 0)
8816 break;
8817 }
8818 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
8819 {
8820 if (tree_int_cst_lt (len2, len))
8821 len = len2;
8822 }
8823
8824 chainon (arglist, build_tree_list (NULL_TREE, len));
8825 }
8826
8827 /* Drops in. */
8828 case BUILT_IN_MEMCMP:
8829 /* If not optimizing, call the library function. */
8830 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8831 break;
8832
8833 /* If we need to check memory accesses, call the library function. */
8834 if (flag_check_memory_usage)
8835 break;
8836
8837 if (arglist == 0
8838 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8839 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8840 || TREE_CHAIN (arglist) == 0
8841 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8842 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8843 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8844 break;
8845 else if (!HAVE_cmpstrsi)
8846 break;
8847 {
8848 tree arg1 = TREE_VALUE (arglist);
8849 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
8850 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8851 rtx result;
8852
8853 int arg1_align
8854 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8855 int arg2_align
8856 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8857 enum machine_mode insn_mode
8858 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
8859
8860 /* If we don't have POINTER_TYPE, call the function. */
8861 if (arg1_align == 0 || arg2_align == 0)
8862 {
8863 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
8864 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8865 break;
8866 }
8867
8868 /* Make a place to write the result of the instruction. */
8869 result = target;
8870 if (! (result != 0
8871 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
8872 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8873 result = gen_reg_rtx (insn_mode);
8874
8875 emit_insn (gen_cmpstrsi (result,
8876 gen_rtx_MEM (BLKmode,
8877 expand_expr (arg1, NULL_RTX,
8878 ptr_mode,
8879 EXPAND_NORMAL)),
8880 gen_rtx_MEM (BLKmode,
8881 expand_expr (arg2, NULL_RTX,
8882 ptr_mode,
8883 EXPAND_NORMAL)),
8884 expand_expr (len, NULL_RTX, VOIDmode, 0),
8885 GEN_INT (MIN (arg1_align, arg2_align))));
8886
8887 /* Return the value in the proper mode for this function. */
8888 mode = TYPE_MODE (TREE_TYPE (exp));
8889 if (GET_MODE (result) == mode)
8890 return result;
8891 else if (target != 0)
8892 {
8893 convert_move (target, result, 0);
8894 return target;
8895 }
8896 else
8897 return convert_to_mode (mode, result, 0);
8898 }
8899 #else
8900 case BUILT_IN_STRCMP:
8901 case BUILT_IN_MEMCMP:
8902 break;
8903 #endif
8904
8905 case BUILT_IN_SETJMP:
8906 if (arglist == 0
8907 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8908 break;
8909 else
8910 {
8911 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
8912 VOIDmode, 0);
8913 rtx lab = gen_label_rtx ();
8914 rtx ret = expand_builtin_setjmp (buf_addr, target, lab, lab);
8915 emit_label (lab);
8916 return ret;
8917 }
8918
8919 /* __builtin_longjmp is passed a pointer to an array of five words.
8920 It's similar to the C library longjmp function but works with
8921 __builtin_setjmp above. */
8922 case BUILT_IN_LONGJMP:
8923 if (arglist == 0 || TREE_CHAIN (arglist) == 0
8924 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8925 break;
8926 else
8927 {
8928 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
8929 VOIDmode, 0);
8930 rtx value = expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
8931 const0_rtx, VOIDmode, 0);
8932 expand_builtin_longjmp (buf_addr, value);
8933 return const0_rtx;
8934 }
8935
8936 /* Various hooks for the DWARF 2 __throw routine. */
8937 case BUILT_IN_UNWIND_INIT:
8938 expand_builtin_unwind_init ();
8939 return const0_rtx;
8940 case BUILT_IN_FP:
8941 return frame_pointer_rtx;
8942 case BUILT_IN_SP:
8943 return stack_pointer_rtx;
8944 #ifdef DWARF2_UNWIND_INFO
8945 case BUILT_IN_DWARF_FP_REGNUM:
8946 return expand_builtin_dwarf_fp_regnum ();
8947 case BUILT_IN_DWARF_REG_SIZE:
8948 return expand_builtin_dwarf_reg_size (TREE_VALUE (arglist), target);
8949 #endif
8950 case BUILT_IN_FROB_RETURN_ADDR:
8951 return expand_builtin_frob_return_addr (TREE_VALUE (arglist));
8952 case BUILT_IN_EXTRACT_RETURN_ADDR:
8953 return expand_builtin_extract_return_addr (TREE_VALUE (arglist));
8954 case BUILT_IN_SET_RETURN_ADDR_REG:
8955 expand_builtin_set_return_addr_reg (TREE_VALUE (arglist));
8956 return const0_rtx;
8957 case BUILT_IN_EH_STUB_OLD:
8958 return expand_builtin_eh_stub_old ();
8959 case BUILT_IN_EH_STUB:
8960 return expand_builtin_eh_stub ();
8961 case BUILT_IN_SET_EH_REGS:
8962 expand_builtin_set_eh_regs (TREE_VALUE (arglist),
8963 TREE_VALUE (TREE_CHAIN (arglist)));
8964 return const0_rtx;
8965
8966 default: /* just do library call, if unknown builtin */
8967 error ("built-in function `%s' not currently supported",
8968 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
8969 }
8970
8971 /* The switch statement above can drop through to cause the function
8972 to be called normally. */
8973
8974 return expand_call (exp, target, ignore);
8975 }
8976 \f
8977 /* Built-in functions to perform an untyped call and return. */
8978
8979 /* For each register that may be used for calling a function, this
8980 gives a mode used to copy the register's value. VOIDmode indicates
8981 the register is not used for calling a function. If the machine
8982 has register windows, this gives only the outbound registers.
8983 INCOMING_REGNO gives the corresponding inbound register. */
8984 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
8985
8986 /* For each register that may be used for returning values, this gives
8987 a mode used to copy the register's value. VOIDmode indicates the
8988 register is not used for returning values. If the machine has
8989 register windows, this gives only the outbound registers.
8990 INCOMING_REGNO gives the corresponding inbound register. */
8991 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
8992
8993 /* For each register that may be used for calling a function, this
8994 gives the offset of that register into the block returned by
8995 __builtin_apply_args. 0 indicates that the register is not
8996 used for calling a function. */
8997 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
8998
8999 /* Return the offset of register REGNO into the block returned by
9000 __builtin_apply_args. This is not declared static, since it is
9001 needed in objc-act.c. */
9002
9003 int
9004 apply_args_register_offset (regno)
9005 int regno;
9006 {
9007 apply_args_size ();
9008
9009 /* Arguments are always put in outgoing registers (in the argument
9010 block) if such make sense. */
9011 #ifdef OUTGOING_REGNO
9012 regno = OUTGOING_REGNO(regno);
9013 #endif
9014 return apply_args_reg_offset[regno];
9015 }
9016
9017 /* Return the size required for the block returned by __builtin_apply_args,
9018 and initialize apply_args_mode. */
9019
9020 static int
9021 apply_args_size ()
9022 {
9023 static int size = -1;
9024 int align, regno;
9025 enum machine_mode mode;
9026
9027 /* The values computed by this function never change. */
9028 if (size < 0)
9029 {
9030 /* The first value is the incoming arg-pointer. */
9031 size = GET_MODE_SIZE (Pmode);
9032
9033 /* The second value is the structure value address unless this is
9034 passed as an "invisible" first argument. */
9035 if (struct_value_rtx)
9036 size += GET_MODE_SIZE (Pmode);
9037
9038 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9039 if (FUNCTION_ARG_REGNO_P (regno))
9040 {
9041 /* Search for the proper mode for copying this register's
9042 value. I'm not sure this is right, but it works so far. */
9043 enum machine_mode best_mode = VOIDmode;
9044
9045 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9046 mode != VOIDmode;
9047 mode = GET_MODE_WIDER_MODE (mode))
9048 if (HARD_REGNO_MODE_OK (regno, mode)
9049 && HARD_REGNO_NREGS (regno, mode) == 1)
9050 best_mode = mode;
9051
9052 if (best_mode == VOIDmode)
9053 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9054 mode != VOIDmode;
9055 mode = GET_MODE_WIDER_MODE (mode))
9056 if (HARD_REGNO_MODE_OK (regno, mode)
9057 && (mov_optab->handlers[(int) mode].insn_code
9058 != CODE_FOR_nothing))
9059 best_mode = mode;
9060
9061 mode = best_mode;
9062 if (mode == VOIDmode)
9063 abort ();
9064
9065 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9066 if (size % align != 0)
9067 size = CEIL (size, align) * align;
9068 apply_args_reg_offset[regno] = size;
9069 size += GET_MODE_SIZE (mode);
9070 apply_args_mode[regno] = mode;
9071 }
9072 else
9073 {
9074 apply_args_mode[regno] = VOIDmode;
9075 apply_args_reg_offset[regno] = 0;
9076 }
9077 }
9078 return size;
9079 }
9080
9081 /* Return the size required for the block returned by __builtin_apply,
9082 and initialize apply_result_mode. */
9083
9084 static int
9085 apply_result_size ()
9086 {
9087 static int size = -1;
9088 int align, regno;
9089 enum machine_mode mode;
9090
9091 /* The values computed by this function never change. */
9092 if (size < 0)
9093 {
9094 size = 0;
9095
9096 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9097 if (FUNCTION_VALUE_REGNO_P (regno))
9098 {
9099 /* Search for the proper mode for copying this register's
9100 value. I'm not sure this is right, but it works so far. */
9101 enum machine_mode best_mode = VOIDmode;
9102
9103 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9104 mode != TImode;
9105 mode = GET_MODE_WIDER_MODE (mode))
9106 if (HARD_REGNO_MODE_OK (regno, mode))
9107 best_mode = mode;
9108
9109 if (best_mode == VOIDmode)
9110 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9111 mode != VOIDmode;
9112 mode = GET_MODE_WIDER_MODE (mode))
9113 if (HARD_REGNO_MODE_OK (regno, mode)
9114 && (mov_optab->handlers[(int) mode].insn_code
9115 != CODE_FOR_nothing))
9116 best_mode = mode;
9117
9118 mode = best_mode;
9119 if (mode == VOIDmode)
9120 abort ();
9121
9122 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9123 if (size % align != 0)
9124 size = CEIL (size, align) * align;
9125 size += GET_MODE_SIZE (mode);
9126 apply_result_mode[regno] = mode;
9127 }
9128 else
9129 apply_result_mode[regno] = VOIDmode;
9130
9131 /* Allow targets that use untyped_call and untyped_return to override
9132 the size so that machine-specific information can be stored here. */
9133 #ifdef APPLY_RESULT_SIZE
9134 size = APPLY_RESULT_SIZE;
9135 #endif
9136 }
9137 return size;
9138 }
9139
9140 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
9141 /* Create a vector describing the result block RESULT. If SAVEP is true,
9142 the result block is used to save the values; otherwise it is used to
9143 restore the values. */
9144
9145 static rtx
9146 result_vector (savep, result)
9147 int savep;
9148 rtx result;
9149 {
9150 int regno, size, align, nelts;
9151 enum machine_mode mode;
9152 rtx reg, mem;
9153 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
9154
9155 size = nelts = 0;
9156 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9157 if ((mode = apply_result_mode[regno]) != VOIDmode)
9158 {
9159 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9160 if (size % align != 0)
9161 size = CEIL (size, align) * align;
9162 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
9163 mem = change_address (result, mode,
9164 plus_constant (XEXP (result, 0), size));
9165 savevec[nelts++] = (savep
9166 ? gen_rtx_SET (VOIDmode, mem, reg)
9167 : gen_rtx_SET (VOIDmode, reg, mem));
9168 size += GET_MODE_SIZE (mode);
9169 }
9170 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
9171 }
9172 #endif /* HAVE_untyped_call or HAVE_untyped_return */
9173
9174 /* Save the state required to perform an untyped call with the same
9175 arguments as were passed to the current function. */
9176
9177 static rtx
9178 expand_builtin_apply_args ()
9179 {
9180 rtx registers;
9181 int size, align, regno;
9182 enum machine_mode mode;
9183
9184 /* Create a block where the arg-pointer, structure value address,
9185 and argument registers can be saved. */
9186 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
9187
9188 /* Walk past the arg-pointer and structure value address. */
9189 size = GET_MODE_SIZE (Pmode);
9190 if (struct_value_rtx)
9191 size += GET_MODE_SIZE (Pmode);
9192
9193 /* Save each register used in calling a function to the block. */
9194 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9195 if ((mode = apply_args_mode[regno]) != VOIDmode)
9196 {
9197 rtx tem;
9198
9199 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9200 if (size % align != 0)
9201 size = CEIL (size, align) * align;
9202
9203 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
9204
9205 #ifdef STACK_REGS
9206 /* For reg-stack.c's stack register household.
9207 Compare with a similar piece of code in function.c. */
9208
9209 emit_insn (gen_rtx_USE (mode, tem));
9210 #endif
9211
9212 emit_move_insn (change_address (registers, mode,
9213 plus_constant (XEXP (registers, 0),
9214 size)),
9215 tem);
9216 size += GET_MODE_SIZE (mode);
9217 }
9218
9219 /* Save the arg pointer to the block. */
9220 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
9221 copy_to_reg (virtual_incoming_args_rtx));
9222 size = GET_MODE_SIZE (Pmode);
9223
9224 /* Save the structure value address unless this is passed as an
9225 "invisible" first argument. */
9226 if (struct_value_incoming_rtx)
9227 {
9228 emit_move_insn (change_address (registers, Pmode,
9229 plus_constant (XEXP (registers, 0),
9230 size)),
9231 copy_to_reg (struct_value_incoming_rtx));
9232 size += GET_MODE_SIZE (Pmode);
9233 }
9234
9235 /* Return the address of the block. */
9236 return copy_addr_to_reg (XEXP (registers, 0));
9237 }
9238
9239 /* Perform an untyped call and save the state required to perform an
9240 untyped return of whatever value was returned by the given function. */
9241
9242 static rtx
9243 expand_builtin_apply (function, arguments, argsize)
9244 rtx function, arguments, argsize;
9245 {
9246 int size, align, regno;
9247 enum machine_mode mode;
9248 rtx incoming_args, result, reg, dest, call_insn;
9249 rtx old_stack_level = 0;
9250 rtx call_fusage = 0;
9251
9252 /* Create a block where the return registers can be saved. */
9253 result = assign_stack_local (BLKmode, apply_result_size (), -1);
9254
9255 /* ??? The argsize value should be adjusted here. */
9256
9257 /* Fetch the arg pointer from the ARGUMENTS block. */
9258 incoming_args = gen_reg_rtx (Pmode);
9259 emit_move_insn (incoming_args,
9260 gen_rtx_MEM (Pmode, arguments));
9261 #ifndef STACK_GROWS_DOWNWARD
9262 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
9263 incoming_args, 0, OPTAB_LIB_WIDEN);
9264 #endif
9265
9266 /* Perform postincrements before actually calling the function. */
9267 emit_queue ();
9268
9269 /* Push a new argument block and copy the arguments. */
9270 do_pending_stack_adjust ();
9271
9272 /* Save the stack with nonlocal if available */
9273 #ifdef HAVE_save_stack_nonlocal
9274 if (HAVE_save_stack_nonlocal)
9275 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
9276 else
9277 #endif
9278 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
9279
9280 /* Push a block of memory onto the stack to store the memory arguments.
9281 Save the address in a register, and copy the memory arguments. ??? I
9282 haven't figured out how the calling convention macros effect this,
9283 but it's likely that the source and/or destination addresses in
9284 the block copy will need updating in machine specific ways. */
9285 dest = allocate_dynamic_stack_space (argsize, 0, 0);
9286 emit_block_move (gen_rtx_MEM (BLKmode, dest),
9287 gen_rtx_MEM (BLKmode, incoming_args),
9288 argsize,
9289 PARM_BOUNDARY / BITS_PER_UNIT);
9290
9291 /* Refer to the argument block. */
9292 apply_args_size ();
9293 arguments = gen_rtx_MEM (BLKmode, arguments);
9294
9295 /* Walk past the arg-pointer and structure value address. */
9296 size = GET_MODE_SIZE (Pmode);
9297 if (struct_value_rtx)
9298 size += GET_MODE_SIZE (Pmode);
9299
9300 /* Restore each of the registers previously saved. Make USE insns
9301 for each of these registers for use in making the call. */
9302 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9303 if ((mode = apply_args_mode[regno]) != VOIDmode)
9304 {
9305 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9306 if (size % align != 0)
9307 size = CEIL (size, align) * align;
9308 reg = gen_rtx_REG (mode, regno);
9309 emit_move_insn (reg,
9310 change_address (arguments, mode,
9311 plus_constant (XEXP (arguments, 0),
9312 size)));
9313
9314 use_reg (&call_fusage, reg);
9315 size += GET_MODE_SIZE (mode);
9316 }
9317
9318 /* Restore the structure value address unless this is passed as an
9319 "invisible" first argument. */
9320 size = GET_MODE_SIZE (Pmode);
9321 if (struct_value_rtx)
9322 {
9323 rtx value = gen_reg_rtx (Pmode);
9324 emit_move_insn (value,
9325 change_address (arguments, Pmode,
9326 plus_constant (XEXP (arguments, 0),
9327 size)));
9328 emit_move_insn (struct_value_rtx, value);
9329 if (GET_CODE (struct_value_rtx) == REG)
9330 use_reg (&call_fusage, struct_value_rtx);
9331 size += GET_MODE_SIZE (Pmode);
9332 }
9333
9334 /* All arguments and registers used for the call are set up by now! */
9335 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
9336
9337 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
9338 and we don't want to load it into a register as an optimization,
9339 because prepare_call_address already did it if it should be done. */
9340 if (GET_CODE (function) != SYMBOL_REF)
9341 function = memory_address (FUNCTION_MODE, function);
9342
9343 /* Generate the actual call instruction and save the return value. */
9344 #ifdef HAVE_untyped_call
9345 if (HAVE_untyped_call)
9346 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
9347 result, result_vector (1, result)));
9348 else
9349 #endif
9350 #ifdef HAVE_call_value
9351 if (HAVE_call_value)
9352 {
9353 rtx valreg = 0;
9354
9355 /* Locate the unique return register. It is not possible to
9356 express a call that sets more than one return register using
9357 call_value; use untyped_call for that. In fact, untyped_call
9358 only needs to save the return registers in the given block. */
9359 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9360 if ((mode = apply_result_mode[regno]) != VOIDmode)
9361 {
9362 if (valreg)
9363 abort (); /* HAVE_untyped_call required. */
9364 valreg = gen_rtx_REG (mode, regno);
9365 }
9366
9367 emit_call_insn (gen_call_value (valreg,
9368 gen_rtx_MEM (FUNCTION_MODE, function),
9369 const0_rtx, NULL_RTX, const0_rtx));
9370
9371 emit_move_insn (change_address (result, GET_MODE (valreg),
9372 XEXP (result, 0)),
9373 valreg);
9374 }
9375 else
9376 #endif
9377 abort ();
9378
9379 /* Find the CALL insn we just emitted. */
9380 for (call_insn = get_last_insn ();
9381 call_insn && GET_CODE (call_insn) != CALL_INSN;
9382 call_insn = PREV_INSN (call_insn))
9383 ;
9384
9385 if (! call_insn)
9386 abort ();
9387
9388 /* Put the register usage information on the CALL. If there is already
9389 some usage information, put ours at the end. */
9390 if (CALL_INSN_FUNCTION_USAGE (call_insn))
9391 {
9392 rtx link;
9393
9394 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
9395 link = XEXP (link, 1))
9396 ;
9397
9398 XEXP (link, 1) = call_fusage;
9399 }
9400 else
9401 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
9402
9403 /* Restore the stack. */
9404 #ifdef HAVE_save_stack_nonlocal
9405 if (HAVE_save_stack_nonlocal)
9406 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
9407 else
9408 #endif
9409 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
9410
9411 /* Return the address of the result block. */
9412 return copy_addr_to_reg (XEXP (result, 0));
9413 }
9414
9415 /* Perform an untyped return. */
9416
9417 static void
9418 expand_builtin_return (result)
9419 rtx result;
9420 {
9421 int size, align, regno;
9422 enum machine_mode mode;
9423 rtx reg;
9424 rtx call_fusage = 0;
9425
9426 apply_result_size ();
9427 result = gen_rtx_MEM (BLKmode, result);
9428
9429 #ifdef HAVE_untyped_return
9430 if (HAVE_untyped_return)
9431 {
9432 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
9433 emit_barrier ();
9434 return;
9435 }
9436 #endif
9437
9438 /* Restore the return value and note that each value is used. */
9439 size = 0;
9440 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9441 if ((mode = apply_result_mode[regno]) != VOIDmode)
9442 {
9443 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9444 if (size % align != 0)
9445 size = CEIL (size, align) * align;
9446 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
9447 emit_move_insn (reg,
9448 change_address (result, mode,
9449 plus_constant (XEXP (result, 0),
9450 size)));
9451
9452 push_to_sequence (call_fusage);
9453 emit_insn (gen_rtx_USE (VOIDmode, reg));
9454 call_fusage = get_insns ();
9455 end_sequence ();
9456 size += GET_MODE_SIZE (mode);
9457 }
9458
9459 /* Put the USE insns before the return. */
9460 emit_insns (call_fusage);
9461
9462 /* Return whatever values was restored by jumping directly to the end
9463 of the function. */
9464 expand_null_return ();
9465 }
9466 \f
9467 /* Expand code for a post- or pre- increment or decrement
9468 and return the RTX for the result.
9469 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9470
9471 static rtx
9472 expand_increment (exp, post, ignore)
9473 register tree exp;
9474 int post, ignore;
9475 {
9476 register rtx op0, op1;
9477 register rtx temp, value;
9478 register tree incremented = TREE_OPERAND (exp, 0);
9479 optab this_optab = add_optab;
9480 int icode;
9481 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9482 int op0_is_copy = 0;
9483 int single_insn = 0;
9484 /* 1 means we can't store into OP0 directly,
9485 because it is a subreg narrower than a word,
9486 and we don't dare clobber the rest of the word. */
9487 int bad_subreg = 0;
9488
9489 /* Stabilize any component ref that might need to be
9490 evaluated more than once below. */
9491 if (!post
9492 || TREE_CODE (incremented) == BIT_FIELD_REF
9493 || (TREE_CODE (incremented) == COMPONENT_REF
9494 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9495 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9496 incremented = stabilize_reference (incremented);
9497 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9498 ones into save exprs so that they don't accidentally get evaluated
9499 more than once by the code below. */
9500 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9501 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9502 incremented = save_expr (incremented);
9503
9504 /* Compute the operands as RTX.
9505 Note whether OP0 is the actual lvalue or a copy of it:
9506 I believe it is a copy iff it is a register or subreg
9507 and insns were generated in computing it. */
9508
9509 temp = get_last_insn ();
9510 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
9511
9512 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9513 in place but instead must do sign- or zero-extension during assignment,
9514 so we copy it into a new register and let the code below use it as
9515 a copy.
9516
9517 Note that we can safely modify this SUBREG since it is know not to be
9518 shared (it was made by the expand_expr call above). */
9519
9520 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9521 {
9522 if (post)
9523 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9524 else
9525 bad_subreg = 1;
9526 }
9527 else if (GET_CODE (op0) == SUBREG
9528 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9529 {
9530 /* We cannot increment this SUBREG in place. If we are
9531 post-incrementing, get a copy of the old value. Otherwise,
9532 just mark that we cannot increment in place. */
9533 if (post)
9534 op0 = copy_to_reg (op0);
9535 else
9536 bad_subreg = 1;
9537 }
9538
9539 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9540 && temp != get_last_insn ());
9541 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9542 EXPAND_MEMORY_USE_BAD);
9543
9544 /* Decide whether incrementing or decrementing. */
9545 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9546 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9547 this_optab = sub_optab;
9548
9549 /* Convert decrement by a constant into a negative increment. */
9550 if (this_optab == sub_optab
9551 && GET_CODE (op1) == CONST_INT)
9552 {
9553 op1 = GEN_INT (- INTVAL (op1));
9554 this_optab = add_optab;
9555 }
9556
9557 /* For a preincrement, see if we can do this with a single instruction. */
9558 if (!post)
9559 {
9560 icode = (int) this_optab->handlers[(int) mode].insn_code;
9561 if (icode != (int) CODE_FOR_nothing
9562 /* Make sure that OP0 is valid for operands 0 and 1
9563 of the insn we want to queue. */
9564 && (*insn_operand_predicate[icode][0]) (op0, mode)
9565 && (*insn_operand_predicate[icode][1]) (op0, mode)
9566 && (*insn_operand_predicate[icode][2]) (op1, mode))
9567 single_insn = 1;
9568 }
9569
9570 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9571 then we cannot just increment OP0. We must therefore contrive to
9572 increment the original value. Then, for postincrement, we can return
9573 OP0 since it is a copy of the old value. For preincrement, expand here
9574 unless we can do it with a single insn.
9575
9576 Likewise if storing directly into OP0 would clobber high bits
9577 we need to preserve (bad_subreg). */
9578 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9579 {
9580 /* This is the easiest way to increment the value wherever it is.
9581 Problems with multiple evaluation of INCREMENTED are prevented
9582 because either (1) it is a component_ref or preincrement,
9583 in which case it was stabilized above, or (2) it is an array_ref
9584 with constant index in an array in a register, which is
9585 safe to reevaluate. */
9586 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9587 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9588 ? MINUS_EXPR : PLUS_EXPR),
9589 TREE_TYPE (exp),
9590 incremented,
9591 TREE_OPERAND (exp, 1));
9592
9593 while (TREE_CODE (incremented) == NOP_EXPR
9594 || TREE_CODE (incremented) == CONVERT_EXPR)
9595 {
9596 newexp = convert (TREE_TYPE (incremented), newexp);
9597 incremented = TREE_OPERAND (incremented, 0);
9598 }
9599
9600 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9601 return post ? op0 : temp;
9602 }
9603
9604 if (post)
9605 {
9606 /* We have a true reference to the value in OP0.
9607 If there is an insn to add or subtract in this mode, queue it.
9608 Queueing the increment insn avoids the register shuffling
9609 that often results if we must increment now and first save
9610 the old value for subsequent use. */
9611
9612 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9613 op0 = stabilize (op0);
9614 #endif
9615
9616 icode = (int) this_optab->handlers[(int) mode].insn_code;
9617 if (icode != (int) CODE_FOR_nothing
9618 /* Make sure that OP0 is valid for operands 0 and 1
9619 of the insn we want to queue. */
9620 && (*insn_operand_predicate[icode][0]) (op0, mode)
9621 && (*insn_operand_predicate[icode][1]) (op0, mode))
9622 {
9623 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
9624 op1 = force_reg (mode, op1);
9625
9626 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9627 }
9628 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9629 {
9630 rtx addr = (general_operand (XEXP (op0, 0), mode)
9631 ? force_reg (Pmode, XEXP (op0, 0))
9632 : copy_to_reg (XEXP (op0, 0)));
9633 rtx temp, result;
9634
9635 op0 = change_address (op0, VOIDmode, addr);
9636 temp = force_reg (GET_MODE (op0), op0);
9637 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
9638 op1 = force_reg (mode, op1);
9639
9640 /* The increment queue is LIFO, thus we have to `queue'
9641 the instructions in reverse order. */
9642 enqueue_insn (op0, gen_move_insn (op0, temp));
9643 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9644 return result;
9645 }
9646 }
9647
9648 /* Preincrement, or we can't increment with one simple insn. */
9649 if (post)
9650 /* Save a copy of the value before inc or dec, to return it later. */
9651 temp = value = copy_to_reg (op0);
9652 else
9653 /* Arrange to return the incremented value. */
9654 /* Copy the rtx because expand_binop will protect from the queue,
9655 and the results of that would be invalid for us to return
9656 if our caller does emit_queue before using our result. */
9657 temp = copy_rtx (value = op0);
9658
9659 /* Increment however we can. */
9660 op1 = expand_binop (mode, this_optab, value, op1,
9661 flag_check_memory_usage ? NULL_RTX : op0,
9662 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9663 /* Make sure the value is stored into OP0. */
9664 if (op1 != op0)
9665 emit_move_insn (op0, op1);
9666
9667 return temp;
9668 }
9669 \f
9670 /* Expand all function calls contained within EXP, innermost ones first.
9671 But don't look within expressions that have sequence points.
9672 For each CALL_EXPR, record the rtx for its value
9673 in the CALL_EXPR_RTL field. */
9674
9675 static void
9676 preexpand_calls (exp)
9677 tree exp;
9678 {
9679 register int nops, i;
9680 int type = TREE_CODE_CLASS (TREE_CODE (exp));
9681
9682 if (! do_preexpand_calls)
9683 return;
9684
9685 /* Only expressions and references can contain calls. */
9686
9687 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
9688 return;
9689
9690 switch (TREE_CODE (exp))
9691 {
9692 case CALL_EXPR:
9693 /* Do nothing if already expanded. */
9694 if (CALL_EXPR_RTL (exp) != 0
9695 /* Do nothing if the call returns a variable-sized object. */
9696 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
9697 /* Do nothing to built-in functions. */
9698 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
9699 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9700 == FUNCTION_DECL)
9701 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
9702 return;
9703
9704 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
9705 return;
9706
9707 case COMPOUND_EXPR:
9708 case COND_EXPR:
9709 case TRUTH_ANDIF_EXPR:
9710 case TRUTH_ORIF_EXPR:
9711 /* If we find one of these, then we can be sure
9712 the adjust will be done for it (since it makes jumps).
9713 Do it now, so that if this is inside an argument
9714 of a function, we don't get the stack adjustment
9715 after some other args have already been pushed. */
9716 do_pending_stack_adjust ();
9717 return;
9718
9719 case BLOCK:
9720 case RTL_EXPR:
9721 case WITH_CLEANUP_EXPR:
9722 case CLEANUP_POINT_EXPR:
9723 case TRY_CATCH_EXPR:
9724 return;
9725
9726 case SAVE_EXPR:
9727 if (SAVE_EXPR_RTL (exp) != 0)
9728 return;
9729
9730 default:
9731 break;
9732 }
9733
9734 nops = tree_code_length[(int) TREE_CODE (exp)];
9735 for (i = 0; i < nops; i++)
9736 if (TREE_OPERAND (exp, i) != 0)
9737 {
9738 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9739 if (type == 'e' || type == '<' || type == '1' || type == '2'
9740 || type == 'r')
9741 preexpand_calls (TREE_OPERAND (exp, i));
9742 }
9743 }
9744 \f
9745 /* At the start of a function, record that we have no previously-pushed
9746 arguments waiting to be popped. */
9747
9748 void
9749 init_pending_stack_adjust ()
9750 {
9751 pending_stack_adjust = 0;
9752 }
9753
9754 /* When exiting from function, if safe, clear out any pending stack adjust
9755 so the adjustment won't get done.
9756
9757 Note, if the current function calls alloca, then it must have a
9758 frame pointer regardless of the value of flag_omit_frame_pointer. */
9759
9760 void
9761 clear_pending_stack_adjust ()
9762 {
9763 #ifdef EXIT_IGNORE_STACK
9764 if (optimize > 0
9765 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9766 && EXIT_IGNORE_STACK
9767 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9768 && ! flag_inline_functions)
9769 pending_stack_adjust = 0;
9770 #endif
9771 }
9772
9773 /* Pop any previously-pushed arguments that have not been popped yet. */
9774
9775 void
9776 do_pending_stack_adjust ()
9777 {
9778 if (inhibit_defer_pop == 0)
9779 {
9780 if (pending_stack_adjust != 0)
9781 adjust_stack (GEN_INT (pending_stack_adjust));
9782 pending_stack_adjust = 0;
9783 }
9784 }
9785 \f
9786 /* Expand conditional expressions. */
9787
9788 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9789 LABEL is an rtx of code CODE_LABEL, in this function and all the
9790 functions here. */
9791
9792 void
9793 jumpifnot (exp, label)
9794 tree exp;
9795 rtx label;
9796 {
9797 do_jump (exp, label, NULL_RTX);
9798 }
9799
9800 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9801
9802 void
9803 jumpif (exp, label)
9804 tree exp;
9805 rtx label;
9806 {
9807 do_jump (exp, NULL_RTX, label);
9808 }
9809
9810 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9811 the result is zero, or IF_TRUE_LABEL if the result is one.
9812 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9813 meaning fall through in that case.
9814
9815 do_jump always does any pending stack adjust except when it does not
9816 actually perform a jump. An example where there is no jump
9817 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9818
9819 This function is responsible for optimizing cases such as
9820 &&, || and comparison operators in EXP. */
9821
9822 void
9823 do_jump (exp, if_false_label, if_true_label)
9824 tree exp;
9825 rtx if_false_label, if_true_label;
9826 {
9827 register enum tree_code code = TREE_CODE (exp);
9828 /* Some cases need to create a label to jump to
9829 in order to properly fall through.
9830 These cases set DROP_THROUGH_LABEL nonzero. */
9831 rtx drop_through_label = 0;
9832 rtx temp;
9833 rtx comparison = 0;
9834 int i;
9835 tree type;
9836 enum machine_mode mode;
9837
9838 emit_queue ();
9839
9840 switch (code)
9841 {
9842 case ERROR_MARK:
9843 break;
9844
9845 case INTEGER_CST:
9846 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9847 if (temp)
9848 emit_jump (temp);
9849 break;
9850
9851 #if 0
9852 /* This is not true with #pragma weak */
9853 case ADDR_EXPR:
9854 /* The address of something can never be zero. */
9855 if (if_true_label)
9856 emit_jump (if_true_label);
9857 break;
9858 #endif
9859
9860 case NOP_EXPR:
9861 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9862 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9863 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9864 goto normal;
9865 case CONVERT_EXPR:
9866 /* If we are narrowing the operand, we have to do the compare in the
9867 narrower mode. */
9868 if ((TYPE_PRECISION (TREE_TYPE (exp))
9869 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9870 goto normal;
9871 case NON_LVALUE_EXPR:
9872 case REFERENCE_EXPR:
9873 case ABS_EXPR:
9874 case NEGATE_EXPR:
9875 case LROTATE_EXPR:
9876 case RROTATE_EXPR:
9877 /* These cannot change zero->non-zero or vice versa. */
9878 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9879 break;
9880
9881 #if 0
9882 /* This is never less insns than evaluating the PLUS_EXPR followed by
9883 a test and can be longer if the test is eliminated. */
9884 case PLUS_EXPR:
9885 /* Reduce to minus. */
9886 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9887 TREE_OPERAND (exp, 0),
9888 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9889 TREE_OPERAND (exp, 1))));
9890 /* Process as MINUS. */
9891 #endif
9892
9893 case MINUS_EXPR:
9894 /* Non-zero iff operands of minus differ. */
9895 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
9896 TREE_OPERAND (exp, 0),
9897 TREE_OPERAND (exp, 1)),
9898 NE, NE);
9899 break;
9900
9901 case BIT_AND_EXPR:
9902 /* If we are AND'ing with a small constant, do this comparison in the
9903 smallest type that fits. If the machine doesn't have comparisons
9904 that small, it will be converted back to the wider comparison.
9905 This helps if we are testing the sign bit of a narrower object.
9906 combine can't do this for us because it can't know whether a
9907 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9908
9909 if (! SLOW_BYTE_ACCESS
9910 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9911 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9912 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
9913 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9914 && (type = type_for_mode (mode, 1)) != 0
9915 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9916 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9917 != CODE_FOR_nothing))
9918 {
9919 do_jump (convert (type, exp), if_false_label, if_true_label);
9920 break;
9921 }
9922 goto normal;
9923
9924 case TRUTH_NOT_EXPR:
9925 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9926 break;
9927
9928 case TRUTH_ANDIF_EXPR:
9929 if (if_false_label == 0)
9930 if_false_label = drop_through_label = gen_label_rtx ();
9931 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9932 start_cleanup_deferral ();
9933 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9934 end_cleanup_deferral ();
9935 break;
9936
9937 case TRUTH_ORIF_EXPR:
9938 if (if_true_label == 0)
9939 if_true_label = drop_through_label = gen_label_rtx ();
9940 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9941 start_cleanup_deferral ();
9942 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9943 end_cleanup_deferral ();
9944 break;
9945
9946 case COMPOUND_EXPR:
9947 push_temp_slots ();
9948 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9949 preserve_temp_slots (NULL_RTX);
9950 free_temp_slots ();
9951 pop_temp_slots ();
9952 emit_queue ();
9953 do_pending_stack_adjust ();
9954 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9955 break;
9956
9957 case COMPONENT_REF:
9958 case BIT_FIELD_REF:
9959 case ARRAY_REF:
9960 {
9961 int bitsize, bitpos, unsignedp;
9962 enum machine_mode mode;
9963 tree type;
9964 tree offset;
9965 int volatilep = 0;
9966 int alignment;
9967
9968 /* Get description of this reference. We don't actually care
9969 about the underlying object here. */
9970 get_inner_reference (exp, &bitsize, &bitpos, &offset,
9971 &mode, &unsignedp, &volatilep,
9972 &alignment);
9973
9974 type = type_for_size (bitsize, unsignedp);
9975 if (! SLOW_BYTE_ACCESS
9976 && type != 0 && bitsize >= 0
9977 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9978 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9979 != CODE_FOR_nothing))
9980 {
9981 do_jump (convert (type, exp), if_false_label, if_true_label);
9982 break;
9983 }
9984 goto normal;
9985 }
9986
9987 case COND_EXPR:
9988 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9989 if (integer_onep (TREE_OPERAND (exp, 1))
9990 && integer_zerop (TREE_OPERAND (exp, 2)))
9991 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9992
9993 else if (integer_zerop (TREE_OPERAND (exp, 1))
9994 && integer_onep (TREE_OPERAND (exp, 2)))
9995 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9996
9997 else
9998 {
9999 register rtx label1 = gen_label_rtx ();
10000 drop_through_label = gen_label_rtx ();
10001
10002 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
10003
10004 start_cleanup_deferral ();
10005 /* Now the THEN-expression. */
10006 do_jump (TREE_OPERAND (exp, 1),
10007 if_false_label ? if_false_label : drop_through_label,
10008 if_true_label ? if_true_label : drop_through_label);
10009 /* In case the do_jump just above never jumps. */
10010 do_pending_stack_adjust ();
10011 emit_label (label1);
10012
10013 /* Now the ELSE-expression. */
10014 do_jump (TREE_OPERAND (exp, 2),
10015 if_false_label ? if_false_label : drop_through_label,
10016 if_true_label ? if_true_label : drop_through_label);
10017 end_cleanup_deferral ();
10018 }
10019 break;
10020
10021 case EQ_EXPR:
10022 {
10023 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10024
10025 if (integer_zerop (TREE_OPERAND (exp, 1)))
10026 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10027 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10028 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10029 do_jump
10030 (fold
10031 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
10032 fold (build (EQ_EXPR, TREE_TYPE (exp),
10033 fold (build1 (REALPART_EXPR,
10034 TREE_TYPE (inner_type),
10035 TREE_OPERAND (exp, 0))),
10036 fold (build1 (REALPART_EXPR,
10037 TREE_TYPE (inner_type),
10038 TREE_OPERAND (exp, 1))))),
10039 fold (build (EQ_EXPR, TREE_TYPE (exp),
10040 fold (build1 (IMAGPART_EXPR,
10041 TREE_TYPE (inner_type),
10042 TREE_OPERAND (exp, 0))),
10043 fold (build1 (IMAGPART_EXPR,
10044 TREE_TYPE (inner_type),
10045 TREE_OPERAND (exp, 1))))))),
10046 if_false_label, if_true_label);
10047 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10048 && !can_compare_p (TYPE_MODE (inner_type)))
10049 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
10050 else
10051 comparison = compare (exp, EQ, EQ);
10052 break;
10053 }
10054
10055 case NE_EXPR:
10056 {
10057 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10058
10059 if (integer_zerop (TREE_OPERAND (exp, 1)))
10060 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10061 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10062 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10063 do_jump
10064 (fold
10065 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
10066 fold (build (NE_EXPR, TREE_TYPE (exp),
10067 fold (build1 (REALPART_EXPR,
10068 TREE_TYPE (inner_type),
10069 TREE_OPERAND (exp, 0))),
10070 fold (build1 (REALPART_EXPR,
10071 TREE_TYPE (inner_type),
10072 TREE_OPERAND (exp, 1))))),
10073 fold (build (NE_EXPR, TREE_TYPE (exp),
10074 fold (build1 (IMAGPART_EXPR,
10075 TREE_TYPE (inner_type),
10076 TREE_OPERAND (exp, 0))),
10077 fold (build1 (IMAGPART_EXPR,
10078 TREE_TYPE (inner_type),
10079 TREE_OPERAND (exp, 1))))))),
10080 if_false_label, if_true_label);
10081 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10082 && !can_compare_p (TYPE_MODE (inner_type)))
10083 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
10084 else
10085 comparison = compare (exp, NE, NE);
10086 break;
10087 }
10088
10089 case LT_EXPR:
10090 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10091 == MODE_INT)
10092 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10093 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
10094 else
10095 comparison = compare (exp, LT, LTU);
10096 break;
10097
10098 case LE_EXPR:
10099 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10100 == MODE_INT)
10101 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10102 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
10103 else
10104 comparison = compare (exp, LE, LEU);
10105 break;
10106
10107 case GT_EXPR:
10108 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10109 == MODE_INT)
10110 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10111 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
10112 else
10113 comparison = compare (exp, GT, GTU);
10114 break;
10115
10116 case GE_EXPR:
10117 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10118 == MODE_INT)
10119 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10120 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
10121 else
10122 comparison = compare (exp, GE, GEU);
10123 break;
10124
10125 default:
10126 normal:
10127 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10128 #if 0
10129 /* This is not needed any more and causes poor code since it causes
10130 comparisons and tests from non-SI objects to have different code
10131 sequences. */
10132 /* Copy to register to avoid generating bad insns by cse
10133 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10134 if (!cse_not_expected && GET_CODE (temp) == MEM)
10135 temp = copy_to_reg (temp);
10136 #endif
10137 do_pending_stack_adjust ();
10138 if (GET_CODE (temp) == CONST_INT)
10139 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
10140 else if (GET_CODE (temp) == LABEL_REF)
10141 comparison = const_true_rtx;
10142 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10143 && !can_compare_p (GET_MODE (temp)))
10144 /* Note swapping the labels gives us not-equal. */
10145 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10146 else if (GET_MODE (temp) != VOIDmode)
10147 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
10148 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10149 GET_MODE (temp), NULL_RTX, 0);
10150 else
10151 abort ();
10152 }
10153
10154 /* Do any postincrements in the expression that was tested. */
10155 emit_queue ();
10156
10157 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
10158 straight into a conditional jump instruction as the jump condition.
10159 Otherwise, all the work has been done already. */
10160
10161 if (comparison == const_true_rtx)
10162 {
10163 if (if_true_label)
10164 emit_jump (if_true_label);
10165 }
10166 else if (comparison == const0_rtx)
10167 {
10168 if (if_false_label)
10169 emit_jump (if_false_label);
10170 }
10171 else if (comparison)
10172 do_jump_for_compare (comparison, if_false_label, if_true_label);
10173
10174 if (drop_through_label)
10175 {
10176 /* If do_jump produces code that might be jumped around,
10177 do any stack adjusts from that code, before the place
10178 where control merges in. */
10179 do_pending_stack_adjust ();
10180 emit_label (drop_through_label);
10181 }
10182 }
10183 \f
10184 /* Given a comparison expression EXP for values too wide to be compared
10185 with one insn, test the comparison and jump to the appropriate label.
10186 The code of EXP is ignored; we always test GT if SWAP is 0,
10187 and LT if SWAP is 1. */
10188
10189 static void
10190 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10191 tree exp;
10192 int swap;
10193 rtx if_false_label, if_true_label;
10194 {
10195 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10196 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10197 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10198 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10199 rtx drop_through_label = 0;
10200 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10201 int i;
10202
10203 if (! if_true_label || ! if_false_label)
10204 drop_through_label = gen_label_rtx ();
10205 if (! if_true_label)
10206 if_true_label = drop_through_label;
10207 if (! if_false_label)
10208 if_false_label = drop_through_label;
10209
10210 /* Compare a word at a time, high order first. */
10211 for (i = 0; i < nwords; i++)
10212 {
10213 rtx comp;
10214 rtx op0_word, op1_word;
10215
10216 if (WORDS_BIG_ENDIAN)
10217 {
10218 op0_word = operand_subword_force (op0, i, mode);
10219 op1_word = operand_subword_force (op1, i, mode);
10220 }
10221 else
10222 {
10223 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10224 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10225 }
10226
10227 /* All but high-order word must be compared as unsigned. */
10228 comp = compare_from_rtx (op0_word, op1_word,
10229 (unsignedp || i > 0) ? GTU : GT,
10230 unsignedp, word_mode, NULL_RTX, 0);
10231 if (comp == const_true_rtx)
10232 emit_jump (if_true_label);
10233 else if (comp != const0_rtx)
10234 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10235
10236 /* Consider lower words only if these are equal. */
10237 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10238 NULL_RTX, 0);
10239 if (comp == const_true_rtx)
10240 emit_jump (if_false_label);
10241 else if (comp != const0_rtx)
10242 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10243 }
10244
10245 if (if_false_label)
10246 emit_jump (if_false_label);
10247 if (drop_through_label)
10248 emit_label (drop_through_label);
10249 }
10250
10251 /* Compare OP0 with OP1, word at a time, in mode MODE.
10252 UNSIGNEDP says to do unsigned comparison.
10253 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10254
10255 void
10256 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10257 enum machine_mode mode;
10258 int unsignedp;
10259 rtx op0, op1;
10260 rtx if_false_label, if_true_label;
10261 {
10262 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10263 rtx drop_through_label = 0;
10264 int i;
10265
10266 if (! if_true_label || ! if_false_label)
10267 drop_through_label = gen_label_rtx ();
10268 if (! if_true_label)
10269 if_true_label = drop_through_label;
10270 if (! if_false_label)
10271 if_false_label = drop_through_label;
10272
10273 /* Compare a word at a time, high order first. */
10274 for (i = 0; i < nwords; i++)
10275 {
10276 rtx comp;
10277 rtx op0_word, op1_word;
10278
10279 if (WORDS_BIG_ENDIAN)
10280 {
10281 op0_word = operand_subword_force (op0, i, mode);
10282 op1_word = operand_subword_force (op1, i, mode);
10283 }
10284 else
10285 {
10286 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10287 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10288 }
10289
10290 /* All but high-order word must be compared as unsigned. */
10291 comp = compare_from_rtx (op0_word, op1_word,
10292 (unsignedp || i > 0) ? GTU : GT,
10293 unsignedp, word_mode, NULL_RTX, 0);
10294 if (comp == const_true_rtx)
10295 emit_jump (if_true_label);
10296 else if (comp != const0_rtx)
10297 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10298
10299 /* Consider lower words only if these are equal. */
10300 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10301 NULL_RTX, 0);
10302 if (comp == const_true_rtx)
10303 emit_jump (if_false_label);
10304 else if (comp != const0_rtx)
10305 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10306 }
10307
10308 if (if_false_label)
10309 emit_jump (if_false_label);
10310 if (drop_through_label)
10311 emit_label (drop_through_label);
10312 }
10313
10314 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10315 with one insn, test the comparison and jump to the appropriate label. */
10316
10317 static void
10318 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10319 tree exp;
10320 rtx if_false_label, if_true_label;
10321 {
10322 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10323 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10324 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10325 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10326 int i;
10327 rtx drop_through_label = 0;
10328
10329 if (! if_false_label)
10330 drop_through_label = if_false_label = gen_label_rtx ();
10331
10332 for (i = 0; i < nwords; i++)
10333 {
10334 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
10335 operand_subword_force (op1, i, mode),
10336 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10337 word_mode, NULL_RTX, 0);
10338 if (comp == const_true_rtx)
10339 emit_jump (if_false_label);
10340 else if (comp != const0_rtx)
10341 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10342 }
10343
10344 if (if_true_label)
10345 emit_jump (if_true_label);
10346 if (drop_through_label)
10347 emit_label (drop_through_label);
10348 }
10349 \f
10350 /* Jump according to whether OP0 is 0.
10351 We assume that OP0 has an integer mode that is too wide
10352 for the available compare insns. */
10353
10354 void
10355 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10356 rtx op0;
10357 rtx if_false_label, if_true_label;
10358 {
10359 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10360 rtx part;
10361 int i;
10362 rtx drop_through_label = 0;
10363
10364 /* The fastest way of doing this comparison on almost any machine is to
10365 "or" all the words and compare the result. If all have to be loaded
10366 from memory and this is a very wide item, it's possible this may
10367 be slower, but that's highly unlikely. */
10368
10369 part = gen_reg_rtx (word_mode);
10370 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10371 for (i = 1; i < nwords && part != 0; i++)
10372 part = expand_binop (word_mode, ior_optab, part,
10373 operand_subword_force (op0, i, GET_MODE (op0)),
10374 part, 1, OPTAB_WIDEN);
10375
10376 if (part != 0)
10377 {
10378 rtx comp = compare_from_rtx (part, const0_rtx, EQ, 1, word_mode,
10379 NULL_RTX, 0);
10380
10381 if (comp == const_true_rtx)
10382 emit_jump (if_false_label);
10383 else if (comp == const0_rtx)
10384 emit_jump (if_true_label);
10385 else
10386 do_jump_for_compare (comp, if_false_label, if_true_label);
10387
10388 return;
10389 }
10390
10391 /* If we couldn't do the "or" simply, do this with a series of compares. */
10392 if (! if_false_label)
10393 drop_through_label = if_false_label = gen_label_rtx ();
10394
10395 for (i = 0; i < nwords; i++)
10396 {
10397 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
10398 GET_MODE (op0)),
10399 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
10400 if (comp == const_true_rtx)
10401 emit_jump (if_false_label);
10402 else if (comp != const0_rtx)
10403 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10404 }
10405
10406 if (if_true_label)
10407 emit_jump (if_true_label);
10408
10409 if (drop_through_label)
10410 emit_label (drop_through_label);
10411 }
10412
10413 /* Given a comparison expression in rtl form, output conditional branches to
10414 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
10415
10416 static void
10417 do_jump_for_compare (comparison, if_false_label, if_true_label)
10418 rtx comparison, if_false_label, if_true_label;
10419 {
10420 if (if_true_label)
10421 {
10422 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10423 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
10424 else
10425 abort ();
10426
10427 if (if_false_label)
10428 emit_jump (if_false_label);
10429 }
10430 else if (if_false_label)
10431 {
10432 rtx insn;
10433 rtx prev = get_last_insn ();
10434 rtx branch = 0;
10435
10436 /* Output the branch with the opposite condition. Then try to invert
10437 what is generated. If more than one insn is a branch, or if the
10438 branch is not the last insn written, abort. If we can't invert
10439 the branch, emit make a true label, redirect this jump to that,
10440 emit a jump to the false label and define the true label. */
10441
10442 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10443 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])(if_false_label));
10444 else
10445 abort ();
10446
10447 /* Here we get the first insn that was just emitted. It used to be the
10448 case that, on some machines, emitting the branch would discard
10449 the previous compare insn and emit a replacement. This isn't
10450 done anymore, but abort if we see that PREV is deleted. */
10451
10452 if (prev == 0)
10453 insn = get_insns ();
10454 else if (INSN_DELETED_P (prev))
10455 abort ();
10456 else
10457 insn = NEXT_INSN (prev);
10458
10459 for (; insn; insn = NEXT_INSN (insn))
10460 if (GET_CODE (insn) == JUMP_INSN)
10461 {
10462 if (branch)
10463 abort ();
10464 branch = insn;
10465 }
10466
10467 if (branch != get_last_insn ())
10468 abort ();
10469
10470 JUMP_LABEL (branch) = if_false_label;
10471 if (! invert_jump (branch, if_false_label))
10472 {
10473 if_true_label = gen_label_rtx ();
10474 redirect_jump (branch, if_true_label);
10475 emit_jump (if_false_label);
10476 emit_label (if_true_label);
10477 }
10478 }
10479 }
10480 \f
10481 /* Generate code for a comparison expression EXP
10482 (including code to compute the values to be compared)
10483 and set (CC0) according to the result.
10484 SIGNED_CODE should be the rtx operation for this comparison for
10485 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10486
10487 We force a stack adjustment unless there are currently
10488 things pushed on the stack that aren't yet used. */
10489
10490 static rtx
10491 compare (exp, signed_code, unsigned_code)
10492 register tree exp;
10493 enum rtx_code signed_code, unsigned_code;
10494 {
10495 register rtx op0
10496 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10497 register rtx op1
10498 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10499 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
10500 register enum machine_mode mode = TYPE_MODE (type);
10501 int unsignedp = TREE_UNSIGNED (type);
10502 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
10503
10504 #ifdef HAVE_canonicalize_funcptr_for_compare
10505 /* If function pointers need to be "canonicalized" before they can
10506 be reliably compared, then canonicalize them. */
10507 if (HAVE_canonicalize_funcptr_for_compare
10508 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10509 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10510 == FUNCTION_TYPE))
10511 {
10512 rtx new_op0 = gen_reg_rtx (mode);
10513
10514 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10515 op0 = new_op0;
10516 }
10517
10518 if (HAVE_canonicalize_funcptr_for_compare
10519 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10520 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10521 == FUNCTION_TYPE))
10522 {
10523 rtx new_op1 = gen_reg_rtx (mode);
10524
10525 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10526 op1 = new_op1;
10527 }
10528 #endif
10529
10530 return compare_from_rtx (op0, op1, code, unsignedp, mode,
10531 ((mode == BLKmode)
10532 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10533 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
10534 }
10535
10536 /* Like compare but expects the values to compare as two rtx's.
10537 The decision as to signed or unsigned comparison must be made by the caller.
10538
10539 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10540 compared.
10541
10542 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10543 size of MODE should be used. */
10544
10545 rtx
10546 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10547 register rtx op0, op1;
10548 enum rtx_code code;
10549 int unsignedp;
10550 enum machine_mode mode;
10551 rtx size;
10552 int align;
10553 {
10554 rtx tem;
10555
10556 /* If one operand is constant, make it the second one. Only do this
10557 if the other operand is not constant as well. */
10558
10559 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10560 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
10561 {
10562 tem = op0;
10563 op0 = op1;
10564 op1 = tem;
10565 code = swap_condition (code);
10566 }
10567
10568 if (flag_force_mem)
10569 {
10570 op0 = force_not_mem (op0);
10571 op1 = force_not_mem (op1);
10572 }
10573
10574 do_pending_stack_adjust ();
10575
10576 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10577 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10578 return tem;
10579
10580 #if 0
10581 /* There's no need to do this now that combine.c can eliminate lots of
10582 sign extensions. This can be less efficient in certain cases on other
10583 machines. */
10584
10585 /* If this is a signed equality comparison, we can do it as an
10586 unsigned comparison since zero-extension is cheaper than sign
10587 extension and comparisons with zero are done as unsigned. This is
10588 the case even on machines that can do fast sign extension, since
10589 zero-extension is easier to combine with other operations than
10590 sign-extension is. If we are comparing against a constant, we must
10591 convert it to what it would look like unsigned. */
10592 if ((code == EQ || code == NE) && ! unsignedp
10593 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10594 {
10595 if (GET_CODE (op1) == CONST_INT
10596 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10597 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10598 unsignedp = 1;
10599 }
10600 #endif
10601
10602 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10603
10604 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10605 }
10606 \f
10607 /* Generate code to calculate EXP using a store-flag instruction
10608 and return an rtx for the result. EXP is either a comparison
10609 or a TRUTH_NOT_EXPR whose operand is a comparison.
10610
10611 If TARGET is nonzero, store the result there if convenient.
10612
10613 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10614 cheap.
10615
10616 Return zero if there is no suitable set-flag instruction
10617 available on this machine.
10618
10619 Once expand_expr has been called on the arguments of the comparison,
10620 we are committed to doing the store flag, since it is not safe to
10621 re-evaluate the expression. We emit the store-flag insn by calling
10622 emit_store_flag, but only expand the arguments if we have a reason
10623 to believe that emit_store_flag will be successful. If we think that
10624 it will, but it isn't, we have to simulate the store-flag with a
10625 set/jump/set sequence. */
10626
10627 static rtx
10628 do_store_flag (exp, target, mode, only_cheap)
10629 tree exp;
10630 rtx target;
10631 enum machine_mode mode;
10632 int only_cheap;
10633 {
10634 enum rtx_code code;
10635 tree arg0, arg1, type;
10636 tree tem;
10637 enum machine_mode operand_mode;
10638 int invert = 0;
10639 int unsignedp;
10640 rtx op0, op1;
10641 enum insn_code icode;
10642 rtx subtarget = target;
10643 rtx result, label;
10644
10645 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10646 result at the end. We can't simply invert the test since it would
10647 have already been inverted if it were valid. This case occurs for
10648 some floating-point comparisons. */
10649
10650 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10651 invert = 1, exp = TREE_OPERAND (exp, 0);
10652
10653 arg0 = TREE_OPERAND (exp, 0);
10654 arg1 = TREE_OPERAND (exp, 1);
10655 type = TREE_TYPE (arg0);
10656 operand_mode = TYPE_MODE (type);
10657 unsignedp = TREE_UNSIGNED (type);
10658
10659 /* We won't bother with BLKmode store-flag operations because it would mean
10660 passing a lot of information to emit_store_flag. */
10661 if (operand_mode == BLKmode)
10662 return 0;
10663
10664 /* We won't bother with store-flag operations involving function pointers
10665 when function pointers must be canonicalized before comparisons. */
10666 #ifdef HAVE_canonicalize_funcptr_for_compare
10667 if (HAVE_canonicalize_funcptr_for_compare
10668 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10669 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10670 == FUNCTION_TYPE))
10671 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10672 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10673 == FUNCTION_TYPE))))
10674 return 0;
10675 #endif
10676
10677 STRIP_NOPS (arg0);
10678 STRIP_NOPS (arg1);
10679
10680 /* Get the rtx comparison code to use. We know that EXP is a comparison
10681 operation of some type. Some comparisons against 1 and -1 can be
10682 converted to comparisons with zero. Do so here so that the tests
10683 below will be aware that we have a comparison with zero. These
10684 tests will not catch constants in the first operand, but constants
10685 are rarely passed as the first operand. */
10686
10687 switch (TREE_CODE (exp))
10688 {
10689 case EQ_EXPR:
10690 code = EQ;
10691 break;
10692 case NE_EXPR:
10693 code = NE;
10694 break;
10695 case LT_EXPR:
10696 if (integer_onep (arg1))
10697 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10698 else
10699 code = unsignedp ? LTU : LT;
10700 break;
10701 case LE_EXPR:
10702 if (! unsignedp && integer_all_onesp (arg1))
10703 arg1 = integer_zero_node, code = LT;
10704 else
10705 code = unsignedp ? LEU : LE;
10706 break;
10707 case GT_EXPR:
10708 if (! unsignedp && integer_all_onesp (arg1))
10709 arg1 = integer_zero_node, code = GE;
10710 else
10711 code = unsignedp ? GTU : GT;
10712 break;
10713 case GE_EXPR:
10714 if (integer_onep (arg1))
10715 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10716 else
10717 code = unsignedp ? GEU : GE;
10718 break;
10719 default:
10720 abort ();
10721 }
10722
10723 /* Put a constant second. */
10724 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10725 {
10726 tem = arg0; arg0 = arg1; arg1 = tem;
10727 code = swap_condition (code);
10728 }
10729
10730 /* If this is an equality or inequality test of a single bit, we can
10731 do this by shifting the bit being tested to the low-order bit and
10732 masking the result with the constant 1. If the condition was EQ,
10733 we xor it with 1. This does not require an scc insn and is faster
10734 than an scc insn even if we have it. */
10735
10736 if ((code == NE || code == EQ)
10737 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10738 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10739 {
10740 tree inner = TREE_OPERAND (arg0, 0);
10741 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10742 int ops_unsignedp;
10743
10744 /* If INNER is a right shift of a constant and it plus BITNUM does
10745 not overflow, adjust BITNUM and INNER. */
10746
10747 if (TREE_CODE (inner) == RSHIFT_EXPR
10748 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10749 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10750 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
10751 < TYPE_PRECISION (type)))
10752 {
10753 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10754 inner = TREE_OPERAND (inner, 0);
10755 }
10756
10757 /* If we are going to be able to omit the AND below, we must do our
10758 operations as unsigned. If we must use the AND, we have a choice.
10759 Normally unsigned is faster, but for some machines signed is. */
10760 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10761 #ifdef LOAD_EXTEND_OP
10762 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10763 #else
10764 : 1
10765 #endif
10766 );
10767
10768 if (subtarget == 0 || GET_CODE (subtarget) != REG
10769 || GET_MODE (subtarget) != operand_mode
10770 || ! safe_from_p (subtarget, inner, 1))
10771 subtarget = 0;
10772
10773 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10774
10775 if (bitnum != 0)
10776 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
10777 size_int (bitnum), subtarget, ops_unsignedp);
10778
10779 if (GET_MODE (op0) != mode)
10780 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10781
10782 if ((code == EQ && ! invert) || (code == NE && invert))
10783 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10784 ops_unsignedp, OPTAB_LIB_WIDEN);
10785
10786 /* Put the AND last so it can combine with more things. */
10787 if (bitnum != TYPE_PRECISION (type) - 1)
10788 op0 = expand_and (op0, const1_rtx, subtarget);
10789
10790 return op0;
10791 }
10792
10793 /* Now see if we are likely to be able to do this. Return if not. */
10794 if (! can_compare_p (operand_mode))
10795 return 0;
10796 icode = setcc_gen_code[(int) code];
10797 if (icode == CODE_FOR_nothing
10798 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
10799 {
10800 /* We can only do this if it is one of the special cases that
10801 can be handled without an scc insn. */
10802 if ((code == LT && integer_zerop (arg1))
10803 || (! only_cheap && code == GE && integer_zerop (arg1)))
10804 ;
10805 else if (BRANCH_COST >= 0
10806 && ! only_cheap && (code == NE || code == EQ)
10807 && TREE_CODE (type) != REAL_TYPE
10808 && ((abs_optab->handlers[(int) operand_mode].insn_code
10809 != CODE_FOR_nothing)
10810 || (ffs_optab->handlers[(int) operand_mode].insn_code
10811 != CODE_FOR_nothing)))
10812 ;
10813 else
10814 return 0;
10815 }
10816
10817 preexpand_calls (exp);
10818 if (subtarget == 0 || GET_CODE (subtarget) != REG
10819 || GET_MODE (subtarget) != operand_mode
10820 || ! safe_from_p (subtarget, arg1, 1))
10821 subtarget = 0;
10822
10823 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10824 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10825
10826 if (target == 0)
10827 target = gen_reg_rtx (mode);
10828
10829 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10830 because, if the emit_store_flag does anything it will succeed and
10831 OP0 and OP1 will not be used subsequently. */
10832
10833 result = emit_store_flag (target, code,
10834 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10835 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10836 operand_mode, unsignedp, 1);
10837
10838 if (result)
10839 {
10840 if (invert)
10841 result = expand_binop (mode, xor_optab, result, const1_rtx,
10842 result, 0, OPTAB_LIB_WIDEN);
10843 return result;
10844 }
10845
10846 /* If this failed, we have to do this with set/compare/jump/set code. */
10847 if (GET_CODE (target) != REG
10848 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10849 target = gen_reg_rtx (GET_MODE (target));
10850
10851 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10852 result = compare_from_rtx (op0, op1, code, unsignedp,
10853 operand_mode, NULL_RTX, 0);
10854 if (GET_CODE (result) == CONST_INT)
10855 return (((result == const0_rtx && ! invert)
10856 || (result != const0_rtx && invert))
10857 ? const0_rtx : const1_rtx);
10858
10859 label = gen_label_rtx ();
10860 if (bcc_gen_fctn[(int) code] == 0)
10861 abort ();
10862
10863 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10864 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10865 emit_label (label);
10866
10867 return target;
10868 }
10869 \f
10870 /* Generate a tablejump instruction (used for switch statements). */
10871
10872 #ifdef HAVE_tablejump
10873
10874 /* INDEX is the value being switched on, with the lowest value
10875 in the table already subtracted.
10876 MODE is its expected mode (needed if INDEX is constant).
10877 RANGE is the length of the jump table.
10878 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10879
10880 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10881 index value is out of range. */
10882
10883 void
10884 do_tablejump (index, mode, range, table_label, default_label)
10885 rtx index, range, table_label, default_label;
10886 enum machine_mode mode;
10887 {
10888 register rtx temp, vector;
10889
10890 /* Do an unsigned comparison (in the proper mode) between the index
10891 expression and the value which represents the length of the range.
10892 Since we just finished subtracting the lower bound of the range
10893 from the index expression, this comparison allows us to simultaneously
10894 check that the original index expression value is both greater than
10895 or equal to the minimum value of the range and less than or equal to
10896 the maximum value of the range. */
10897
10898 emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
10899 emit_jump_insn (gen_bgtu (default_label));
10900
10901 /* If index is in range, it must fit in Pmode.
10902 Convert to Pmode so we can index with it. */
10903 if (mode != Pmode)
10904 index = convert_to_mode (Pmode, index, 1);
10905
10906 /* Don't let a MEM slip thru, because then INDEX that comes
10907 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10908 and break_out_memory_refs will go to work on it and mess it up. */
10909 #ifdef PIC_CASE_VECTOR_ADDRESS
10910 if (flag_pic && GET_CODE (index) != REG)
10911 index = copy_to_mode_reg (Pmode, index);
10912 #endif
10913
10914 /* If flag_force_addr were to affect this address
10915 it could interfere with the tricky assumptions made
10916 about addresses that contain label-refs,
10917 which may be valid only very near the tablejump itself. */
10918 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10919 GET_MODE_SIZE, because this indicates how large insns are. The other
10920 uses should all be Pmode, because they are addresses. This code
10921 could fail if addresses and insns are not the same size. */
10922 index = gen_rtx_PLUS (Pmode,
10923 gen_rtx_MULT (Pmode, index,
10924 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10925 gen_rtx_LABEL_REF (Pmode, table_label));
10926 #ifdef PIC_CASE_VECTOR_ADDRESS
10927 if (flag_pic)
10928 index = PIC_CASE_VECTOR_ADDRESS (index);
10929 else
10930 #endif
10931 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10932 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10933 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10934 RTX_UNCHANGING_P (vector) = 1;
10935 convert_move (temp, vector, 0);
10936
10937 emit_jump_insn (gen_tablejump (temp, table_label));
10938
10939 /* If we are generating PIC code or if the table is PC-relative, the
10940 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10941 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10942 emit_barrier ();
10943 }
10944
10945 #endif /* HAVE_tablejump */
This page took 0.59024 seconds and 5 git commands to generate.