]> gcc.gnu.org Git - gcc.git/blob - gcc/expr.c
e54f3e47a45004df9f9b3f2eaf10901c27a485d8
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 92, 93, 94, 95, 96, 1997 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 #include "config.h"
23 #include <stdio.h>
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "obstack.h"
28 #include "flags.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "except.h"
32 #include "function.h"
33 #include "insn-flags.h"
34 #include "insn-codes.h"
35 #include "expr.h"
36 #include "insn-config.h"
37 #include "recog.h"
38 #include "output.h"
39 #include "typeclass.h"
40 #include "defaults.h"
41
42 #include "bytecode.h"
43 #include "bc-opcode.h"
44 #include "bc-typecd.h"
45 #include "bc-optab.h"
46 #include "bc-emit.h"
47
48
49 #define CEIL(x,y) (((x) + (y) - 1) / (y))
50
51 /* Decide whether a function's arguments should be processed
52 from first to last or from last to first.
53
54 They should if the stack and args grow in opposite directions, but
55 only if we have push insns. */
56
57 #ifdef PUSH_ROUNDING
58
59 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
60 #define PUSH_ARGS_REVERSED /* If it's last to first */
61 #endif
62
63 #endif
64
65 #ifndef STACK_PUSH_CODE
66 #ifdef STACK_GROWS_DOWNWARD
67 #define STACK_PUSH_CODE PRE_DEC
68 #else
69 #define STACK_PUSH_CODE PRE_INC
70 #endif
71 #endif
72
73 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
74 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
75
76 /* Assume that case vectors are not pc-relative. */
77 #ifndef CASE_VECTOR_PC_RELATIVE
78 #define CASE_VECTOR_PC_RELATIVE 0
79 #endif
80
81 /* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
87 int cse_not_expected;
88
89 /* Nonzero to generate code for all the subroutines within an
90 expression before generating the upper levels of the expression.
91 Nowadays this is never zero. */
92 int do_preexpand_calls = 1;
93
94 /* Number of units that we should eventually pop off the stack.
95 These are the arguments to function calls that have already returned. */
96 int pending_stack_adjust;
97
98 /* Nonzero means stack pops must not be deferred, and deferred stack
99 pops must not be output. It is nonzero inside a function call,
100 inside a conditional expression, inside a statement expression,
101 and in other cases as well. */
102 int inhibit_defer_pop;
103
104 /* When temporaries are created by TARGET_EXPRs, they are created at
105 this level of temp_slot_level, so that they can remain allocated
106 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
107 of TARGET_EXPRs. */
108 int target_temp_slot_level;
109
110 /* Nonzero means __builtin_saveregs has already been done in this function.
111 The value is the pseudoreg containing the value __builtin_saveregs
112 returned. */
113 static rtx saveregs_value;
114
115 /* Similarly for __builtin_apply_args. */
116 static rtx apply_args_value;
117
118 /* Don't check memory usage, since code is being emitted to check a memory
119 usage. Used when flag_check_memory_usage is true, to avoid infinite
120 recursion. */
121 static int in_check_memory_usage;
122
123 /* This structure is used by move_by_pieces to describe the move to
124 be performed. */
125 struct move_by_pieces
126 {
127 rtx to;
128 rtx to_addr;
129 int autinc_to;
130 int explicit_inc_to;
131 int to_struct;
132 rtx from;
133 rtx from_addr;
134 int autinc_from;
135 int explicit_inc_from;
136 int from_struct;
137 int len;
138 int offset;
139 int reverse;
140 };
141
142 /* This structure is used by clear_by_pieces to describe the clear to
143 be performed. */
144
145 struct clear_by_pieces
146 {
147 rtx to;
148 rtx to_addr;
149 int autinc_to;
150 int explicit_inc_to;
151 int to_struct;
152 int len;
153 int offset;
154 int reverse;
155 };
156
157 /* Used to generate bytecodes: keep track of size of local variables,
158 as well as depth of arithmetic stack. (Notice that variables are
159 stored on the machine's stack, not the arithmetic stack.) */
160
161 static rtx get_push_address PROTO ((int));
162 extern int local_vars_size;
163 extern int stack_depth;
164 extern int max_stack_depth;
165 extern struct obstack permanent_obstack;
166 extern rtx arg_pointer_save_area;
167
168 static rtx enqueue_insn PROTO((rtx, rtx));
169 static int queued_subexp_p PROTO((rtx));
170 static void init_queue PROTO((void));
171 static void move_by_pieces PROTO((rtx, rtx, int, int));
172 static int move_by_pieces_ninsns PROTO((unsigned int, int));
173 static void move_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
174 struct move_by_pieces *));
175 static void clear_by_pieces PROTO((rtx, int, int));
176 static void clear_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
177 struct clear_by_pieces *));
178 static int is_zeros_p PROTO((tree));
179 static int mostly_zeros_p PROTO((tree));
180 static void store_constructor PROTO((tree, rtx, int));
181 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
182 enum machine_mode, int, int, int));
183 static tree save_noncopied_parts PROTO((tree, tree));
184 static tree init_noncopied_parts PROTO((tree, tree));
185 static int safe_from_p PROTO((rtx, tree));
186 static int fixed_type_p PROTO((tree));
187 static rtx var_rtx PROTO((tree));
188 static int get_pointer_alignment PROTO((tree, unsigned));
189 static tree string_constant PROTO((tree, tree *));
190 static tree c_strlen PROTO((tree));
191 static rtx expand_builtin PROTO((tree, rtx, rtx,
192 enum machine_mode, int));
193 static int apply_args_size PROTO((void));
194 static int apply_result_size PROTO((void));
195 static rtx result_vector PROTO((int, rtx));
196 static rtx expand_builtin_apply_args PROTO((void));
197 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
198 static void expand_builtin_return PROTO((rtx));
199 static rtx expand_increment PROTO((tree, int, int));
200 void bc_expand_increment PROTO((struct increment_operator *, tree));
201 rtx bc_allocate_local PROTO((int, int));
202 void bc_store_memory PROTO((tree, tree));
203 tree bc_expand_component_address PROTO((tree));
204 tree bc_expand_address PROTO((tree));
205 void bc_expand_constructor PROTO((tree));
206 void bc_adjust_stack PROTO((int));
207 tree bc_canonicalize_array_ref PROTO((tree));
208 void bc_load_memory PROTO((tree, tree));
209 void bc_load_externaddr PROTO((rtx));
210 void bc_load_externaddr_id PROTO((tree, int));
211 void bc_load_localaddr PROTO((rtx));
212 void bc_load_parmaddr PROTO((rtx));
213 static void preexpand_calls PROTO((tree));
214 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
215 void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
216 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
217 static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
218 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
219 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
220 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
221 extern tree truthvalue_conversion PROTO((tree));
222
223 /* Record for each mode whether we can move a register directly to or
224 from an object of that mode in memory. If we can't, we won't try
225 to use that mode directly when accessing a field of that mode. */
226
227 static char direct_load[NUM_MACHINE_MODES];
228 static char direct_store[NUM_MACHINE_MODES];
229
230 /* MOVE_RATIO is the number of move instructions that is better than
231 a block move. */
232
233 #ifndef MOVE_RATIO
234 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
235 #define MOVE_RATIO 2
236 #else
237 /* A value of around 6 would minimize code size; infinity would minimize
238 execution time. */
239 #define MOVE_RATIO 15
240 #endif
241 #endif
242
243 /* This array records the insn_code of insns to perform block moves. */
244 enum insn_code movstr_optab[NUM_MACHINE_MODES];
245
246 /* This array records the insn_code of insns to perform block clears. */
247 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
248
249 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
250
251 #ifndef SLOW_UNALIGNED_ACCESS
252 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
253 #endif
254
255 /* Register mappings for target machines without register windows. */
256 #ifndef INCOMING_REGNO
257 #define INCOMING_REGNO(OUT) (OUT)
258 #endif
259 #ifndef OUTGOING_REGNO
260 #define OUTGOING_REGNO(IN) (IN)
261 #endif
262 \f
263 /* Maps used to convert modes to const, load, and store bytecodes. */
264 enum bytecode_opcode mode_to_const_map[MAX_MACHINE_MODE];
265 enum bytecode_opcode mode_to_load_map[MAX_MACHINE_MODE];
266 enum bytecode_opcode mode_to_store_map[MAX_MACHINE_MODE];
267
268 /* Initialize maps used to convert modes to const, load, and store
269 bytecodes. */
270
271 void
272 bc_init_mode_to_opcode_maps ()
273 {
274 int mode;
275
276 for (mode = 0; mode < (int) MAX_MACHINE_MODE; mode++)
277 mode_to_const_map[mode]
278 = mode_to_load_map[mode]
279 = mode_to_store_map[mode] = neverneverland;
280
281 #define DEF_MODEMAP(SYM, CODE, UCODE, CONST, LOAD, STORE) \
282 mode_to_const_map[(int) SYM] = CONST; \
283 mode_to_load_map[(int) SYM] = LOAD; \
284 mode_to_store_map[(int) SYM] = STORE;
285
286 #include "modemap.def"
287 #undef DEF_MODEMAP
288 }
289 \f
290 /* This is run once per compilation to set up which modes can be used
291 directly in memory and to initialize the block move optab. */
292
293 void
294 init_expr_once ()
295 {
296 rtx insn, pat;
297 enum machine_mode mode;
298 /* Try indexing by frame ptr and try by stack ptr.
299 It is known that on the Convex the stack ptr isn't a valid index.
300 With luck, one or the other is valid on any machine. */
301 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
302 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
303
304 start_sequence ();
305 insn = emit_insn (gen_rtx (SET, 0, NULL_RTX, NULL_RTX));
306 pat = PATTERN (insn);
307
308 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
309 mode = (enum machine_mode) ((int) mode + 1))
310 {
311 int regno;
312 rtx reg;
313 int num_clobbers;
314
315 direct_load[(int) mode] = direct_store[(int) mode] = 0;
316 PUT_MODE (mem, mode);
317 PUT_MODE (mem1, mode);
318
319 /* See if there is some register that can be used in this mode and
320 directly loaded or stored from memory. */
321
322 if (mode != VOIDmode && mode != BLKmode)
323 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
324 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
325 regno++)
326 {
327 if (! HARD_REGNO_MODE_OK (regno, mode))
328 continue;
329
330 reg = gen_rtx (REG, mode, regno);
331
332 SET_SRC (pat) = mem;
333 SET_DEST (pat) = reg;
334 if (recog (pat, insn, &num_clobbers) >= 0)
335 direct_load[(int) mode] = 1;
336
337 SET_SRC (pat) = mem1;
338 SET_DEST (pat) = reg;
339 if (recog (pat, insn, &num_clobbers) >= 0)
340 direct_load[(int) mode] = 1;
341
342 SET_SRC (pat) = reg;
343 SET_DEST (pat) = mem;
344 if (recog (pat, insn, &num_clobbers) >= 0)
345 direct_store[(int) mode] = 1;
346
347 SET_SRC (pat) = reg;
348 SET_DEST (pat) = mem1;
349 if (recog (pat, insn, &num_clobbers) >= 0)
350 direct_store[(int) mode] = 1;
351 }
352 }
353
354 end_sequence ();
355 }
356
357 /* This is run at the start of compiling a function. */
358
359 void
360 init_expr ()
361 {
362 init_queue ();
363
364 pending_stack_adjust = 0;
365 inhibit_defer_pop = 0;
366 saveregs_value = 0;
367 apply_args_value = 0;
368 forced_labels = 0;
369 }
370
371 /* Save all variables describing the current status into the structure *P.
372 This is used before starting a nested function. */
373
374 void
375 save_expr_status (p)
376 struct function *p;
377 {
378 /* Instead of saving the postincrement queue, empty it. */
379 emit_queue ();
380
381 p->pending_stack_adjust = pending_stack_adjust;
382 p->inhibit_defer_pop = inhibit_defer_pop;
383 p->saveregs_value = saveregs_value;
384 p->apply_args_value = apply_args_value;
385 p->forced_labels = forced_labels;
386
387 pending_stack_adjust = 0;
388 inhibit_defer_pop = 0;
389 saveregs_value = 0;
390 apply_args_value = 0;
391 forced_labels = 0;
392 }
393
394 /* Restore all variables describing the current status from the structure *P.
395 This is used after a nested function. */
396
397 void
398 restore_expr_status (p)
399 struct function *p;
400 {
401 pending_stack_adjust = p->pending_stack_adjust;
402 inhibit_defer_pop = p->inhibit_defer_pop;
403 saveregs_value = p->saveregs_value;
404 apply_args_value = p->apply_args_value;
405 forced_labels = p->forced_labels;
406 }
407 \f
408 /* Manage the queue of increment instructions to be output
409 for POSTINCREMENT_EXPR expressions, etc. */
410
411 static rtx pending_chain;
412
413 /* Queue up to increment (or change) VAR later. BODY says how:
414 BODY should be the same thing you would pass to emit_insn
415 to increment right away. It will go to emit_insn later on.
416
417 The value is a QUEUED expression to be used in place of VAR
418 where you want to guarantee the pre-incrementation value of VAR. */
419
420 static rtx
421 enqueue_insn (var, body)
422 rtx var, body;
423 {
424 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
425 var, NULL_RTX, NULL_RTX, body, pending_chain);
426 return pending_chain;
427 }
428
429 /* Use protect_from_queue to convert a QUEUED expression
430 into something that you can put immediately into an instruction.
431 If the queued incrementation has not happened yet,
432 protect_from_queue returns the variable itself.
433 If the incrementation has happened, protect_from_queue returns a temp
434 that contains a copy of the old value of the variable.
435
436 Any time an rtx which might possibly be a QUEUED is to be put
437 into an instruction, it must be passed through protect_from_queue first.
438 QUEUED expressions are not meaningful in instructions.
439
440 Do not pass a value through protect_from_queue and then hold
441 on to it for a while before putting it in an instruction!
442 If the queue is flushed in between, incorrect code will result. */
443
444 rtx
445 protect_from_queue (x, modify)
446 register rtx x;
447 int modify;
448 {
449 register RTX_CODE code = GET_CODE (x);
450
451 #if 0 /* A QUEUED can hang around after the queue is forced out. */
452 /* Shortcut for most common case. */
453 if (pending_chain == 0)
454 return x;
455 #endif
456
457 if (code != QUEUED)
458 {
459 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
460 use of autoincrement. Make a copy of the contents of the memory
461 location rather than a copy of the address, but not if the value is
462 of mode BLKmode. Don't modify X in place since it might be
463 shared. */
464 if (code == MEM && GET_MODE (x) != BLKmode
465 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
466 {
467 register rtx y = XEXP (x, 0);
468 register rtx new = gen_rtx (MEM, GET_MODE (x), QUEUED_VAR (y));
469
470 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
471 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
472 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
473
474 if (QUEUED_INSN (y))
475 {
476 register rtx temp = gen_reg_rtx (GET_MODE (new));
477 emit_insn_before (gen_move_insn (temp, new),
478 QUEUED_INSN (y));
479 return temp;
480 }
481 return new;
482 }
483 /* Otherwise, recursively protect the subexpressions of all
484 the kinds of rtx's that can contain a QUEUED. */
485 if (code == MEM)
486 {
487 rtx tem = protect_from_queue (XEXP (x, 0), 0);
488 if (tem != XEXP (x, 0))
489 {
490 x = copy_rtx (x);
491 XEXP (x, 0) = tem;
492 }
493 }
494 else if (code == PLUS || code == MULT)
495 {
496 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
497 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
498 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
499 {
500 x = copy_rtx (x);
501 XEXP (x, 0) = new0;
502 XEXP (x, 1) = new1;
503 }
504 }
505 return x;
506 }
507 /* If the increment has not happened, use the variable itself. */
508 if (QUEUED_INSN (x) == 0)
509 return QUEUED_VAR (x);
510 /* If the increment has happened and a pre-increment copy exists,
511 use that copy. */
512 if (QUEUED_COPY (x) != 0)
513 return QUEUED_COPY (x);
514 /* The increment has happened but we haven't set up a pre-increment copy.
515 Set one up now, and use it. */
516 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
517 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
518 QUEUED_INSN (x));
519 return QUEUED_COPY (x);
520 }
521
522 /* Return nonzero if X contains a QUEUED expression:
523 if it contains anything that will be altered by a queued increment.
524 We handle only combinations of MEM, PLUS, MINUS and MULT operators
525 since memory addresses generally contain only those. */
526
527 static int
528 queued_subexp_p (x)
529 rtx x;
530 {
531 register enum rtx_code code = GET_CODE (x);
532 switch (code)
533 {
534 case QUEUED:
535 return 1;
536 case MEM:
537 return queued_subexp_p (XEXP (x, 0));
538 case MULT:
539 case PLUS:
540 case MINUS:
541 return (queued_subexp_p (XEXP (x, 0))
542 || queued_subexp_p (XEXP (x, 1)));
543 default:
544 return 0;
545 }
546 }
547
548 /* Perform all the pending incrementations. */
549
550 void
551 emit_queue ()
552 {
553 register rtx p;
554 while (p = pending_chain)
555 {
556 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
557 pending_chain = QUEUED_NEXT (p);
558 }
559 }
560
561 static void
562 init_queue ()
563 {
564 if (pending_chain)
565 abort ();
566 }
567 \f
568 /* Copy data from FROM to TO, where the machine modes are not the same.
569 Both modes may be integer, or both may be floating.
570 UNSIGNEDP should be nonzero if FROM is an unsigned type.
571 This causes zero-extension instead of sign-extension. */
572
573 void
574 convert_move (to, from, unsignedp)
575 register rtx to, from;
576 int unsignedp;
577 {
578 enum machine_mode to_mode = GET_MODE (to);
579 enum machine_mode from_mode = GET_MODE (from);
580 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
581 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
582 enum insn_code code;
583 rtx libcall;
584
585 /* rtx code for making an equivalent value. */
586 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
587
588 to = protect_from_queue (to, 1);
589 from = protect_from_queue (from, 0);
590
591 if (to_real != from_real)
592 abort ();
593
594 /* If FROM is a SUBREG that indicates that we have already done at least
595 the required extension, strip it. We don't handle such SUBREGs as
596 TO here. */
597
598 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
599 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
600 >= GET_MODE_SIZE (to_mode))
601 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
602 from = gen_lowpart (to_mode, from), from_mode = to_mode;
603
604 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
605 abort ();
606
607 if (to_mode == from_mode
608 || (from_mode == VOIDmode && CONSTANT_P (from)))
609 {
610 emit_move_insn (to, from);
611 return;
612 }
613
614 if (to_real)
615 {
616 rtx value;
617
618 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
619 {
620 /* Try converting directly if the insn is supported. */
621 if ((code = can_extend_p (to_mode, from_mode, 0))
622 != CODE_FOR_nothing)
623 {
624 emit_unop_insn (code, to, from, UNKNOWN);
625 return;
626 }
627 }
628
629 #ifdef HAVE_trunchfqf2
630 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
631 {
632 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
633 return;
634 }
635 #endif
636 #ifdef HAVE_trunctqfqf2
637 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
638 {
639 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
640 return;
641 }
642 #endif
643 #ifdef HAVE_truncsfqf2
644 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
645 {
646 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
647 return;
648 }
649 #endif
650 #ifdef HAVE_truncdfqf2
651 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
652 {
653 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
654 return;
655 }
656 #endif
657 #ifdef HAVE_truncxfqf2
658 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
659 {
660 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
661 return;
662 }
663 #endif
664 #ifdef HAVE_trunctfqf2
665 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
666 {
667 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
668 return;
669 }
670 #endif
671
672 #ifdef HAVE_trunctqfhf2
673 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
674 {
675 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
676 return;
677 }
678 #endif
679 #ifdef HAVE_truncsfhf2
680 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
681 {
682 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
683 return;
684 }
685 #endif
686 #ifdef HAVE_truncdfhf2
687 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
688 {
689 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
690 return;
691 }
692 #endif
693 #ifdef HAVE_truncxfhf2
694 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
695 {
696 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
697 return;
698 }
699 #endif
700 #ifdef HAVE_trunctfhf2
701 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
702 {
703 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
704 return;
705 }
706 #endif
707
708 #ifdef HAVE_truncsftqf2
709 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
710 {
711 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
712 return;
713 }
714 #endif
715 #ifdef HAVE_truncdftqf2
716 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
717 {
718 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
719 return;
720 }
721 #endif
722 #ifdef HAVE_truncxftqf2
723 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
724 {
725 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
726 return;
727 }
728 #endif
729 #ifdef HAVE_trunctftqf2
730 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
731 {
732 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
733 return;
734 }
735 #endif
736
737 #ifdef HAVE_truncdfsf2
738 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
739 {
740 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
741 return;
742 }
743 #endif
744 #ifdef HAVE_truncxfsf2
745 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
746 {
747 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
748 return;
749 }
750 #endif
751 #ifdef HAVE_trunctfsf2
752 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
753 {
754 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
755 return;
756 }
757 #endif
758 #ifdef HAVE_truncxfdf2
759 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
760 {
761 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
762 return;
763 }
764 #endif
765 #ifdef HAVE_trunctfdf2
766 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
767 {
768 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
769 return;
770 }
771 #endif
772
773 libcall = (rtx) 0;
774 switch (from_mode)
775 {
776 case SFmode:
777 switch (to_mode)
778 {
779 case DFmode:
780 libcall = extendsfdf2_libfunc;
781 break;
782
783 case XFmode:
784 libcall = extendsfxf2_libfunc;
785 break;
786
787 case TFmode:
788 libcall = extendsftf2_libfunc;
789 break;
790
791 default:
792 break;
793 }
794 break;
795
796 case DFmode:
797 switch (to_mode)
798 {
799 case SFmode:
800 libcall = truncdfsf2_libfunc;
801 break;
802
803 case XFmode:
804 libcall = extenddfxf2_libfunc;
805 break;
806
807 case TFmode:
808 libcall = extenddftf2_libfunc;
809 break;
810
811 default:
812 break;
813 }
814 break;
815
816 case XFmode:
817 switch (to_mode)
818 {
819 case SFmode:
820 libcall = truncxfsf2_libfunc;
821 break;
822
823 case DFmode:
824 libcall = truncxfdf2_libfunc;
825 break;
826
827 default:
828 break;
829 }
830 break;
831
832 case TFmode:
833 switch (to_mode)
834 {
835 case SFmode:
836 libcall = trunctfsf2_libfunc;
837 break;
838
839 case DFmode:
840 libcall = trunctfdf2_libfunc;
841 break;
842
843 default:
844 break;
845 }
846 break;
847
848 default:
849 break;
850 }
851
852 if (libcall == (rtx) 0)
853 /* This conversion is not implemented yet. */
854 abort ();
855
856 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
857 1, from, from_mode);
858 emit_move_insn (to, value);
859 return;
860 }
861
862 /* Now both modes are integers. */
863
864 /* Handle expanding beyond a word. */
865 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
866 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
867 {
868 rtx insns;
869 rtx lowpart;
870 rtx fill_value;
871 rtx lowfrom;
872 int i;
873 enum machine_mode lowpart_mode;
874 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
875
876 /* Try converting directly if the insn is supported. */
877 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
878 != CODE_FOR_nothing)
879 {
880 /* If FROM is a SUBREG, put it into a register. Do this
881 so that we always generate the same set of insns for
882 better cse'ing; if an intermediate assignment occurred,
883 we won't be doing the operation directly on the SUBREG. */
884 if (optimize > 0 && GET_CODE (from) == SUBREG)
885 from = force_reg (from_mode, from);
886 emit_unop_insn (code, to, from, equiv_code);
887 return;
888 }
889 /* Next, try converting via full word. */
890 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
891 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
892 != CODE_FOR_nothing))
893 {
894 if (GET_CODE (to) == REG)
895 emit_insn (gen_rtx (CLOBBER, VOIDmode, to));
896 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
897 emit_unop_insn (code, to,
898 gen_lowpart (word_mode, to), equiv_code);
899 return;
900 }
901
902 /* No special multiword conversion insn; do it by hand. */
903 start_sequence ();
904
905 /* Since we will turn this into a no conflict block, we must ensure
906 that the source does not overlap the target. */
907
908 if (reg_overlap_mentioned_p (to, from))
909 from = force_reg (from_mode, from);
910
911 /* Get a copy of FROM widened to a word, if necessary. */
912 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
913 lowpart_mode = word_mode;
914 else
915 lowpart_mode = from_mode;
916
917 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
918
919 lowpart = gen_lowpart (lowpart_mode, to);
920 emit_move_insn (lowpart, lowfrom);
921
922 /* Compute the value to put in each remaining word. */
923 if (unsignedp)
924 fill_value = const0_rtx;
925 else
926 {
927 #ifdef HAVE_slt
928 if (HAVE_slt
929 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
930 && STORE_FLAG_VALUE == -1)
931 {
932 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
933 lowpart_mode, 0, 0);
934 fill_value = gen_reg_rtx (word_mode);
935 emit_insn (gen_slt (fill_value));
936 }
937 else
938 #endif
939 {
940 fill_value
941 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
942 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
943 NULL_RTX, 0);
944 fill_value = convert_to_mode (word_mode, fill_value, 1);
945 }
946 }
947
948 /* Fill the remaining words. */
949 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
950 {
951 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
952 rtx subword = operand_subword (to, index, 1, to_mode);
953
954 if (subword == 0)
955 abort ();
956
957 if (fill_value != subword)
958 emit_move_insn (subword, fill_value);
959 }
960
961 insns = get_insns ();
962 end_sequence ();
963
964 emit_no_conflict_block (insns, to, from, NULL_RTX,
965 gen_rtx (equiv_code, to_mode, copy_rtx (from)));
966 return;
967 }
968
969 /* Truncating multi-word to a word or less. */
970 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
971 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
972 {
973 if (!((GET_CODE (from) == MEM
974 && ! MEM_VOLATILE_P (from)
975 && direct_load[(int) to_mode]
976 && ! mode_dependent_address_p (XEXP (from, 0)))
977 || GET_CODE (from) == REG
978 || GET_CODE (from) == SUBREG))
979 from = force_reg (from_mode, from);
980 convert_move (to, gen_lowpart (word_mode, from), 0);
981 return;
982 }
983
984 /* Handle pointer conversion */ /* SPEE 900220 */
985 if (to_mode == PSImode)
986 {
987 if (from_mode != SImode)
988 from = convert_to_mode (SImode, from, unsignedp);
989
990 #ifdef HAVE_truncsipsi2
991 if (HAVE_truncsipsi2)
992 {
993 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
994 return;
995 }
996 #endif /* HAVE_truncsipsi2 */
997 abort ();
998 }
999
1000 if (from_mode == PSImode)
1001 {
1002 if (to_mode != SImode)
1003 {
1004 from = convert_to_mode (SImode, from, unsignedp);
1005 from_mode = SImode;
1006 }
1007 else
1008 {
1009 #ifdef HAVE_extendpsisi2
1010 if (HAVE_extendpsisi2)
1011 {
1012 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1013 return;
1014 }
1015 #endif /* HAVE_extendpsisi2 */
1016 abort ();
1017 }
1018 }
1019
1020 if (to_mode == PDImode)
1021 {
1022 if (from_mode != DImode)
1023 from = convert_to_mode (DImode, from, unsignedp);
1024
1025 #ifdef HAVE_truncdipdi2
1026 if (HAVE_truncdipdi2)
1027 {
1028 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1029 return;
1030 }
1031 #endif /* HAVE_truncdipdi2 */
1032 abort ();
1033 }
1034
1035 if (from_mode == PDImode)
1036 {
1037 if (to_mode != DImode)
1038 {
1039 from = convert_to_mode (DImode, from, unsignedp);
1040 from_mode = DImode;
1041 }
1042 else
1043 {
1044 #ifdef HAVE_extendpdidi2
1045 if (HAVE_extendpdidi2)
1046 {
1047 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1048 return;
1049 }
1050 #endif /* HAVE_extendpdidi2 */
1051 abort ();
1052 }
1053 }
1054
1055 /* Now follow all the conversions between integers
1056 no more than a word long. */
1057
1058 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1059 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1060 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1061 GET_MODE_BITSIZE (from_mode)))
1062 {
1063 if (!((GET_CODE (from) == MEM
1064 && ! MEM_VOLATILE_P (from)
1065 && direct_load[(int) to_mode]
1066 && ! mode_dependent_address_p (XEXP (from, 0)))
1067 || GET_CODE (from) == REG
1068 || GET_CODE (from) == SUBREG))
1069 from = force_reg (from_mode, from);
1070 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1071 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1072 from = copy_to_reg (from);
1073 emit_move_insn (to, gen_lowpart (to_mode, from));
1074 return;
1075 }
1076
1077 /* Handle extension. */
1078 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1079 {
1080 /* Convert directly if that works. */
1081 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1082 != CODE_FOR_nothing)
1083 {
1084 emit_unop_insn (code, to, from, equiv_code);
1085 return;
1086 }
1087 else
1088 {
1089 enum machine_mode intermediate;
1090
1091 /* Search for a mode to convert via. */
1092 for (intermediate = from_mode; intermediate != VOIDmode;
1093 intermediate = GET_MODE_WIDER_MODE (intermediate))
1094 if (((can_extend_p (to_mode, intermediate, unsignedp)
1095 != CODE_FOR_nothing)
1096 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1097 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
1098 && (can_extend_p (intermediate, from_mode, unsignedp)
1099 != CODE_FOR_nothing))
1100 {
1101 convert_move (to, convert_to_mode (intermediate, from,
1102 unsignedp), unsignedp);
1103 return;
1104 }
1105
1106 /* No suitable intermediate mode. */
1107 abort ();
1108 }
1109 }
1110
1111 /* Support special truncate insns for certain modes. */
1112
1113 if (from_mode == DImode && to_mode == SImode)
1114 {
1115 #ifdef HAVE_truncdisi2
1116 if (HAVE_truncdisi2)
1117 {
1118 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1119 return;
1120 }
1121 #endif
1122 convert_move (to, force_reg (from_mode, from), unsignedp);
1123 return;
1124 }
1125
1126 if (from_mode == DImode && to_mode == HImode)
1127 {
1128 #ifdef HAVE_truncdihi2
1129 if (HAVE_truncdihi2)
1130 {
1131 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1132 return;
1133 }
1134 #endif
1135 convert_move (to, force_reg (from_mode, from), unsignedp);
1136 return;
1137 }
1138
1139 if (from_mode == DImode && to_mode == QImode)
1140 {
1141 #ifdef HAVE_truncdiqi2
1142 if (HAVE_truncdiqi2)
1143 {
1144 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1145 return;
1146 }
1147 #endif
1148 convert_move (to, force_reg (from_mode, from), unsignedp);
1149 return;
1150 }
1151
1152 if (from_mode == SImode && to_mode == HImode)
1153 {
1154 #ifdef HAVE_truncsihi2
1155 if (HAVE_truncsihi2)
1156 {
1157 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1158 return;
1159 }
1160 #endif
1161 convert_move (to, force_reg (from_mode, from), unsignedp);
1162 return;
1163 }
1164
1165 if (from_mode == SImode && to_mode == QImode)
1166 {
1167 #ifdef HAVE_truncsiqi2
1168 if (HAVE_truncsiqi2)
1169 {
1170 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1171 return;
1172 }
1173 #endif
1174 convert_move (to, force_reg (from_mode, from), unsignedp);
1175 return;
1176 }
1177
1178 if (from_mode == HImode && to_mode == QImode)
1179 {
1180 #ifdef HAVE_trunchiqi2
1181 if (HAVE_trunchiqi2)
1182 {
1183 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1184 return;
1185 }
1186 #endif
1187 convert_move (to, force_reg (from_mode, from), unsignedp);
1188 return;
1189 }
1190
1191 if (from_mode == TImode && to_mode == DImode)
1192 {
1193 #ifdef HAVE_trunctidi2
1194 if (HAVE_trunctidi2)
1195 {
1196 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1197 return;
1198 }
1199 #endif
1200 convert_move (to, force_reg (from_mode, from), unsignedp);
1201 return;
1202 }
1203
1204 if (from_mode == TImode && to_mode == SImode)
1205 {
1206 #ifdef HAVE_trunctisi2
1207 if (HAVE_trunctisi2)
1208 {
1209 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1210 return;
1211 }
1212 #endif
1213 convert_move (to, force_reg (from_mode, from), unsignedp);
1214 return;
1215 }
1216
1217 if (from_mode == TImode && to_mode == HImode)
1218 {
1219 #ifdef HAVE_trunctihi2
1220 if (HAVE_trunctihi2)
1221 {
1222 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1223 return;
1224 }
1225 #endif
1226 convert_move (to, force_reg (from_mode, from), unsignedp);
1227 return;
1228 }
1229
1230 if (from_mode == TImode && to_mode == QImode)
1231 {
1232 #ifdef HAVE_trunctiqi2
1233 if (HAVE_trunctiqi2)
1234 {
1235 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1236 return;
1237 }
1238 #endif
1239 convert_move (to, force_reg (from_mode, from), unsignedp);
1240 return;
1241 }
1242
1243 /* Handle truncation of volatile memrefs, and so on;
1244 the things that couldn't be truncated directly,
1245 and for which there was no special instruction. */
1246 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1247 {
1248 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1249 emit_move_insn (to, temp);
1250 return;
1251 }
1252
1253 /* Mode combination is not recognized. */
1254 abort ();
1255 }
1256
1257 /* Return an rtx for a value that would result
1258 from converting X to mode MODE.
1259 Both X and MODE may be floating, or both integer.
1260 UNSIGNEDP is nonzero if X is an unsigned value.
1261 This can be done by referring to a part of X in place
1262 or by copying to a new temporary with conversion.
1263
1264 This function *must not* call protect_from_queue
1265 except when putting X into an insn (in which case convert_move does it). */
1266
1267 rtx
1268 convert_to_mode (mode, x, unsignedp)
1269 enum machine_mode mode;
1270 rtx x;
1271 int unsignedp;
1272 {
1273 return convert_modes (mode, VOIDmode, x, unsignedp);
1274 }
1275
1276 /* Return an rtx for a value that would result
1277 from converting X from mode OLDMODE to mode MODE.
1278 Both modes may be floating, or both integer.
1279 UNSIGNEDP is nonzero if X is an unsigned value.
1280
1281 This can be done by referring to a part of X in place
1282 or by copying to a new temporary with conversion.
1283
1284 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1285
1286 This function *must not* call protect_from_queue
1287 except when putting X into an insn (in which case convert_move does it). */
1288
1289 rtx
1290 convert_modes (mode, oldmode, x, unsignedp)
1291 enum machine_mode mode, oldmode;
1292 rtx x;
1293 int unsignedp;
1294 {
1295 register rtx temp;
1296
1297 /* If FROM is a SUBREG that indicates that we have already done at least
1298 the required extension, strip it. */
1299
1300 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1301 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1302 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1303 x = gen_lowpart (mode, x);
1304
1305 if (GET_MODE (x) != VOIDmode)
1306 oldmode = GET_MODE (x);
1307
1308 if (mode == oldmode)
1309 return x;
1310
1311 /* There is one case that we must handle specially: If we are converting
1312 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1313 we are to interpret the constant as unsigned, gen_lowpart will do
1314 the wrong if the constant appears negative. What we want to do is
1315 make the high-order word of the constant zero, not all ones. */
1316
1317 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1318 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1319 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1320 {
1321 HOST_WIDE_INT val = INTVAL (x);
1322
1323 if (oldmode != VOIDmode
1324 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1325 {
1326 int width = GET_MODE_BITSIZE (oldmode);
1327
1328 /* We need to zero extend VAL. */
1329 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1330 }
1331
1332 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1333 }
1334
1335 /* We can do this with a gen_lowpart if both desired and current modes
1336 are integer, and this is either a constant integer, a register, or a
1337 non-volatile MEM. Except for the constant case where MODE is no
1338 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1339
1340 if ((GET_CODE (x) == CONST_INT
1341 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1342 || (GET_MODE_CLASS (mode) == MODE_INT
1343 && GET_MODE_CLASS (oldmode) == MODE_INT
1344 && (GET_CODE (x) == CONST_DOUBLE
1345 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1346 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1347 && direct_load[(int) mode])
1348 || (GET_CODE (x) == REG
1349 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1350 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1351 {
1352 /* ?? If we don't know OLDMODE, we have to assume here that
1353 X does not need sign- or zero-extension. This may not be
1354 the case, but it's the best we can do. */
1355 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1356 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1357 {
1358 HOST_WIDE_INT val = INTVAL (x);
1359 int width = GET_MODE_BITSIZE (oldmode);
1360
1361 /* We must sign or zero-extend in this case. Start by
1362 zero-extending, then sign extend if we need to. */
1363 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1364 if (! unsignedp
1365 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1366 val |= (HOST_WIDE_INT) (-1) << width;
1367
1368 return GEN_INT (val);
1369 }
1370
1371 return gen_lowpart (mode, x);
1372 }
1373
1374 temp = gen_reg_rtx (mode);
1375 convert_move (temp, x, unsignedp);
1376 return temp;
1377 }
1378 \f
1379 /* Generate several move instructions to copy LEN bytes
1380 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1381 The caller must pass FROM and TO
1382 through protect_from_queue before calling.
1383 ALIGN (in bytes) is maximum alignment we can assume. */
1384
1385 static void
1386 move_by_pieces (to, from, len, align)
1387 rtx to, from;
1388 int len, align;
1389 {
1390 struct move_by_pieces data;
1391 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1392 int max_size = MOVE_MAX + 1;
1393
1394 data.offset = 0;
1395 data.to_addr = to_addr;
1396 data.from_addr = from_addr;
1397 data.to = to;
1398 data.from = from;
1399 data.autinc_to
1400 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1401 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1402 data.autinc_from
1403 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1404 || GET_CODE (from_addr) == POST_INC
1405 || GET_CODE (from_addr) == POST_DEC);
1406
1407 data.explicit_inc_from = 0;
1408 data.explicit_inc_to = 0;
1409 data.reverse
1410 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1411 if (data.reverse) data.offset = len;
1412 data.len = len;
1413
1414 data.to_struct = MEM_IN_STRUCT_P (to);
1415 data.from_struct = MEM_IN_STRUCT_P (from);
1416
1417 /* If copying requires more than two move insns,
1418 copy addresses to registers (to make displacements shorter)
1419 and use post-increment if available. */
1420 if (!(data.autinc_from && data.autinc_to)
1421 && move_by_pieces_ninsns (len, align) > 2)
1422 {
1423 #ifdef HAVE_PRE_DECREMENT
1424 if (data.reverse && ! data.autinc_from)
1425 {
1426 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1427 data.autinc_from = 1;
1428 data.explicit_inc_from = -1;
1429 }
1430 #endif
1431 #ifdef HAVE_POST_INCREMENT
1432 if (! data.autinc_from)
1433 {
1434 data.from_addr = copy_addr_to_reg (from_addr);
1435 data.autinc_from = 1;
1436 data.explicit_inc_from = 1;
1437 }
1438 #endif
1439 if (!data.autinc_from && CONSTANT_P (from_addr))
1440 data.from_addr = copy_addr_to_reg (from_addr);
1441 #ifdef HAVE_PRE_DECREMENT
1442 if (data.reverse && ! data.autinc_to)
1443 {
1444 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1445 data.autinc_to = 1;
1446 data.explicit_inc_to = -1;
1447 }
1448 #endif
1449 #ifdef HAVE_POST_INCREMENT
1450 if (! data.reverse && ! data.autinc_to)
1451 {
1452 data.to_addr = copy_addr_to_reg (to_addr);
1453 data.autinc_to = 1;
1454 data.explicit_inc_to = 1;
1455 }
1456 #endif
1457 if (!data.autinc_to && CONSTANT_P (to_addr))
1458 data.to_addr = copy_addr_to_reg (to_addr);
1459 }
1460
1461 if (! SLOW_UNALIGNED_ACCESS
1462 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1463 align = MOVE_MAX;
1464
1465 /* First move what we can in the largest integer mode, then go to
1466 successively smaller modes. */
1467
1468 while (max_size > 1)
1469 {
1470 enum machine_mode mode = VOIDmode, tmode;
1471 enum insn_code icode;
1472
1473 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1474 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1475 if (GET_MODE_SIZE (tmode) < max_size)
1476 mode = tmode;
1477
1478 if (mode == VOIDmode)
1479 break;
1480
1481 icode = mov_optab->handlers[(int) mode].insn_code;
1482 if (icode != CODE_FOR_nothing
1483 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1484 GET_MODE_SIZE (mode)))
1485 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1486
1487 max_size = GET_MODE_SIZE (mode);
1488 }
1489
1490 /* The code above should have handled everything. */
1491 if (data.len > 0)
1492 abort ();
1493 }
1494
1495 /* Return number of insns required to move L bytes by pieces.
1496 ALIGN (in bytes) is maximum alignment we can assume. */
1497
1498 static int
1499 move_by_pieces_ninsns (l, align)
1500 unsigned int l;
1501 int align;
1502 {
1503 register int n_insns = 0;
1504 int max_size = MOVE_MAX + 1;
1505
1506 if (! SLOW_UNALIGNED_ACCESS
1507 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1508 align = MOVE_MAX;
1509
1510 while (max_size > 1)
1511 {
1512 enum machine_mode mode = VOIDmode, tmode;
1513 enum insn_code icode;
1514
1515 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1516 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1517 if (GET_MODE_SIZE (tmode) < max_size)
1518 mode = tmode;
1519
1520 if (mode == VOIDmode)
1521 break;
1522
1523 icode = mov_optab->handlers[(int) mode].insn_code;
1524 if (icode != CODE_FOR_nothing
1525 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1526 GET_MODE_SIZE (mode)))
1527 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1528
1529 max_size = GET_MODE_SIZE (mode);
1530 }
1531
1532 return n_insns;
1533 }
1534
1535 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1536 with move instructions for mode MODE. GENFUN is the gen_... function
1537 to make a move insn for that mode. DATA has all the other info. */
1538
1539 static void
1540 move_by_pieces_1 (genfun, mode, data)
1541 rtx (*genfun) PROTO ((rtx, ...));
1542 enum machine_mode mode;
1543 struct move_by_pieces *data;
1544 {
1545 register int size = GET_MODE_SIZE (mode);
1546 register rtx to1, from1;
1547
1548 while (data->len >= size)
1549 {
1550 if (data->reverse) data->offset -= size;
1551
1552 to1 = (data->autinc_to
1553 ? gen_rtx (MEM, mode, data->to_addr)
1554 : copy_rtx (change_address (data->to, mode,
1555 plus_constant (data->to_addr,
1556 data->offset))));
1557 MEM_IN_STRUCT_P (to1) = data->to_struct;
1558
1559 from1
1560 = (data->autinc_from
1561 ? gen_rtx (MEM, mode, data->from_addr)
1562 : copy_rtx (change_address (data->from, mode,
1563 plus_constant (data->from_addr,
1564 data->offset))));
1565 MEM_IN_STRUCT_P (from1) = data->from_struct;
1566
1567 #ifdef HAVE_PRE_DECREMENT
1568 if (data->explicit_inc_to < 0)
1569 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1570 if (data->explicit_inc_from < 0)
1571 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1572 #endif
1573
1574 emit_insn ((*genfun) (to1, from1));
1575 #ifdef HAVE_POST_INCREMENT
1576 if (data->explicit_inc_to > 0)
1577 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1578 if (data->explicit_inc_from > 0)
1579 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1580 #endif
1581
1582 if (! data->reverse) data->offset += size;
1583
1584 data->len -= size;
1585 }
1586 }
1587 \f
1588 /* Emit code to move a block Y to a block X.
1589 This may be done with string-move instructions,
1590 with multiple scalar move instructions, or with a library call.
1591
1592 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1593 with mode BLKmode.
1594 SIZE is an rtx that says how long they are.
1595 ALIGN is the maximum alignment we can assume they have,
1596 measured in bytes.
1597
1598 Return the address of the new block, if memcpy is called and returns it,
1599 0 otherwise. */
1600
1601 rtx
1602 emit_block_move (x, y, size, align)
1603 rtx x, y;
1604 rtx size;
1605 int align;
1606 {
1607 rtx retval = 0;
1608
1609 if (GET_MODE (x) != BLKmode)
1610 abort ();
1611
1612 if (GET_MODE (y) != BLKmode)
1613 abort ();
1614
1615 x = protect_from_queue (x, 1);
1616 y = protect_from_queue (y, 0);
1617 size = protect_from_queue (size, 0);
1618
1619 if (GET_CODE (x) != MEM)
1620 abort ();
1621 if (GET_CODE (y) != MEM)
1622 abort ();
1623 if (size == 0)
1624 abort ();
1625
1626 if (GET_CODE (size) == CONST_INT
1627 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1628 move_by_pieces (x, y, INTVAL (size), align);
1629 else
1630 {
1631 /* Try the most limited insn first, because there's no point
1632 including more than one in the machine description unless
1633 the more limited one has some advantage. */
1634
1635 rtx opalign = GEN_INT (align);
1636 enum machine_mode mode;
1637
1638 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1639 mode = GET_MODE_WIDER_MODE (mode))
1640 {
1641 enum insn_code code = movstr_optab[(int) mode];
1642
1643 if (code != CODE_FOR_nothing
1644 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1645 here because if SIZE is less than the mode mask, as it is
1646 returned by the macro, it will definitely be less than the
1647 actual mode mask. */
1648 && ((GET_CODE (size) == CONST_INT
1649 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1650 <= GET_MODE_MASK (mode)))
1651 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1652 && (insn_operand_predicate[(int) code][0] == 0
1653 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1654 && (insn_operand_predicate[(int) code][1] == 0
1655 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1656 && (insn_operand_predicate[(int) code][3] == 0
1657 || (*insn_operand_predicate[(int) code][3]) (opalign,
1658 VOIDmode)))
1659 {
1660 rtx op2;
1661 rtx last = get_last_insn ();
1662 rtx pat;
1663
1664 op2 = convert_to_mode (mode, size, 1);
1665 if (insn_operand_predicate[(int) code][2] != 0
1666 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1667 op2 = copy_to_mode_reg (mode, op2);
1668
1669 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1670 if (pat)
1671 {
1672 emit_insn (pat);
1673 return 0;
1674 }
1675 else
1676 delete_insns_since (last);
1677 }
1678 }
1679
1680 #ifdef TARGET_MEM_FUNCTIONS
1681 retval
1682 = emit_library_call_value (memcpy_libfunc, NULL_RTX, 0,
1683 ptr_mode, 3, XEXP (x, 0), Pmode,
1684 XEXP (y, 0), Pmode,
1685 convert_to_mode (TYPE_MODE (sizetype), size,
1686 TREE_UNSIGNED (sizetype)),
1687 TYPE_MODE (sizetype));
1688 #else
1689 emit_library_call (bcopy_libfunc, 0,
1690 VOIDmode, 3, XEXP (y, 0), Pmode,
1691 XEXP (x, 0), Pmode,
1692 convert_to_mode (TYPE_MODE (integer_type_node), size,
1693 TREE_UNSIGNED (integer_type_node)),
1694 TYPE_MODE (integer_type_node));
1695 #endif
1696 }
1697
1698 return retval;
1699 }
1700 \f
1701 /* Copy all or part of a value X into registers starting at REGNO.
1702 The number of registers to be filled is NREGS. */
1703
1704 void
1705 move_block_to_reg (regno, x, nregs, mode)
1706 int regno;
1707 rtx x;
1708 int nregs;
1709 enum machine_mode mode;
1710 {
1711 int i;
1712 rtx pat, last;
1713
1714 if (nregs == 0)
1715 return;
1716
1717 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1718 x = validize_mem (force_const_mem (mode, x));
1719
1720 /* See if the machine can do this with a load multiple insn. */
1721 #ifdef HAVE_load_multiple
1722 if (HAVE_load_multiple)
1723 {
1724 last = get_last_insn ();
1725 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1726 GEN_INT (nregs));
1727 if (pat)
1728 {
1729 emit_insn (pat);
1730 return;
1731 }
1732 else
1733 delete_insns_since (last);
1734 }
1735 #endif
1736
1737 for (i = 0; i < nregs; i++)
1738 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1739 operand_subword_force (x, i, mode));
1740 }
1741
1742 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1743 The number of registers to be filled is NREGS. SIZE indicates the number
1744 of bytes in the object X. */
1745
1746
1747 void
1748 move_block_from_reg (regno, x, nregs, size)
1749 int regno;
1750 rtx x;
1751 int nregs;
1752 int size;
1753 {
1754 int i;
1755 rtx pat, last;
1756 enum machine_mode mode;
1757
1758 /* If SIZE is that of a mode no bigger than a word, just use that
1759 mode's store operation. */
1760 if (size <= UNITS_PER_WORD
1761 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1762 {
1763 emit_move_insn (change_address (x, mode, NULL),
1764 gen_rtx (REG, mode, regno));
1765 return;
1766 }
1767
1768 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1769 to the left before storing to memory. Note that the previous test
1770 doesn't handle all cases (e.g. SIZE == 3). */
1771 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1772 {
1773 rtx tem = operand_subword (x, 0, 1, BLKmode);
1774 rtx shift;
1775
1776 if (tem == 0)
1777 abort ();
1778
1779 shift = expand_shift (LSHIFT_EXPR, word_mode,
1780 gen_rtx (REG, word_mode, regno),
1781 build_int_2 ((UNITS_PER_WORD - size)
1782 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1783 emit_move_insn (tem, shift);
1784 return;
1785 }
1786
1787 /* See if the machine can do this with a store multiple insn. */
1788 #ifdef HAVE_store_multiple
1789 if (HAVE_store_multiple)
1790 {
1791 last = get_last_insn ();
1792 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1793 GEN_INT (nregs));
1794 if (pat)
1795 {
1796 emit_insn (pat);
1797 return;
1798 }
1799 else
1800 delete_insns_since (last);
1801 }
1802 #endif
1803
1804 for (i = 0; i < nregs; i++)
1805 {
1806 rtx tem = operand_subword (x, i, 1, BLKmode);
1807
1808 if (tem == 0)
1809 abort ();
1810
1811 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1812 }
1813 }
1814
1815 /* Emit code to move a block Y to a block X, where X is non-consecutive
1816 registers represented by a PARALLEL. */
1817
1818 void
1819 emit_group_load (x, y)
1820 rtx x, y;
1821 {
1822 rtx target_reg, source;
1823 int i;
1824
1825 if (GET_CODE (x) != PARALLEL)
1826 abort ();
1827
1828 /* Check for a NULL entry, used to indicate that the parameter goes
1829 both on the stack and in registers. */
1830 if (XEXP (XVECEXP (x, 0, 0), 0))
1831 i = 0;
1832 else
1833 i = 1;
1834
1835 for (; i < XVECLEN (x, 0); i++)
1836 {
1837 rtx element = XVECEXP (x, 0, i);
1838
1839 target_reg = XEXP (element, 0);
1840
1841 if (GET_CODE (y) == MEM)
1842 source = change_address (y, GET_MODE (target_reg),
1843 plus_constant (XEXP (y, 0),
1844 INTVAL (XEXP (element, 1))));
1845 else if (XEXP (element, 1) == const0_rtx)
1846 {
1847 if (GET_MODE (target_reg) == GET_MODE (y))
1848 source = y;
1849 /* Allow for the target_reg to be smaller than the input register
1850 to allow for AIX with 4 DF arguments after a single SI arg. The
1851 last DF argument will only load 1 word into the integer registers,
1852 but load a DF value into the float registers. */
1853 else if ((GET_MODE_SIZE (GET_MODE (target_reg))
1854 <= GET_MODE_SIZE (GET_MODE (y)))
1855 && GET_MODE (target_reg) == word_mode)
1856 /* This might be a const_double, so we can't just use SUBREG. */
1857 source = operand_subword (y, 0, 0, VOIDmode);
1858 else if (GET_MODE_SIZE (GET_MODE (target_reg))
1859 == GET_MODE_SIZE (GET_MODE (y)))
1860 source = gen_lowpart (GET_MODE (target_reg), y);
1861 else
1862 abort ();
1863 }
1864 else
1865 abort ();
1866
1867 emit_move_insn (target_reg, source);
1868 }
1869 }
1870
1871 /* Emit code to move a block Y to a block X, where Y is non-consecutive
1872 registers represented by a PARALLEL. */
1873
1874 void
1875 emit_group_store (x, y)
1876 rtx x, y;
1877 {
1878 rtx source_reg, target;
1879 int i;
1880
1881 if (GET_CODE (y) != PARALLEL)
1882 abort ();
1883
1884 /* Check for a NULL entry, used to indicate that the parameter goes
1885 both on the stack and in registers. */
1886 if (XEXP (XVECEXP (y, 0, 0), 0))
1887 i = 0;
1888 else
1889 i = 1;
1890
1891 for (; i < XVECLEN (y, 0); i++)
1892 {
1893 rtx element = XVECEXP (y, 0, i);
1894
1895 source_reg = XEXP (element, 0);
1896
1897 if (GET_CODE (x) == MEM)
1898 target = change_address (x, GET_MODE (source_reg),
1899 plus_constant (XEXP (x, 0),
1900 INTVAL (XEXP (element, 1))));
1901 else if (XEXP (element, 1) == const0_rtx)
1902 {
1903 target = x;
1904 if (GET_MODE (target) != GET_MODE (source_reg))
1905 target = gen_lowpart (GET_MODE (source_reg), target);
1906 }
1907 else
1908 abort ();
1909
1910 emit_move_insn (target, source_reg);
1911 }
1912 }
1913
1914 /* Add a USE expression for REG to the (possibly empty) list pointed
1915 to by CALL_FUSAGE. REG must denote a hard register. */
1916
1917 void
1918 use_reg (call_fusage, reg)
1919 rtx *call_fusage, reg;
1920 {
1921 if (GET_CODE (reg) != REG
1922 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
1923 abort();
1924
1925 *call_fusage
1926 = gen_rtx (EXPR_LIST, VOIDmode,
1927 gen_rtx (USE, VOIDmode, reg), *call_fusage);
1928 }
1929
1930 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1931 starting at REGNO. All of these registers must be hard registers. */
1932
1933 void
1934 use_regs (call_fusage, regno, nregs)
1935 rtx *call_fusage;
1936 int regno;
1937 int nregs;
1938 {
1939 int i;
1940
1941 if (regno + nregs > FIRST_PSEUDO_REGISTER)
1942 abort ();
1943
1944 for (i = 0; i < nregs; i++)
1945 use_reg (call_fusage, gen_rtx (REG, reg_raw_mode[regno + i], regno + i));
1946 }
1947
1948 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
1949 PARALLEL REGS. This is for calls that pass values in multiple
1950 non-contiguous locations. The Irix 6 ABI has examples of this. */
1951
1952 void
1953 use_group_regs (call_fusage, regs)
1954 rtx *call_fusage;
1955 rtx regs;
1956 {
1957 int i;
1958
1959 for (i = 0; i < XVECLEN (regs, 0); i++)
1960 {
1961 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
1962
1963 /* A NULL entry means the parameter goes both on the stack and in
1964 registers. This can also be a MEM for targets that pass values
1965 partially on the stack and partially in registers. */
1966 if (reg != 0 && GET_CODE (reg) == REG)
1967 use_reg (call_fusage, reg);
1968 }
1969 }
1970 \f
1971 /* Generate several move instructions to clear LEN bytes of block TO.
1972 (A MEM rtx with BLKmode). The caller must pass TO through
1973 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
1974 we can assume. */
1975
1976 static void
1977 clear_by_pieces (to, len, align)
1978 rtx to;
1979 int len, align;
1980 {
1981 struct clear_by_pieces data;
1982 rtx to_addr = XEXP (to, 0);
1983 int max_size = MOVE_MAX + 1;
1984
1985 data.offset = 0;
1986 data.to_addr = to_addr;
1987 data.to = to;
1988 data.autinc_to
1989 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1990 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1991
1992 data.explicit_inc_to = 0;
1993 data.reverse
1994 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1995 if (data.reverse) data.offset = len;
1996 data.len = len;
1997
1998 data.to_struct = MEM_IN_STRUCT_P (to);
1999
2000 /* If copying requires more than two move insns,
2001 copy addresses to registers (to make displacements shorter)
2002 and use post-increment if available. */
2003 if (!data.autinc_to
2004 && move_by_pieces_ninsns (len, align) > 2)
2005 {
2006 #ifdef HAVE_PRE_DECREMENT
2007 if (data.reverse && ! data.autinc_to)
2008 {
2009 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2010 data.autinc_to = 1;
2011 data.explicit_inc_to = -1;
2012 }
2013 #endif
2014 #ifdef HAVE_POST_INCREMENT
2015 if (! data.reverse && ! data.autinc_to)
2016 {
2017 data.to_addr = copy_addr_to_reg (to_addr);
2018 data.autinc_to = 1;
2019 data.explicit_inc_to = 1;
2020 }
2021 #endif
2022 if (!data.autinc_to && CONSTANT_P (to_addr))
2023 data.to_addr = copy_addr_to_reg (to_addr);
2024 }
2025
2026 if (! SLOW_UNALIGNED_ACCESS
2027 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2028 align = MOVE_MAX;
2029
2030 /* First move what we can in the largest integer mode, then go to
2031 successively smaller modes. */
2032
2033 while (max_size > 1)
2034 {
2035 enum machine_mode mode = VOIDmode, tmode;
2036 enum insn_code icode;
2037
2038 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2039 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2040 if (GET_MODE_SIZE (tmode) < max_size)
2041 mode = tmode;
2042
2043 if (mode == VOIDmode)
2044 break;
2045
2046 icode = mov_optab->handlers[(int) mode].insn_code;
2047 if (icode != CODE_FOR_nothing
2048 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2049 GET_MODE_SIZE (mode)))
2050 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2051
2052 max_size = GET_MODE_SIZE (mode);
2053 }
2054
2055 /* The code above should have handled everything. */
2056 if (data.len != 0)
2057 abort ();
2058 }
2059
2060 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2061 with move instructions for mode MODE. GENFUN is the gen_... function
2062 to make a move insn for that mode. DATA has all the other info. */
2063
2064 static void
2065 clear_by_pieces_1 (genfun, mode, data)
2066 rtx (*genfun) PROTO ((rtx, ...));
2067 enum machine_mode mode;
2068 struct clear_by_pieces *data;
2069 {
2070 register int size = GET_MODE_SIZE (mode);
2071 register rtx to1;
2072
2073 while (data->len >= size)
2074 {
2075 if (data->reverse) data->offset -= size;
2076
2077 to1 = (data->autinc_to
2078 ? gen_rtx (MEM, mode, data->to_addr)
2079 : copy_rtx (change_address (data->to, mode,
2080 plus_constant (data->to_addr,
2081 data->offset))));
2082 MEM_IN_STRUCT_P (to1) = data->to_struct;
2083
2084 #ifdef HAVE_PRE_DECREMENT
2085 if (data->explicit_inc_to < 0)
2086 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2087 #endif
2088
2089 emit_insn ((*genfun) (to1, const0_rtx));
2090 #ifdef HAVE_POST_INCREMENT
2091 if (data->explicit_inc_to > 0)
2092 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2093 #endif
2094
2095 if (! data->reverse) data->offset += size;
2096
2097 data->len -= size;
2098 }
2099 }
2100 \f
2101 /* Write zeros through the storage of OBJECT.
2102 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2103 the maximum alignment we can is has, measured in bytes.
2104
2105 If we call a function that returns the length of the block, return it. */
2106
2107 rtx
2108 clear_storage (object, size, align)
2109 rtx object;
2110 rtx size;
2111 int align;
2112 {
2113 rtx retval = 0;
2114
2115 if (GET_MODE (object) == BLKmode)
2116 {
2117 object = protect_from_queue (object, 1);
2118 size = protect_from_queue (size, 0);
2119
2120 if (GET_CODE (size) == CONST_INT
2121 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
2122 clear_by_pieces (object, INTVAL (size), align);
2123
2124 else
2125 {
2126 /* Try the most limited insn first, because there's no point
2127 including more than one in the machine description unless
2128 the more limited one has some advantage. */
2129
2130 rtx opalign = GEN_INT (align);
2131 enum machine_mode mode;
2132
2133 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2134 mode = GET_MODE_WIDER_MODE (mode))
2135 {
2136 enum insn_code code = clrstr_optab[(int) mode];
2137
2138 if (code != CODE_FOR_nothing
2139 /* We don't need MODE to be narrower than
2140 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2141 the mode mask, as it is returned by the macro, it will
2142 definitely be less than the actual mode mask. */
2143 && ((GET_CODE (size) == CONST_INT
2144 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2145 <= GET_MODE_MASK (mode)))
2146 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2147 && (insn_operand_predicate[(int) code][0] == 0
2148 || (*insn_operand_predicate[(int) code][0]) (object,
2149 BLKmode))
2150 && (insn_operand_predicate[(int) code][2] == 0
2151 || (*insn_operand_predicate[(int) code][2]) (opalign,
2152 VOIDmode)))
2153 {
2154 rtx op1;
2155 rtx last = get_last_insn ();
2156 rtx pat;
2157
2158 op1 = convert_to_mode (mode, size, 1);
2159 if (insn_operand_predicate[(int) code][1] != 0
2160 && ! (*insn_operand_predicate[(int) code][1]) (op1,
2161 mode))
2162 op1 = copy_to_mode_reg (mode, op1);
2163
2164 pat = GEN_FCN ((int) code) (object, op1, opalign);
2165 if (pat)
2166 {
2167 emit_insn (pat);
2168 return 0;
2169 }
2170 else
2171 delete_insns_since (last);
2172 }
2173 }
2174
2175
2176 #ifdef TARGET_MEM_FUNCTIONS
2177 retval
2178 = emit_library_call_value (memset_libfunc, NULL_RTX, 0,
2179 ptr_mode, 3,
2180 XEXP (object, 0), Pmode,
2181 const0_rtx,
2182 TYPE_MODE (integer_type_node),
2183 convert_to_mode
2184 (TYPE_MODE (sizetype), size,
2185 TREE_UNSIGNED (sizetype)),
2186 TYPE_MODE (sizetype));
2187 #else
2188 emit_library_call (bzero_libfunc, 0,
2189 VOIDmode, 2,
2190 XEXP (object, 0), Pmode,
2191 convert_to_mode
2192 (TYPE_MODE (integer_type_node), size,
2193 TREE_UNSIGNED (integer_type_node)),
2194 TYPE_MODE (integer_type_node));
2195 #endif
2196 }
2197 }
2198 else
2199 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2200
2201 return retval;
2202 }
2203
2204 /* Generate code to copy Y into X.
2205 Both Y and X must have the same mode, except that
2206 Y can be a constant with VOIDmode.
2207 This mode cannot be BLKmode; use emit_block_move for that.
2208
2209 Return the last instruction emitted. */
2210
2211 rtx
2212 emit_move_insn (x, y)
2213 rtx x, y;
2214 {
2215 enum machine_mode mode = GET_MODE (x);
2216
2217 x = protect_from_queue (x, 1);
2218 y = protect_from_queue (y, 0);
2219
2220 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2221 abort ();
2222
2223 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2224 y = force_const_mem (mode, y);
2225
2226 /* If X or Y are memory references, verify that their addresses are valid
2227 for the machine. */
2228 if (GET_CODE (x) == MEM
2229 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2230 && ! push_operand (x, GET_MODE (x)))
2231 || (flag_force_addr
2232 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2233 x = change_address (x, VOIDmode, XEXP (x, 0));
2234
2235 if (GET_CODE (y) == MEM
2236 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2237 || (flag_force_addr
2238 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2239 y = change_address (y, VOIDmode, XEXP (y, 0));
2240
2241 if (mode == BLKmode)
2242 abort ();
2243
2244 return emit_move_insn_1 (x, y);
2245 }
2246
2247 /* Low level part of emit_move_insn.
2248 Called just like emit_move_insn, but assumes X and Y
2249 are basically valid. */
2250
2251 rtx
2252 emit_move_insn_1 (x, y)
2253 rtx x, y;
2254 {
2255 enum machine_mode mode = GET_MODE (x);
2256 enum machine_mode submode;
2257 enum mode_class class = GET_MODE_CLASS (mode);
2258 int i;
2259
2260 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2261 return
2262 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2263
2264 /* Expand complex moves by moving real part and imag part, if possible. */
2265 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2266 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2267 * BITS_PER_UNIT),
2268 (class == MODE_COMPLEX_INT
2269 ? MODE_INT : MODE_FLOAT),
2270 0))
2271 && (mov_optab->handlers[(int) submode].insn_code
2272 != CODE_FOR_nothing))
2273 {
2274 /* Don't split destination if it is a stack push. */
2275 int stack = push_operand (x, GET_MODE (x));
2276 rtx insns;
2277
2278 /* If this is a stack, push the highpart first, so it
2279 will be in the argument order.
2280
2281 In that case, change_address is used only to convert
2282 the mode, not to change the address. */
2283 if (stack)
2284 {
2285 /* Note that the real part always precedes the imag part in memory
2286 regardless of machine's endianness. */
2287 #ifdef STACK_GROWS_DOWNWARD
2288 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2289 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2290 gen_imagpart (submode, y)));
2291 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2292 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2293 gen_realpart (submode, y)));
2294 #else
2295 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2296 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2297 gen_realpart (submode, y)));
2298 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2299 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2300 gen_imagpart (submode, y)));
2301 #endif
2302 }
2303 else
2304 {
2305 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2306 (gen_realpart (submode, x), gen_realpart (submode, y)));
2307 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2308 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
2309 }
2310
2311 return get_last_insn ();
2312 }
2313
2314 /* This will handle any multi-word mode that lacks a move_insn pattern.
2315 However, you will get better code if you define such patterns,
2316 even if they must turn into multiple assembler instructions. */
2317 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2318 {
2319 rtx last_insn = 0;
2320 rtx insns;
2321
2322 #ifdef PUSH_ROUNDING
2323
2324 /* If X is a push on the stack, do the push now and replace
2325 X with a reference to the stack pointer. */
2326 if (push_operand (x, GET_MODE (x)))
2327 {
2328 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2329 x = change_address (x, VOIDmode, stack_pointer_rtx);
2330 }
2331 #endif
2332
2333 /* Show the output dies here. */
2334 if (x != y)
2335 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
2336
2337 for (i = 0;
2338 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2339 i++)
2340 {
2341 rtx xpart = operand_subword (x, i, 1, mode);
2342 rtx ypart = operand_subword (y, i, 1, mode);
2343
2344 /* If we can't get a part of Y, put Y into memory if it is a
2345 constant. Otherwise, force it into a register. If we still
2346 can't get a part of Y, abort. */
2347 if (ypart == 0 && CONSTANT_P (y))
2348 {
2349 y = force_const_mem (mode, y);
2350 ypart = operand_subword (y, i, 1, mode);
2351 }
2352 else if (ypart == 0)
2353 ypart = operand_subword_force (y, i, mode);
2354
2355 if (xpart == 0 || ypart == 0)
2356 abort ();
2357
2358 last_insn = emit_move_insn (xpart, ypart);
2359 }
2360
2361 return last_insn;
2362 }
2363 else
2364 abort ();
2365 }
2366 \f
2367 /* Pushing data onto the stack. */
2368
2369 /* Push a block of length SIZE (perhaps variable)
2370 and return an rtx to address the beginning of the block.
2371 Note that it is not possible for the value returned to be a QUEUED.
2372 The value may be virtual_outgoing_args_rtx.
2373
2374 EXTRA is the number of bytes of padding to push in addition to SIZE.
2375 BELOW nonzero means this padding comes at low addresses;
2376 otherwise, the padding comes at high addresses. */
2377
2378 rtx
2379 push_block (size, extra, below)
2380 rtx size;
2381 int extra, below;
2382 {
2383 register rtx temp;
2384
2385 size = convert_modes (Pmode, ptr_mode, size, 1);
2386 if (CONSTANT_P (size))
2387 anti_adjust_stack (plus_constant (size, extra));
2388 else if (GET_CODE (size) == REG && extra == 0)
2389 anti_adjust_stack (size);
2390 else
2391 {
2392 rtx temp = copy_to_mode_reg (Pmode, size);
2393 if (extra != 0)
2394 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2395 temp, 0, OPTAB_LIB_WIDEN);
2396 anti_adjust_stack (temp);
2397 }
2398
2399 #ifdef STACK_GROWS_DOWNWARD
2400 temp = virtual_outgoing_args_rtx;
2401 if (extra != 0 && below)
2402 temp = plus_constant (temp, extra);
2403 #else
2404 if (GET_CODE (size) == CONST_INT)
2405 temp = plus_constant (virtual_outgoing_args_rtx,
2406 - INTVAL (size) - (below ? 0 : extra));
2407 else if (extra != 0 && !below)
2408 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2409 negate_rtx (Pmode, plus_constant (size, extra)));
2410 else
2411 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2412 negate_rtx (Pmode, size));
2413 #endif
2414
2415 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2416 }
2417
2418 rtx
2419 gen_push_operand ()
2420 {
2421 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2422 }
2423
2424 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2425 block of SIZE bytes. */
2426
2427 static rtx
2428 get_push_address (size)
2429 int size;
2430 {
2431 register rtx temp;
2432
2433 if (STACK_PUSH_CODE == POST_DEC)
2434 temp = gen_rtx (PLUS, Pmode, stack_pointer_rtx, GEN_INT (size));
2435 else if (STACK_PUSH_CODE == POST_INC)
2436 temp = gen_rtx (MINUS, Pmode, stack_pointer_rtx, GEN_INT (size));
2437 else
2438 temp = stack_pointer_rtx;
2439
2440 return force_operand (temp, NULL_RTX);
2441 }
2442
2443 /* Generate code to push X onto the stack, assuming it has mode MODE and
2444 type TYPE.
2445 MODE is redundant except when X is a CONST_INT (since they don't
2446 carry mode info).
2447 SIZE is an rtx for the size of data to be copied (in bytes),
2448 needed only if X is BLKmode.
2449
2450 ALIGN (in bytes) is maximum alignment we can assume.
2451
2452 If PARTIAL and REG are both nonzero, then copy that many of the first
2453 words of X into registers starting with REG, and push the rest of X.
2454 The amount of space pushed is decreased by PARTIAL words,
2455 rounded *down* to a multiple of PARM_BOUNDARY.
2456 REG must be a hard register in this case.
2457 If REG is zero but PARTIAL is not, take any all others actions for an
2458 argument partially in registers, but do not actually load any
2459 registers.
2460
2461 EXTRA is the amount in bytes of extra space to leave next to this arg.
2462 This is ignored if an argument block has already been allocated.
2463
2464 On a machine that lacks real push insns, ARGS_ADDR is the address of
2465 the bottom of the argument block for this call. We use indexing off there
2466 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2467 argument block has not been preallocated.
2468
2469 ARGS_SO_FAR is the size of args previously pushed for this call. */
2470
2471 void
2472 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2473 args_addr, args_so_far)
2474 register rtx x;
2475 enum machine_mode mode;
2476 tree type;
2477 rtx size;
2478 int align;
2479 int partial;
2480 rtx reg;
2481 int extra;
2482 rtx args_addr;
2483 rtx args_so_far;
2484 {
2485 rtx xinner;
2486 enum direction stack_direction
2487 #ifdef STACK_GROWS_DOWNWARD
2488 = downward;
2489 #else
2490 = upward;
2491 #endif
2492
2493 /* Decide where to pad the argument: `downward' for below,
2494 `upward' for above, or `none' for don't pad it.
2495 Default is below for small data on big-endian machines; else above. */
2496 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2497
2498 /* Invert direction if stack is post-update. */
2499 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2500 if (where_pad != none)
2501 where_pad = (where_pad == downward ? upward : downward);
2502
2503 xinner = x = protect_from_queue (x, 0);
2504
2505 if (mode == BLKmode)
2506 {
2507 /* Copy a block into the stack, entirely or partially. */
2508
2509 register rtx temp;
2510 int used = partial * UNITS_PER_WORD;
2511 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2512 int skip;
2513
2514 if (size == 0)
2515 abort ();
2516
2517 used -= offset;
2518
2519 /* USED is now the # of bytes we need not copy to the stack
2520 because registers will take care of them. */
2521
2522 if (partial != 0)
2523 xinner = change_address (xinner, BLKmode,
2524 plus_constant (XEXP (xinner, 0), used));
2525
2526 /* If the partial register-part of the arg counts in its stack size,
2527 skip the part of stack space corresponding to the registers.
2528 Otherwise, start copying to the beginning of the stack space,
2529 by setting SKIP to 0. */
2530 #ifndef REG_PARM_STACK_SPACE
2531 skip = 0;
2532 #else
2533 skip = used;
2534 #endif
2535
2536 #ifdef PUSH_ROUNDING
2537 /* Do it with several push insns if that doesn't take lots of insns
2538 and if there is no difficulty with push insns that skip bytes
2539 on the stack for alignment purposes. */
2540 if (args_addr == 0
2541 && GET_CODE (size) == CONST_INT
2542 && skip == 0
2543 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2544 < MOVE_RATIO)
2545 /* Here we avoid the case of a structure whose weak alignment
2546 forces many pushes of a small amount of data,
2547 and such small pushes do rounding that causes trouble. */
2548 && ((! SLOW_UNALIGNED_ACCESS)
2549 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2550 || PUSH_ROUNDING (align) == align)
2551 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2552 {
2553 /* Push padding now if padding above and stack grows down,
2554 or if padding below and stack grows up.
2555 But if space already allocated, this has already been done. */
2556 if (extra && args_addr == 0
2557 && where_pad != none && where_pad != stack_direction)
2558 anti_adjust_stack (GEN_INT (extra));
2559
2560 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
2561 INTVAL (size) - used, align);
2562
2563 if (flag_check_memory_usage && ! in_check_memory_usage)
2564 {
2565 rtx temp;
2566
2567 in_check_memory_usage = 1;
2568 temp = get_push_address (INTVAL(size) - used);
2569 if (GET_CODE (x) == MEM && AGGREGATE_TYPE_P (type))
2570 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2571 temp, ptr_mode,
2572 XEXP (xinner, 0), ptr_mode,
2573 GEN_INT (INTVAL(size) - used),
2574 TYPE_MODE (sizetype));
2575 else
2576 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2577 temp, ptr_mode,
2578 GEN_INT (INTVAL(size) - used),
2579 TYPE_MODE (sizetype),
2580 GEN_INT (MEMORY_USE_RW),
2581 TYPE_MODE (integer_type_node));
2582 in_check_memory_usage = 0;
2583 }
2584 }
2585 else
2586 #endif /* PUSH_ROUNDING */
2587 {
2588 /* Otherwise make space on the stack and copy the data
2589 to the address of that space. */
2590
2591 /* Deduct words put into registers from the size we must copy. */
2592 if (partial != 0)
2593 {
2594 if (GET_CODE (size) == CONST_INT)
2595 size = GEN_INT (INTVAL (size) - used);
2596 else
2597 size = expand_binop (GET_MODE (size), sub_optab, size,
2598 GEN_INT (used), NULL_RTX, 0,
2599 OPTAB_LIB_WIDEN);
2600 }
2601
2602 /* Get the address of the stack space.
2603 In this case, we do not deal with EXTRA separately.
2604 A single stack adjust will do. */
2605 if (! args_addr)
2606 {
2607 temp = push_block (size, extra, where_pad == downward);
2608 extra = 0;
2609 }
2610 else if (GET_CODE (args_so_far) == CONST_INT)
2611 temp = memory_address (BLKmode,
2612 plus_constant (args_addr,
2613 skip + INTVAL (args_so_far)));
2614 else
2615 temp = memory_address (BLKmode,
2616 plus_constant (gen_rtx (PLUS, Pmode,
2617 args_addr, args_so_far),
2618 skip));
2619 if (flag_check_memory_usage && ! in_check_memory_usage)
2620 {
2621 rtx target;
2622
2623 in_check_memory_usage = 1;
2624 target = copy_to_reg (temp);
2625 if (GET_CODE (x) == MEM && AGGREGATE_TYPE_P (type))
2626 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2627 target, ptr_mode,
2628 XEXP (xinner, 0), ptr_mode,
2629 size, TYPE_MODE (sizetype));
2630 else
2631 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2632 target, ptr_mode,
2633 size, TYPE_MODE (sizetype),
2634 GEN_INT (MEMORY_USE_RW),
2635 TYPE_MODE (integer_type_node));
2636 in_check_memory_usage = 0;
2637 }
2638
2639 /* TEMP is the address of the block. Copy the data there. */
2640 if (GET_CODE (size) == CONST_INT
2641 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2642 < MOVE_RATIO))
2643 {
2644 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
2645 INTVAL (size), align);
2646 goto ret;
2647 }
2648 /* Try the most limited insn first, because there's no point
2649 including more than one in the machine description unless
2650 the more limited one has some advantage. */
2651 #ifdef HAVE_movstrqi
2652 if (HAVE_movstrqi
2653 && GET_CODE (size) == CONST_INT
2654 && ((unsigned) INTVAL (size)
2655 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
2656 {
2657 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
2658 xinner, size, GEN_INT (align));
2659 if (pat != 0)
2660 {
2661 emit_insn (pat);
2662 goto ret;
2663 }
2664 }
2665 #endif
2666 #ifdef HAVE_movstrhi
2667 if (HAVE_movstrhi
2668 && GET_CODE (size) == CONST_INT
2669 && ((unsigned) INTVAL (size)
2670 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
2671 {
2672 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
2673 xinner, size, GEN_INT (align));
2674 if (pat != 0)
2675 {
2676 emit_insn (pat);
2677 goto ret;
2678 }
2679 }
2680 #endif
2681 #ifdef HAVE_movstrsi
2682 if (HAVE_movstrsi)
2683 {
2684 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
2685 xinner, size, GEN_INT (align));
2686 if (pat != 0)
2687 {
2688 emit_insn (pat);
2689 goto ret;
2690 }
2691 }
2692 #endif
2693 #ifdef HAVE_movstrdi
2694 if (HAVE_movstrdi)
2695 {
2696 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
2697 xinner, size, GEN_INT (align));
2698 if (pat != 0)
2699 {
2700 emit_insn (pat);
2701 goto ret;
2702 }
2703 }
2704 #endif
2705
2706 #ifndef ACCUMULATE_OUTGOING_ARGS
2707 /* If the source is referenced relative to the stack pointer,
2708 copy it to another register to stabilize it. We do not need
2709 to do this if we know that we won't be changing sp. */
2710
2711 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2712 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2713 temp = copy_to_reg (temp);
2714 #endif
2715
2716 /* Make inhibit_defer_pop nonzero around the library call
2717 to force it to pop the bcopy-arguments right away. */
2718 NO_DEFER_POP;
2719 #ifdef TARGET_MEM_FUNCTIONS
2720 emit_library_call (memcpy_libfunc, 0,
2721 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
2722 convert_to_mode (TYPE_MODE (sizetype),
2723 size, TREE_UNSIGNED (sizetype)),
2724 TYPE_MODE (sizetype));
2725 #else
2726 emit_library_call (bcopy_libfunc, 0,
2727 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
2728 convert_to_mode (TYPE_MODE (integer_type_node),
2729 size,
2730 TREE_UNSIGNED (integer_type_node)),
2731 TYPE_MODE (integer_type_node));
2732 #endif
2733 OK_DEFER_POP;
2734 }
2735 }
2736 else if (partial > 0)
2737 {
2738 /* Scalar partly in registers. */
2739
2740 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2741 int i;
2742 int not_stack;
2743 /* # words of start of argument
2744 that we must make space for but need not store. */
2745 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2746 int args_offset = INTVAL (args_so_far);
2747 int skip;
2748
2749 /* Push padding now if padding above and stack grows down,
2750 or if padding below and stack grows up.
2751 But if space already allocated, this has already been done. */
2752 if (extra && args_addr == 0
2753 && where_pad != none && where_pad != stack_direction)
2754 anti_adjust_stack (GEN_INT (extra));
2755
2756 /* If we make space by pushing it, we might as well push
2757 the real data. Otherwise, we can leave OFFSET nonzero
2758 and leave the space uninitialized. */
2759 if (args_addr == 0)
2760 offset = 0;
2761
2762 /* Now NOT_STACK gets the number of words that we don't need to
2763 allocate on the stack. */
2764 not_stack = partial - offset;
2765
2766 /* If the partial register-part of the arg counts in its stack size,
2767 skip the part of stack space corresponding to the registers.
2768 Otherwise, start copying to the beginning of the stack space,
2769 by setting SKIP to 0. */
2770 #ifndef REG_PARM_STACK_SPACE
2771 skip = 0;
2772 #else
2773 skip = not_stack;
2774 #endif
2775
2776 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2777 x = validize_mem (force_const_mem (mode, x));
2778
2779 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2780 SUBREGs of such registers are not allowed. */
2781 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2782 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2783 x = copy_to_reg (x);
2784
2785 /* Loop over all the words allocated on the stack for this arg. */
2786 /* We can do it by words, because any scalar bigger than a word
2787 has a size a multiple of a word. */
2788 #ifndef PUSH_ARGS_REVERSED
2789 for (i = not_stack; i < size; i++)
2790 #else
2791 for (i = size - 1; i >= not_stack; i--)
2792 #endif
2793 if (i >= not_stack + offset)
2794 emit_push_insn (operand_subword_force (x, i, mode),
2795 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2796 0, args_addr,
2797 GEN_INT (args_offset + ((i - not_stack + skip)
2798 * UNITS_PER_WORD)));
2799 }
2800 else
2801 {
2802 rtx addr;
2803 rtx target = NULL_RTX;
2804
2805 /* Push padding now if padding above and stack grows down,
2806 or if padding below and stack grows up.
2807 But if space already allocated, this has already been done. */
2808 if (extra && args_addr == 0
2809 && where_pad != none && where_pad != stack_direction)
2810 anti_adjust_stack (GEN_INT (extra));
2811
2812 #ifdef PUSH_ROUNDING
2813 if (args_addr == 0)
2814 addr = gen_push_operand ();
2815 else
2816 #endif
2817 {
2818 if (GET_CODE (args_so_far) == CONST_INT)
2819 addr
2820 = memory_address (mode,
2821 plus_constant (args_addr,
2822 INTVAL (args_so_far)));
2823 else
2824 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2825 args_so_far));
2826 target = addr;
2827 }
2828
2829 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2830
2831 if (flag_check_memory_usage && ! in_check_memory_usage)
2832 {
2833 in_check_memory_usage = 1;
2834 if (target == 0)
2835 target = get_push_address (GET_MODE_SIZE (mode));
2836
2837 if (GET_CODE (x) == MEM && AGGREGATE_TYPE_P (type))
2838 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2839 target, ptr_mode,
2840 XEXP (x, 0), ptr_mode,
2841 GEN_INT (GET_MODE_SIZE (mode)),
2842 TYPE_MODE (sizetype));
2843 else
2844 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2845 target, ptr_mode,
2846 GEN_INT (GET_MODE_SIZE (mode)),
2847 TYPE_MODE (sizetype),
2848 GEN_INT (MEMORY_USE_RW),
2849 TYPE_MODE (integer_type_node));
2850 in_check_memory_usage = 0;
2851 }
2852 }
2853
2854 ret:
2855 /* If part should go in registers, copy that part
2856 into the appropriate registers. Do this now, at the end,
2857 since mem-to-mem copies above may do function calls. */
2858 if (partial > 0 && reg != 0)
2859 {
2860 /* Handle calls that pass values in multiple non-contiguous locations.
2861 The Irix 6 ABI has examples of this. */
2862 if (GET_CODE (reg) == PARALLEL)
2863 emit_group_load (reg, x);
2864 else
2865 move_block_to_reg (REGNO (reg), x, partial, mode);
2866 }
2867
2868 if (extra && args_addr == 0 && where_pad == stack_direction)
2869 anti_adjust_stack (GEN_INT (extra));
2870 }
2871 \f
2872 /* Expand an assignment that stores the value of FROM into TO.
2873 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2874 (This may contain a QUEUED rtx;
2875 if the value is constant, this rtx is a constant.)
2876 Otherwise, the returned value is NULL_RTX.
2877
2878 SUGGEST_REG is no longer actually used.
2879 It used to mean, copy the value through a register
2880 and return that register, if that is possible.
2881 We now use WANT_VALUE to decide whether to do this. */
2882
2883 rtx
2884 expand_assignment (to, from, want_value, suggest_reg)
2885 tree to, from;
2886 int want_value;
2887 int suggest_reg;
2888 {
2889 register rtx to_rtx = 0;
2890 rtx result;
2891
2892 /* Don't crash if the lhs of the assignment was erroneous. */
2893
2894 if (TREE_CODE (to) == ERROR_MARK)
2895 {
2896 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
2897 return want_value ? result : NULL_RTX;
2898 }
2899
2900 if (output_bytecode)
2901 {
2902 tree dest_innermost;
2903
2904 bc_expand_expr (from);
2905 bc_emit_instruction (duplicate);
2906
2907 dest_innermost = bc_expand_address (to);
2908
2909 /* Can't deduce from TYPE that we're dealing with a bitfield, so
2910 take care of it here. */
2911
2912 bc_store_memory (TREE_TYPE (to), dest_innermost);
2913 return NULL;
2914 }
2915
2916 /* Assignment of a structure component needs special treatment
2917 if the structure component's rtx is not simply a MEM.
2918 Assignment of an array element at a constant index, and assignment of
2919 an array element in an unaligned packed structure field, has the same
2920 problem. */
2921
2922 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
2923 || TREE_CODE (to) == ARRAY_REF)
2924 {
2925 enum machine_mode mode1;
2926 int bitsize;
2927 int bitpos;
2928 tree offset;
2929 int unsignedp;
2930 int volatilep = 0;
2931 tree tem;
2932 int alignment;
2933
2934 push_temp_slots ();
2935 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
2936 &unsignedp, &volatilep, &alignment);
2937
2938 /* If we are going to use store_bit_field and extract_bit_field,
2939 make sure to_rtx will be safe for multiple use. */
2940
2941 if (mode1 == VOIDmode && want_value)
2942 tem = stabilize_reference (tem);
2943
2944 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
2945 if (offset != 0)
2946 {
2947 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2948
2949 if (GET_CODE (to_rtx) != MEM)
2950 abort ();
2951 to_rtx = change_address (to_rtx, VOIDmode,
2952 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
2953 force_reg (ptr_mode, offset_rtx)));
2954 }
2955 if (volatilep)
2956 {
2957 if (GET_CODE (to_rtx) == MEM)
2958 {
2959 /* When the offset is zero, to_rtx is the address of the
2960 structure we are storing into, and hence may be shared.
2961 We must make a new MEM before setting the volatile bit. */
2962 if (offset == 0)
2963 to_rtx = copy_rtx (to_rtx);
2964
2965 MEM_VOLATILE_P (to_rtx) = 1;
2966 }
2967 #if 0 /* This was turned off because, when a field is volatile
2968 in an object which is not volatile, the object may be in a register,
2969 and then we would abort over here. */
2970 else
2971 abort ();
2972 #endif
2973 }
2974
2975 if (TREE_CODE (to) == COMPONENT_REF
2976 && TREE_READONLY (TREE_OPERAND (to, 1)))
2977 {
2978 if (offset = 0)
2979 to_rtx = copy_rtx (to_rtx);
2980
2981 RTX_UNCHANGING_P (to_rtx) = 1;
2982 }
2983
2984 /* Check the access. */
2985 if (flag_check_memory_usage && GET_CODE (to_rtx) == MEM)
2986 {
2987 rtx to_addr;
2988 int size;
2989 int best_mode_size;
2990 enum machine_mode best_mode;
2991
2992 best_mode = get_best_mode (bitsize, bitpos,
2993 TYPE_ALIGN (TREE_TYPE (tem)),
2994 mode1, volatilep);
2995 if (best_mode == VOIDmode)
2996 best_mode = QImode;
2997
2998 best_mode_size = GET_MODE_BITSIZE (best_mode);
2999 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3000 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3001 size *= GET_MODE_SIZE (best_mode);
3002
3003 /* Check the access right of the pointer. */
3004 if (size)
3005 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3006 to_addr, ptr_mode,
3007 GEN_INT (size), TYPE_MODE (sizetype),
3008 GEN_INT (MEMORY_USE_WO),
3009 TYPE_MODE (integer_type_node));
3010 }
3011
3012 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3013 (want_value
3014 /* Spurious cast makes HPUX compiler happy. */
3015 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
3016 : VOIDmode),
3017 unsignedp,
3018 /* Required alignment of containing datum. */
3019 alignment,
3020 int_size_in_bytes (TREE_TYPE (tem)));
3021 preserve_temp_slots (result);
3022 free_temp_slots ();
3023 pop_temp_slots ();
3024
3025 /* If the value is meaningful, convert RESULT to the proper mode.
3026 Otherwise, return nothing. */
3027 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3028 TYPE_MODE (TREE_TYPE (from)),
3029 result,
3030 TREE_UNSIGNED (TREE_TYPE (to)))
3031 : NULL_RTX);
3032 }
3033
3034 /* If the rhs is a function call and its value is not an aggregate,
3035 call the function before we start to compute the lhs.
3036 This is needed for correct code for cases such as
3037 val = setjmp (buf) on machines where reference to val
3038 requires loading up part of an address in a separate insn.
3039
3040 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
3041 a promoted variable where the zero- or sign- extension needs to be done.
3042 Handling this in the normal way is safe because no computation is done
3043 before the call. */
3044 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3045 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3046 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
3047 {
3048 rtx value;
3049
3050 push_temp_slots ();
3051 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3052 if (to_rtx == 0)
3053 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3054
3055 /* Handle calls that return values in multiple non-contiguous locations.
3056 The Irix 6 ABI has examples of this. */
3057 if (GET_CODE (to_rtx) == PARALLEL)
3058 emit_group_load (to_rtx, value);
3059 else if (GET_MODE (to_rtx) == BLKmode)
3060 emit_block_move (to_rtx, value, expr_size (from),
3061 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3062 else
3063 emit_move_insn (to_rtx, value);
3064 preserve_temp_slots (to_rtx);
3065 free_temp_slots ();
3066 pop_temp_slots ();
3067 return want_value ? to_rtx : NULL_RTX;
3068 }
3069
3070 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3071 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3072
3073 if (to_rtx == 0)
3074 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3075
3076 /* Don't move directly into a return register. */
3077 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
3078 {
3079 rtx temp;
3080
3081 push_temp_slots ();
3082 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3083 emit_move_insn (to_rtx, temp);
3084 preserve_temp_slots (to_rtx);
3085 free_temp_slots ();
3086 pop_temp_slots ();
3087 return want_value ? to_rtx : NULL_RTX;
3088 }
3089
3090 /* In case we are returning the contents of an object which overlaps
3091 the place the value is being stored, use a safe function when copying
3092 a value through a pointer into a structure value return block. */
3093 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3094 && current_function_returns_struct
3095 && !current_function_returns_pcc_struct)
3096 {
3097 rtx from_rtx, size;
3098
3099 push_temp_slots ();
3100 size = expr_size (from);
3101 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3102 EXPAND_MEMORY_USE_DONT);
3103
3104 /* Copy the rights of the bitmap. */
3105 if (flag_check_memory_usage)
3106 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3107 XEXP (to_rtx, 0), ptr_mode,
3108 XEXP (from_rtx, 0), ptr_mode,
3109 convert_to_mode (TYPE_MODE (sizetype),
3110 size, TREE_UNSIGNED (sizetype)),
3111 TYPE_MODE (sizetype));
3112
3113 #ifdef TARGET_MEM_FUNCTIONS
3114 emit_library_call (memcpy_libfunc, 0,
3115 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3116 XEXP (from_rtx, 0), Pmode,
3117 convert_to_mode (TYPE_MODE (sizetype),
3118 size, TREE_UNSIGNED (sizetype)),
3119 TYPE_MODE (sizetype));
3120 #else
3121 emit_library_call (bcopy_libfunc, 0,
3122 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3123 XEXP (to_rtx, 0), Pmode,
3124 convert_to_mode (TYPE_MODE (integer_type_node),
3125 size, TREE_UNSIGNED (integer_type_node)),
3126 TYPE_MODE (integer_type_node));
3127 #endif
3128
3129 preserve_temp_slots (to_rtx);
3130 free_temp_slots ();
3131 pop_temp_slots ();
3132 return want_value ? to_rtx : NULL_RTX;
3133 }
3134
3135 /* Compute FROM and store the value in the rtx we got. */
3136
3137 push_temp_slots ();
3138 result = store_expr (from, to_rtx, want_value);
3139 preserve_temp_slots (result);
3140 free_temp_slots ();
3141 pop_temp_slots ();
3142 return want_value ? result : NULL_RTX;
3143 }
3144
3145 /* Generate code for computing expression EXP,
3146 and storing the value into TARGET.
3147 TARGET may contain a QUEUED rtx.
3148
3149 If WANT_VALUE is nonzero, return a copy of the value
3150 not in TARGET, so that we can be sure to use the proper
3151 value in a containing expression even if TARGET has something
3152 else stored in it. If possible, we copy the value through a pseudo
3153 and return that pseudo. Or, if the value is constant, we try to
3154 return the constant. In some cases, we return a pseudo
3155 copied *from* TARGET.
3156
3157 If the mode is BLKmode then we may return TARGET itself.
3158 It turns out that in BLKmode it doesn't cause a problem.
3159 because C has no operators that could combine two different
3160 assignments into the same BLKmode object with different values
3161 with no sequence point. Will other languages need this to
3162 be more thorough?
3163
3164 If WANT_VALUE is 0, we return NULL, to make sure
3165 to catch quickly any cases where the caller uses the value
3166 and fails to set WANT_VALUE. */
3167
3168 rtx
3169 store_expr (exp, target, want_value)
3170 register tree exp;
3171 register rtx target;
3172 int want_value;
3173 {
3174 register rtx temp;
3175 int dont_return_target = 0;
3176
3177 if (TREE_CODE (exp) == COMPOUND_EXPR)
3178 {
3179 /* Perform first part of compound expression, then assign from second
3180 part. */
3181 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3182 emit_queue ();
3183 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3184 }
3185 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3186 {
3187 /* For conditional expression, get safe form of the target. Then
3188 test the condition, doing the appropriate assignment on either
3189 side. This avoids the creation of unnecessary temporaries.
3190 For non-BLKmode, it is more efficient not to do this. */
3191
3192 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3193
3194 emit_queue ();
3195 target = protect_from_queue (target, 1);
3196
3197 do_pending_stack_adjust ();
3198 NO_DEFER_POP;
3199 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3200 start_cleanup_deferral ();
3201 store_expr (TREE_OPERAND (exp, 1), target, 0);
3202 end_cleanup_deferral ();
3203 emit_queue ();
3204 emit_jump_insn (gen_jump (lab2));
3205 emit_barrier ();
3206 emit_label (lab1);
3207 start_cleanup_deferral ();
3208 store_expr (TREE_OPERAND (exp, 2), target, 0);
3209 end_cleanup_deferral ();
3210 emit_queue ();
3211 emit_label (lab2);
3212 OK_DEFER_POP;
3213
3214 return want_value ? target : NULL_RTX;
3215 }
3216 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3217 && GET_MODE (target) != BLKmode)
3218 /* If target is in memory and caller wants value in a register instead,
3219 arrange that. Pass TARGET as target for expand_expr so that,
3220 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3221 We know expand_expr will not use the target in that case.
3222 Don't do this if TARGET is volatile because we are supposed
3223 to write it and then read it. */
3224 {
3225 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
3226 GET_MODE (target), 0);
3227 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3228 temp = copy_to_reg (temp);
3229 dont_return_target = 1;
3230 }
3231 else if (queued_subexp_p (target))
3232 /* If target contains a postincrement, let's not risk
3233 using it as the place to generate the rhs. */
3234 {
3235 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3236 {
3237 /* Expand EXP into a new pseudo. */
3238 temp = gen_reg_rtx (GET_MODE (target));
3239 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3240 }
3241 else
3242 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3243
3244 /* If target is volatile, ANSI requires accessing the value
3245 *from* the target, if it is accessed. So make that happen.
3246 In no case return the target itself. */
3247 if (! MEM_VOLATILE_P (target) && want_value)
3248 dont_return_target = 1;
3249 }
3250 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3251 /* If this is an scalar in a register that is stored in a wider mode
3252 than the declared mode, compute the result into its declared mode
3253 and then convert to the wider mode. Our value is the computed
3254 expression. */
3255 {
3256 /* If we don't want a value, we can do the conversion inside EXP,
3257 which will often result in some optimizations. Do the conversion
3258 in two steps: first change the signedness, if needed, then
3259 the extend. But don't do this if the type of EXP is a subtype
3260 of something else since then the conversion might involve
3261 more than just converting modes. */
3262 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3263 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3264 {
3265 if (TREE_UNSIGNED (TREE_TYPE (exp))
3266 != SUBREG_PROMOTED_UNSIGNED_P (target))
3267 exp
3268 = convert
3269 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3270 TREE_TYPE (exp)),
3271 exp);
3272
3273 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3274 SUBREG_PROMOTED_UNSIGNED_P (target)),
3275 exp);
3276 }
3277
3278 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3279
3280 /* If TEMP is a volatile MEM and we want a result value, make
3281 the access now so it gets done only once. Likewise if
3282 it contains TARGET. */
3283 if (GET_CODE (temp) == MEM && want_value
3284 && (MEM_VOLATILE_P (temp)
3285 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3286 temp = copy_to_reg (temp);
3287
3288 /* If TEMP is a VOIDmode constant, use convert_modes to make
3289 sure that we properly convert it. */
3290 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3291 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3292 TYPE_MODE (TREE_TYPE (exp)), temp,
3293 SUBREG_PROMOTED_UNSIGNED_P (target));
3294
3295 convert_move (SUBREG_REG (target), temp,
3296 SUBREG_PROMOTED_UNSIGNED_P (target));
3297 return want_value ? temp : NULL_RTX;
3298 }
3299 else
3300 {
3301 temp = expand_expr (exp, target, GET_MODE (target), 0);
3302 /* Return TARGET if it's a specified hardware register.
3303 If TARGET is a volatile mem ref, either return TARGET
3304 or return a reg copied *from* TARGET; ANSI requires this.
3305
3306 Otherwise, if TEMP is not TARGET, return TEMP
3307 if it is constant (for efficiency),
3308 or if we really want the correct value. */
3309 if (!(target && GET_CODE (target) == REG
3310 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3311 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3312 && ! rtx_equal_p (temp, target)
3313 && (CONSTANT_P (temp) || want_value))
3314 dont_return_target = 1;
3315 }
3316
3317 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3318 the same as that of TARGET, adjust the constant. This is needed, for
3319 example, in case it is a CONST_DOUBLE and we want only a word-sized
3320 value. */
3321 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3322 && TREE_CODE (exp) != ERROR_MARK
3323 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3324 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3325 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3326
3327 if (flag_check_memory_usage
3328 && GET_CODE (target) == MEM
3329 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3330 {
3331 if (GET_CODE (temp) == MEM)
3332 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3333 XEXP (target, 0), ptr_mode,
3334 XEXP (temp, 0), ptr_mode,
3335 expr_size (exp), TYPE_MODE (sizetype));
3336 else
3337 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3338 XEXP (target, 0), ptr_mode,
3339 expr_size (exp), TYPE_MODE (sizetype),
3340 GEN_INT (MEMORY_USE_WO),
3341 TYPE_MODE (integer_type_node));
3342 }
3343
3344 /* If value was not generated in the target, store it there.
3345 Convert the value to TARGET's type first if nec. */
3346
3347 if (! rtx_equal_p (temp, target) && TREE_CODE (exp) != ERROR_MARK)
3348 {
3349 target = protect_from_queue (target, 1);
3350 if (GET_MODE (temp) != GET_MODE (target)
3351 && GET_MODE (temp) != VOIDmode)
3352 {
3353 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3354 if (dont_return_target)
3355 {
3356 /* In this case, we will return TEMP,
3357 so make sure it has the proper mode.
3358 But don't forget to store the value into TARGET. */
3359 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3360 emit_move_insn (target, temp);
3361 }
3362 else
3363 convert_move (target, temp, unsignedp);
3364 }
3365
3366 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3367 {
3368 /* Handle copying a string constant into an array.
3369 The string constant may be shorter than the array.
3370 So copy just the string's actual length, and clear the rest. */
3371 rtx size;
3372 rtx addr;
3373
3374 /* Get the size of the data type of the string,
3375 which is actually the size of the target. */
3376 size = expr_size (exp);
3377 if (GET_CODE (size) == CONST_INT
3378 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3379 emit_block_move (target, temp, size,
3380 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3381 else
3382 {
3383 /* Compute the size of the data to copy from the string. */
3384 tree copy_size
3385 = size_binop (MIN_EXPR,
3386 make_tree (sizetype, size),
3387 convert (sizetype,
3388 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
3389 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3390 VOIDmode, 0);
3391 rtx label = 0;
3392
3393 /* Copy that much. */
3394 emit_block_move (target, temp, copy_size_rtx,
3395 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3396
3397 /* Figure out how much is left in TARGET that we have to clear.
3398 Do all calculations in ptr_mode. */
3399
3400 addr = XEXP (target, 0);
3401 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3402
3403 if (GET_CODE (copy_size_rtx) == CONST_INT)
3404 {
3405 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3406 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3407 }
3408 else
3409 {
3410 addr = force_reg (ptr_mode, addr);
3411 addr = expand_binop (ptr_mode, add_optab, addr,
3412 copy_size_rtx, NULL_RTX, 0,
3413 OPTAB_LIB_WIDEN);
3414
3415 size = expand_binop (ptr_mode, sub_optab, size,
3416 copy_size_rtx, NULL_RTX, 0,
3417 OPTAB_LIB_WIDEN);
3418
3419 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
3420 GET_MODE (size), 0, 0);
3421 label = gen_label_rtx ();
3422 emit_jump_insn (gen_blt (label));
3423 }
3424
3425 if (size != const0_rtx)
3426 {
3427 /* Be sure we can write on ADDR. */
3428 if (flag_check_memory_usage)
3429 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3430 addr, ptr_mode,
3431 size, TYPE_MODE (sizetype),
3432 GEN_INT (MEMORY_USE_WO),
3433 TYPE_MODE (integer_type_node));
3434 #ifdef TARGET_MEM_FUNCTIONS
3435 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3436 addr, ptr_mode,
3437 const0_rtx, TYPE_MODE (integer_type_node),
3438 convert_to_mode (TYPE_MODE (sizetype),
3439 size,
3440 TREE_UNSIGNED (sizetype)),
3441 TYPE_MODE (sizetype));
3442 #else
3443 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3444 addr, ptr_mode,
3445 convert_to_mode (TYPE_MODE (integer_type_node),
3446 size,
3447 TREE_UNSIGNED (integer_type_node)),
3448 TYPE_MODE (integer_type_node));
3449 #endif
3450 }
3451
3452 if (label)
3453 emit_label (label);
3454 }
3455 }
3456 /* Handle calls that return values in multiple non-contiguous locations.
3457 The Irix 6 ABI has examples of this. */
3458 else if (GET_CODE (target) == PARALLEL)
3459 emit_group_load (target, temp);
3460 else if (GET_MODE (temp) == BLKmode)
3461 emit_block_move (target, temp, expr_size (exp),
3462 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3463 else
3464 emit_move_insn (target, temp);
3465 }
3466
3467 /* If we don't want a value, return NULL_RTX. */
3468 if (! want_value)
3469 return NULL_RTX;
3470
3471 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3472 ??? The latter test doesn't seem to make sense. */
3473 else if (dont_return_target && GET_CODE (temp) != MEM)
3474 return temp;
3475
3476 /* Return TARGET itself if it is a hard register. */
3477 else if (want_value && GET_MODE (target) != BLKmode
3478 && ! (GET_CODE (target) == REG
3479 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3480 return copy_to_reg (target);
3481
3482 else
3483 return target;
3484 }
3485 \f
3486 /* Return 1 if EXP just contains zeros. */
3487
3488 static int
3489 is_zeros_p (exp)
3490 tree exp;
3491 {
3492 tree elt;
3493
3494 switch (TREE_CODE (exp))
3495 {
3496 case CONVERT_EXPR:
3497 case NOP_EXPR:
3498 case NON_LVALUE_EXPR:
3499 return is_zeros_p (TREE_OPERAND (exp, 0));
3500
3501 case INTEGER_CST:
3502 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3503
3504 case COMPLEX_CST:
3505 return
3506 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3507
3508 case REAL_CST:
3509 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
3510
3511 case CONSTRUCTOR:
3512 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3513 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
3514 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3515 if (! is_zeros_p (TREE_VALUE (elt)))
3516 return 0;
3517
3518 return 1;
3519
3520 default:
3521 return 0;
3522 }
3523 }
3524
3525 /* Return 1 if EXP contains mostly (3/4) zeros. */
3526
3527 static int
3528 mostly_zeros_p (exp)
3529 tree exp;
3530 {
3531 if (TREE_CODE (exp) == CONSTRUCTOR)
3532 {
3533 int elts = 0, zeros = 0;
3534 tree elt = CONSTRUCTOR_ELTS (exp);
3535 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3536 {
3537 /* If there are no ranges of true bits, it is all zero. */
3538 return elt == NULL_TREE;
3539 }
3540 for (; elt; elt = TREE_CHAIN (elt))
3541 {
3542 /* We do not handle the case where the index is a RANGE_EXPR,
3543 so the statistic will be somewhat inaccurate.
3544 We do make a more accurate count in store_constructor itself,
3545 so since this function is only used for nested array elements,
3546 this should be close enough. */
3547 if (mostly_zeros_p (TREE_VALUE (elt)))
3548 zeros++;
3549 elts++;
3550 }
3551
3552 return 4 * zeros >= 3 * elts;
3553 }
3554
3555 return is_zeros_p (exp);
3556 }
3557 \f
3558 /* Helper function for store_constructor.
3559 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3560 TYPE is the type of the CONSTRUCTOR, not the element type.
3561 CLEARED is as for store_constructor.
3562
3563 This provides a recursive shortcut back to store_constructor when it isn't
3564 necessary to go through store_field. This is so that we can pass through
3565 the cleared field to let store_constructor know that we may not have to
3566 clear a substructure if the outer structure has already been cleared. */
3567
3568 static void
3569 store_constructor_field (target, bitsize, bitpos,
3570 mode, exp, type, cleared)
3571 rtx target;
3572 int bitsize, bitpos;
3573 enum machine_mode mode;
3574 tree exp, type;
3575 int cleared;
3576 {
3577 if (TREE_CODE (exp) == CONSTRUCTOR
3578 && bitpos % BITS_PER_UNIT == 0
3579 /* If we have a non-zero bitpos for a register target, then we just
3580 let store_field do the bitfield handling. This is unlikely to
3581 generate unnecessary clear instructions anyways. */
3582 && (bitpos == 0 || GET_CODE (target) == MEM))
3583 {
3584 if (bitpos != 0)
3585 target = change_address (target, VOIDmode,
3586 plus_constant (XEXP (target, 0),
3587 bitpos / BITS_PER_UNIT));
3588 store_constructor (exp, target, cleared);
3589 }
3590 else
3591 store_field (target, bitsize, bitpos, mode, exp,
3592 VOIDmode, 0, TYPE_ALIGN (type) / BITS_PER_UNIT,
3593 int_size_in_bytes (type));
3594 }
3595
3596 /* Store the value of constructor EXP into the rtx TARGET.
3597 TARGET is either a REG or a MEM.
3598 CLEARED is true if TARGET is known to have been zero'd. */
3599
3600 static void
3601 store_constructor (exp, target, cleared)
3602 tree exp;
3603 rtx target;
3604 int cleared;
3605 {
3606 tree type = TREE_TYPE (exp);
3607
3608 /* We know our target cannot conflict, since safe_from_p has been called. */
3609 #if 0
3610 /* Don't try copying piece by piece into a hard register
3611 since that is vulnerable to being clobbered by EXP.
3612 Instead, construct in a pseudo register and then copy it all. */
3613 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
3614 {
3615 rtx temp = gen_reg_rtx (GET_MODE (target));
3616 store_constructor (exp, temp, 0);
3617 emit_move_insn (target, temp);
3618 return;
3619 }
3620 #endif
3621
3622 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
3623 || TREE_CODE (type) == QUAL_UNION_TYPE)
3624 {
3625 register tree elt;
3626
3627 /* Inform later passes that the whole union value is dead. */
3628 if (TREE_CODE (type) == UNION_TYPE
3629 || TREE_CODE (type) == QUAL_UNION_TYPE)
3630 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3631
3632 /* If we are building a static constructor into a register,
3633 set the initial value as zero so we can fold the value into
3634 a constant. But if more than one register is involved,
3635 this probably loses. */
3636 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
3637 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
3638 {
3639 if (! cleared)
3640 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
3641
3642 cleared = 1;
3643 }
3644
3645 /* If the constructor has fewer fields than the structure
3646 or if we are initializing the structure to mostly zeros,
3647 clear the whole structure first. */
3648 else if ((list_length (CONSTRUCTOR_ELTS (exp))
3649 != list_length (TYPE_FIELDS (type)))
3650 || mostly_zeros_p (exp))
3651 {
3652 if (! cleared)
3653 clear_storage (target, expr_size (exp),
3654 TYPE_ALIGN (type) / BITS_PER_UNIT);
3655
3656 cleared = 1;
3657 }
3658 else
3659 /* Inform later passes that the old value is dead. */
3660 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3661
3662 /* Store each element of the constructor into
3663 the corresponding field of TARGET. */
3664
3665 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3666 {
3667 register tree field = TREE_PURPOSE (elt);
3668 register enum machine_mode mode;
3669 int bitsize;
3670 int bitpos = 0;
3671 int unsignedp;
3672 tree pos, constant = 0, offset = 0;
3673 rtx to_rtx = target;
3674
3675 /* Just ignore missing fields.
3676 We cleared the whole structure, above,
3677 if any fields are missing. */
3678 if (field == 0)
3679 continue;
3680
3681 if (cleared && is_zeros_p (TREE_VALUE (elt)))
3682 continue;
3683
3684 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
3685 unsignedp = TREE_UNSIGNED (field);
3686 mode = DECL_MODE (field);
3687 if (DECL_BIT_FIELD (field))
3688 mode = VOIDmode;
3689
3690 pos = DECL_FIELD_BITPOS (field);
3691 if (TREE_CODE (pos) == INTEGER_CST)
3692 constant = pos;
3693 else if (TREE_CODE (pos) == PLUS_EXPR
3694 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3695 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
3696 else
3697 offset = pos;
3698
3699 if (constant)
3700 bitpos = TREE_INT_CST_LOW (constant);
3701
3702 if (offset)
3703 {
3704 rtx offset_rtx;
3705
3706 if (contains_placeholder_p (offset))
3707 offset = build (WITH_RECORD_EXPR, sizetype,
3708 offset, make_tree (TREE_TYPE (exp), target));
3709
3710 offset = size_binop (FLOOR_DIV_EXPR, offset,
3711 size_int (BITS_PER_UNIT));
3712
3713 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3714 if (GET_CODE (to_rtx) != MEM)
3715 abort ();
3716
3717 to_rtx
3718 = change_address (to_rtx, VOIDmode,
3719 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
3720 force_reg (ptr_mode, offset_rtx)));
3721 }
3722 if (TREE_READONLY (field))
3723 {
3724 if (GET_CODE (to_rtx) == MEM)
3725 to_rtx = copy_rtx (to_rtx);
3726
3727 RTX_UNCHANGING_P (to_rtx) = 1;
3728 }
3729
3730 store_constructor_field (to_rtx, bitsize, bitpos,
3731 mode, TREE_VALUE (elt), type, cleared);
3732 }
3733 }
3734 else if (TREE_CODE (type) == ARRAY_TYPE)
3735 {
3736 register tree elt;
3737 register int i;
3738 int need_to_clear;
3739 tree domain = TYPE_DOMAIN (type);
3740 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
3741 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
3742 tree elttype = TREE_TYPE (type);
3743
3744 /* If the constructor has fewer elements than the array,
3745 clear the whole array first. Similarly if this this is
3746 static constructor of a non-BLKmode object. */
3747 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
3748 need_to_clear = 1;
3749 else
3750 {
3751 HOST_WIDE_INT count = 0, zero_count = 0;
3752 need_to_clear = 0;
3753 /* This loop is a more accurate version of the loop in
3754 mostly_zeros_p (it handles RANGE_EXPR in an index).
3755 It is also needed to check for missing elements. */
3756 for (elt = CONSTRUCTOR_ELTS (exp);
3757 elt != NULL_TREE;
3758 elt = TREE_CHAIN (elt))
3759 {
3760 tree index = TREE_PURPOSE (elt);
3761 HOST_WIDE_INT this_node_count;
3762 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3763 {
3764 tree lo_index = TREE_OPERAND (index, 0);
3765 tree hi_index = TREE_OPERAND (index, 1);
3766 if (TREE_CODE (lo_index) != INTEGER_CST
3767 || TREE_CODE (hi_index) != INTEGER_CST)
3768 {
3769 need_to_clear = 1;
3770 break;
3771 }
3772 this_node_count = TREE_INT_CST_LOW (hi_index)
3773 - TREE_INT_CST_LOW (lo_index) + 1;
3774 }
3775 else
3776 this_node_count = 1;
3777 count += this_node_count;
3778 if (mostly_zeros_p (TREE_VALUE (elt)))
3779 zero_count += this_node_count;
3780 }
3781 /* Clear the entire array first if there are any missing elements,
3782 or if the incidence of zero elements is >= 75%. */
3783 if (count < maxelt - minelt + 1
3784 || 4 * zero_count >= 3 * count)
3785 need_to_clear = 1;
3786 }
3787 if (need_to_clear)
3788 {
3789 if (! cleared)
3790 clear_storage (target, expr_size (exp),
3791 TYPE_ALIGN (type) / BITS_PER_UNIT);
3792 cleared = 1;
3793 }
3794 else
3795 /* Inform later passes that the old value is dead. */
3796 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3797
3798 /* Store each element of the constructor into
3799 the corresponding element of TARGET, determined
3800 by counting the elements. */
3801 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
3802 elt;
3803 elt = TREE_CHAIN (elt), i++)
3804 {
3805 register enum machine_mode mode;
3806 int bitsize;
3807 int bitpos;
3808 int unsignedp;
3809 tree value = TREE_VALUE (elt);
3810 tree index = TREE_PURPOSE (elt);
3811 rtx xtarget = target;
3812
3813 if (cleared && is_zeros_p (value))
3814 continue;
3815
3816 mode = TYPE_MODE (elttype);
3817 bitsize = GET_MODE_BITSIZE (mode);
3818 unsignedp = TREE_UNSIGNED (elttype);
3819
3820 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3821 {
3822 tree lo_index = TREE_OPERAND (index, 0);
3823 tree hi_index = TREE_OPERAND (index, 1);
3824 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
3825 struct nesting *loop;
3826 HOST_WIDE_INT lo, hi, count;
3827 tree position;
3828
3829 /* If the range is constant and "small", unroll the loop. */
3830 if (TREE_CODE (lo_index) == INTEGER_CST
3831 && TREE_CODE (hi_index) == INTEGER_CST
3832 && (lo = TREE_INT_CST_LOW (lo_index),
3833 hi = TREE_INT_CST_LOW (hi_index),
3834 count = hi - lo + 1,
3835 (GET_CODE (target) != MEM
3836 || count <= 2
3837 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
3838 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
3839 <= 40 * 8))))
3840 {
3841 lo -= minelt; hi -= minelt;
3842 for (; lo <= hi; lo++)
3843 {
3844 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
3845 store_constructor_field (target, bitsize, bitpos,
3846 mode, value, type, cleared);
3847 }
3848 }
3849 else
3850 {
3851 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
3852 loop_top = gen_label_rtx ();
3853 loop_end = gen_label_rtx ();
3854
3855 unsignedp = TREE_UNSIGNED (domain);
3856
3857 index = build_decl (VAR_DECL, NULL_TREE, domain);
3858
3859 DECL_RTL (index) = index_r
3860 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
3861 &unsignedp, 0));
3862
3863 if (TREE_CODE (value) == SAVE_EXPR
3864 && SAVE_EXPR_RTL (value) == 0)
3865 {
3866 /* Make sure value gets expanded once before the
3867 loop. */
3868 expand_expr (value, const0_rtx, VOIDmode, 0);
3869 emit_queue ();
3870 }
3871 store_expr (lo_index, index_r, 0);
3872 loop = expand_start_loop (0);
3873
3874 /* Assign value to element index. */
3875 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3876 size_int (BITS_PER_UNIT));
3877 position = size_binop (MULT_EXPR,
3878 size_binop (MINUS_EXPR, index,
3879 TYPE_MIN_VALUE (domain)),
3880 position);
3881 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3882 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
3883 xtarget = change_address (target, mode, addr);
3884 if (TREE_CODE (value) == CONSTRUCTOR)
3885 store_constructor (value, xtarget, cleared);
3886 else
3887 store_expr (value, xtarget, 0);
3888
3889 expand_exit_loop_if_false (loop,
3890 build (LT_EXPR, integer_type_node,
3891 index, hi_index));
3892
3893 expand_increment (build (PREINCREMENT_EXPR,
3894 TREE_TYPE (index),
3895 index, integer_one_node), 0, 0);
3896 expand_end_loop ();
3897 emit_label (loop_end);
3898
3899 /* Needed by stupid register allocation. to extend the
3900 lifetime of pseudo-regs used by target past the end
3901 of the loop. */
3902 emit_insn (gen_rtx (USE, GET_MODE (target), target));
3903 }
3904 }
3905 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
3906 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
3907 {
3908 rtx pos_rtx, addr;
3909 tree position;
3910
3911 if (index == 0)
3912 index = size_int (i);
3913
3914 if (minelt)
3915 index = size_binop (MINUS_EXPR, index,
3916 TYPE_MIN_VALUE (domain));
3917 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3918 size_int (BITS_PER_UNIT));
3919 position = size_binop (MULT_EXPR, index, position);
3920 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3921 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
3922 xtarget = change_address (target, mode, addr);
3923 store_expr (value, xtarget, 0);
3924 }
3925 else
3926 {
3927 if (index != 0)
3928 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
3929 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3930 else
3931 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3932 store_constructor_field (target, bitsize, bitpos,
3933 mode, value, type, cleared);
3934 }
3935 }
3936 }
3937 /* set constructor assignments */
3938 else if (TREE_CODE (type) == SET_TYPE)
3939 {
3940 tree elt = CONSTRUCTOR_ELTS (exp);
3941 rtx xtarget = XEXP (target, 0);
3942 int set_word_size = TYPE_ALIGN (type);
3943 int nbytes = int_size_in_bytes (type), nbits;
3944 tree domain = TYPE_DOMAIN (type);
3945 tree domain_min, domain_max, bitlength;
3946
3947 /* The default implementation strategy is to extract the constant
3948 parts of the constructor, use that to initialize the target,
3949 and then "or" in whatever non-constant ranges we need in addition.
3950
3951 If a large set is all zero or all ones, it is
3952 probably better to set it using memset (if available) or bzero.
3953 Also, if a large set has just a single range, it may also be
3954 better to first clear all the first clear the set (using
3955 bzero/memset), and set the bits we want. */
3956
3957 /* Check for all zeros. */
3958 if (elt == NULL_TREE)
3959 {
3960 if (!cleared)
3961 clear_storage (target, expr_size (exp),
3962 TYPE_ALIGN (type) / BITS_PER_UNIT);
3963 return;
3964 }
3965
3966 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
3967 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
3968 bitlength = size_binop (PLUS_EXPR,
3969 size_binop (MINUS_EXPR, domain_max, domain_min),
3970 size_one_node);
3971
3972 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
3973 abort ();
3974 nbits = TREE_INT_CST_LOW (bitlength);
3975
3976 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
3977 are "complicated" (more than one range), initialize (the
3978 constant parts) by copying from a constant. */
3979 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
3980 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
3981 {
3982 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
3983 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
3984 char *bit_buffer = (char *) alloca (nbits);
3985 HOST_WIDE_INT word = 0;
3986 int bit_pos = 0;
3987 int ibit = 0;
3988 int offset = 0; /* In bytes from beginning of set. */
3989 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
3990 for (;;)
3991 {
3992 if (bit_buffer[ibit])
3993 {
3994 if (BYTES_BIG_ENDIAN)
3995 word |= (1 << (set_word_size - 1 - bit_pos));
3996 else
3997 word |= 1 << bit_pos;
3998 }
3999 bit_pos++; ibit++;
4000 if (bit_pos >= set_word_size || ibit == nbits)
4001 {
4002 if (word != 0 || ! cleared)
4003 {
4004 rtx datum = GEN_INT (word);
4005 rtx to_rtx;
4006 /* The assumption here is that it is safe to use
4007 XEXP if the set is multi-word, but not if
4008 it's single-word. */
4009 if (GET_CODE (target) == MEM)
4010 {
4011 to_rtx = plus_constant (XEXP (target, 0), offset);
4012 to_rtx = change_address (target, mode, to_rtx);
4013 }
4014 else if (offset == 0)
4015 to_rtx = target;
4016 else
4017 abort ();
4018 emit_move_insn (to_rtx, datum);
4019 }
4020 if (ibit == nbits)
4021 break;
4022 word = 0;
4023 bit_pos = 0;
4024 offset += set_word_size / BITS_PER_UNIT;
4025 }
4026 }
4027 }
4028 else if (!cleared)
4029 {
4030 /* Don't bother clearing storage if the set is all ones. */
4031 if (TREE_CHAIN (elt) != NULL_TREE
4032 || (TREE_PURPOSE (elt) == NULL_TREE
4033 ? nbits != 1
4034 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
4035 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
4036 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
4037 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
4038 != nbits))))
4039 clear_storage (target, expr_size (exp),
4040 TYPE_ALIGN (type) / BITS_PER_UNIT);
4041 }
4042
4043 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4044 {
4045 /* start of range of element or NULL */
4046 tree startbit = TREE_PURPOSE (elt);
4047 /* end of range of element, or element value */
4048 tree endbit = TREE_VALUE (elt);
4049 HOST_WIDE_INT startb, endb;
4050 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4051
4052 bitlength_rtx = expand_expr (bitlength,
4053 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4054
4055 /* handle non-range tuple element like [ expr ] */
4056 if (startbit == NULL_TREE)
4057 {
4058 startbit = save_expr (endbit);
4059 endbit = startbit;
4060 }
4061 startbit = convert (sizetype, startbit);
4062 endbit = convert (sizetype, endbit);
4063 if (! integer_zerop (domain_min))
4064 {
4065 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4066 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4067 }
4068 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4069 EXPAND_CONST_ADDRESS);
4070 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4071 EXPAND_CONST_ADDRESS);
4072
4073 if (REG_P (target))
4074 {
4075 targetx = assign_stack_temp (GET_MODE (target),
4076 GET_MODE_SIZE (GET_MODE (target)),
4077 0);
4078 emit_move_insn (targetx, target);
4079 }
4080 else if (GET_CODE (target) == MEM)
4081 targetx = target;
4082 else
4083 abort ();
4084
4085 #ifdef TARGET_MEM_FUNCTIONS
4086 /* Optimization: If startbit and endbit are
4087 constants divisible by BITS_PER_UNIT,
4088 call memset instead. */
4089 if (TREE_CODE (startbit) == INTEGER_CST
4090 && TREE_CODE (endbit) == INTEGER_CST
4091 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4092 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4093 {
4094 emit_library_call (memset_libfunc, 0,
4095 VOIDmode, 3,
4096 plus_constant (XEXP (targetx, 0),
4097 startb / BITS_PER_UNIT),
4098 Pmode,
4099 constm1_rtx, TYPE_MODE (integer_type_node),
4100 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4101 TYPE_MODE (sizetype));
4102 }
4103 else
4104 #endif
4105 {
4106 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__setbits"),
4107 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4108 bitlength_rtx, TYPE_MODE (sizetype),
4109 startbit_rtx, TYPE_MODE (sizetype),
4110 endbit_rtx, TYPE_MODE (sizetype));
4111 }
4112 if (REG_P (target))
4113 emit_move_insn (target, targetx);
4114 }
4115 }
4116
4117 else
4118 abort ();
4119 }
4120
4121 /* Store the value of EXP (an expression tree)
4122 into a subfield of TARGET which has mode MODE and occupies
4123 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4124 If MODE is VOIDmode, it means that we are storing into a bit-field.
4125
4126 If VALUE_MODE is VOIDmode, return nothing in particular.
4127 UNSIGNEDP is not used in this case.
4128
4129 Otherwise, return an rtx for the value stored. This rtx
4130 has mode VALUE_MODE if that is convenient to do.
4131 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4132
4133 ALIGN is the alignment that TARGET is known to have, measured in bytes.
4134 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
4135
4136 static rtx
4137 store_field (target, bitsize, bitpos, mode, exp, value_mode,
4138 unsignedp, align, total_size)
4139 rtx target;
4140 int bitsize, bitpos;
4141 enum machine_mode mode;
4142 tree exp;
4143 enum machine_mode value_mode;
4144 int unsignedp;
4145 int align;
4146 int total_size;
4147 {
4148 HOST_WIDE_INT width_mask = 0;
4149
4150 if (TREE_CODE (exp) == ERROR_MARK)
4151 return const0_rtx;
4152
4153 if (bitsize < HOST_BITS_PER_WIDE_INT)
4154 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4155
4156 /* If we are storing into an unaligned field of an aligned union that is
4157 in a register, we may have the mode of TARGET being an integer mode but
4158 MODE == BLKmode. In that case, get an aligned object whose size and
4159 alignment are the same as TARGET and store TARGET into it (we can avoid
4160 the store if the field being stored is the entire width of TARGET). Then
4161 call ourselves recursively to store the field into a BLKmode version of
4162 that object. Finally, load from the object into TARGET. This is not
4163 very efficient in general, but should only be slightly more expensive
4164 than the otherwise-required unaligned accesses. Perhaps this can be
4165 cleaned up later. */
4166
4167 if (mode == BLKmode
4168 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4169 {
4170 rtx object = assign_stack_temp (GET_MODE (target),
4171 GET_MODE_SIZE (GET_MODE (target)), 0);
4172 rtx blk_object = copy_rtx (object);
4173
4174 MEM_IN_STRUCT_P (object) = 1;
4175 MEM_IN_STRUCT_P (blk_object) = 1;
4176 PUT_MODE (blk_object, BLKmode);
4177
4178 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4179 emit_move_insn (object, target);
4180
4181 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4182 align, total_size);
4183
4184 /* Even though we aren't returning target, we need to
4185 give it the updated value. */
4186 emit_move_insn (target, object);
4187
4188 return blk_object;
4189 }
4190
4191 /* If the structure is in a register or if the component
4192 is a bit field, we cannot use addressing to access it.
4193 Use bit-field techniques or SUBREG to store in it. */
4194
4195 if (mode == VOIDmode
4196 || (mode != BLKmode && ! direct_store[(int) mode])
4197 || GET_CODE (target) == REG
4198 || GET_CODE (target) == SUBREG
4199 /* If the field isn't aligned enough to store as an ordinary memref,
4200 store it as a bit field. */
4201 || (SLOW_UNALIGNED_ACCESS
4202 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
4203 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
4204 {
4205 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4206
4207 /* If BITSIZE is narrower than the size of the type of EXP
4208 we will be narrowing TEMP. Normally, what's wanted are the
4209 low-order bits. However, if EXP's type is a record and this is
4210 big-endian machine, we want the upper BITSIZE bits. */
4211 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4212 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4213 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4214 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4215 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4216 - bitsize),
4217 temp, 1);
4218
4219 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4220 MODE. */
4221 if (mode != VOIDmode && mode != BLKmode
4222 && mode != TYPE_MODE (TREE_TYPE (exp)))
4223 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4224
4225 /* If the modes of TARGET and TEMP are both BLKmode, both
4226 must be in memory and BITPOS must be aligned on a byte
4227 boundary. If so, we simply do a block copy. */
4228 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4229 {
4230 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4231 || bitpos % BITS_PER_UNIT != 0)
4232 abort ();
4233
4234 target = change_address (target, VOIDmode,
4235 plus_constant (XEXP (target, 0),
4236 bitpos / BITS_PER_UNIT));
4237
4238 emit_block_move (target, temp,
4239 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4240 / BITS_PER_UNIT),
4241 1);
4242
4243 return value_mode == VOIDmode ? const0_rtx : target;
4244 }
4245
4246 /* Store the value in the bitfield. */
4247 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4248 if (value_mode != VOIDmode)
4249 {
4250 /* The caller wants an rtx for the value. */
4251 /* If possible, avoid refetching from the bitfield itself. */
4252 if (width_mask != 0
4253 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4254 {
4255 tree count;
4256 enum machine_mode tmode;
4257
4258 if (unsignedp)
4259 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4260 tmode = GET_MODE (temp);
4261 if (tmode == VOIDmode)
4262 tmode = value_mode;
4263 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4264 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4265 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4266 }
4267 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4268 NULL_RTX, value_mode, 0, align,
4269 total_size);
4270 }
4271 return const0_rtx;
4272 }
4273 else
4274 {
4275 rtx addr = XEXP (target, 0);
4276 rtx to_rtx;
4277
4278 /* If a value is wanted, it must be the lhs;
4279 so make the address stable for multiple use. */
4280
4281 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4282 && ! CONSTANT_ADDRESS_P (addr)
4283 /* A frame-pointer reference is already stable. */
4284 && ! (GET_CODE (addr) == PLUS
4285 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4286 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4287 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4288 addr = copy_to_reg (addr);
4289
4290 /* Now build a reference to just the desired component. */
4291
4292 to_rtx = copy_rtx (change_address (target, mode,
4293 plus_constant (addr,
4294 (bitpos
4295 / BITS_PER_UNIT))));
4296 MEM_IN_STRUCT_P (to_rtx) = 1;
4297
4298 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4299 }
4300 }
4301 \f
4302 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4303 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4304 ARRAY_REFs and find the ultimate containing object, which we return.
4305
4306 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4307 bit position, and *PUNSIGNEDP to the signedness of the field.
4308 If the position of the field is variable, we store a tree
4309 giving the variable offset (in units) in *POFFSET.
4310 This offset is in addition to the bit position.
4311 If the position is not variable, we store 0 in *POFFSET.
4312 We set *PALIGNMENT to the alignment in bytes of the address that will be
4313 computed. This is the alignment of the thing we return if *POFFSET
4314 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4315
4316 If any of the extraction expressions is volatile,
4317 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4318
4319 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4320 is a mode that can be used to access the field. In that case, *PBITSIZE
4321 is redundant.
4322
4323 If the field describes a variable-sized object, *PMODE is set to
4324 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4325 this case, but the address of the object can be found. */
4326
4327 tree
4328 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4329 punsignedp, pvolatilep, palignment)
4330 tree exp;
4331 int *pbitsize;
4332 int *pbitpos;
4333 tree *poffset;
4334 enum machine_mode *pmode;
4335 int *punsignedp;
4336 int *pvolatilep;
4337 int *palignment;
4338 {
4339 tree orig_exp = exp;
4340 tree size_tree = 0;
4341 enum machine_mode mode = VOIDmode;
4342 tree offset = integer_zero_node;
4343 int alignment = BIGGEST_ALIGNMENT;
4344
4345 if (TREE_CODE (exp) == COMPONENT_REF)
4346 {
4347 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4348 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4349 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4350 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4351 }
4352 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4353 {
4354 size_tree = TREE_OPERAND (exp, 1);
4355 *punsignedp = TREE_UNSIGNED (exp);
4356 }
4357 else
4358 {
4359 mode = TYPE_MODE (TREE_TYPE (exp));
4360 *pbitsize = GET_MODE_BITSIZE (mode);
4361 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4362 }
4363
4364 if (size_tree)
4365 {
4366 if (TREE_CODE (size_tree) != INTEGER_CST)
4367 mode = BLKmode, *pbitsize = -1;
4368 else
4369 *pbitsize = TREE_INT_CST_LOW (size_tree);
4370 }
4371
4372 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4373 and find the ultimate containing object. */
4374
4375 *pbitpos = 0;
4376
4377 while (1)
4378 {
4379 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4380 {
4381 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4382 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4383 : TREE_OPERAND (exp, 2));
4384 tree constant = integer_zero_node, var = pos;
4385
4386 /* If this field hasn't been filled in yet, don't go
4387 past it. This should only happen when folding expressions
4388 made during type construction. */
4389 if (pos == 0)
4390 break;
4391
4392 /* Assume here that the offset is a multiple of a unit.
4393 If not, there should be an explicitly added constant. */
4394 if (TREE_CODE (pos) == PLUS_EXPR
4395 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4396 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
4397 else if (TREE_CODE (pos) == INTEGER_CST)
4398 constant = pos, var = integer_zero_node;
4399
4400 *pbitpos += TREE_INT_CST_LOW (constant);
4401 offset = size_binop (PLUS_EXPR, offset,
4402 size_binop (EXACT_DIV_EXPR, var,
4403 size_int (BITS_PER_UNIT)));
4404 }
4405
4406 else if (TREE_CODE (exp) == ARRAY_REF)
4407 {
4408 /* This code is based on the code in case ARRAY_REF in expand_expr
4409 below. We assume here that the size of an array element is
4410 always an integral multiple of BITS_PER_UNIT. */
4411
4412 tree index = TREE_OPERAND (exp, 1);
4413 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4414 tree low_bound
4415 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4416 tree index_type = TREE_TYPE (index);
4417
4418 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
4419 {
4420 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4421 index);
4422 index_type = TREE_TYPE (index);
4423 }
4424
4425 if (! integer_zerop (low_bound))
4426 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
4427
4428 index = fold (build (MULT_EXPR, index_type, index,
4429 convert (index_type,
4430 TYPE_SIZE (TREE_TYPE (exp)))));
4431
4432 if (TREE_CODE (index) == INTEGER_CST
4433 && TREE_INT_CST_HIGH (index) == 0)
4434 *pbitpos += TREE_INT_CST_LOW (index);
4435 else
4436 {
4437 offset = size_binop (PLUS_EXPR, offset,
4438 size_binop (FLOOR_DIV_EXPR, index,
4439 size_int (BITS_PER_UNIT)));
4440
4441 if (contains_placeholder_p (offset))
4442 offset = build (WITH_RECORD_EXPR, sizetype, offset, exp);
4443 }
4444 }
4445 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
4446 && ! ((TREE_CODE (exp) == NOP_EXPR
4447 || TREE_CODE (exp) == CONVERT_EXPR)
4448 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4449 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
4450 != UNION_TYPE))
4451 && (TYPE_MODE (TREE_TYPE (exp))
4452 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4453 break;
4454
4455 /* If any reference in the chain is volatile, the effect is volatile. */
4456 if (TREE_THIS_VOLATILE (exp))
4457 *pvolatilep = 1;
4458
4459 /* If the offset is non-constant already, then we can't assume any
4460 alignment more than the alignment here. */
4461 if (! integer_zerop (offset))
4462 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4463
4464 exp = TREE_OPERAND (exp, 0);
4465 }
4466
4467 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
4468 alignment = MIN (alignment, DECL_ALIGN (exp));
4469 else if (TREE_TYPE (exp) != 0)
4470 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4471
4472 if (integer_zerop (offset))
4473 offset = 0;
4474
4475 if (offset != 0 && contains_placeholder_p (offset))
4476 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
4477
4478 *pmode = mode;
4479 *poffset = offset;
4480 *palignment = alignment / BITS_PER_UNIT;
4481 return exp;
4482 }
4483
4484 /* Subroutine of expand_exp: compute memory_usage from modifier. */
4485 static enum memory_use_mode
4486 get_memory_usage_from_modifier (modifier)
4487 enum expand_modifier modifier;
4488 {
4489 switch (modifier)
4490 {
4491 case EXPAND_NORMAL:
4492 return MEMORY_USE_RO;
4493 break;
4494 case EXPAND_MEMORY_USE_WO:
4495 return MEMORY_USE_WO;
4496 break;
4497 case EXPAND_MEMORY_USE_RW:
4498 return MEMORY_USE_RW;
4499 break;
4500 case EXPAND_INITIALIZER:
4501 case EXPAND_MEMORY_USE_DONT:
4502 case EXPAND_SUM:
4503 case EXPAND_CONST_ADDRESS:
4504 return MEMORY_USE_DONT;
4505 case EXPAND_MEMORY_USE_BAD:
4506 default:
4507 abort ();
4508 }
4509 }
4510 \f
4511 /* Given an rtx VALUE that may contain additions and multiplications,
4512 return an equivalent value that just refers to a register or memory.
4513 This is done by generating instructions to perform the arithmetic
4514 and returning a pseudo-register containing the value.
4515
4516 The returned value may be a REG, SUBREG, MEM or constant. */
4517
4518 rtx
4519 force_operand (value, target)
4520 rtx value, target;
4521 {
4522 register optab binoptab = 0;
4523 /* Use a temporary to force order of execution of calls to
4524 `force_operand'. */
4525 rtx tmp;
4526 register rtx op2;
4527 /* Use subtarget as the target for operand 0 of a binary operation. */
4528 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4529
4530 if (GET_CODE (value) == PLUS)
4531 binoptab = add_optab;
4532 else if (GET_CODE (value) == MINUS)
4533 binoptab = sub_optab;
4534 else if (GET_CODE (value) == MULT)
4535 {
4536 op2 = XEXP (value, 1);
4537 if (!CONSTANT_P (op2)
4538 && !(GET_CODE (op2) == REG && op2 != subtarget))
4539 subtarget = 0;
4540 tmp = force_operand (XEXP (value, 0), subtarget);
4541 return expand_mult (GET_MODE (value), tmp,
4542 force_operand (op2, NULL_RTX),
4543 target, 0);
4544 }
4545
4546 if (binoptab)
4547 {
4548 op2 = XEXP (value, 1);
4549 if (!CONSTANT_P (op2)
4550 && !(GET_CODE (op2) == REG && op2 != subtarget))
4551 subtarget = 0;
4552 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
4553 {
4554 binoptab = add_optab;
4555 op2 = negate_rtx (GET_MODE (value), op2);
4556 }
4557
4558 /* Check for an addition with OP2 a constant integer and our first
4559 operand a PLUS of a virtual register and something else. In that
4560 case, we want to emit the sum of the virtual register and the
4561 constant first and then add the other value. This allows virtual
4562 register instantiation to simply modify the constant rather than
4563 creating another one around this addition. */
4564 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
4565 && GET_CODE (XEXP (value, 0)) == PLUS
4566 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
4567 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
4568 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
4569 {
4570 rtx temp = expand_binop (GET_MODE (value), binoptab,
4571 XEXP (XEXP (value, 0), 0), op2,
4572 subtarget, 0, OPTAB_LIB_WIDEN);
4573 return expand_binop (GET_MODE (value), binoptab, temp,
4574 force_operand (XEXP (XEXP (value, 0), 1), 0),
4575 target, 0, OPTAB_LIB_WIDEN);
4576 }
4577
4578 tmp = force_operand (XEXP (value, 0), subtarget);
4579 return expand_binop (GET_MODE (value), binoptab, tmp,
4580 force_operand (op2, NULL_RTX),
4581 target, 0, OPTAB_LIB_WIDEN);
4582 /* We give UNSIGNEDP = 0 to expand_binop
4583 because the only operations we are expanding here are signed ones. */
4584 }
4585 return value;
4586 }
4587 \f
4588 /* Subroutine of expand_expr:
4589 save the non-copied parts (LIST) of an expr (LHS), and return a list
4590 which can restore these values to their previous values,
4591 should something modify their storage. */
4592
4593 static tree
4594 save_noncopied_parts (lhs, list)
4595 tree lhs;
4596 tree list;
4597 {
4598 tree tail;
4599 tree parts = 0;
4600
4601 for (tail = list; tail; tail = TREE_CHAIN (tail))
4602 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4603 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
4604 else
4605 {
4606 tree part = TREE_VALUE (tail);
4607 tree part_type = TREE_TYPE (part);
4608 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
4609 rtx target = assign_temp (part_type, 0, 1, 1);
4610 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
4611 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
4612 parts = tree_cons (to_be_saved,
4613 build (RTL_EXPR, part_type, NULL_TREE,
4614 (tree) target),
4615 parts);
4616 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
4617 }
4618 return parts;
4619 }
4620
4621 /* Subroutine of expand_expr:
4622 record the non-copied parts (LIST) of an expr (LHS), and return a list
4623 which specifies the initial values of these parts. */
4624
4625 static tree
4626 init_noncopied_parts (lhs, list)
4627 tree lhs;
4628 tree list;
4629 {
4630 tree tail;
4631 tree parts = 0;
4632
4633 for (tail = list; tail; tail = TREE_CHAIN (tail))
4634 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4635 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
4636 else
4637 {
4638 tree part = TREE_VALUE (tail);
4639 tree part_type = TREE_TYPE (part);
4640 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
4641 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
4642 }
4643 return parts;
4644 }
4645
4646 /* Subroutine of expand_expr: return nonzero iff there is no way that
4647 EXP can reference X, which is being modified. */
4648
4649 static int
4650 safe_from_p (x, exp)
4651 rtx x;
4652 tree exp;
4653 {
4654 rtx exp_rtl = 0;
4655 int i, nops;
4656
4657 if (x == 0
4658 /* If EXP has varying size, we MUST use a target since we currently
4659 have no way of allocating temporaries of variable size
4660 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
4661 So we assume here that something at a higher level has prevented a
4662 clash. This is somewhat bogus, but the best we can do. Only
4663 do this when X is BLKmode. */
4664 || (TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4665 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
4666 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
4667 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
4668 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
4669 != INTEGER_CST)
4670 && GET_MODE (x) == BLKmode))
4671 return 1;
4672
4673 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
4674 find the underlying pseudo. */
4675 if (GET_CODE (x) == SUBREG)
4676 {
4677 x = SUBREG_REG (x);
4678 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4679 return 0;
4680 }
4681
4682 /* If X is a location in the outgoing argument area, it is always safe. */
4683 if (GET_CODE (x) == MEM
4684 && (XEXP (x, 0) == virtual_outgoing_args_rtx
4685 || (GET_CODE (XEXP (x, 0)) == PLUS
4686 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
4687 return 1;
4688
4689 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4690 {
4691 case 'd':
4692 exp_rtl = DECL_RTL (exp);
4693 break;
4694
4695 case 'c':
4696 return 1;
4697
4698 case 'x':
4699 if (TREE_CODE (exp) == TREE_LIST)
4700 return ((TREE_VALUE (exp) == 0
4701 || safe_from_p (x, TREE_VALUE (exp)))
4702 && (TREE_CHAIN (exp) == 0
4703 || safe_from_p (x, TREE_CHAIN (exp))));
4704 else
4705 return 0;
4706
4707 case '1':
4708 return safe_from_p (x, TREE_OPERAND (exp, 0));
4709
4710 case '2':
4711 case '<':
4712 return (safe_from_p (x, TREE_OPERAND (exp, 0))
4713 && safe_from_p (x, TREE_OPERAND (exp, 1)));
4714
4715 case 'e':
4716 case 'r':
4717 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
4718 the expression. If it is set, we conflict iff we are that rtx or
4719 both are in memory. Otherwise, we check all operands of the
4720 expression recursively. */
4721
4722 switch (TREE_CODE (exp))
4723 {
4724 case ADDR_EXPR:
4725 return (staticp (TREE_OPERAND (exp, 0))
4726 || safe_from_p (x, TREE_OPERAND (exp, 0)));
4727
4728 case INDIRECT_REF:
4729 if (GET_CODE (x) == MEM)
4730 return 0;
4731 break;
4732
4733 case CALL_EXPR:
4734 exp_rtl = CALL_EXPR_RTL (exp);
4735 if (exp_rtl == 0)
4736 {
4737 /* Assume that the call will clobber all hard registers and
4738 all of memory. */
4739 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4740 || GET_CODE (x) == MEM)
4741 return 0;
4742 }
4743
4744 break;
4745
4746 case RTL_EXPR:
4747 /* If a sequence exists, we would have to scan every instruction
4748 in the sequence to see if it was safe. This is probably not
4749 worthwhile. */
4750 if (RTL_EXPR_SEQUENCE (exp))
4751 return 0;
4752
4753 exp_rtl = RTL_EXPR_RTL (exp);
4754 break;
4755
4756 case WITH_CLEANUP_EXPR:
4757 exp_rtl = RTL_EXPR_RTL (exp);
4758 break;
4759
4760 case CLEANUP_POINT_EXPR:
4761 return safe_from_p (x, TREE_OPERAND (exp, 0));
4762
4763 case SAVE_EXPR:
4764 exp_rtl = SAVE_EXPR_RTL (exp);
4765 break;
4766
4767 case BIND_EXPR:
4768 /* The only operand we look at is operand 1. The rest aren't
4769 part of the expression. */
4770 return safe_from_p (x, TREE_OPERAND (exp, 1));
4771
4772 case METHOD_CALL_EXPR:
4773 /* This takes a rtx argument, but shouldn't appear here. */
4774 abort ();
4775
4776 default:
4777 break;
4778 }
4779
4780 /* If we have an rtx, we do not need to scan our operands. */
4781 if (exp_rtl)
4782 break;
4783
4784 nops = tree_code_length[(int) TREE_CODE (exp)];
4785 for (i = 0; i < nops; i++)
4786 if (TREE_OPERAND (exp, i) != 0
4787 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
4788 return 0;
4789 }
4790
4791 /* If we have an rtl, find any enclosed object. Then see if we conflict
4792 with it. */
4793 if (exp_rtl)
4794 {
4795 if (GET_CODE (exp_rtl) == SUBREG)
4796 {
4797 exp_rtl = SUBREG_REG (exp_rtl);
4798 if (GET_CODE (exp_rtl) == REG
4799 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
4800 return 0;
4801 }
4802
4803 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
4804 are memory and EXP is not readonly. */
4805 return ! (rtx_equal_p (x, exp_rtl)
4806 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
4807 && ! TREE_READONLY (exp)));
4808 }
4809
4810 /* If we reach here, it is safe. */
4811 return 1;
4812 }
4813
4814 /* Subroutine of expand_expr: return nonzero iff EXP is an
4815 expression whose type is statically determinable. */
4816
4817 static int
4818 fixed_type_p (exp)
4819 tree exp;
4820 {
4821 if (TREE_CODE (exp) == PARM_DECL
4822 || TREE_CODE (exp) == VAR_DECL
4823 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
4824 || TREE_CODE (exp) == COMPONENT_REF
4825 || TREE_CODE (exp) == ARRAY_REF)
4826 return 1;
4827 return 0;
4828 }
4829
4830 /* Subroutine of expand_expr: return rtx if EXP is a
4831 variable or parameter; else return 0. */
4832
4833 static rtx
4834 var_rtx (exp)
4835 tree exp;
4836 {
4837 STRIP_NOPS (exp);
4838 switch (TREE_CODE (exp))
4839 {
4840 case PARM_DECL:
4841 case VAR_DECL:
4842 return DECL_RTL (exp);
4843 default:
4844 return 0;
4845 }
4846 }
4847 \f
4848 /* expand_expr: generate code for computing expression EXP.
4849 An rtx for the computed value is returned. The value is never null.
4850 In the case of a void EXP, const0_rtx is returned.
4851
4852 The value may be stored in TARGET if TARGET is nonzero.
4853 TARGET is just a suggestion; callers must assume that
4854 the rtx returned may not be the same as TARGET.
4855
4856 If TARGET is CONST0_RTX, it means that the value will be ignored.
4857
4858 If TMODE is not VOIDmode, it suggests generating the
4859 result in mode TMODE. But this is done only when convenient.
4860 Otherwise, TMODE is ignored and the value generated in its natural mode.
4861 TMODE is just a suggestion; callers must assume that
4862 the rtx returned may not have mode TMODE.
4863
4864 Note that TARGET may have neither TMODE nor MODE. In that case, it
4865 probably will not be used.
4866
4867 If MODIFIER is EXPAND_SUM then when EXP is an addition
4868 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
4869 or a nest of (PLUS ...) and (MINUS ...) where the terms are
4870 products as above, or REG or MEM, or constant.
4871 Ordinarily in such cases we would output mul or add instructions
4872 and then return a pseudo reg containing the sum.
4873
4874 EXPAND_INITIALIZER is much like EXPAND_SUM except that
4875 it also marks a label as absolutely required (it can't be dead).
4876 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
4877 This is used for outputting expressions used in initializers.
4878
4879 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
4880 with a constant address even if that address is not normally legitimate.
4881 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
4882
4883 rtx
4884 expand_expr (exp, target, tmode, modifier)
4885 register tree exp;
4886 rtx target;
4887 enum machine_mode tmode;
4888 enum expand_modifier modifier;
4889 {
4890 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
4891 This is static so it will be accessible to our recursive callees. */
4892 static tree placeholder_list = 0;
4893 register rtx op0, op1, temp;
4894 tree type = TREE_TYPE (exp);
4895 int unsignedp = TREE_UNSIGNED (type);
4896 register enum machine_mode mode = TYPE_MODE (type);
4897 register enum tree_code code = TREE_CODE (exp);
4898 optab this_optab;
4899 /* Use subtarget as the target for operand 0 of a binary operation. */
4900 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4901 rtx original_target = target;
4902 /* Maybe defer this until sure not doing bytecode? */
4903 int ignore = (target == const0_rtx
4904 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
4905 || code == CONVERT_EXPR || code == REFERENCE_EXPR
4906 || code == COND_EXPR)
4907 && TREE_CODE (type) == VOID_TYPE));
4908 tree context;
4909 /* Used by check-memory-usage to make modifier read only. */
4910 enum expand_modifier ro_modifier;
4911
4912 /* Make a read-only version of the modifier. */
4913 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
4914 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
4915 ro_modifier = modifier;
4916 else
4917 ro_modifier = EXPAND_NORMAL;
4918
4919 if (output_bytecode && modifier != EXPAND_INITIALIZER)
4920 {
4921 bc_expand_expr (exp);
4922 return NULL;
4923 }
4924
4925 /* Don't use hard regs as subtargets, because the combiner
4926 can only handle pseudo regs. */
4927 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
4928 subtarget = 0;
4929 /* Avoid subtargets inside loops,
4930 since they hide some invariant expressions. */
4931 if (preserve_subexpressions_p ())
4932 subtarget = 0;
4933
4934 /* If we are going to ignore this result, we need only do something
4935 if there is a side-effect somewhere in the expression. If there
4936 is, short-circuit the most common cases here. Note that we must
4937 not call expand_expr with anything but const0_rtx in case this
4938 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
4939
4940 if (ignore)
4941 {
4942 if (! TREE_SIDE_EFFECTS (exp))
4943 return const0_rtx;
4944
4945 /* Ensure we reference a volatile object even if value is ignored. */
4946 if (TREE_THIS_VOLATILE (exp)
4947 && TREE_CODE (exp) != FUNCTION_DECL
4948 && mode != VOIDmode && mode != BLKmode)
4949 {
4950 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
4951 if (GET_CODE (temp) == MEM)
4952 temp = copy_to_reg (temp);
4953 return const0_rtx;
4954 }
4955
4956 if (TREE_CODE_CLASS (code) == '1')
4957 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4958 VOIDmode, ro_modifier);
4959 else if (TREE_CODE_CLASS (code) == '2'
4960 || TREE_CODE_CLASS (code) == '<')
4961 {
4962 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
4963 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
4964 return const0_rtx;
4965 }
4966 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
4967 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
4968 /* If the second operand has no side effects, just evaluate
4969 the first. */
4970 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4971 VOIDmode, ro_modifier);
4972
4973 target = 0;
4974 }
4975
4976 /* If will do cse, generate all results into pseudo registers
4977 since 1) that allows cse to find more things
4978 and 2) otherwise cse could produce an insn the machine
4979 cannot support. */
4980
4981 if (! cse_not_expected && mode != BLKmode && target
4982 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
4983 target = subtarget;
4984
4985 switch (code)
4986 {
4987 case LABEL_DECL:
4988 {
4989 tree function = decl_function_context (exp);
4990 /* Handle using a label in a containing function. */
4991 if (function != current_function_decl
4992 && function != inline_function_decl && function != 0)
4993 {
4994 struct function *p = find_function_data (function);
4995 /* Allocate in the memory associated with the function
4996 that the label is in. */
4997 push_obstacks (p->function_obstack,
4998 p->function_maybepermanent_obstack);
4999
5000 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
5001 label_rtx (exp), p->forced_labels);
5002 pop_obstacks ();
5003 }
5004 else if (modifier == EXPAND_INITIALIZER)
5005 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
5006 label_rtx (exp), forced_labels);
5007 temp = gen_rtx (MEM, FUNCTION_MODE,
5008 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
5009 if (function != current_function_decl
5010 && function != inline_function_decl && function != 0)
5011 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5012 return temp;
5013 }
5014
5015 case PARM_DECL:
5016 if (DECL_RTL (exp) == 0)
5017 {
5018 error_with_decl (exp, "prior parameter's size depends on `%s'");
5019 return CONST0_RTX (mode);
5020 }
5021
5022 /* ... fall through ... */
5023
5024 case VAR_DECL:
5025 /* If a static var's type was incomplete when the decl was written,
5026 but the type is complete now, lay out the decl now. */
5027 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5028 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5029 {
5030 push_obstacks_nochange ();
5031 end_temporary_allocation ();
5032 layout_decl (exp, 0);
5033 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5034 pop_obstacks ();
5035 }
5036
5037 /* Only check automatic variables. Currently, function arguments are
5038 not checked (this can be done at compile-time with prototypes).
5039 Aggregates are not checked. */
5040 if (flag_check_memory_usage && code == VAR_DECL
5041 && GET_CODE (DECL_RTL (exp)) == MEM
5042 && DECL_CONTEXT (exp) != NULL_TREE
5043 && ! TREE_STATIC (exp)
5044 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5045 {
5046 enum memory_use_mode memory_usage;
5047 memory_usage = get_memory_usage_from_modifier (modifier);
5048
5049 if (memory_usage != MEMORY_USE_DONT)
5050 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5051 XEXP (DECL_RTL (exp), 0), ptr_mode,
5052 GEN_INT (int_size_in_bytes (type)),
5053 TYPE_MODE (sizetype),
5054 GEN_INT (memory_usage),
5055 TYPE_MODE (integer_type_node));
5056 }
5057
5058 /* ... fall through ... */
5059
5060 case FUNCTION_DECL:
5061 case RESULT_DECL:
5062 if (DECL_RTL (exp) == 0)
5063 abort ();
5064
5065 /* Ensure variable marked as used even if it doesn't go through
5066 a parser. If it hasn't be used yet, write out an external
5067 definition. */
5068 if (! TREE_USED (exp))
5069 {
5070 assemble_external (exp);
5071 TREE_USED (exp) = 1;
5072 }
5073
5074 /* Show we haven't gotten RTL for this yet. */
5075 temp = 0;
5076
5077 /* Handle variables inherited from containing functions. */
5078 context = decl_function_context (exp);
5079
5080 /* We treat inline_function_decl as an alias for the current function
5081 because that is the inline function whose vars, types, etc.
5082 are being merged into the current function.
5083 See expand_inline_function. */
5084
5085 if (context != 0 && context != current_function_decl
5086 && context != inline_function_decl
5087 /* If var is static, we don't need a static chain to access it. */
5088 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5089 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5090 {
5091 rtx addr;
5092
5093 /* Mark as non-local and addressable. */
5094 DECL_NONLOCAL (exp) = 1;
5095 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5096 abort ();
5097 mark_addressable (exp);
5098 if (GET_CODE (DECL_RTL (exp)) != MEM)
5099 abort ();
5100 addr = XEXP (DECL_RTL (exp), 0);
5101 if (GET_CODE (addr) == MEM)
5102 addr = gen_rtx (MEM, Pmode,
5103 fix_lexical_addr (XEXP (addr, 0), exp));
5104 else
5105 addr = fix_lexical_addr (addr, exp);
5106 temp = change_address (DECL_RTL (exp), mode, addr);
5107 }
5108
5109 /* This is the case of an array whose size is to be determined
5110 from its initializer, while the initializer is still being parsed.
5111 See expand_decl. */
5112
5113 else if (GET_CODE (DECL_RTL (exp)) == MEM
5114 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5115 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
5116 XEXP (DECL_RTL (exp), 0));
5117
5118 /* If DECL_RTL is memory, we are in the normal case and either
5119 the address is not valid or it is not a register and -fforce-addr
5120 is specified, get the address into a register. */
5121
5122 else if (GET_CODE (DECL_RTL (exp)) == MEM
5123 && modifier != EXPAND_CONST_ADDRESS
5124 && modifier != EXPAND_SUM
5125 && modifier != EXPAND_INITIALIZER
5126 && (! memory_address_p (DECL_MODE (exp),
5127 XEXP (DECL_RTL (exp), 0))
5128 || (flag_force_addr
5129 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5130 temp = change_address (DECL_RTL (exp), VOIDmode,
5131 copy_rtx (XEXP (DECL_RTL (exp), 0)));
5132
5133 /* If we got something, return it. But first, set the alignment
5134 the address is a register. */
5135 if (temp != 0)
5136 {
5137 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
5138 mark_reg_pointer (XEXP (temp, 0),
5139 DECL_ALIGN (exp) / BITS_PER_UNIT);
5140
5141 return temp;
5142 }
5143
5144 /* If the mode of DECL_RTL does not match that of the decl, it
5145 must be a promoted value. We return a SUBREG of the wanted mode,
5146 but mark it so that we know that it was already extended. */
5147
5148 if (GET_CODE (DECL_RTL (exp)) == REG
5149 && GET_MODE (DECL_RTL (exp)) != mode)
5150 {
5151 /* Get the signedness used for this variable. Ensure we get the
5152 same mode we got when the variable was declared. */
5153 if (GET_MODE (DECL_RTL (exp))
5154 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
5155 abort ();
5156
5157 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
5158 SUBREG_PROMOTED_VAR_P (temp) = 1;
5159 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5160 return temp;
5161 }
5162
5163 return DECL_RTL (exp);
5164
5165 case INTEGER_CST:
5166 return immed_double_const (TREE_INT_CST_LOW (exp),
5167 TREE_INT_CST_HIGH (exp),
5168 mode);
5169
5170 case CONST_DECL:
5171 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
5172 EXPAND_MEMORY_USE_BAD);
5173
5174 case REAL_CST:
5175 /* If optimized, generate immediate CONST_DOUBLE
5176 which will be turned into memory by reload if necessary.
5177
5178 We used to force a register so that loop.c could see it. But
5179 this does not allow gen_* patterns to perform optimizations with
5180 the constants. It also produces two insns in cases like "x = 1.0;".
5181 On most machines, floating-point constants are not permitted in
5182 many insns, so we'd end up copying it to a register in any case.
5183
5184 Now, we do the copying in expand_binop, if appropriate. */
5185 return immed_real_const (exp);
5186
5187 case COMPLEX_CST:
5188 case STRING_CST:
5189 if (! TREE_CST_RTL (exp))
5190 output_constant_def (exp);
5191
5192 /* TREE_CST_RTL probably contains a constant address.
5193 On RISC machines where a constant address isn't valid,
5194 make some insns to get that address into a register. */
5195 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
5196 && modifier != EXPAND_CONST_ADDRESS
5197 && modifier != EXPAND_INITIALIZER
5198 && modifier != EXPAND_SUM
5199 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
5200 || (flag_force_addr
5201 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
5202 return change_address (TREE_CST_RTL (exp), VOIDmode,
5203 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
5204 return TREE_CST_RTL (exp);
5205
5206 case SAVE_EXPR:
5207 context = decl_function_context (exp);
5208
5209 /* If this SAVE_EXPR was at global context, assume we are an
5210 initialization function and move it into our context. */
5211 if (context == 0)
5212 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
5213
5214 /* We treat inline_function_decl as an alias for the current function
5215 because that is the inline function whose vars, types, etc.
5216 are being merged into the current function.
5217 See expand_inline_function. */
5218 if (context == current_function_decl || context == inline_function_decl)
5219 context = 0;
5220
5221 /* If this is non-local, handle it. */
5222 if (context)
5223 {
5224 /* The following call just exists to abort if the context is
5225 not of a containing function. */
5226 find_function_data (context);
5227
5228 temp = SAVE_EXPR_RTL (exp);
5229 if (temp && GET_CODE (temp) == REG)
5230 {
5231 put_var_into_stack (exp);
5232 temp = SAVE_EXPR_RTL (exp);
5233 }
5234 if (temp == 0 || GET_CODE (temp) != MEM)
5235 abort ();
5236 return change_address (temp, mode,
5237 fix_lexical_addr (XEXP (temp, 0), exp));
5238 }
5239 if (SAVE_EXPR_RTL (exp) == 0)
5240 {
5241 if (mode == VOIDmode)
5242 temp = const0_rtx;
5243 else
5244 temp = assign_temp (type, 0, 0, 0);
5245
5246 SAVE_EXPR_RTL (exp) = temp;
5247 if (!optimize && GET_CODE (temp) == REG)
5248 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
5249 save_expr_regs);
5250
5251 /* If the mode of TEMP does not match that of the expression, it
5252 must be a promoted value. We pass store_expr a SUBREG of the
5253 wanted mode but mark it so that we know that it was already
5254 extended. Note that `unsignedp' was modified above in
5255 this case. */
5256
5257 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
5258 {
5259 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
5260 SUBREG_PROMOTED_VAR_P (temp) = 1;
5261 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5262 }
5263
5264 if (temp == const0_rtx)
5265 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5266 EXPAND_MEMORY_USE_BAD);
5267 else
5268 store_expr (TREE_OPERAND (exp, 0), temp, 0);
5269 }
5270
5271 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
5272 must be a promoted value. We return a SUBREG of the wanted mode,
5273 but mark it so that we know that it was already extended. */
5274
5275 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
5276 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
5277 {
5278 /* Compute the signedness and make the proper SUBREG. */
5279 promote_mode (type, mode, &unsignedp, 0);
5280 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
5281 SUBREG_PROMOTED_VAR_P (temp) = 1;
5282 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5283 return temp;
5284 }
5285
5286 return SAVE_EXPR_RTL (exp);
5287
5288 case UNSAVE_EXPR:
5289 {
5290 rtx temp;
5291 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5292 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
5293 return temp;
5294 }
5295
5296 case PLACEHOLDER_EXPR:
5297 {
5298 tree placeholder_expr;
5299
5300 /* If there is an object on the head of the placeholder list,
5301 see if some object in its references is of type TYPE. For
5302 further information, see tree.def. */
5303 for (placeholder_expr = placeholder_list;
5304 placeholder_expr != 0;
5305 placeholder_expr = TREE_CHAIN (placeholder_expr))
5306 {
5307 tree need_type = TYPE_MAIN_VARIANT (type);
5308 tree object = 0;
5309 tree old_list = placeholder_list;
5310 tree elt;
5311
5312 /* See if the object is the type that we want. */
5313 if ((TYPE_MAIN_VARIANT (TREE_TYPE
5314 (TREE_PURPOSE (placeholder_expr)))
5315 == need_type))
5316 object = TREE_PURPOSE (placeholder_expr);
5317
5318 /* Find the outermost reference that is of the type we want. */
5319 for (elt = TREE_PURPOSE (placeholder_expr);
5320 elt != 0 && object == 0
5321 && (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5322 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5323 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5324 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e');
5325 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
5326 || TREE_CODE (elt) == COND_EXPR)
5327 ? TREE_OPERAND (elt, 1) : TREE_OPERAND (elt, 0)))
5328 if (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5329 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (elt, 0)))
5330 == need_type))
5331 object = TREE_OPERAND (elt, 0);
5332
5333 if (object != 0)
5334 {
5335 /* Expand this object skipping the list entries before
5336 it was found in case it is also a PLACEHOLDER_EXPR.
5337 In that case, we want to translate it using subsequent
5338 entries. */
5339 placeholder_list = TREE_CHAIN (placeholder_expr);
5340 temp = expand_expr (object, original_target, tmode,
5341 ro_modifier);
5342 placeholder_list = old_list;
5343 return temp;
5344 }
5345 }
5346 }
5347
5348 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
5349 abort ();
5350
5351 case WITH_RECORD_EXPR:
5352 /* Put the object on the placeholder list, expand our first operand,
5353 and pop the list. */
5354 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
5355 placeholder_list);
5356 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
5357 tmode, ro_modifier);
5358 placeholder_list = TREE_CHAIN (placeholder_list);
5359 return target;
5360
5361 case EXIT_EXPR:
5362 expand_exit_loop_if_false (NULL_PTR,
5363 invert_truthvalue (TREE_OPERAND (exp, 0)));
5364 return const0_rtx;
5365
5366 case LOOP_EXPR:
5367 push_temp_slots ();
5368 expand_start_loop (1);
5369 expand_expr_stmt (TREE_OPERAND (exp, 0));
5370 expand_end_loop ();
5371 pop_temp_slots ();
5372
5373 return const0_rtx;
5374
5375 case BIND_EXPR:
5376 {
5377 tree vars = TREE_OPERAND (exp, 0);
5378 int vars_need_expansion = 0;
5379
5380 /* Need to open a binding contour here because
5381 if there are any cleanups they must be contained here. */
5382 expand_start_bindings (0);
5383
5384 /* Mark the corresponding BLOCK for output in its proper place. */
5385 if (TREE_OPERAND (exp, 2) != 0
5386 && ! TREE_USED (TREE_OPERAND (exp, 2)))
5387 insert_block (TREE_OPERAND (exp, 2));
5388
5389 /* If VARS have not yet been expanded, expand them now. */
5390 while (vars)
5391 {
5392 if (DECL_RTL (vars) == 0)
5393 {
5394 vars_need_expansion = 1;
5395 expand_decl (vars);
5396 }
5397 expand_decl_init (vars);
5398 vars = TREE_CHAIN (vars);
5399 }
5400
5401 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
5402
5403 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
5404
5405 return temp;
5406 }
5407
5408 case RTL_EXPR:
5409 if (RTL_EXPR_SEQUENCE (exp))
5410 {
5411 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
5412 abort ();
5413 emit_insns (RTL_EXPR_SEQUENCE (exp));
5414 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
5415 }
5416 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
5417 free_temps_for_rtl_expr (exp);
5418 return RTL_EXPR_RTL (exp);
5419
5420 case CONSTRUCTOR:
5421 /* If we don't need the result, just ensure we evaluate any
5422 subexpressions. */
5423 if (ignore)
5424 {
5425 tree elt;
5426 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
5427 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
5428 EXPAND_MEMORY_USE_BAD);
5429 return const0_rtx;
5430 }
5431
5432 /* All elts simple constants => refer to a constant in memory. But
5433 if this is a non-BLKmode mode, let it store a field at a time
5434 since that should make a CONST_INT or CONST_DOUBLE when we
5435 fold. Likewise, if we have a target we can use, it is best to
5436 store directly into the target unless the type is large enough
5437 that memcpy will be used. If we are making an initializer and
5438 all operands are constant, put it in memory as well. */
5439 else if ((TREE_STATIC (exp)
5440 && ((mode == BLKmode
5441 && ! (target != 0 && safe_from_p (target, exp)))
5442 || TREE_ADDRESSABLE (exp)
5443 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5444 && (move_by_pieces_ninsns
5445 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
5446 TYPE_ALIGN (type) / BITS_PER_UNIT)
5447 > MOVE_RATIO)
5448 && ! mostly_zeros_p (exp))))
5449 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
5450 {
5451 rtx constructor = output_constant_def (exp);
5452 if (modifier != EXPAND_CONST_ADDRESS
5453 && modifier != EXPAND_INITIALIZER
5454 && modifier != EXPAND_SUM
5455 && (! memory_address_p (GET_MODE (constructor),
5456 XEXP (constructor, 0))
5457 || (flag_force_addr
5458 && GET_CODE (XEXP (constructor, 0)) != REG)))
5459 constructor = change_address (constructor, VOIDmode,
5460 XEXP (constructor, 0));
5461 return constructor;
5462 }
5463
5464 else
5465 {
5466 /* Handle calls that pass values in multiple non-contiguous
5467 locations. The Irix 6 ABI has examples of this. */
5468 if (target == 0 || ! safe_from_p (target, exp)
5469 || GET_CODE (target) == PARALLEL)
5470 {
5471 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
5472 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5473 else
5474 target = assign_temp (type, 0, 1, 1);
5475 }
5476
5477 if (TREE_READONLY (exp))
5478 {
5479 if (GET_CODE (target) == MEM)
5480 target = copy_rtx (target);
5481
5482 RTX_UNCHANGING_P (target) = 1;
5483 }
5484
5485 store_constructor (exp, target, 0);
5486 return target;
5487 }
5488
5489 case INDIRECT_REF:
5490 {
5491 tree exp1 = TREE_OPERAND (exp, 0);
5492 tree exp2;
5493 tree index;
5494 tree string = string_constant (exp1, &index);
5495 int i;
5496
5497 if (string
5498 && TREE_CODE (string) == STRING_CST
5499 && TREE_CODE (index) == INTEGER_CST
5500 && !TREE_INT_CST_HIGH (index)
5501 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (string)
5502 && GET_MODE_CLASS (mode) == MODE_INT
5503 && GET_MODE_SIZE (mode) == 1)
5504 return GEN_INT (TREE_STRING_POINTER (string)[i]);
5505
5506 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
5507 op0 = memory_address (mode, op0);
5508
5509 if (flag_check_memory_usage && !AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5510 {
5511 enum memory_use_mode memory_usage;
5512 memory_usage = get_memory_usage_from_modifier (modifier);
5513
5514 if (memory_usage != MEMORY_USE_DONT)
5515 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5516 op0, ptr_mode,
5517 GEN_INT (int_size_in_bytes (type)),
5518 TYPE_MODE (sizetype),
5519 GEN_INT (memory_usage),
5520 TYPE_MODE (integer_type_node));
5521 }
5522
5523 temp = gen_rtx (MEM, mode, op0);
5524 /* If address was computed by addition,
5525 mark this as an element of an aggregate. */
5526 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5527 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
5528 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
5529 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
5530 || (TREE_CODE (exp1) == ADDR_EXPR
5531 && (exp2 = TREE_OPERAND (exp1, 0))
5532 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
5533 MEM_IN_STRUCT_P (temp) = 1;
5534 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
5535
5536 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
5537 here, because, in C and C++, the fact that a location is accessed
5538 through a pointer to const does not mean that the value there can
5539 never change. Languages where it can never change should
5540 also set TREE_STATIC. */
5541 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
5542 return temp;
5543 }
5544
5545 case ARRAY_REF:
5546 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
5547 abort ();
5548
5549 {
5550 tree array = TREE_OPERAND (exp, 0);
5551 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5552 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
5553 tree index = TREE_OPERAND (exp, 1);
5554 tree index_type = TREE_TYPE (index);
5555 HOST_WIDE_INT i;
5556
5557 /* Optimize the special-case of a zero lower bound.
5558
5559 We convert the low_bound to sizetype to avoid some problems
5560 with constant folding. (E.g. suppose the lower bound is 1,
5561 and its mode is QI. Without the conversion, (ARRAY
5562 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
5563 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
5564
5565 But sizetype isn't quite right either (especially if
5566 the lowbound is negative). FIXME */
5567
5568 if (! integer_zerop (low_bound))
5569 index = fold (build (MINUS_EXPR, index_type, index,
5570 convert (sizetype, low_bound)));
5571
5572 /* Fold an expression like: "foo"[2].
5573 This is not done in fold so it won't happen inside &.
5574 Don't fold if this is for wide characters since it's too
5575 difficult to do correctly and this is a very rare case. */
5576
5577 if (TREE_CODE (array) == STRING_CST
5578 && TREE_CODE (index) == INTEGER_CST
5579 && !TREE_INT_CST_HIGH (index)
5580 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
5581 && GET_MODE_CLASS (mode) == MODE_INT
5582 && GET_MODE_SIZE (mode) == 1)
5583 return GEN_INT (TREE_STRING_POINTER (array)[i]);
5584
5585 /* If this is a constant index into a constant array,
5586 just get the value from the array. Handle both the cases when
5587 we have an explicit constructor and when our operand is a variable
5588 that was declared const. */
5589
5590 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
5591 {
5592 if (TREE_CODE (index) == INTEGER_CST
5593 && TREE_INT_CST_HIGH (index) == 0)
5594 {
5595 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
5596
5597 i = TREE_INT_CST_LOW (index);
5598 while (elem && i--)
5599 elem = TREE_CHAIN (elem);
5600 if (elem)
5601 return expand_expr (fold (TREE_VALUE (elem)), target,
5602 tmode, ro_modifier);
5603 }
5604 }
5605
5606 else if (optimize >= 1
5607 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
5608 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
5609 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
5610 {
5611 if (TREE_CODE (index) == INTEGER_CST)
5612 {
5613 tree init = DECL_INITIAL (array);
5614
5615 i = TREE_INT_CST_LOW (index);
5616 if (TREE_CODE (init) == CONSTRUCTOR)
5617 {
5618 tree elem = CONSTRUCTOR_ELTS (init);
5619
5620 while (elem
5621 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
5622 elem = TREE_CHAIN (elem);
5623 if (elem)
5624 return expand_expr (fold (TREE_VALUE (elem)), target,
5625 tmode, ro_modifier);
5626 }
5627 else if (TREE_CODE (init) == STRING_CST
5628 && TREE_INT_CST_HIGH (index) == 0
5629 && (TREE_INT_CST_LOW (index)
5630 < TREE_STRING_LENGTH (init)))
5631 return (GEN_INT
5632 (TREE_STRING_POINTER
5633 (init)[TREE_INT_CST_LOW (index)]));
5634 }
5635 }
5636 }
5637
5638 /* ... fall through ... */
5639
5640 case COMPONENT_REF:
5641 case BIT_FIELD_REF:
5642 /* If the operand is a CONSTRUCTOR, we can just extract the
5643 appropriate field if it is present. Don't do this if we have
5644 already written the data since we want to refer to that copy
5645 and varasm.c assumes that's what we'll do. */
5646 if (code != ARRAY_REF
5647 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
5648 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
5649 {
5650 tree elt;
5651
5652 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
5653 elt = TREE_CHAIN (elt))
5654 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
5655 /* We can normally use the value of the field in the
5656 CONSTRUCTOR. However, if this is a bitfield in
5657 an integral mode that we can fit in a HOST_WIDE_INT,
5658 we must mask only the number of bits in the bitfield,
5659 since this is done implicitly by the constructor. If
5660 the bitfield does not meet either of those conditions,
5661 we can't do this optimization. */
5662 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
5663 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
5664 == MODE_INT)
5665 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
5666 <= HOST_BITS_PER_WIDE_INT))))
5667 {
5668 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
5669 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
5670 {
5671 int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt));
5672 enum machine_mode imode
5673 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
5674
5675 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
5676 {
5677 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
5678 op0 = expand_and (op0, op1, target);
5679 }
5680 else
5681 {
5682 tree count
5683 = build_int_2 (imode - bitsize, 0);
5684
5685 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
5686 target, 0);
5687 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
5688 target, 0);
5689 }
5690 }
5691
5692 return op0;
5693 }
5694 }
5695
5696 {
5697 enum machine_mode mode1;
5698 int bitsize;
5699 int bitpos;
5700 tree offset;
5701 int volatilep = 0;
5702 int alignment;
5703 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
5704 &mode1, &unsignedp, &volatilep,
5705 &alignment);
5706
5707 /* If we got back the original object, something is wrong. Perhaps
5708 we are evaluating an expression too early. In any event, don't
5709 infinitely recurse. */
5710 if (tem == exp)
5711 abort ();
5712
5713 /* If TEM's type is a union of variable size, pass TARGET to the inner
5714 computation, since it will need a temporary and TARGET is known
5715 to have to do. This occurs in unchecked conversion in Ada. */
5716
5717 op0 = expand_expr (tem,
5718 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
5719 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
5720 != INTEGER_CST)
5721 ? target : NULL_RTX),
5722 VOIDmode,
5723 modifier == EXPAND_INITIALIZER ? modifier : 0);
5724
5725 /* If this is a constant, put it into a register if it is a
5726 legitimate constant and memory if it isn't. */
5727 if (CONSTANT_P (op0))
5728 {
5729 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
5730 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
5731 op0 = force_reg (mode, op0);
5732 else
5733 op0 = validize_mem (force_const_mem (mode, op0));
5734 }
5735
5736 if (offset != 0)
5737 {
5738 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
5739
5740 if (GET_CODE (op0) != MEM)
5741 abort ();
5742
5743 if (GET_MODE (offset_rtx) != ptr_mode)
5744 #ifdef POINTERS_EXTEND_UNSIGNED
5745 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 1);
5746 #else
5747 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
5748 #endif
5749
5750 op0 = change_address (op0, VOIDmode,
5751 gen_rtx (PLUS, ptr_mode, XEXP (op0, 0),
5752 force_reg (ptr_mode, offset_rtx)));
5753 }
5754
5755 /* Don't forget about volatility even if this is a bitfield. */
5756 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
5757 {
5758 op0 = copy_rtx (op0);
5759 MEM_VOLATILE_P (op0) = 1;
5760 }
5761
5762 /* Check the access. */
5763 if (flag_check_memory_usage && GET_CODE (op0) == MEM)
5764 {
5765 enum memory_use_mode memory_usage;
5766 memory_usage = get_memory_usage_from_modifier (modifier);
5767
5768 if (memory_usage != MEMORY_USE_DONT)
5769 {
5770 rtx to;
5771 int size;
5772
5773 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
5774 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
5775
5776 /* Check the access right of the pointer. */
5777 if (size > BITS_PER_UNIT)
5778 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5779 to, ptr_mode,
5780 GEN_INT (size / BITS_PER_UNIT),
5781 TYPE_MODE (sizetype),
5782 GEN_INT (memory_usage),
5783 TYPE_MODE (integer_type_node));
5784 }
5785 }
5786
5787 /* In cases where an aligned union has an unaligned object
5788 as a field, we might be extracting a BLKmode value from
5789 an integer-mode (e.g., SImode) object. Handle this case
5790 by doing the extract into an object as wide as the field
5791 (which we know to be the width of a basic mode), then
5792 storing into memory, and changing the mode to BLKmode.
5793 If we ultimately want the address (EXPAND_CONST_ADDRESS or
5794 EXPAND_INITIALIZER), then we must not copy to a temporary. */
5795 if (mode1 == VOIDmode
5796 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
5797 || (modifier != EXPAND_CONST_ADDRESS
5798 && modifier != EXPAND_INITIALIZER
5799 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
5800 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5801 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5802 /* If the field isn't aligned enough to fetch as a memref,
5803 fetch it as a bit field. */
5804 || (SLOW_UNALIGNED_ACCESS
5805 && ((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
5806 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
5807 {
5808 enum machine_mode ext_mode = mode;
5809
5810 if (ext_mode == BLKmode)
5811 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
5812
5813 if (ext_mode == BLKmode)
5814 {
5815 /* In this case, BITPOS must start at a byte boundary and
5816 TARGET, if specified, must be a MEM. */
5817 if (GET_CODE (op0) != MEM
5818 || (target != 0 && GET_CODE (target) != MEM)
5819 || bitpos % BITS_PER_UNIT != 0)
5820 abort ();
5821
5822 op0 = change_address (op0, VOIDmode,
5823 plus_constant (XEXP (op0, 0),
5824 bitpos / BITS_PER_UNIT));
5825 if (target == 0)
5826 target = assign_temp (type, 0, 1, 1);
5827
5828 emit_block_move (target, op0,
5829 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5830 / BITS_PER_UNIT),
5831 1);
5832
5833 return target;
5834 }
5835
5836 op0 = validize_mem (op0);
5837
5838 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
5839 mark_reg_pointer (XEXP (op0, 0), alignment);
5840
5841 op0 = extract_bit_field (op0, bitsize, bitpos,
5842 unsignedp, target, ext_mode, ext_mode,
5843 alignment,
5844 int_size_in_bytes (TREE_TYPE (tem)));
5845
5846 /* If the result is a record type and BITSIZE is narrower than
5847 the mode of OP0, an integral mode, and this is a big endian
5848 machine, we must put the field into the high-order bits. */
5849 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
5850 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
5851 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
5852 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
5853 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
5854 - bitsize),
5855 op0, 1);
5856
5857 if (mode == BLKmode)
5858 {
5859 rtx new = assign_stack_temp (ext_mode,
5860 bitsize / BITS_PER_UNIT, 0);
5861
5862 emit_move_insn (new, op0);
5863 op0 = copy_rtx (new);
5864 PUT_MODE (op0, BLKmode);
5865 MEM_IN_STRUCT_P (op0) = 1;
5866 }
5867
5868 return op0;
5869 }
5870
5871 /* If the result is BLKmode, use that to access the object
5872 now as well. */
5873 if (mode == BLKmode)
5874 mode1 = BLKmode;
5875
5876 /* Get a reference to just this component. */
5877 if (modifier == EXPAND_CONST_ADDRESS
5878 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5879 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
5880 (bitpos / BITS_PER_UNIT)));
5881 else
5882 op0 = change_address (op0, mode1,
5883 plus_constant (XEXP (op0, 0),
5884 (bitpos / BITS_PER_UNIT)));
5885 if (GET_CODE (XEXP (op0, 0)) == REG)
5886 mark_reg_pointer (XEXP (op0, 0), alignment);
5887
5888 MEM_IN_STRUCT_P (op0) = 1;
5889 MEM_VOLATILE_P (op0) |= volatilep;
5890 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
5891 || modifier == EXPAND_CONST_ADDRESS
5892 || modifier == EXPAND_INITIALIZER)
5893 return op0;
5894 else if (target == 0)
5895 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5896
5897 convert_move (target, op0, unsignedp);
5898 return target;
5899 }
5900
5901 /* Intended for a reference to a buffer of a file-object in Pascal.
5902 But it's not certain that a special tree code will really be
5903 necessary for these. INDIRECT_REF might work for them. */
5904 case BUFFER_REF:
5905 abort ();
5906
5907 case IN_EXPR:
5908 {
5909 /* Pascal set IN expression.
5910
5911 Algorithm:
5912 rlo = set_low - (set_low%bits_per_word);
5913 the_word = set [ (index - rlo)/bits_per_word ];
5914 bit_index = index % bits_per_word;
5915 bitmask = 1 << bit_index;
5916 return !!(the_word & bitmask); */
5917
5918 tree set = TREE_OPERAND (exp, 0);
5919 tree index = TREE_OPERAND (exp, 1);
5920 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
5921 tree set_type = TREE_TYPE (set);
5922 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
5923 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
5924 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
5925 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
5926 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
5927 rtx setval = expand_expr (set, 0, VOIDmode, 0);
5928 rtx setaddr = XEXP (setval, 0);
5929 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
5930 rtx rlow;
5931 rtx diff, quo, rem, addr, bit, result;
5932
5933 preexpand_calls (exp);
5934
5935 /* If domain is empty, answer is no. Likewise if index is constant
5936 and out of bounds. */
5937 if ((TREE_CODE (set_high_bound) == INTEGER_CST
5938 && TREE_CODE (set_low_bound) == INTEGER_CST
5939 && tree_int_cst_lt (set_high_bound, set_low_bound)
5940 || (TREE_CODE (index) == INTEGER_CST
5941 && TREE_CODE (set_low_bound) == INTEGER_CST
5942 && tree_int_cst_lt (index, set_low_bound))
5943 || (TREE_CODE (set_high_bound) == INTEGER_CST
5944 && TREE_CODE (index) == INTEGER_CST
5945 && tree_int_cst_lt (set_high_bound, index))))
5946 return const0_rtx;
5947
5948 if (target == 0)
5949 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5950
5951 /* If we get here, we have to generate the code for both cases
5952 (in range and out of range). */
5953
5954 op0 = gen_label_rtx ();
5955 op1 = gen_label_rtx ();
5956
5957 if (! (GET_CODE (index_val) == CONST_INT
5958 && GET_CODE (lo_r) == CONST_INT))
5959 {
5960 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
5961 GET_MODE (index_val), iunsignedp, 0);
5962 emit_jump_insn (gen_blt (op1));
5963 }
5964
5965 if (! (GET_CODE (index_val) == CONST_INT
5966 && GET_CODE (hi_r) == CONST_INT))
5967 {
5968 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
5969 GET_MODE (index_val), iunsignedp, 0);
5970 emit_jump_insn (gen_bgt (op1));
5971 }
5972
5973 /* Calculate the element number of bit zero in the first word
5974 of the set. */
5975 if (GET_CODE (lo_r) == CONST_INT)
5976 rlow = GEN_INT (INTVAL (lo_r)
5977 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
5978 else
5979 rlow = expand_binop (index_mode, and_optab, lo_r,
5980 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
5981 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
5982
5983 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
5984 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
5985
5986 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
5987 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
5988 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
5989 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
5990
5991 addr = memory_address (byte_mode,
5992 expand_binop (index_mode, add_optab, diff,
5993 setaddr, NULL_RTX, iunsignedp,
5994 OPTAB_LIB_WIDEN));
5995
5996 /* Extract the bit we want to examine */
5997 bit = expand_shift (RSHIFT_EXPR, byte_mode,
5998 gen_rtx (MEM, byte_mode, addr),
5999 make_tree (TREE_TYPE (index), rem),
6000 NULL_RTX, 1);
6001 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
6002 GET_MODE (target) == byte_mode ? target : 0,
6003 1, OPTAB_LIB_WIDEN);
6004
6005 if (result != target)
6006 convert_move (target, result, 1);
6007
6008 /* Output the code to handle the out-of-range case. */
6009 emit_jump (op0);
6010 emit_label (op1);
6011 emit_move_insn (target, const0_rtx);
6012 emit_label (op0);
6013 return target;
6014 }
6015
6016 case WITH_CLEANUP_EXPR:
6017 if (RTL_EXPR_RTL (exp) == 0)
6018 {
6019 RTL_EXPR_RTL (exp)
6020 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6021 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
6022
6023 /* That's it for this cleanup. */
6024 TREE_OPERAND (exp, 2) = 0;
6025 }
6026 return RTL_EXPR_RTL (exp);
6027
6028 case CLEANUP_POINT_EXPR:
6029 {
6030 extern int temp_slot_level;
6031 /* Start a new binding layer that will keep track of all cleanup
6032 actions to be performed. */
6033 expand_start_bindings (0);
6034
6035 target_temp_slot_level = temp_slot_level;
6036
6037 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6038 /* If we're going to use this value, load it up now. */
6039 if (! ignore)
6040 op0 = force_not_mem (op0);
6041 preserve_temp_slots (op0);
6042 expand_end_bindings (NULL_TREE, 0, 0);
6043 }
6044 return op0;
6045
6046 case CALL_EXPR:
6047 /* Check for a built-in function. */
6048 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6049 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6050 == FUNCTION_DECL)
6051 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6052 return expand_builtin (exp, target, subtarget, tmode, ignore);
6053
6054 /* If this call was expanded already by preexpand_calls,
6055 just return the result we got. */
6056 if (CALL_EXPR_RTL (exp) != 0)
6057 return CALL_EXPR_RTL (exp);
6058
6059 return expand_call (exp, target, ignore);
6060
6061 case NON_LVALUE_EXPR:
6062 case NOP_EXPR:
6063 case CONVERT_EXPR:
6064 case REFERENCE_EXPR:
6065 if (TREE_CODE (type) == UNION_TYPE)
6066 {
6067 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
6068 if (target == 0)
6069 {
6070 if (mode != BLKmode)
6071 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6072 else
6073 target = assign_temp (type, 0, 1, 1);
6074 }
6075
6076 if (GET_CODE (target) == MEM)
6077 /* Store data into beginning of memory target. */
6078 store_expr (TREE_OPERAND (exp, 0),
6079 change_address (target, TYPE_MODE (valtype), 0), 0);
6080
6081 else if (GET_CODE (target) == REG)
6082 /* Store this field into a union of the proper type. */
6083 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
6084 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
6085 VOIDmode, 0, 1,
6086 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
6087 else
6088 abort ();
6089
6090 /* Return the entire union. */
6091 return target;
6092 }
6093
6094 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6095 {
6096 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
6097 ro_modifier);
6098
6099 /* If the signedness of the conversion differs and OP0 is
6100 a promoted SUBREG, clear that indication since we now
6101 have to do the proper extension. */
6102 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
6103 && GET_CODE (op0) == SUBREG)
6104 SUBREG_PROMOTED_VAR_P (op0) = 0;
6105
6106 return op0;
6107 }
6108
6109 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
6110 if (GET_MODE (op0) == mode)
6111 return op0;
6112
6113 /* If OP0 is a constant, just convert it into the proper mode. */
6114 if (CONSTANT_P (op0))
6115 return
6116 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6117 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6118
6119 if (modifier == EXPAND_INITIALIZER)
6120 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
6121
6122 if (target == 0)
6123 return
6124 convert_to_mode (mode, op0,
6125 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6126 else
6127 convert_move (target, op0,
6128 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6129 return target;
6130
6131 case PLUS_EXPR:
6132 /* We come here from MINUS_EXPR when the second operand is a
6133 constant. */
6134 plus_expr:
6135 this_optab = add_optab;
6136
6137 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
6138 something else, make sure we add the register to the constant and
6139 then to the other thing. This case can occur during strength
6140 reduction and doing it this way will produce better code if the
6141 frame pointer or argument pointer is eliminated.
6142
6143 fold-const.c will ensure that the constant is always in the inner
6144 PLUS_EXPR, so the only case we need to do anything about is if
6145 sp, ap, or fp is our second argument, in which case we must swap
6146 the innermost first argument and our second argument. */
6147
6148 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
6149 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
6150 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
6151 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
6152 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
6153 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
6154 {
6155 tree t = TREE_OPERAND (exp, 1);
6156
6157 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6158 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
6159 }
6160
6161 /* If the result is to be ptr_mode and we are adding an integer to
6162 something, we might be forming a constant. So try to use
6163 plus_constant. If it produces a sum and we can't accept it,
6164 use force_operand. This allows P = &ARR[const] to generate
6165 efficient code on machines where a SYMBOL_REF is not a valid
6166 address.
6167
6168 If this is an EXPAND_SUM call, always return the sum. */
6169 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
6170 || mode == ptr_mode)
6171 {
6172 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
6173 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
6174 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
6175 {
6176 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
6177 EXPAND_SUM);
6178 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
6179 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6180 op1 = force_operand (op1, target);
6181 return op1;
6182 }
6183
6184 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6185 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
6186 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
6187 {
6188 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
6189 EXPAND_SUM);
6190 if (! CONSTANT_P (op0))
6191 {
6192 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6193 VOIDmode, modifier);
6194 /* Don't go to both_summands if modifier
6195 says it's not right to return a PLUS. */
6196 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6197 goto binop2;
6198 goto both_summands;
6199 }
6200 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
6201 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6202 op0 = force_operand (op0, target);
6203 return op0;
6204 }
6205 }
6206
6207 /* No sense saving up arithmetic to be done
6208 if it's all in the wrong mode to form part of an address.
6209 And force_operand won't know whether to sign-extend or
6210 zero-extend. */
6211 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6212 || mode != ptr_mode)
6213 goto binop;
6214
6215 preexpand_calls (exp);
6216 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6217 subtarget = 0;
6218
6219 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
6220 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
6221
6222 both_summands:
6223 /* Make sure any term that's a sum with a constant comes last. */
6224 if (GET_CODE (op0) == PLUS
6225 && CONSTANT_P (XEXP (op0, 1)))
6226 {
6227 temp = op0;
6228 op0 = op1;
6229 op1 = temp;
6230 }
6231 /* If adding to a sum including a constant,
6232 associate it to put the constant outside. */
6233 if (GET_CODE (op1) == PLUS
6234 && CONSTANT_P (XEXP (op1, 1)))
6235 {
6236 rtx constant_term = const0_rtx;
6237
6238 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
6239 if (temp != 0)
6240 op0 = temp;
6241 /* Ensure that MULT comes first if there is one. */
6242 else if (GET_CODE (op0) == MULT)
6243 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
6244 else
6245 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
6246
6247 /* Let's also eliminate constants from op0 if possible. */
6248 op0 = eliminate_constant_term (op0, &constant_term);
6249
6250 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
6251 their sum should be a constant. Form it into OP1, since the
6252 result we want will then be OP0 + OP1. */
6253
6254 temp = simplify_binary_operation (PLUS, mode, constant_term,
6255 XEXP (op1, 1));
6256 if (temp != 0)
6257 op1 = temp;
6258 else
6259 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
6260 }
6261
6262 /* Put a constant term last and put a multiplication first. */
6263 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
6264 temp = op1, op1 = op0, op0 = temp;
6265
6266 temp = simplify_binary_operation (PLUS, mode, op0, op1);
6267 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
6268
6269 case MINUS_EXPR:
6270 /* For initializers, we are allowed to return a MINUS of two
6271 symbolic constants. Here we handle all cases when both operands
6272 are constant. */
6273 /* Handle difference of two symbolic constants,
6274 for the sake of an initializer. */
6275 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6276 && really_constant_p (TREE_OPERAND (exp, 0))
6277 && really_constant_p (TREE_OPERAND (exp, 1)))
6278 {
6279 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
6280 VOIDmode, ro_modifier);
6281 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6282 VOIDmode, ro_modifier);
6283
6284 /* If the last operand is a CONST_INT, use plus_constant of
6285 the negated constant. Else make the MINUS. */
6286 if (GET_CODE (op1) == CONST_INT)
6287 return plus_constant (op0, - INTVAL (op1));
6288 else
6289 return gen_rtx (MINUS, mode, op0, op1);
6290 }
6291 /* Convert A - const to A + (-const). */
6292 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6293 {
6294 tree negated = fold (build1 (NEGATE_EXPR, type,
6295 TREE_OPERAND (exp, 1)));
6296
6297 /* Deal with the case where we can't negate the constant
6298 in TYPE. */
6299 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
6300 {
6301 tree newtype = signed_type (type);
6302 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
6303 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
6304 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
6305
6306 if (! TREE_OVERFLOW (newneg))
6307 return expand_expr (convert (type,
6308 build (PLUS_EXPR, newtype,
6309 newop0, newneg)),
6310 target, tmode, ro_modifier);
6311 }
6312 else
6313 {
6314 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
6315 goto plus_expr;
6316 }
6317 }
6318 this_optab = sub_optab;
6319 goto binop;
6320
6321 case MULT_EXPR:
6322 preexpand_calls (exp);
6323 /* If first operand is constant, swap them.
6324 Thus the following special case checks need only
6325 check the second operand. */
6326 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6327 {
6328 register tree t1 = TREE_OPERAND (exp, 0);
6329 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
6330 TREE_OPERAND (exp, 1) = t1;
6331 }
6332
6333 /* Attempt to return something suitable for generating an
6334 indexed address, for machines that support that. */
6335
6336 if (modifier == EXPAND_SUM && mode == ptr_mode
6337 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6338 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
6339 {
6340 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
6341 EXPAND_SUM);
6342
6343 /* Apply distributive law if OP0 is x+c. */
6344 if (GET_CODE (op0) == PLUS
6345 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
6346 return gen_rtx (PLUS, mode,
6347 gen_rtx (MULT, mode, XEXP (op0, 0),
6348 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
6349 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
6350 * INTVAL (XEXP (op0, 1))));
6351
6352 if (GET_CODE (op0) != REG)
6353 op0 = force_operand (op0, NULL_RTX);
6354 if (GET_CODE (op0) != REG)
6355 op0 = copy_to_mode_reg (mode, op0);
6356
6357 return gen_rtx (MULT, mode, op0,
6358 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
6359 }
6360
6361 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6362 subtarget = 0;
6363
6364 /* Check for multiplying things that have been extended
6365 from a narrower type. If this machine supports multiplying
6366 in that narrower type with a result in the desired type,
6367 do it that way, and avoid the explicit type-conversion. */
6368 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
6369 && TREE_CODE (type) == INTEGER_TYPE
6370 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6371 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
6372 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6373 && int_fits_type_p (TREE_OPERAND (exp, 1),
6374 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6375 /* Don't use a widening multiply if a shift will do. */
6376 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
6377 > HOST_BITS_PER_WIDE_INT)
6378 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
6379 ||
6380 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
6381 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6382 ==
6383 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
6384 /* If both operands are extended, they must either both
6385 be zero-extended or both be sign-extended. */
6386 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6387 ==
6388 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
6389 {
6390 enum machine_mode innermode
6391 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
6392 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6393 ? smul_widen_optab : umul_widen_optab);
6394 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6395 ? umul_widen_optab : smul_widen_optab);
6396 if (mode == GET_MODE_WIDER_MODE (innermode))
6397 {
6398 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
6399 {
6400 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6401 NULL_RTX, VOIDmode, 0);
6402 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6403 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6404 VOIDmode, 0);
6405 else
6406 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6407 NULL_RTX, VOIDmode, 0);
6408 goto binop2;
6409 }
6410 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
6411 && innermode == word_mode)
6412 {
6413 rtx htem;
6414 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6415 NULL_RTX, VOIDmode, 0);
6416 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6417 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6418 VOIDmode, 0);
6419 else
6420 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6421 NULL_RTX, VOIDmode, 0);
6422 temp = expand_binop (mode, other_optab, op0, op1, target,
6423 unsignedp, OPTAB_LIB_WIDEN);
6424 htem = expand_mult_highpart_adjust (innermode,
6425 gen_highpart (innermode, temp),
6426 op0, op1,
6427 gen_highpart (innermode, temp),
6428 unsignedp);
6429 emit_move_insn (gen_highpart (innermode, temp), htem);
6430 return temp;
6431 }
6432 }
6433 }
6434 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6435 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6436 return expand_mult (mode, op0, op1, target, unsignedp);
6437
6438 case TRUNC_DIV_EXPR:
6439 case FLOOR_DIV_EXPR:
6440 case CEIL_DIV_EXPR:
6441 case ROUND_DIV_EXPR:
6442 case EXACT_DIV_EXPR:
6443 preexpand_calls (exp);
6444 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6445 subtarget = 0;
6446 /* Possible optimization: compute the dividend with EXPAND_SUM
6447 then if the divisor is constant can optimize the case
6448 where some terms of the dividend have coeffs divisible by it. */
6449 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6450 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6451 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
6452
6453 case RDIV_EXPR:
6454 this_optab = flodiv_optab;
6455 goto binop;
6456
6457 case TRUNC_MOD_EXPR:
6458 case FLOOR_MOD_EXPR:
6459 case CEIL_MOD_EXPR:
6460 case ROUND_MOD_EXPR:
6461 preexpand_calls (exp);
6462 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6463 subtarget = 0;
6464 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6465 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6466 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
6467
6468 case FIX_ROUND_EXPR:
6469 case FIX_FLOOR_EXPR:
6470 case FIX_CEIL_EXPR:
6471 abort (); /* Not used for C. */
6472
6473 case FIX_TRUNC_EXPR:
6474 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6475 if (target == 0)
6476 target = gen_reg_rtx (mode);
6477 expand_fix (target, op0, unsignedp);
6478 return target;
6479
6480 case FLOAT_EXPR:
6481 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6482 if (target == 0)
6483 target = gen_reg_rtx (mode);
6484 /* expand_float can't figure out what to do if FROM has VOIDmode.
6485 So give it the correct mode. With -O, cse will optimize this. */
6486 if (GET_MODE (op0) == VOIDmode)
6487 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6488 op0);
6489 expand_float (target, op0,
6490 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6491 return target;
6492
6493 case NEGATE_EXPR:
6494 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6495 temp = expand_unop (mode, neg_optab, op0, target, 0);
6496 if (temp == 0)
6497 abort ();
6498 return temp;
6499
6500 case ABS_EXPR:
6501 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6502
6503 /* Handle complex values specially. */
6504 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
6505 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
6506 return expand_complex_abs (mode, op0, target, unsignedp);
6507
6508 /* Unsigned abs is simply the operand. Testing here means we don't
6509 risk generating incorrect code below. */
6510 if (TREE_UNSIGNED (type))
6511 return op0;
6512
6513 return expand_abs (mode, op0, target, unsignedp,
6514 safe_from_p (target, TREE_OPERAND (exp, 0)));
6515
6516 case MAX_EXPR:
6517 case MIN_EXPR:
6518 target = original_target;
6519 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
6520 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
6521 || GET_MODE (target) != mode
6522 || (GET_CODE (target) == REG
6523 && REGNO (target) < FIRST_PSEUDO_REGISTER))
6524 target = gen_reg_rtx (mode);
6525 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6526 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6527
6528 /* First try to do it with a special MIN or MAX instruction.
6529 If that does not win, use a conditional jump to select the proper
6530 value. */
6531 this_optab = (TREE_UNSIGNED (type)
6532 ? (code == MIN_EXPR ? umin_optab : umax_optab)
6533 : (code == MIN_EXPR ? smin_optab : smax_optab));
6534
6535 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
6536 OPTAB_WIDEN);
6537 if (temp != 0)
6538 return temp;
6539
6540 /* At this point, a MEM target is no longer useful; we will get better
6541 code without it. */
6542
6543 if (GET_CODE (target) == MEM)
6544 target = gen_reg_rtx (mode);
6545
6546 if (target != op0)
6547 emit_move_insn (target, op0);
6548
6549 op0 = gen_label_rtx ();
6550
6551 /* If this mode is an integer too wide to compare properly,
6552 compare word by word. Rely on cse to optimize constant cases. */
6553 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
6554 {
6555 if (code == MAX_EXPR)
6556 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6557 target, op1, NULL_RTX, op0);
6558 else
6559 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6560 op1, target, NULL_RTX, op0);
6561 emit_move_insn (target, op1);
6562 }
6563 else
6564 {
6565 if (code == MAX_EXPR)
6566 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
6567 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
6568 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
6569 else
6570 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
6571 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
6572 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
6573 if (temp == const0_rtx)
6574 emit_move_insn (target, op1);
6575 else if (temp != const_true_rtx)
6576 {
6577 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
6578 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
6579 else
6580 abort ();
6581 emit_move_insn (target, op1);
6582 }
6583 }
6584 emit_label (op0);
6585 return target;
6586
6587 case BIT_NOT_EXPR:
6588 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6589 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
6590 if (temp == 0)
6591 abort ();
6592 return temp;
6593
6594 case FFS_EXPR:
6595 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6596 temp = expand_unop (mode, ffs_optab, op0, target, 1);
6597 if (temp == 0)
6598 abort ();
6599 return temp;
6600
6601 /* ??? Can optimize bitwise operations with one arg constant.
6602 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
6603 and (a bitwise1 b) bitwise2 b (etc)
6604 but that is probably not worth while. */
6605
6606 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
6607 boolean values when we want in all cases to compute both of them. In
6608 general it is fastest to do TRUTH_AND_EXPR by computing both operands
6609 as actual zero-or-1 values and then bitwise anding. In cases where
6610 there cannot be any side effects, better code would be made by
6611 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
6612 how to recognize those cases. */
6613
6614 case TRUTH_AND_EXPR:
6615 case BIT_AND_EXPR:
6616 this_optab = and_optab;
6617 goto binop;
6618
6619 case TRUTH_OR_EXPR:
6620 case BIT_IOR_EXPR:
6621 this_optab = ior_optab;
6622 goto binop;
6623
6624 case TRUTH_XOR_EXPR:
6625 case BIT_XOR_EXPR:
6626 this_optab = xor_optab;
6627 goto binop;
6628
6629 case LSHIFT_EXPR:
6630 case RSHIFT_EXPR:
6631 case LROTATE_EXPR:
6632 case RROTATE_EXPR:
6633 preexpand_calls (exp);
6634 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6635 subtarget = 0;
6636 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6637 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
6638 unsignedp);
6639
6640 /* Could determine the answer when only additive constants differ. Also,
6641 the addition of one can be handled by changing the condition. */
6642 case LT_EXPR:
6643 case LE_EXPR:
6644 case GT_EXPR:
6645 case GE_EXPR:
6646 case EQ_EXPR:
6647 case NE_EXPR:
6648 preexpand_calls (exp);
6649 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
6650 if (temp != 0)
6651 return temp;
6652
6653 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
6654 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
6655 && original_target
6656 && GET_CODE (original_target) == REG
6657 && (GET_MODE (original_target)
6658 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6659 {
6660 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
6661 VOIDmode, 0);
6662
6663 if (temp != original_target)
6664 temp = copy_to_reg (temp);
6665
6666 op1 = gen_label_rtx ();
6667 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
6668 GET_MODE (temp), unsignedp, 0);
6669 emit_jump_insn (gen_beq (op1));
6670 emit_move_insn (temp, const1_rtx);
6671 emit_label (op1);
6672 return temp;
6673 }
6674
6675 /* If no set-flag instruction, must generate a conditional
6676 store into a temporary variable. Drop through
6677 and handle this like && and ||. */
6678
6679 case TRUTH_ANDIF_EXPR:
6680 case TRUTH_ORIF_EXPR:
6681 if (! ignore
6682 && (target == 0 || ! safe_from_p (target, exp)
6683 /* Make sure we don't have a hard reg (such as function's return
6684 value) live across basic blocks, if not optimizing. */
6685 || (!optimize && GET_CODE (target) == REG
6686 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
6687 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6688
6689 if (target)
6690 emit_clr_insn (target);
6691
6692 op1 = gen_label_rtx ();
6693 jumpifnot (exp, op1);
6694
6695 if (target)
6696 emit_0_to_1_insn (target);
6697
6698 emit_label (op1);
6699 return ignore ? const0_rtx : target;
6700
6701 case TRUTH_NOT_EXPR:
6702 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6703 /* The parser is careful to generate TRUTH_NOT_EXPR
6704 only with operands that are always zero or one. */
6705 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
6706 target, 1, OPTAB_LIB_WIDEN);
6707 if (temp == 0)
6708 abort ();
6709 return temp;
6710
6711 case COMPOUND_EXPR:
6712 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6713 emit_queue ();
6714 return expand_expr (TREE_OPERAND (exp, 1),
6715 (ignore ? const0_rtx : target),
6716 VOIDmode, 0);
6717
6718 case COND_EXPR:
6719 /* If we would have a "singleton" (see below) were it not for a
6720 conversion in each arm, bring that conversion back out. */
6721 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
6722 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
6723 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
6724 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
6725 {
6726 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
6727 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
6728
6729 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
6730 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
6731 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
6732 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
6733 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
6734 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
6735 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
6736 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
6737 return expand_expr (build1 (NOP_EXPR, type,
6738 build (COND_EXPR, TREE_TYPE (true),
6739 TREE_OPERAND (exp, 0),
6740 true, false)),
6741 target, tmode, modifier);
6742 }
6743
6744 {
6745 /* Note that COND_EXPRs whose type is a structure or union
6746 are required to be constructed to contain assignments of
6747 a temporary variable, so that we can evaluate them here
6748 for side effect only. If type is void, we must do likewise. */
6749
6750 /* If an arm of the branch requires a cleanup,
6751 only that cleanup is performed. */
6752
6753 tree singleton = 0;
6754 tree binary_op = 0, unary_op = 0;
6755
6756 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
6757 convert it to our mode, if necessary. */
6758 if (integer_onep (TREE_OPERAND (exp, 1))
6759 && integer_zerop (TREE_OPERAND (exp, 2))
6760 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6761 {
6762 if (ignore)
6763 {
6764 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6765 ro_modifier);
6766 return const0_rtx;
6767 }
6768
6769 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
6770 if (GET_MODE (op0) == mode)
6771 return op0;
6772
6773 if (target == 0)
6774 target = gen_reg_rtx (mode);
6775 convert_move (target, op0, unsignedp);
6776 return target;
6777 }
6778
6779 /* Check for X ? A + B : A. If we have this, we can copy A to the
6780 output and conditionally add B. Similarly for unary operations.
6781 Don't do this if X has side-effects because those side effects
6782 might affect A or B and the "?" operation is a sequence point in
6783 ANSI. (operand_equal_p tests for side effects.) */
6784
6785 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
6786 && operand_equal_p (TREE_OPERAND (exp, 2),
6787 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6788 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
6789 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
6790 && operand_equal_p (TREE_OPERAND (exp, 1),
6791 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6792 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
6793 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
6794 && operand_equal_p (TREE_OPERAND (exp, 2),
6795 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6796 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
6797 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
6798 && operand_equal_p (TREE_OPERAND (exp, 1),
6799 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6800 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
6801
6802 /* If we are not to produce a result, we have no target. Otherwise,
6803 if a target was specified use it; it will not be used as an
6804 intermediate target unless it is safe. If no target, use a
6805 temporary. */
6806
6807 if (ignore)
6808 temp = 0;
6809 else if (original_target
6810 && (safe_from_p (original_target, TREE_OPERAND (exp, 0))
6811 || (singleton && GET_CODE (original_target) == REG
6812 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
6813 && original_target == var_rtx (singleton)))
6814 && GET_MODE (original_target) == mode
6815 && ! (GET_CODE (original_target) == MEM
6816 && MEM_VOLATILE_P (original_target)))
6817 temp = original_target;
6818 else if (TREE_ADDRESSABLE (type))
6819 abort ();
6820 else
6821 temp = assign_temp (type, 0, 0, 1);
6822
6823 /* If we had X ? A + C : A, with C a constant power of 2, and we can
6824 do the test of X as a store-flag operation, do this as
6825 A + ((X != 0) << log C). Similarly for other simple binary
6826 operators. Only do for C == 1 if BRANCH_COST is low. */
6827 if (temp && singleton && binary_op
6828 && (TREE_CODE (binary_op) == PLUS_EXPR
6829 || TREE_CODE (binary_op) == MINUS_EXPR
6830 || TREE_CODE (binary_op) == BIT_IOR_EXPR
6831 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
6832 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
6833 : integer_onep (TREE_OPERAND (binary_op, 1)))
6834 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6835 {
6836 rtx result;
6837 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
6838 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
6839 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
6840 : xor_optab);
6841
6842 /* If we had X ? A : A + 1, do this as A + (X == 0).
6843
6844 We have to invert the truth value here and then put it
6845 back later if do_store_flag fails. We cannot simply copy
6846 TREE_OPERAND (exp, 0) to another variable and modify that
6847 because invert_truthvalue can modify the tree pointed to
6848 by its argument. */
6849 if (singleton == TREE_OPERAND (exp, 1))
6850 TREE_OPERAND (exp, 0)
6851 = invert_truthvalue (TREE_OPERAND (exp, 0));
6852
6853 result = do_store_flag (TREE_OPERAND (exp, 0),
6854 (safe_from_p (temp, singleton)
6855 ? temp : NULL_RTX),
6856 mode, BRANCH_COST <= 1);
6857
6858 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
6859 result = expand_shift (LSHIFT_EXPR, mode, result,
6860 build_int_2 (tree_log2
6861 (TREE_OPERAND
6862 (binary_op, 1)),
6863 0),
6864 (safe_from_p (temp, singleton)
6865 ? temp : NULL_RTX), 0);
6866
6867 if (result)
6868 {
6869 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
6870 return expand_binop (mode, boptab, op1, result, temp,
6871 unsignedp, OPTAB_LIB_WIDEN);
6872 }
6873 else if (singleton == TREE_OPERAND (exp, 1))
6874 TREE_OPERAND (exp, 0)
6875 = invert_truthvalue (TREE_OPERAND (exp, 0));
6876 }
6877
6878 do_pending_stack_adjust ();
6879 NO_DEFER_POP;
6880 op0 = gen_label_rtx ();
6881
6882 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
6883 {
6884 if (temp != 0)
6885 {
6886 /* If the target conflicts with the other operand of the
6887 binary op, we can't use it. Also, we can't use the target
6888 if it is a hard register, because evaluating the condition
6889 might clobber it. */
6890 if ((binary_op
6891 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
6892 || (GET_CODE (temp) == REG
6893 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
6894 temp = gen_reg_rtx (mode);
6895 store_expr (singleton, temp, 0);
6896 }
6897 else
6898 expand_expr (singleton,
6899 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6900 if (singleton == TREE_OPERAND (exp, 1))
6901 jumpif (TREE_OPERAND (exp, 0), op0);
6902 else
6903 jumpifnot (TREE_OPERAND (exp, 0), op0);
6904
6905 start_cleanup_deferral ();
6906 if (binary_op && temp == 0)
6907 /* Just touch the other operand. */
6908 expand_expr (TREE_OPERAND (binary_op, 1),
6909 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6910 else if (binary_op)
6911 store_expr (build (TREE_CODE (binary_op), type,
6912 make_tree (type, temp),
6913 TREE_OPERAND (binary_op, 1)),
6914 temp, 0);
6915 else
6916 store_expr (build1 (TREE_CODE (unary_op), type,
6917 make_tree (type, temp)),
6918 temp, 0);
6919 op1 = op0;
6920 }
6921 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
6922 comparison operator. If we have one of these cases, set the
6923 output to A, branch on A (cse will merge these two references),
6924 then set the output to FOO. */
6925 else if (temp
6926 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6927 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6928 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6929 TREE_OPERAND (exp, 1), 0)
6930 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6931 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
6932 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
6933 {
6934 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6935 temp = gen_reg_rtx (mode);
6936 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6937 jumpif (TREE_OPERAND (exp, 0), op0);
6938
6939 start_cleanup_deferral ();
6940 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6941 op1 = op0;
6942 }
6943 else if (temp
6944 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6945 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6946 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6947 TREE_OPERAND (exp, 2), 0)
6948 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6949 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
6950 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
6951 {
6952 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6953 temp = gen_reg_rtx (mode);
6954 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6955 jumpifnot (TREE_OPERAND (exp, 0), op0);
6956
6957 start_cleanup_deferral ();
6958 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6959 op1 = op0;
6960 }
6961 else
6962 {
6963 op1 = gen_label_rtx ();
6964 jumpifnot (TREE_OPERAND (exp, 0), op0);
6965
6966 start_cleanup_deferral ();
6967 if (temp != 0)
6968 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6969 else
6970 expand_expr (TREE_OPERAND (exp, 1),
6971 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6972 end_cleanup_deferral ();
6973 emit_queue ();
6974 emit_jump_insn (gen_jump (op1));
6975 emit_barrier ();
6976 emit_label (op0);
6977 start_cleanup_deferral ();
6978 if (temp != 0)
6979 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6980 else
6981 expand_expr (TREE_OPERAND (exp, 2),
6982 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6983 }
6984
6985 end_cleanup_deferral ();
6986
6987 emit_queue ();
6988 emit_label (op1);
6989 OK_DEFER_POP;
6990
6991 return temp;
6992 }
6993
6994 case TARGET_EXPR:
6995 {
6996 /* Something needs to be initialized, but we didn't know
6997 where that thing was when building the tree. For example,
6998 it could be the return value of a function, or a parameter
6999 to a function which lays down in the stack, or a temporary
7000 variable which must be passed by reference.
7001
7002 We guarantee that the expression will either be constructed
7003 or copied into our original target. */
7004
7005 tree slot = TREE_OPERAND (exp, 0);
7006 tree cleanups = NULL_TREE;
7007 tree exp1;
7008 rtx temp;
7009
7010 if (TREE_CODE (slot) != VAR_DECL)
7011 abort ();
7012
7013 if (! ignore)
7014 target = original_target;
7015
7016 if (target == 0)
7017 {
7018 if (DECL_RTL (slot) != 0)
7019 {
7020 target = DECL_RTL (slot);
7021 /* If we have already expanded the slot, so don't do
7022 it again. (mrs) */
7023 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7024 return target;
7025 }
7026 else
7027 {
7028 target = assign_temp (type, 2, 0, 1);
7029 /* All temp slots at this level must not conflict. */
7030 preserve_temp_slots (target);
7031 DECL_RTL (slot) = target;
7032 if (TREE_ADDRESSABLE (slot))
7033 {
7034 TREE_ADDRESSABLE (slot) = 0;
7035 mark_addressable (slot);
7036 }
7037
7038 /* Since SLOT is not known to the called function
7039 to belong to its stack frame, we must build an explicit
7040 cleanup. This case occurs when we must build up a reference
7041 to pass the reference as an argument. In this case,
7042 it is very likely that such a reference need not be
7043 built here. */
7044
7045 if (TREE_OPERAND (exp, 2) == 0)
7046 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
7047 cleanups = TREE_OPERAND (exp, 2);
7048 }
7049 }
7050 else
7051 {
7052 /* This case does occur, when expanding a parameter which
7053 needs to be constructed on the stack. The target
7054 is the actual stack address that we want to initialize.
7055 The function we call will perform the cleanup in this case. */
7056
7057 /* If we have already assigned it space, use that space,
7058 not target that we were passed in, as our target
7059 parameter is only a hint. */
7060 if (DECL_RTL (slot) != 0)
7061 {
7062 target = DECL_RTL (slot);
7063 /* If we have already expanded the slot, so don't do
7064 it again. (mrs) */
7065 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7066 return target;
7067 }
7068 else
7069 {
7070 DECL_RTL (slot) = target;
7071 /* If we must have an addressable slot, then make sure that
7072 the RTL that we just stored in slot is OK. */
7073 if (TREE_ADDRESSABLE (slot))
7074 {
7075 TREE_ADDRESSABLE (slot) = 0;
7076 mark_addressable (slot);
7077 }
7078 }
7079 }
7080
7081 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
7082 /* Mark it as expanded. */
7083 TREE_OPERAND (exp, 1) = NULL_TREE;
7084
7085 store_expr (exp1, target, 0);
7086
7087 expand_decl_cleanup (NULL_TREE, cleanups);
7088
7089 return target;
7090 }
7091
7092 case INIT_EXPR:
7093 {
7094 tree lhs = TREE_OPERAND (exp, 0);
7095 tree rhs = TREE_OPERAND (exp, 1);
7096 tree noncopied_parts = 0;
7097 tree lhs_type = TREE_TYPE (lhs);
7098
7099 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7100 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
7101 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
7102 TYPE_NONCOPIED_PARTS (lhs_type));
7103 while (noncopied_parts != 0)
7104 {
7105 expand_assignment (TREE_VALUE (noncopied_parts),
7106 TREE_PURPOSE (noncopied_parts), 0, 0);
7107 noncopied_parts = TREE_CHAIN (noncopied_parts);
7108 }
7109 return temp;
7110 }
7111
7112 case MODIFY_EXPR:
7113 {
7114 /* If lhs is complex, expand calls in rhs before computing it.
7115 That's so we don't compute a pointer and save it over a call.
7116 If lhs is simple, compute it first so we can give it as a
7117 target if the rhs is just a call. This avoids an extra temp and copy
7118 and that prevents a partial-subsumption which makes bad code.
7119 Actually we could treat component_ref's of vars like vars. */
7120
7121 tree lhs = TREE_OPERAND (exp, 0);
7122 tree rhs = TREE_OPERAND (exp, 1);
7123 tree noncopied_parts = 0;
7124 tree lhs_type = TREE_TYPE (lhs);
7125
7126 temp = 0;
7127
7128 if (TREE_CODE (lhs) != VAR_DECL
7129 && TREE_CODE (lhs) != RESULT_DECL
7130 && TREE_CODE (lhs) != PARM_DECL
7131 && ! (TREE_CODE (lhs) == INDIRECT_REF
7132 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
7133 preexpand_calls (exp);
7134
7135 /* Check for |= or &= of a bitfield of size one into another bitfield
7136 of size 1. In this case, (unless we need the result of the
7137 assignment) we can do this more efficiently with a
7138 test followed by an assignment, if necessary.
7139
7140 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7141 things change so we do, this code should be enhanced to
7142 support it. */
7143 if (ignore
7144 && TREE_CODE (lhs) == COMPONENT_REF
7145 && (TREE_CODE (rhs) == BIT_IOR_EXPR
7146 || TREE_CODE (rhs) == BIT_AND_EXPR)
7147 && TREE_OPERAND (rhs, 0) == lhs
7148 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
7149 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
7150 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
7151 {
7152 rtx label = gen_label_rtx ();
7153
7154 do_jump (TREE_OPERAND (rhs, 1),
7155 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
7156 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
7157 expand_assignment (lhs, convert (TREE_TYPE (rhs),
7158 (TREE_CODE (rhs) == BIT_IOR_EXPR
7159 ? integer_one_node
7160 : integer_zero_node)),
7161 0, 0);
7162 do_pending_stack_adjust ();
7163 emit_label (label);
7164 return const0_rtx;
7165 }
7166
7167 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
7168 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
7169 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
7170 TYPE_NONCOPIED_PARTS (lhs_type));
7171
7172 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7173 while (noncopied_parts != 0)
7174 {
7175 expand_assignment (TREE_PURPOSE (noncopied_parts),
7176 TREE_VALUE (noncopied_parts), 0, 0);
7177 noncopied_parts = TREE_CHAIN (noncopied_parts);
7178 }
7179 return temp;
7180 }
7181
7182 case PREINCREMENT_EXPR:
7183 case PREDECREMENT_EXPR:
7184 return expand_increment (exp, 0, ignore);
7185
7186 case POSTINCREMENT_EXPR:
7187 case POSTDECREMENT_EXPR:
7188 /* Faster to treat as pre-increment if result is not used. */
7189 return expand_increment (exp, ! ignore, ignore);
7190
7191 case ADDR_EXPR:
7192 /* If nonzero, TEMP will be set to the address of something that might
7193 be a MEM corresponding to a stack slot. */
7194 temp = 0;
7195
7196 /* Are we taking the address of a nested function? */
7197 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
7198 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
7199 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0)))
7200 {
7201 op0 = trampoline_address (TREE_OPERAND (exp, 0));
7202 op0 = force_operand (op0, target);
7203 }
7204 /* If we are taking the address of something erroneous, just
7205 return a zero. */
7206 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
7207 return const0_rtx;
7208 else
7209 {
7210 /* We make sure to pass const0_rtx down if we came in with
7211 ignore set, to avoid doing the cleanups twice for something. */
7212 op0 = expand_expr (TREE_OPERAND (exp, 0),
7213 ignore ? const0_rtx : NULL_RTX, VOIDmode,
7214 (modifier == EXPAND_INITIALIZER
7215 ? modifier : EXPAND_CONST_ADDRESS));
7216
7217 /* If we are going to ignore the result, OP0 will have been set
7218 to const0_rtx, so just return it. Don't get confused and
7219 think we are taking the address of the constant. */
7220 if (ignore)
7221 return op0;
7222
7223 op0 = protect_from_queue (op0, 0);
7224
7225 /* We would like the object in memory. If it is a constant,
7226 we can have it be statically allocated into memory. For
7227 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
7228 memory and store the value into it. */
7229
7230 if (CONSTANT_P (op0))
7231 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7232 op0);
7233 else if (GET_CODE (op0) == MEM)
7234 {
7235 mark_temp_addr_taken (op0);
7236 temp = XEXP (op0, 0);
7237 }
7238
7239 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7240 || GET_CODE (op0) == CONCAT)
7241 {
7242 /* If this object is in a register, it must be not
7243 be BLKmode. */
7244 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7245 rtx memloc = assign_temp (inner_type, 1, 1, 1);
7246
7247 mark_temp_addr_taken (memloc);
7248 emit_move_insn (memloc, op0);
7249 op0 = memloc;
7250 }
7251
7252 if (GET_CODE (op0) != MEM)
7253 abort ();
7254
7255 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7256 {
7257 temp = XEXP (op0, 0);
7258 #ifdef POINTERS_EXTEND_UNSIGNED
7259 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
7260 && mode == ptr_mode)
7261 temp = convert_memory_address (ptr_mode, temp);
7262 #endif
7263 return temp;
7264 }
7265
7266 op0 = force_operand (XEXP (op0, 0), target);
7267 }
7268
7269 if (flag_force_addr && GET_CODE (op0) != REG)
7270 op0 = force_reg (Pmode, op0);
7271
7272 if (GET_CODE (op0) == REG
7273 && ! REG_USERVAR_P (op0))
7274 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
7275
7276 /* If we might have had a temp slot, add an equivalent address
7277 for it. */
7278 if (temp != 0)
7279 update_temp_slot_address (temp, op0);
7280
7281 #ifdef POINTERS_EXTEND_UNSIGNED
7282 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
7283 && mode == ptr_mode)
7284 op0 = convert_memory_address (ptr_mode, op0);
7285 #endif
7286
7287 return op0;
7288
7289 case ENTRY_VALUE_EXPR:
7290 abort ();
7291
7292 /* COMPLEX type for Extended Pascal & Fortran */
7293 case COMPLEX_EXPR:
7294 {
7295 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7296 rtx insns;
7297
7298 /* Get the rtx code of the operands. */
7299 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7300 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
7301
7302 if (! target)
7303 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7304
7305 start_sequence ();
7306
7307 /* Move the real (op0) and imaginary (op1) parts to their location. */
7308 emit_move_insn (gen_realpart (mode, target), op0);
7309 emit_move_insn (gen_imagpart (mode, target), op1);
7310
7311 insns = get_insns ();
7312 end_sequence ();
7313
7314 /* Complex construction should appear as a single unit. */
7315 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
7316 each with a separate pseudo as destination.
7317 It's not correct for flow to treat them as a unit. */
7318 if (GET_CODE (target) != CONCAT)
7319 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
7320 else
7321 emit_insns (insns);
7322
7323 return target;
7324 }
7325
7326 case REALPART_EXPR:
7327 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7328 return gen_realpart (mode, op0);
7329
7330 case IMAGPART_EXPR:
7331 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7332 return gen_imagpart (mode, op0);
7333
7334 case CONJ_EXPR:
7335 {
7336 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7337 rtx imag_t;
7338 rtx insns;
7339
7340 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7341
7342 if (! target)
7343 target = gen_reg_rtx (mode);
7344
7345 start_sequence ();
7346
7347 /* Store the realpart and the negated imagpart to target. */
7348 emit_move_insn (gen_realpart (partmode, target),
7349 gen_realpart (partmode, op0));
7350
7351 imag_t = gen_imagpart (partmode, target);
7352 temp = expand_unop (partmode, neg_optab,
7353 gen_imagpart (partmode, op0), imag_t, 0);
7354 if (temp != imag_t)
7355 emit_move_insn (imag_t, temp);
7356
7357 insns = get_insns ();
7358 end_sequence ();
7359
7360 /* Conjugate should appear as a single unit
7361 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
7362 each with a separate pseudo as destination.
7363 It's not correct for flow to treat them as a unit. */
7364 if (GET_CODE (target) != CONCAT)
7365 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
7366 else
7367 emit_insns (insns);
7368
7369 return target;
7370 }
7371
7372 case TRY_CATCH_EXPR:
7373 {
7374 tree handler = TREE_OPERAND (exp, 1);
7375
7376 expand_eh_region_start ();
7377
7378 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7379
7380 expand_eh_region_end (handler);
7381
7382 return op0;
7383 }
7384
7385 case POPDCC_EXPR:
7386 {
7387 rtx dcc = get_dynamic_cleanup_chain ();
7388 emit_move_insn (dcc, validize_mem (gen_rtx (MEM, Pmode, dcc)));
7389 return const0_rtx;
7390 }
7391
7392 case POPDHC_EXPR:
7393 {
7394 rtx dhc = get_dynamic_handler_chain ();
7395 emit_move_insn (dhc, validize_mem (gen_rtx (MEM, Pmode, dhc)));
7396 return const0_rtx;
7397 }
7398
7399 case ERROR_MARK:
7400 op0 = CONST0_RTX (tmode);
7401 if (op0 != 0)
7402 return op0;
7403 return const0_rtx;
7404
7405 default:
7406 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7407 }
7408
7409 /* Here to do an ordinary binary operator, generating an instruction
7410 from the optab already placed in `this_optab'. */
7411 binop:
7412 preexpand_calls (exp);
7413 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
7414 subtarget = 0;
7415 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7416 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7417 binop2:
7418 temp = expand_binop (mode, this_optab, op0, op1, target,
7419 unsignedp, OPTAB_LIB_WIDEN);
7420 if (temp == 0)
7421 abort ();
7422 return temp;
7423 }
7424
7425
7426 /* Emit bytecode to evaluate the given expression EXP to the stack. */
7427
7428 void
7429 bc_expand_expr (exp)
7430 tree exp;
7431 {
7432 enum tree_code code;
7433 tree type, arg0;
7434 rtx r;
7435 struct binary_operator *binoptab;
7436 struct unary_operator *unoptab;
7437 struct increment_operator *incroptab;
7438 struct bc_label *lab, *lab1;
7439 enum bytecode_opcode opcode;
7440
7441
7442 code = TREE_CODE (exp);
7443
7444 switch (code)
7445 {
7446 case PARM_DECL:
7447
7448 if (DECL_RTL (exp) == 0)
7449 {
7450 error_with_decl (exp, "prior parameter's size depends on `%s'");
7451 return;
7452 }
7453
7454 bc_load_parmaddr (DECL_RTL (exp));
7455 bc_load_memory (TREE_TYPE (exp), exp);
7456
7457 return;
7458
7459 case VAR_DECL:
7460
7461 if (DECL_RTL (exp) == 0)
7462 abort ();
7463
7464 #if 0
7465 if (BYTECODE_LABEL (DECL_RTL (exp)))
7466 bc_load_externaddr (DECL_RTL (exp));
7467 else
7468 bc_load_localaddr (DECL_RTL (exp));
7469 #endif
7470 if (TREE_PUBLIC (exp))
7471 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
7472 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
7473 else
7474 bc_load_localaddr (DECL_RTL (exp));
7475
7476 bc_load_memory (TREE_TYPE (exp), exp);
7477 return;
7478
7479 case INTEGER_CST:
7480
7481 #ifdef DEBUG_PRINT_CODE
7482 fprintf (stderr, " [%x]\n", TREE_INT_CST_LOW (exp));
7483 #endif
7484 bc_emit_instruction (mode_to_const_map[(int) (DECL_BIT_FIELD (exp)
7485 ? SImode
7486 : TYPE_MODE (TREE_TYPE (exp)))],
7487 (HOST_WIDE_INT) TREE_INT_CST_LOW (exp));
7488 return;
7489
7490 case REAL_CST:
7491
7492 #if 0
7493 #ifdef DEBUG_PRINT_CODE
7494 fprintf (stderr, " [%g]\n", (double) TREE_INT_CST_LOW (exp));
7495 #endif
7496 /* FIX THIS: find a better way to pass real_cst's. -bson */
7497 bc_emit_instruction (mode_to_const_map[TYPE_MODE (TREE_TYPE (exp))],
7498 (double) TREE_REAL_CST (exp));
7499 #else
7500 abort ();
7501 #endif
7502
7503 return;
7504
7505 case CALL_EXPR:
7506
7507 /* We build a call description vector describing the type of
7508 the return value and of the arguments; this call vector,
7509 together with a pointer to a location for the return value
7510 and the base of the argument list, is passed to the low
7511 level machine dependent call subroutine, which is responsible
7512 for putting the arguments wherever real functions expect
7513 them, as well as getting the return value back. */
7514 {
7515 tree calldesc = 0, arg;
7516 int nargs = 0, i;
7517 rtx retval;
7518
7519 /* Push the evaluated args on the evaluation stack in reverse
7520 order. Also make an entry for each arg in the calldesc
7521 vector while we're at it. */
7522
7523 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
7524
7525 for (arg = TREE_OPERAND (exp, 1); arg; arg = TREE_CHAIN (arg))
7526 {
7527 ++nargs;
7528 bc_expand_expr (TREE_VALUE (arg));
7529
7530 calldesc = tree_cons ((tree) 0,
7531 size_in_bytes (TREE_TYPE (TREE_VALUE (arg))),
7532 calldesc);
7533 calldesc = tree_cons ((tree) 0,
7534 bc_runtime_type_code (TREE_TYPE (TREE_VALUE (arg))),
7535 calldesc);
7536 }
7537
7538 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
7539
7540 /* Allocate a location for the return value and push its
7541 address on the evaluation stack. Also make an entry
7542 at the front of the calldesc for the return value type. */
7543
7544 type = TREE_TYPE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7545 retval = bc_allocate_local (int_size_in_bytes (type), TYPE_ALIGN (type));
7546 bc_load_localaddr (retval);
7547
7548 calldesc = tree_cons ((tree) 0, size_in_bytes (type), calldesc);
7549 calldesc = tree_cons ((tree) 0, bc_runtime_type_code (type), calldesc);
7550
7551 /* Prepend the argument count. */
7552 calldesc = tree_cons ((tree) 0,
7553 build_int_2 (nargs, 0),
7554 calldesc);
7555
7556 /* Push the address of the call description vector on the stack. */
7557 calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
7558 TREE_TYPE (calldesc) = build_array_type (integer_type_node,
7559 build_index_type (build_int_2 (nargs * 2, 0)));
7560 r = output_constant_def (calldesc);
7561 bc_load_externaddr (r);
7562
7563 /* Push the address of the function to be called. */
7564 bc_expand_expr (TREE_OPERAND (exp, 0));
7565
7566 /* Call the function, popping its address and the calldesc vector
7567 address off the evaluation stack in the process. */
7568 bc_emit_instruction (call);
7569
7570 /* Pop the arguments off the stack. */
7571 bc_adjust_stack (nargs);
7572
7573 /* Load the return value onto the stack. */
7574 bc_load_localaddr (retval);
7575 bc_load_memory (type, TREE_OPERAND (exp, 0));
7576 }
7577 return;
7578
7579 case SAVE_EXPR:
7580
7581 if (!SAVE_EXPR_RTL (exp))
7582 {
7583 /* First time around: copy to local variable */
7584 SAVE_EXPR_RTL (exp) = bc_allocate_local (int_size_in_bytes (TREE_TYPE (exp)),
7585 TYPE_ALIGN (TREE_TYPE(exp)));
7586 bc_expand_expr (TREE_OPERAND (exp, 0));
7587 bc_emit_instruction (duplicate);
7588
7589 bc_load_localaddr (SAVE_EXPR_RTL (exp));
7590 bc_store_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7591 }
7592 else
7593 {
7594 /* Consecutive reference: use saved copy */
7595 bc_load_localaddr (SAVE_EXPR_RTL (exp));
7596 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7597 }
7598 return;
7599
7600 #if 0
7601 /* FIXME: the XXXX_STMT codes have been removed in GCC2, but
7602 how are they handled instead? */
7603 case LET_STMT:
7604
7605 TREE_USED (exp) = 1;
7606 bc_expand_expr (STMT_BODY (exp));
7607 return;
7608 #endif
7609
7610 case NOP_EXPR:
7611 case CONVERT_EXPR:
7612
7613 bc_expand_expr (TREE_OPERAND (exp, 0));
7614 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)), TREE_TYPE (exp));
7615 return;
7616
7617 case MODIFY_EXPR:
7618
7619 expand_assignment (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 0, 0);
7620 return;
7621
7622 case ADDR_EXPR:
7623
7624 bc_expand_address (TREE_OPERAND (exp, 0));
7625 return;
7626
7627 case INDIRECT_REF:
7628
7629 bc_expand_expr (TREE_OPERAND (exp, 0));
7630 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7631 return;
7632
7633 case ARRAY_REF:
7634
7635 bc_expand_expr (bc_canonicalize_array_ref (exp));
7636 return;
7637
7638 case COMPONENT_REF:
7639
7640 bc_expand_component_address (exp);
7641
7642 /* If we have a bitfield, generate a proper load */
7643 bc_load_memory (TREE_TYPE (TREE_OPERAND (exp, 1)), TREE_OPERAND (exp, 1));
7644 return;
7645
7646 case COMPOUND_EXPR:
7647
7648 bc_expand_expr (TREE_OPERAND (exp, 0));
7649 bc_emit_instruction (drop);
7650 bc_expand_expr (TREE_OPERAND (exp, 1));
7651 return;
7652
7653 case COND_EXPR:
7654
7655 bc_expand_expr (TREE_OPERAND (exp, 0));
7656 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
7657 lab = bc_get_bytecode_label ();
7658 bc_emit_bytecode (xjumpifnot);
7659 bc_emit_bytecode_labelref (lab);
7660
7661 #ifdef DEBUG_PRINT_CODE
7662 fputc ('\n', stderr);
7663 #endif
7664 bc_expand_expr (TREE_OPERAND (exp, 1));
7665 lab1 = bc_get_bytecode_label ();
7666 bc_emit_bytecode (jump);
7667 bc_emit_bytecode_labelref (lab1);
7668
7669 #ifdef DEBUG_PRINT_CODE
7670 fputc ('\n', stderr);
7671 #endif
7672
7673 bc_emit_bytecode_labeldef (lab);
7674 bc_expand_expr (TREE_OPERAND (exp, 2));
7675 bc_emit_bytecode_labeldef (lab1);
7676 return;
7677
7678 case TRUTH_ANDIF_EXPR:
7679
7680 opcode = xjumpifnot;
7681 goto andorif;
7682
7683 case TRUTH_ORIF_EXPR:
7684
7685 opcode = xjumpif;
7686 goto andorif;
7687
7688 case PLUS_EXPR:
7689
7690 binoptab = optab_plus_expr;
7691 goto binop;
7692
7693 case MINUS_EXPR:
7694
7695 binoptab = optab_minus_expr;
7696 goto binop;
7697
7698 case MULT_EXPR:
7699
7700 binoptab = optab_mult_expr;
7701 goto binop;
7702
7703 case TRUNC_DIV_EXPR:
7704 case FLOOR_DIV_EXPR:
7705 case CEIL_DIV_EXPR:
7706 case ROUND_DIV_EXPR:
7707 case EXACT_DIV_EXPR:
7708
7709 binoptab = optab_trunc_div_expr;
7710 goto binop;
7711
7712 case TRUNC_MOD_EXPR:
7713 case FLOOR_MOD_EXPR:
7714 case CEIL_MOD_EXPR:
7715 case ROUND_MOD_EXPR:
7716
7717 binoptab = optab_trunc_mod_expr;
7718 goto binop;
7719
7720 case FIX_ROUND_EXPR:
7721 case FIX_FLOOR_EXPR:
7722 case FIX_CEIL_EXPR:
7723 abort (); /* Not used for C. */
7724
7725 case FIX_TRUNC_EXPR:
7726 case FLOAT_EXPR:
7727 case MAX_EXPR:
7728 case MIN_EXPR:
7729 case FFS_EXPR:
7730 case LROTATE_EXPR:
7731 case RROTATE_EXPR:
7732 abort (); /* FIXME */
7733
7734 case RDIV_EXPR:
7735
7736 binoptab = optab_rdiv_expr;
7737 goto binop;
7738
7739 case BIT_AND_EXPR:
7740
7741 binoptab = optab_bit_and_expr;
7742 goto binop;
7743
7744 case BIT_IOR_EXPR:
7745
7746 binoptab = optab_bit_ior_expr;
7747 goto binop;
7748
7749 case BIT_XOR_EXPR:
7750
7751 binoptab = optab_bit_xor_expr;
7752 goto binop;
7753
7754 case LSHIFT_EXPR:
7755
7756 binoptab = optab_lshift_expr;
7757 goto binop;
7758
7759 case RSHIFT_EXPR:
7760
7761 binoptab = optab_rshift_expr;
7762 goto binop;
7763
7764 case TRUTH_AND_EXPR:
7765
7766 binoptab = optab_truth_and_expr;
7767 goto binop;
7768
7769 case TRUTH_OR_EXPR:
7770
7771 binoptab = optab_truth_or_expr;
7772 goto binop;
7773
7774 case LT_EXPR:
7775
7776 binoptab = optab_lt_expr;
7777 goto binop;
7778
7779 case LE_EXPR:
7780
7781 binoptab = optab_le_expr;
7782 goto binop;
7783
7784 case GE_EXPR:
7785
7786 binoptab = optab_ge_expr;
7787 goto binop;
7788
7789 case GT_EXPR:
7790
7791 binoptab = optab_gt_expr;
7792 goto binop;
7793
7794 case EQ_EXPR:
7795
7796 binoptab = optab_eq_expr;
7797 goto binop;
7798
7799 case NE_EXPR:
7800
7801 binoptab = optab_ne_expr;
7802 goto binop;
7803
7804 case NEGATE_EXPR:
7805
7806 unoptab = optab_negate_expr;
7807 goto unop;
7808
7809 case BIT_NOT_EXPR:
7810
7811 unoptab = optab_bit_not_expr;
7812 goto unop;
7813
7814 case TRUTH_NOT_EXPR:
7815
7816 unoptab = optab_truth_not_expr;
7817 goto unop;
7818
7819 case PREDECREMENT_EXPR:
7820
7821 incroptab = optab_predecrement_expr;
7822 goto increment;
7823
7824 case PREINCREMENT_EXPR:
7825
7826 incroptab = optab_preincrement_expr;
7827 goto increment;
7828
7829 case POSTDECREMENT_EXPR:
7830
7831 incroptab = optab_postdecrement_expr;
7832 goto increment;
7833
7834 case POSTINCREMENT_EXPR:
7835
7836 incroptab = optab_postincrement_expr;
7837 goto increment;
7838
7839 case CONSTRUCTOR:
7840
7841 bc_expand_constructor (exp);
7842 return;
7843
7844 case ERROR_MARK:
7845 case RTL_EXPR:
7846
7847 return;
7848
7849 case BIND_EXPR:
7850 {
7851 tree vars = TREE_OPERAND (exp, 0);
7852 int vars_need_expansion = 0;
7853
7854 /* Need to open a binding contour here because
7855 if there are any cleanups they most be contained here. */
7856 expand_start_bindings (0);
7857
7858 /* Mark the corresponding BLOCK for output. */
7859 if (TREE_OPERAND (exp, 2) != 0)
7860 TREE_USED (TREE_OPERAND (exp, 2)) = 1;
7861
7862 /* If VARS have not yet been expanded, expand them now. */
7863 while (vars)
7864 {
7865 if (DECL_RTL (vars) == 0)
7866 {
7867 vars_need_expansion = 1;
7868 expand_decl (vars);
7869 }
7870 expand_decl_init (vars);
7871 vars = TREE_CHAIN (vars);
7872 }
7873
7874 bc_expand_expr (TREE_OPERAND (exp, 1));
7875
7876 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
7877
7878 return;
7879 }
7880
7881 default:
7882 abort ();
7883 }
7884
7885 abort ();
7886
7887 binop:
7888
7889 bc_expand_binary_operation (binoptab, TREE_TYPE (exp),
7890 TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1));
7891 return;
7892
7893
7894 unop:
7895
7896 bc_expand_unary_operation (unoptab, TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7897 return;
7898
7899
7900 andorif:
7901
7902 bc_expand_expr (TREE_OPERAND (exp, 0));
7903 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
7904 lab = bc_get_bytecode_label ();
7905
7906 bc_emit_instruction (duplicate);
7907 bc_emit_bytecode (opcode);
7908 bc_emit_bytecode_labelref (lab);
7909
7910 #ifdef DEBUG_PRINT_CODE
7911 fputc ('\n', stderr);
7912 #endif
7913
7914 bc_emit_instruction (drop);
7915
7916 bc_expand_expr (TREE_OPERAND (exp, 1));
7917 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)));
7918 bc_emit_bytecode_labeldef (lab);
7919 return;
7920
7921
7922 increment:
7923
7924 type = TREE_TYPE (TREE_OPERAND (exp, 0));
7925
7926 /* Push the quantum. */
7927 bc_expand_expr (TREE_OPERAND (exp, 1));
7928
7929 /* Convert it to the lvalue's type. */
7930 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)), type);
7931
7932 /* Push the address of the lvalue */
7933 bc_expand_expr (build1 (ADDR_EXPR, TYPE_POINTER_TO (type), TREE_OPERAND (exp, 0)));
7934
7935 /* Perform actual increment */
7936 bc_expand_increment (incroptab, type);
7937 return;
7938 }
7939 \f
7940 /* Return the alignment in bits of EXP, a pointer valued expression.
7941 But don't return more than MAX_ALIGN no matter what.
7942 The alignment returned is, by default, the alignment of the thing that
7943 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
7944
7945 Otherwise, look at the expression to see if we can do better, i.e., if the
7946 expression is actually pointing at an object whose alignment is tighter. */
7947
7948 static int
7949 get_pointer_alignment (exp, max_align)
7950 tree exp;
7951 unsigned max_align;
7952 {
7953 unsigned align, inner;
7954
7955 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7956 return 0;
7957
7958 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7959 align = MIN (align, max_align);
7960
7961 while (1)
7962 {
7963 switch (TREE_CODE (exp))
7964 {
7965 case NOP_EXPR:
7966 case CONVERT_EXPR:
7967 case NON_LVALUE_EXPR:
7968 exp = TREE_OPERAND (exp, 0);
7969 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7970 return align;
7971 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7972 align = MIN (inner, max_align);
7973 break;
7974
7975 case PLUS_EXPR:
7976 /* If sum of pointer + int, restrict our maximum alignment to that
7977 imposed by the integer. If not, we can't do any better than
7978 ALIGN. */
7979 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
7980 return align;
7981
7982 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
7983 & (max_align - 1))
7984 != 0)
7985 max_align >>= 1;
7986
7987 exp = TREE_OPERAND (exp, 0);
7988 break;
7989
7990 case ADDR_EXPR:
7991 /* See what we are pointing at and look at its alignment. */
7992 exp = TREE_OPERAND (exp, 0);
7993 if (TREE_CODE (exp) == FUNCTION_DECL)
7994 align = FUNCTION_BOUNDARY;
7995 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
7996 align = DECL_ALIGN (exp);
7997 #ifdef CONSTANT_ALIGNMENT
7998 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
7999 align = CONSTANT_ALIGNMENT (exp, align);
8000 #endif
8001 return MIN (align, max_align);
8002
8003 default:
8004 return align;
8005 }
8006 }
8007 }
8008 \f
8009 /* Return the tree node and offset if a given argument corresponds to
8010 a string constant. */
8011
8012 static tree
8013 string_constant (arg, ptr_offset)
8014 tree arg;
8015 tree *ptr_offset;
8016 {
8017 STRIP_NOPS (arg);
8018
8019 if (TREE_CODE (arg) == ADDR_EXPR
8020 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8021 {
8022 *ptr_offset = integer_zero_node;
8023 return TREE_OPERAND (arg, 0);
8024 }
8025 else if (TREE_CODE (arg) == PLUS_EXPR)
8026 {
8027 tree arg0 = TREE_OPERAND (arg, 0);
8028 tree arg1 = TREE_OPERAND (arg, 1);
8029
8030 STRIP_NOPS (arg0);
8031 STRIP_NOPS (arg1);
8032
8033 if (TREE_CODE (arg0) == ADDR_EXPR
8034 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8035 {
8036 *ptr_offset = arg1;
8037 return TREE_OPERAND (arg0, 0);
8038 }
8039 else if (TREE_CODE (arg1) == ADDR_EXPR
8040 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8041 {
8042 *ptr_offset = arg0;
8043 return TREE_OPERAND (arg1, 0);
8044 }
8045 }
8046
8047 return 0;
8048 }
8049
8050 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
8051 way, because it could contain a zero byte in the middle.
8052 TREE_STRING_LENGTH is the size of the character array, not the string.
8053
8054 Unfortunately, string_constant can't access the values of const char
8055 arrays with initializers, so neither can we do so here. */
8056
8057 static tree
8058 c_strlen (src)
8059 tree src;
8060 {
8061 tree offset_node;
8062 int offset, max;
8063 char *ptr;
8064
8065 src = string_constant (src, &offset_node);
8066 if (src == 0)
8067 return 0;
8068 max = TREE_STRING_LENGTH (src);
8069 ptr = TREE_STRING_POINTER (src);
8070 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
8071 {
8072 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
8073 compute the offset to the following null if we don't know where to
8074 start searching for it. */
8075 int i;
8076 for (i = 0; i < max; i++)
8077 if (ptr[i] == 0)
8078 return 0;
8079 /* We don't know the starting offset, but we do know that the string
8080 has no internal zero bytes. We can assume that the offset falls
8081 within the bounds of the string; otherwise, the programmer deserves
8082 what he gets. Subtract the offset from the length of the string,
8083 and return that. */
8084 /* This would perhaps not be valid if we were dealing with named
8085 arrays in addition to literal string constants. */
8086 return size_binop (MINUS_EXPR, size_int (max), offset_node);
8087 }
8088
8089 /* We have a known offset into the string. Start searching there for
8090 a null character. */
8091 if (offset_node == 0)
8092 offset = 0;
8093 else
8094 {
8095 /* Did we get a long long offset? If so, punt. */
8096 if (TREE_INT_CST_HIGH (offset_node) != 0)
8097 return 0;
8098 offset = TREE_INT_CST_LOW (offset_node);
8099 }
8100 /* If the offset is known to be out of bounds, warn, and call strlen at
8101 runtime. */
8102 if (offset < 0 || offset > max)
8103 {
8104 warning ("offset outside bounds of constant string");
8105 return 0;
8106 }
8107 /* Use strlen to search for the first zero byte. Since any strings
8108 constructed with build_string will have nulls appended, we win even
8109 if we get handed something like (char[4])"abcd".
8110
8111 Since OFFSET is our starting index into the string, no further
8112 calculation is needed. */
8113 return size_int (strlen (ptr + offset));
8114 }
8115
8116 rtx
8117 expand_builtin_return_addr (fndecl_code, count, tem)
8118 enum built_in_function fndecl_code;
8119 int count;
8120 rtx tem;
8121 {
8122 int i;
8123
8124 /* Some machines need special handling before we can access
8125 arbitrary frames. For example, on the sparc, we must first flush
8126 all register windows to the stack. */
8127 #ifdef SETUP_FRAME_ADDRESSES
8128 if (count > 0)
8129 SETUP_FRAME_ADDRESSES ();
8130 #endif
8131
8132 /* On the sparc, the return address is not in the frame, it is in a
8133 register. There is no way to access it off of the current frame
8134 pointer, but it can be accessed off the previous frame pointer by
8135 reading the value from the register window save area. */
8136 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
8137 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
8138 count--;
8139 #endif
8140
8141 /* Scan back COUNT frames to the specified frame. */
8142 for (i = 0; i < count; i++)
8143 {
8144 /* Assume the dynamic chain pointer is in the word that the
8145 frame address points to, unless otherwise specified. */
8146 #ifdef DYNAMIC_CHAIN_ADDRESS
8147 tem = DYNAMIC_CHAIN_ADDRESS (tem);
8148 #endif
8149 tem = memory_address (Pmode, tem);
8150 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
8151 }
8152
8153 /* For __builtin_frame_address, return what we've got. */
8154 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
8155 return tem;
8156
8157 /* For __builtin_return_address, Get the return address from that
8158 frame. */
8159 #ifdef RETURN_ADDR_RTX
8160 tem = RETURN_ADDR_RTX (count, tem);
8161 #else
8162 tem = memory_address (Pmode,
8163 plus_constant (tem, GET_MODE_SIZE (Pmode)));
8164 tem = gen_rtx (MEM, Pmode, tem);
8165 #endif
8166 return tem;
8167 }
8168
8169 /* __builtin_setjmp is passed a pointer to an array of five words (not
8170 all will be used on all machines). It operates similarly to the C
8171 library function of the same name, but is more efficient. Much of
8172 the code below (and for longjmp) is copied from the handling of
8173 non-local gotos.
8174
8175 NOTE: This is intended for use by GNAT and the exception handling
8176 scheme in the compiler and will only work in the method used by
8177 them. */
8178
8179 rtx
8180 expand_builtin_setjmp (buf_addr, target)
8181 rtx buf_addr;
8182 rtx target;
8183 {
8184 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
8185 enum machine_mode sa_mode = Pmode, value_mode;
8186 rtx stack_save;
8187 int old_inhibit_defer_pop = inhibit_defer_pop;
8188 int return_pops
8189 = RETURN_POPS_ARGS (get_identifier ("__dummy"),
8190 build_function_type (void_type_node, NULL_TREE),
8191 0);
8192 rtx next_arg_reg;
8193 CUMULATIVE_ARGS args_so_far;
8194 rtx op0;
8195 int i;
8196
8197 value_mode = TYPE_MODE (integer_type_node);
8198
8199 #ifdef POINTERS_EXTEND_UNSIGNED
8200 buf_addr = convert_memory_address (Pmode, buf_addr);
8201 #endif
8202
8203 buf_addr = force_reg (Pmode, buf_addr);
8204
8205 if (target == 0 || GET_CODE (target) != REG
8206 || REGNO (target) < FIRST_PSEUDO_REGISTER)
8207 target = gen_reg_rtx (value_mode);
8208
8209 emit_queue ();
8210
8211 /* We store the frame pointer and the address of lab1 in the buffer
8212 and use the rest of it for the stack save area, which is
8213 machine-dependent. */
8214 emit_move_insn (gen_rtx (MEM, Pmode, buf_addr),
8215 virtual_stack_vars_rtx);
8216 emit_move_insn
8217 (validize_mem (gen_rtx (MEM, Pmode,
8218 plus_constant (buf_addr,
8219 GET_MODE_SIZE (Pmode)))),
8220 gen_rtx (LABEL_REF, Pmode, lab1));
8221
8222 #ifdef HAVE_save_stack_nonlocal
8223 if (HAVE_save_stack_nonlocal)
8224 sa_mode = insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0];
8225 #endif
8226
8227 stack_save = gen_rtx (MEM, sa_mode,
8228 plus_constant (buf_addr,
8229 2 * GET_MODE_SIZE (Pmode)));
8230 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
8231
8232 #ifdef HAVE_setjmp
8233 if (HAVE_setjmp)
8234 emit_insn (gen_setjmp ());
8235 #endif
8236
8237 /* Set TARGET to zero and branch around the other case. */
8238 emit_move_insn (target, const0_rtx);
8239 emit_jump_insn (gen_jump (lab2));
8240 emit_barrier ();
8241 emit_label (lab1);
8242
8243 /* Note that setjmp clobbers FP when we get here, so we have to make
8244 sure it's marked as used by this function. */
8245 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
8246
8247 /* Mark the static chain as clobbered here so life information
8248 doesn't get messed up for it. */
8249 emit_insn (gen_rtx (CLOBBER, VOIDmode, static_chain_rtx));
8250
8251 /* Now put in the code to restore the frame pointer, and argument
8252 pointer, if needed. The code below is from expand_end_bindings
8253 in stmt.c; see detailed documentation there. */
8254 #ifdef HAVE_nonlocal_goto
8255 if (! HAVE_nonlocal_goto)
8256 #endif
8257 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
8258
8259 current_function_has_nonlocal_label = 1;
8260
8261 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
8262 if (fixed_regs[ARG_POINTER_REGNUM])
8263 {
8264 #ifdef ELIMINABLE_REGS
8265 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
8266
8267 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
8268 if (elim_regs[i].from == ARG_POINTER_REGNUM
8269 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
8270 break;
8271
8272 if (i == sizeof elim_regs / sizeof elim_regs [0])
8273 #endif
8274 {
8275 /* Now restore our arg pointer from the address at which it
8276 was saved in our stack frame.
8277 If there hasn't be space allocated for it yet, make
8278 some now. */
8279 if (arg_pointer_save_area == 0)
8280 arg_pointer_save_area
8281 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
8282 emit_move_insn (virtual_incoming_args_rtx,
8283 copy_to_reg (arg_pointer_save_area));
8284 }
8285 }
8286 #endif
8287
8288 #ifdef HAVE_nonlocal_goto_receiver
8289 if (HAVE_nonlocal_goto_receiver)
8290 emit_insn (gen_nonlocal_goto_receiver ());
8291 #endif
8292 /* The static chain pointer contains the address of dummy function.
8293 We need to call it here to handle some PIC cases of restoring a
8294 global pointer. Then return 1. */
8295 op0 = copy_to_mode_reg (Pmode, static_chain_rtx);
8296
8297 /* We can't actually call emit_library_call here, so do everything
8298 it does, which isn't much for a libfunc with no args. */
8299 op0 = memory_address (FUNCTION_MODE, op0);
8300
8301 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE,
8302 gen_rtx (SYMBOL_REF, Pmode, "__dummy"), 1);
8303 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1);
8304
8305 #ifndef ACCUMULATE_OUTGOING_ARGS
8306 #ifdef HAVE_call_pop
8307 if (HAVE_call_pop)
8308 emit_call_insn (gen_call_pop (gen_rtx (MEM, FUNCTION_MODE, op0),
8309 const0_rtx, next_arg_reg,
8310 GEN_INT (return_pops)));
8311 else
8312 #endif
8313 #endif
8314
8315 #ifdef HAVE_call
8316 if (HAVE_call)
8317 emit_call_insn (gen_call (gen_rtx (MEM, FUNCTION_MODE, op0),
8318 const0_rtx, next_arg_reg, const0_rtx));
8319 else
8320 #endif
8321 abort ();
8322
8323 emit_move_insn (target, const1_rtx);
8324 emit_label (lab2);
8325 return target;
8326 }
8327
8328 \f
8329 /* Expand an expression EXP that calls a built-in function,
8330 with result going to TARGET if that's convenient
8331 (and in mode MODE if that's convenient).
8332 SUBTARGET may be used as the target for computing one of EXP's operands.
8333 IGNORE is nonzero if the value is to be ignored. */
8334
8335 #define CALLED_AS_BUILT_IN(NODE) \
8336 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
8337
8338 static rtx
8339 expand_builtin (exp, target, subtarget, mode, ignore)
8340 tree exp;
8341 rtx target;
8342 rtx subtarget;
8343 enum machine_mode mode;
8344 int ignore;
8345 {
8346 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8347 tree arglist = TREE_OPERAND (exp, 1);
8348 rtx op0;
8349 rtx lab1, insns;
8350 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
8351 optab builtin_optab;
8352
8353 switch (DECL_FUNCTION_CODE (fndecl))
8354 {
8355 case BUILT_IN_ABS:
8356 case BUILT_IN_LABS:
8357 case BUILT_IN_FABS:
8358 /* build_function_call changes these into ABS_EXPR. */
8359 abort ();
8360
8361 case BUILT_IN_SIN:
8362 case BUILT_IN_COS:
8363 /* Treat these like sqrt, but only if the user asks for them. */
8364 if (! flag_fast_math)
8365 break;
8366 case BUILT_IN_FSQRT:
8367 /* If not optimizing, call the library function. */
8368 if (! optimize)
8369 break;
8370
8371 if (arglist == 0
8372 /* Arg could be wrong type if user redeclared this fcn wrong. */
8373 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
8374 break;
8375
8376 /* Stabilize and compute the argument. */
8377 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
8378 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
8379 {
8380 exp = copy_node (exp);
8381 arglist = copy_node (arglist);
8382 TREE_OPERAND (exp, 1) = arglist;
8383 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
8384 }
8385 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8386
8387 /* Make a suitable register to place result in. */
8388 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8389
8390 emit_queue ();
8391 start_sequence ();
8392
8393 switch (DECL_FUNCTION_CODE (fndecl))
8394 {
8395 case BUILT_IN_SIN:
8396 builtin_optab = sin_optab; break;
8397 case BUILT_IN_COS:
8398 builtin_optab = cos_optab; break;
8399 case BUILT_IN_FSQRT:
8400 builtin_optab = sqrt_optab; break;
8401 default:
8402 abort ();
8403 }
8404
8405 /* Compute into TARGET.
8406 Set TARGET to wherever the result comes back. */
8407 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8408 builtin_optab, op0, target, 0);
8409
8410 /* If we were unable to expand via the builtin, stop the
8411 sequence (without outputting the insns) and break, causing
8412 a call the the library function. */
8413 if (target == 0)
8414 {
8415 end_sequence ();
8416 break;
8417 }
8418
8419 /* Check the results by default. But if flag_fast_math is turned on,
8420 then assume sqrt will always be called with valid arguments. */
8421
8422 if (! flag_fast_math)
8423 {
8424 /* Don't define the builtin FP instructions
8425 if your machine is not IEEE. */
8426 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
8427 abort ();
8428
8429 lab1 = gen_label_rtx ();
8430
8431 /* Test the result; if it is NaN, set errno=EDOM because
8432 the argument was not in the domain. */
8433 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
8434 emit_jump_insn (gen_beq (lab1));
8435
8436 #ifdef TARGET_EDOM
8437 {
8438 #ifdef GEN_ERRNO_RTX
8439 rtx errno_rtx = GEN_ERRNO_RTX;
8440 #else
8441 rtx errno_rtx
8442 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "errno"));
8443 #endif
8444
8445 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
8446 }
8447 #else
8448 /* We can't set errno=EDOM directly; let the library call do it.
8449 Pop the arguments right away in case the call gets deleted. */
8450 NO_DEFER_POP;
8451 expand_call (exp, target, 0);
8452 OK_DEFER_POP;
8453 #endif
8454
8455 emit_label (lab1);
8456 }
8457
8458 /* Output the entire sequence. */
8459 insns = get_insns ();
8460 end_sequence ();
8461 emit_insns (insns);
8462
8463 return target;
8464
8465 case BUILT_IN_FMOD:
8466 break;
8467
8468 /* __builtin_apply_args returns block of memory allocated on
8469 the stack into which is stored the arg pointer, structure
8470 value address, static chain, and all the registers that might
8471 possibly be used in performing a function call. The code is
8472 moved to the start of the function so the incoming values are
8473 saved. */
8474 case BUILT_IN_APPLY_ARGS:
8475 /* Don't do __builtin_apply_args more than once in a function.
8476 Save the result of the first call and reuse it. */
8477 if (apply_args_value != 0)
8478 return apply_args_value;
8479 {
8480 /* When this function is called, it means that registers must be
8481 saved on entry to this function. So we migrate the
8482 call to the first insn of this function. */
8483 rtx temp;
8484 rtx seq;
8485
8486 start_sequence ();
8487 temp = expand_builtin_apply_args ();
8488 seq = get_insns ();
8489 end_sequence ();
8490
8491 apply_args_value = temp;
8492
8493 /* Put the sequence after the NOTE that starts the function.
8494 If this is inside a SEQUENCE, make the outer-level insn
8495 chain current, so the code is placed at the start of the
8496 function. */
8497 push_topmost_sequence ();
8498 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8499 pop_topmost_sequence ();
8500 return temp;
8501 }
8502
8503 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8504 FUNCTION with a copy of the parameters described by
8505 ARGUMENTS, and ARGSIZE. It returns a block of memory
8506 allocated on the stack into which is stored all the registers
8507 that might possibly be used for returning the result of a
8508 function. ARGUMENTS is the value returned by
8509 __builtin_apply_args. ARGSIZE is the number of bytes of
8510 arguments that must be copied. ??? How should this value be
8511 computed? We'll also need a safe worst case value for varargs
8512 functions. */
8513 case BUILT_IN_APPLY:
8514 if (arglist == 0
8515 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8516 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8517 || TREE_CHAIN (arglist) == 0
8518 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8519 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8520 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8521 return const0_rtx;
8522 else
8523 {
8524 int i;
8525 tree t;
8526 rtx ops[3];
8527
8528 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
8529 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
8530
8531 return expand_builtin_apply (ops[0], ops[1], ops[2]);
8532 }
8533
8534 /* __builtin_return (RESULT) causes the function to return the
8535 value described by RESULT. RESULT is address of the block of
8536 memory returned by __builtin_apply. */
8537 case BUILT_IN_RETURN:
8538 if (arglist
8539 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8540 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
8541 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
8542 NULL_RTX, VOIDmode, 0));
8543 return const0_rtx;
8544
8545 case BUILT_IN_SAVEREGS:
8546 /* Don't do __builtin_saveregs more than once in a function.
8547 Save the result of the first call and reuse it. */
8548 if (saveregs_value != 0)
8549 return saveregs_value;
8550 {
8551 /* When this function is called, it means that registers must be
8552 saved on entry to this function. So we migrate the
8553 call to the first insn of this function. */
8554 rtx temp;
8555 rtx seq;
8556
8557 /* Now really call the function. `expand_call' does not call
8558 expand_builtin, so there is no danger of infinite recursion here. */
8559 start_sequence ();
8560
8561 #ifdef EXPAND_BUILTIN_SAVEREGS
8562 /* Do whatever the machine needs done in this case. */
8563 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
8564 #else
8565 /* The register where the function returns its value
8566 is likely to have something else in it, such as an argument.
8567 So preserve that register around the call. */
8568
8569 if (value_mode != VOIDmode)
8570 {
8571 rtx valreg = hard_libcall_value (value_mode);
8572 rtx saved_valreg = gen_reg_rtx (value_mode);
8573
8574 emit_move_insn (saved_valreg, valreg);
8575 temp = expand_call (exp, target, ignore);
8576 emit_move_insn (valreg, saved_valreg);
8577 }
8578 else
8579 /* Generate the call, putting the value in a pseudo. */
8580 temp = expand_call (exp, target, ignore);
8581 #endif
8582
8583 seq = get_insns ();
8584 end_sequence ();
8585
8586 saveregs_value = temp;
8587
8588 /* Put the sequence after the NOTE that starts the function.
8589 If this is inside a SEQUENCE, make the outer-level insn
8590 chain current, so the code is placed at the start of the
8591 function. */
8592 push_topmost_sequence ();
8593 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8594 pop_topmost_sequence ();
8595 return temp;
8596 }
8597
8598 /* __builtin_args_info (N) returns word N of the arg space info
8599 for the current function. The number and meanings of words
8600 is controlled by the definition of CUMULATIVE_ARGS. */
8601 case BUILT_IN_ARGS_INFO:
8602 {
8603 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
8604 int i;
8605 int *word_ptr = (int *) &current_function_args_info;
8606 tree type, elts, result;
8607
8608 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
8609 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
8610 __FILE__, __LINE__);
8611
8612 if (arglist != 0)
8613 {
8614 tree arg = TREE_VALUE (arglist);
8615 if (TREE_CODE (arg) != INTEGER_CST)
8616 error ("argument of `__builtin_args_info' must be constant");
8617 else
8618 {
8619 int wordnum = TREE_INT_CST_LOW (arg);
8620
8621 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
8622 error ("argument of `__builtin_args_info' out of range");
8623 else
8624 return GEN_INT (word_ptr[wordnum]);
8625 }
8626 }
8627 else
8628 error ("missing argument in `__builtin_args_info'");
8629
8630 return const0_rtx;
8631
8632 #if 0
8633 for (i = 0; i < nwords; i++)
8634 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
8635
8636 type = build_array_type (integer_type_node,
8637 build_index_type (build_int_2 (nwords, 0)));
8638 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
8639 TREE_CONSTANT (result) = 1;
8640 TREE_STATIC (result) = 1;
8641 result = build (INDIRECT_REF, build_pointer_type (type), result);
8642 TREE_CONSTANT (result) = 1;
8643 return expand_expr (result, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD);
8644 #endif
8645 }
8646
8647 /* Return the address of the first anonymous stack arg. */
8648 case BUILT_IN_NEXT_ARG:
8649 {
8650 tree fntype = TREE_TYPE (current_function_decl);
8651
8652 if ((TYPE_ARG_TYPES (fntype) == 0
8653 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
8654 == void_type_node))
8655 && ! current_function_varargs)
8656 {
8657 error ("`va_start' used in function with fixed args");
8658 return const0_rtx;
8659 }
8660
8661 if (arglist)
8662 {
8663 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
8664 tree arg = TREE_VALUE (arglist);
8665
8666 /* Strip off all nops for the sake of the comparison. This
8667 is not quite the same as STRIP_NOPS. It does more.
8668 We must also strip off INDIRECT_EXPR for C++ reference
8669 parameters. */
8670 while (TREE_CODE (arg) == NOP_EXPR
8671 || TREE_CODE (arg) == CONVERT_EXPR
8672 || TREE_CODE (arg) == NON_LVALUE_EXPR
8673 || TREE_CODE (arg) == INDIRECT_REF)
8674 arg = TREE_OPERAND (arg, 0);
8675 if (arg != last_parm)
8676 warning ("second parameter of `va_start' not last named argument");
8677 }
8678 else if (! current_function_varargs)
8679 /* Evidently an out of date version of <stdarg.h>; can't validate
8680 va_start's second argument, but can still work as intended. */
8681 warning ("`__builtin_next_arg' called without an argument");
8682 }
8683
8684 return expand_binop (Pmode, add_optab,
8685 current_function_internal_arg_pointer,
8686 current_function_arg_offset_rtx,
8687 NULL_RTX, 0, OPTAB_LIB_WIDEN);
8688
8689 case BUILT_IN_CLASSIFY_TYPE:
8690 if (arglist != 0)
8691 {
8692 tree type = TREE_TYPE (TREE_VALUE (arglist));
8693 enum tree_code code = TREE_CODE (type);
8694 if (code == VOID_TYPE)
8695 return GEN_INT (void_type_class);
8696 if (code == INTEGER_TYPE)
8697 return GEN_INT (integer_type_class);
8698 if (code == CHAR_TYPE)
8699 return GEN_INT (char_type_class);
8700 if (code == ENUMERAL_TYPE)
8701 return GEN_INT (enumeral_type_class);
8702 if (code == BOOLEAN_TYPE)
8703 return GEN_INT (boolean_type_class);
8704 if (code == POINTER_TYPE)
8705 return GEN_INT (pointer_type_class);
8706 if (code == REFERENCE_TYPE)
8707 return GEN_INT (reference_type_class);
8708 if (code == OFFSET_TYPE)
8709 return GEN_INT (offset_type_class);
8710 if (code == REAL_TYPE)
8711 return GEN_INT (real_type_class);
8712 if (code == COMPLEX_TYPE)
8713 return GEN_INT (complex_type_class);
8714 if (code == FUNCTION_TYPE)
8715 return GEN_INT (function_type_class);
8716 if (code == METHOD_TYPE)
8717 return GEN_INT (method_type_class);
8718 if (code == RECORD_TYPE)
8719 return GEN_INT (record_type_class);
8720 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
8721 return GEN_INT (union_type_class);
8722 if (code == ARRAY_TYPE)
8723 {
8724 if (TYPE_STRING_FLAG (type))
8725 return GEN_INT (string_type_class);
8726 else
8727 return GEN_INT (array_type_class);
8728 }
8729 if (code == SET_TYPE)
8730 return GEN_INT (set_type_class);
8731 if (code == FILE_TYPE)
8732 return GEN_INT (file_type_class);
8733 if (code == LANG_TYPE)
8734 return GEN_INT (lang_type_class);
8735 }
8736 return GEN_INT (no_type_class);
8737
8738 case BUILT_IN_CONSTANT_P:
8739 if (arglist == 0)
8740 return const0_rtx;
8741 else
8742 {
8743 tree arg = TREE_VALUE (arglist);
8744
8745 STRIP_NOPS (arg);
8746 return (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c'
8747 || (TREE_CODE (arg) == ADDR_EXPR
8748 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8749 ? const1_rtx : const0_rtx);
8750 }
8751
8752 case BUILT_IN_FRAME_ADDRESS:
8753 /* The argument must be a nonnegative integer constant.
8754 It counts the number of frames to scan up the stack.
8755 The value is the address of that frame. */
8756 case BUILT_IN_RETURN_ADDRESS:
8757 /* The argument must be a nonnegative integer constant.
8758 It counts the number of frames to scan up the stack.
8759 The value is the return address saved in that frame. */
8760 if (arglist == 0)
8761 /* Warning about missing arg was already issued. */
8762 return const0_rtx;
8763 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST
8764 || tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
8765 {
8766 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8767 error ("invalid arg to `__builtin_frame_address'");
8768 else
8769 error ("invalid arg to `__builtin_return_address'");
8770 return const0_rtx;
8771 }
8772 else
8773 {
8774 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
8775 TREE_INT_CST_LOW (TREE_VALUE (arglist)),
8776 hard_frame_pointer_rtx);
8777
8778 /* Some ports cannot access arbitrary stack frames. */
8779 if (tem == NULL)
8780 {
8781 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8782 warning ("unsupported arg to `__builtin_frame_address'");
8783 else
8784 warning ("unsupported arg to `__builtin_return_address'");
8785 return const0_rtx;
8786 }
8787
8788 /* For __builtin_frame_address, return what we've got. */
8789 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8790 return tem;
8791
8792 if (GET_CODE (tem) != REG)
8793 tem = copy_to_reg (tem);
8794 return tem;
8795 }
8796
8797 /* Returns the address of the area where the structure is returned.
8798 0 otherwise. */
8799 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
8800 if (arglist != 0
8801 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
8802 || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) != MEM)
8803 return const0_rtx;
8804 else
8805 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
8806
8807 case BUILT_IN_ALLOCA:
8808 if (arglist == 0
8809 /* Arg could be non-integer if user redeclared this fcn wrong. */
8810 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8811 break;
8812
8813 /* Compute the argument. */
8814 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
8815
8816 /* Allocate the desired space. */
8817 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
8818
8819 case BUILT_IN_FFS:
8820 /* If not optimizing, call the library function. */
8821 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8822 break;
8823
8824 if (arglist == 0
8825 /* Arg could be non-integer if user redeclared this fcn wrong. */
8826 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8827 break;
8828
8829 /* Compute the argument. */
8830 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8831 /* Compute ffs, into TARGET if possible.
8832 Set TARGET to wherever the result comes back. */
8833 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8834 ffs_optab, op0, target, 1);
8835 if (target == 0)
8836 abort ();
8837 return target;
8838
8839 case BUILT_IN_STRLEN:
8840 /* If not optimizing, call the library function. */
8841 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8842 break;
8843
8844 if (arglist == 0
8845 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8846 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8847 break;
8848 else
8849 {
8850 tree src = TREE_VALUE (arglist);
8851 tree len = c_strlen (src);
8852
8853 int align
8854 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8855
8856 rtx result, src_rtx, char_rtx;
8857 enum machine_mode insn_mode = value_mode, char_mode;
8858 enum insn_code icode;
8859
8860 /* If the length is known, just return it. */
8861 if (len != 0)
8862 return expand_expr (len, target, mode, EXPAND_MEMORY_USE_BAD);
8863
8864 /* If SRC is not a pointer type, don't do this operation inline. */
8865 if (align == 0)
8866 break;
8867
8868 /* Call a function if we can't compute strlen in the right mode. */
8869
8870 while (insn_mode != VOIDmode)
8871 {
8872 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
8873 if (icode != CODE_FOR_nothing)
8874 break;
8875
8876 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
8877 }
8878 if (insn_mode == VOIDmode)
8879 break;
8880
8881 /* Make a place to write the result of the instruction. */
8882 result = target;
8883 if (! (result != 0
8884 && GET_CODE (result) == REG
8885 && GET_MODE (result) == insn_mode
8886 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8887 result = gen_reg_rtx (insn_mode);
8888
8889 /* Make sure the operands are acceptable to the predicates. */
8890
8891 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
8892 result = gen_reg_rtx (insn_mode);
8893 src_rtx = memory_address (BLKmode,
8894 expand_expr (src, NULL_RTX, ptr_mode,
8895 EXPAND_NORMAL));
8896
8897 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
8898 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
8899
8900 /* Check the string is readable and has an end. */
8901 if (flag_check_memory_usage)
8902 emit_library_call (chkr_check_str_libfunc, 1, VOIDmode, 2,
8903 src_rtx, ptr_mode,
8904 GEN_INT (MEMORY_USE_RO),
8905 TYPE_MODE (integer_type_node));
8906
8907 char_rtx = const0_rtx;
8908 char_mode = insn_operand_mode[(int)icode][2];
8909 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
8910 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
8911
8912 emit_insn (GEN_FCN (icode) (result,
8913 gen_rtx (MEM, BLKmode, src_rtx),
8914 char_rtx, GEN_INT (align)));
8915
8916 /* Return the value in the proper mode for this function. */
8917 if (GET_MODE (result) == value_mode)
8918 return result;
8919 else if (target != 0)
8920 {
8921 convert_move (target, result, 0);
8922 return target;
8923 }
8924 else
8925 return convert_to_mode (value_mode, result, 0);
8926 }
8927
8928 case BUILT_IN_STRCPY:
8929 /* If not optimizing, call the library function. */
8930 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8931 break;
8932
8933 if (arglist == 0
8934 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8935 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8936 || TREE_CHAIN (arglist) == 0
8937 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
8938 break;
8939 else
8940 {
8941 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
8942
8943 if (len == 0)
8944 break;
8945
8946 len = size_binop (PLUS_EXPR, len, integer_one_node);
8947
8948 chainon (arglist, build_tree_list (NULL_TREE, len));
8949 }
8950
8951 /* Drops in. */
8952 case BUILT_IN_MEMCPY:
8953 /* If not optimizing, call the library function. */
8954 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8955 break;
8956
8957 if (arglist == 0
8958 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8959 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8960 || TREE_CHAIN (arglist) == 0
8961 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
8962 != POINTER_TYPE)
8963 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8964 || (TREE_CODE (TREE_TYPE (TREE_VALUE
8965 (TREE_CHAIN (TREE_CHAIN (arglist)))))
8966 != INTEGER_TYPE))
8967 break;
8968 else
8969 {
8970 tree dest = TREE_VALUE (arglist);
8971 tree src = TREE_VALUE (TREE_CHAIN (arglist));
8972 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8973 tree type;
8974
8975 int src_align
8976 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8977 int dest_align
8978 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8979 rtx dest_rtx, dest_mem, src_mem, src_rtx, dest_addr, len_rtx;
8980
8981 /* If either SRC or DEST is not a pointer type, don't do
8982 this operation in-line. */
8983 if (src_align == 0 || dest_align == 0)
8984 {
8985 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
8986 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8987 break;
8988 }
8989
8990 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
8991 dest_mem = gen_rtx (MEM, BLKmode,
8992 memory_address (BLKmode, dest_rtx));
8993 /* There could be a void* cast on top of the object. */
8994 while (TREE_CODE (dest) == NOP_EXPR)
8995 dest = TREE_OPERAND (dest, 0);
8996 type = TREE_TYPE (TREE_TYPE (dest));
8997 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
8998 src_rtx = expand_expr (src, NULL_RTX, ptr_mode, EXPAND_SUM);
8999 src_mem = gen_rtx (MEM, BLKmode,
9000 memory_address (BLKmode, src_rtx));
9001 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
9002
9003 /* Just copy the rights of SRC to the rights of DEST. */
9004 if (flag_check_memory_usage)
9005 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
9006 src_rtx, ptr_mode,
9007 dest_rtx, ptr_mode,
9008 len_rtx, TYPE_MODE (sizetype));
9009
9010 /* There could be a void* cast on top of the object. */
9011 while (TREE_CODE (src) == NOP_EXPR)
9012 src = TREE_OPERAND (src, 0);
9013 type = TREE_TYPE (TREE_TYPE (src));
9014 MEM_IN_STRUCT_P (src_mem) = AGGREGATE_TYPE_P (type);
9015
9016 /* Copy word part most expediently. */
9017 dest_addr
9018 = emit_block_move (dest_mem, src_mem, len_rtx,
9019 MIN (src_align, dest_align));
9020
9021 if (dest_addr == 0)
9022 dest_addr = force_operand (dest_rtx, NULL_RTX);
9023
9024 return dest_addr;
9025 }
9026
9027 case BUILT_IN_MEMSET:
9028 /* If not optimizing, call the library function. */
9029 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9030 break;
9031
9032 if (arglist == 0
9033 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9034 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9035 || TREE_CHAIN (arglist) == 0
9036 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
9037 != INTEGER_TYPE)
9038 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9039 || (INTEGER_TYPE
9040 != (TREE_CODE (TREE_TYPE
9041 (TREE_VALUE
9042 (TREE_CHAIN (TREE_CHAIN (arglist))))))))
9043 break;
9044 else
9045 {
9046 tree dest = TREE_VALUE (arglist);
9047 tree val = TREE_VALUE (TREE_CHAIN (arglist));
9048 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
9049 tree type;
9050
9051 int dest_align
9052 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9053 rtx dest_rtx, dest_mem, dest_addr, len_rtx;
9054
9055 /* If DEST is not a pointer type, don't do this
9056 operation in-line. */
9057 if (dest_align == 0)
9058 break;
9059
9060 /* If VAL is not 0, don't do this operation in-line. */
9061 if (expand_expr (val, NULL_RTX, VOIDmode, 0) != const0_rtx)
9062 break;
9063
9064 /* If LEN does not expand to a constant, don't do this
9065 operation in-line. */
9066 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
9067 if (GET_CODE (len_rtx) != CONST_INT)
9068 break;
9069
9070 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
9071 dest_mem = gen_rtx (MEM, BLKmode,
9072 memory_address (BLKmode, dest_rtx));
9073
9074 /* Just check DST is writable and mark it as readable. */
9075 if (flag_check_memory_usage)
9076 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
9077 dest_rtx, ptr_mode,
9078 len_rtx, TYPE_MODE (sizetype),
9079 GEN_INT (MEMORY_USE_WO),
9080 TYPE_MODE (integer_type_node));
9081
9082 /* There could be a void* cast on top of the object. */
9083 while (TREE_CODE (dest) == NOP_EXPR)
9084 dest = TREE_OPERAND (dest, 0);
9085 type = TREE_TYPE (TREE_TYPE (dest));
9086 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
9087
9088 dest_addr = clear_storage (dest_mem, len_rtx, dest_align);
9089
9090 if (dest_addr == 0)
9091 dest_addr = force_operand (dest_rtx, NULL_RTX);
9092
9093 return dest_addr;
9094 }
9095
9096 /* These comparison functions need an instruction that returns an actual
9097 index. An ordinary compare that just sets the condition codes
9098 is not enough. */
9099 #ifdef HAVE_cmpstrsi
9100 case BUILT_IN_STRCMP:
9101 /* If not optimizing, call the library function. */
9102 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9103 break;
9104
9105 /* If we need to check memory accesses, call the library function. */
9106 if (flag_check_memory_usage)
9107 break;
9108
9109 if (arglist == 0
9110 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9111 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9112 || TREE_CHAIN (arglist) == 0
9113 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
9114 break;
9115 else if (!HAVE_cmpstrsi)
9116 break;
9117 {
9118 tree arg1 = TREE_VALUE (arglist);
9119 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
9120 tree offset;
9121 tree len, len2;
9122
9123 len = c_strlen (arg1);
9124 if (len)
9125 len = size_binop (PLUS_EXPR, integer_one_node, len);
9126 len2 = c_strlen (arg2);
9127 if (len2)
9128 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
9129
9130 /* If we don't have a constant length for the first, use the length
9131 of the second, if we know it. We don't require a constant for
9132 this case; some cost analysis could be done if both are available
9133 but neither is constant. For now, assume they're equally cheap.
9134
9135 If both strings have constant lengths, use the smaller. This
9136 could arise if optimization results in strcpy being called with
9137 two fixed strings, or if the code was machine-generated. We should
9138 add some code to the `memcmp' handler below to deal with such
9139 situations, someday. */
9140 if (!len || TREE_CODE (len) != INTEGER_CST)
9141 {
9142 if (len2)
9143 len = len2;
9144 else if (len == 0)
9145 break;
9146 }
9147 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
9148 {
9149 if (tree_int_cst_lt (len2, len))
9150 len = len2;
9151 }
9152
9153 chainon (arglist, build_tree_list (NULL_TREE, len));
9154 }
9155
9156 /* Drops in. */
9157 case BUILT_IN_MEMCMP:
9158 /* If not optimizing, call the library function. */
9159 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9160 break;
9161
9162 /* If we need to check memory accesses, call the library function. */
9163 if (flag_check_memory_usage)
9164 break;
9165
9166 if (arglist == 0
9167 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9168 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9169 || TREE_CHAIN (arglist) == 0
9170 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
9171 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9172 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
9173 break;
9174 else if (!HAVE_cmpstrsi)
9175 break;
9176 {
9177 tree arg1 = TREE_VALUE (arglist);
9178 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
9179 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
9180 rtx result;
9181
9182 int arg1_align
9183 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9184 int arg2_align
9185 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9186 enum machine_mode insn_mode
9187 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
9188
9189 /* If we don't have POINTER_TYPE, call the function. */
9190 if (arg1_align == 0 || arg2_align == 0)
9191 {
9192 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
9193 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
9194 break;
9195 }
9196
9197 /* Make a place to write the result of the instruction. */
9198 result = target;
9199 if (! (result != 0
9200 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
9201 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
9202 result = gen_reg_rtx (insn_mode);
9203
9204 emit_insn (gen_cmpstrsi (result,
9205 gen_rtx (MEM, BLKmode,
9206 expand_expr (arg1, NULL_RTX,
9207 ptr_mode,
9208 EXPAND_NORMAL)),
9209 gen_rtx (MEM, BLKmode,
9210 expand_expr (arg2, NULL_RTX,
9211 ptr_mode,
9212 EXPAND_NORMAL)),
9213 expand_expr (len, NULL_RTX, VOIDmode, 0),
9214 GEN_INT (MIN (arg1_align, arg2_align))));
9215
9216 /* Return the value in the proper mode for this function. */
9217 mode = TYPE_MODE (TREE_TYPE (exp));
9218 if (GET_MODE (result) == mode)
9219 return result;
9220 else if (target != 0)
9221 {
9222 convert_move (target, result, 0);
9223 return target;
9224 }
9225 else
9226 return convert_to_mode (mode, result, 0);
9227 }
9228 #else
9229 case BUILT_IN_STRCMP:
9230 case BUILT_IN_MEMCMP:
9231 break;
9232 #endif
9233
9234 case BUILT_IN_SETJMP:
9235 if (arglist == 0
9236 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9237 break;
9238
9239 {
9240 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
9241 VOIDmode, 0);
9242 return expand_builtin_setjmp (buf_addr, target);
9243 }
9244
9245 /* __builtin_longjmp is passed a pointer to an array of five words
9246 and a value, which is a dummy. It's similar to the C library longjmp
9247 function but works with __builtin_setjmp above. */
9248 case BUILT_IN_LONGJMP:
9249 if (arglist == 0 || TREE_CHAIN (arglist) == 0
9250 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9251 break;
9252
9253 {
9254 tree dummy_id = get_identifier ("__dummy");
9255 tree dummy_type = build_function_type (void_type_node, NULL_TREE);
9256 tree dummy_decl = build_decl (FUNCTION_DECL, dummy_id, dummy_type);
9257 #ifdef POINTERS_EXTEND_UNSIGNED
9258 rtx buf_addr
9259 = force_reg (Pmode,
9260 convert_memory_address
9261 (Pmode,
9262 expand_expr (TREE_VALUE (arglist),
9263 NULL_RTX, VOIDmode, 0)));
9264 #else
9265 rtx buf_addr
9266 = force_reg (Pmode, expand_expr (TREE_VALUE (arglist),
9267 NULL_RTX,
9268 VOIDmode, 0));
9269 #endif
9270 rtx fp = gen_rtx (MEM, Pmode, buf_addr);
9271 rtx lab = gen_rtx (MEM, Pmode,
9272 plus_constant (buf_addr, GET_MODE_SIZE (Pmode)));
9273 enum machine_mode sa_mode
9274 #ifdef HAVE_save_stack_nonlocal
9275 = (HAVE_save_stack_nonlocal
9276 ? insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0]
9277 : Pmode);
9278 #else
9279 = Pmode;
9280 #endif
9281 rtx stack = gen_rtx (MEM, sa_mode,
9282 plus_constant (buf_addr,
9283 2 * GET_MODE_SIZE (Pmode)));
9284
9285 DECL_EXTERNAL (dummy_decl) = 1;
9286 TREE_PUBLIC (dummy_decl) = 1;
9287 make_decl_rtl (dummy_decl, NULL_PTR, 1);
9288
9289 /* Expand the second expression just for side-effects. */
9290 expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
9291 const0_rtx, VOIDmode, 0);
9292
9293 assemble_external (dummy_decl);
9294
9295 /* Pick up FP, label, and SP from the block and jump. This code is
9296 from expand_goto in stmt.c; see there for detailed comments. */
9297 #if HAVE_nonlocal_goto
9298 if (HAVE_nonlocal_goto)
9299 emit_insn (gen_nonlocal_goto (fp, lab, stack,
9300 XEXP (DECL_RTL (dummy_decl), 0)));
9301 else
9302 #endif
9303 {
9304 lab = copy_to_reg (lab);
9305 emit_move_insn (hard_frame_pointer_rtx, fp);
9306 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
9307
9308 /* Put in the static chain register the address of the dummy
9309 function. */
9310 emit_move_insn (static_chain_rtx, XEXP (DECL_RTL (dummy_decl), 0));
9311 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
9312 emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
9313 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
9314 emit_indirect_jump (lab);
9315 }
9316
9317 return const0_rtx;
9318 }
9319
9320 /* Various hooks for the DWARF 2 __throw routine. */
9321 case BUILT_IN_UNWIND_INIT:
9322 expand_builtin_unwind_init ();
9323 return const0_rtx;
9324 case BUILT_IN_FP:
9325 return frame_pointer_rtx;
9326 case BUILT_IN_SP:
9327 return stack_pointer_rtx;
9328 #ifdef DWARF2_UNWIND_INFO
9329 case BUILT_IN_DWARF_FP_REGNUM:
9330 return expand_builtin_dwarf_fp_regnum ();
9331 case BUILT_IN_DWARF_REG_SIZE:
9332 return expand_builtin_dwarf_reg_size (TREE_VALUE (arglist), target);
9333 #endif
9334 case BUILT_IN_FROB_RETURN_ADDR:
9335 return expand_builtin_frob_return_addr (TREE_VALUE (arglist));
9336 case BUILT_IN_EXTRACT_RETURN_ADDR:
9337 return expand_builtin_extract_return_addr (TREE_VALUE (arglist));
9338 case BUILT_IN_SET_RETURN_ADDR_REG:
9339 expand_builtin_set_return_addr_reg (TREE_VALUE (arglist));
9340 return const0_rtx;
9341 case BUILT_IN_EH_STUB:
9342 return expand_builtin_eh_stub ();
9343 case BUILT_IN_SET_EH_REGS:
9344 expand_builtin_set_eh_regs (TREE_VALUE (arglist),
9345 TREE_VALUE (TREE_CHAIN (arglist)));
9346 return const0_rtx;
9347
9348 default: /* just do library call, if unknown builtin */
9349 error ("built-in function `%s' not currently supported",
9350 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9351 }
9352
9353 /* The switch statement above can drop through to cause the function
9354 to be called normally. */
9355
9356 return expand_call (exp, target, ignore);
9357 }
9358 \f
9359 /* Built-in functions to perform an untyped call and return. */
9360
9361 /* For each register that may be used for calling a function, this
9362 gives a mode used to copy the register's value. VOIDmode indicates
9363 the register is not used for calling a function. If the machine
9364 has register windows, this gives only the outbound registers.
9365 INCOMING_REGNO gives the corresponding inbound register. */
9366 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
9367
9368 /* For each register that may be used for returning values, this gives
9369 a mode used to copy the register's value. VOIDmode indicates the
9370 register is not used for returning values. If the machine has
9371 register windows, this gives only the outbound registers.
9372 INCOMING_REGNO gives the corresponding inbound register. */
9373 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
9374
9375 /* For each register that may be used for calling a function, this
9376 gives the offset of that register into the block returned by
9377 __builtin_apply_args. 0 indicates that the register is not
9378 used for calling a function. */
9379 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
9380
9381 /* Return the offset of register REGNO into the block returned by
9382 __builtin_apply_args. This is not declared static, since it is
9383 needed in objc-act.c. */
9384
9385 int
9386 apply_args_register_offset (regno)
9387 int regno;
9388 {
9389 apply_args_size ();
9390
9391 /* Arguments are always put in outgoing registers (in the argument
9392 block) if such make sense. */
9393 #ifdef OUTGOING_REGNO
9394 regno = OUTGOING_REGNO(regno);
9395 #endif
9396 return apply_args_reg_offset[regno];
9397 }
9398
9399 /* Return the size required for the block returned by __builtin_apply_args,
9400 and initialize apply_args_mode. */
9401
9402 static int
9403 apply_args_size ()
9404 {
9405 static int size = -1;
9406 int align, regno;
9407 enum machine_mode mode;
9408
9409 /* The values computed by this function never change. */
9410 if (size < 0)
9411 {
9412 /* The first value is the incoming arg-pointer. */
9413 size = GET_MODE_SIZE (Pmode);
9414
9415 /* The second value is the structure value address unless this is
9416 passed as an "invisible" first argument. */
9417 if (struct_value_rtx)
9418 size += GET_MODE_SIZE (Pmode);
9419
9420 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9421 if (FUNCTION_ARG_REGNO_P (regno))
9422 {
9423 /* Search for the proper mode for copying this register's
9424 value. I'm not sure this is right, but it works so far. */
9425 enum machine_mode best_mode = VOIDmode;
9426
9427 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9428 mode != VOIDmode;
9429 mode = GET_MODE_WIDER_MODE (mode))
9430 if (HARD_REGNO_MODE_OK (regno, mode)
9431 && HARD_REGNO_NREGS (regno, mode) == 1)
9432 best_mode = mode;
9433
9434 if (best_mode == VOIDmode)
9435 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9436 mode != VOIDmode;
9437 mode = GET_MODE_WIDER_MODE (mode))
9438 if (HARD_REGNO_MODE_OK (regno, mode)
9439 && (mov_optab->handlers[(int) mode].insn_code
9440 != CODE_FOR_nothing))
9441 best_mode = mode;
9442
9443 mode = best_mode;
9444 if (mode == VOIDmode)
9445 abort ();
9446
9447 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9448 if (size % align != 0)
9449 size = CEIL (size, align) * align;
9450 apply_args_reg_offset[regno] = size;
9451 size += GET_MODE_SIZE (mode);
9452 apply_args_mode[regno] = mode;
9453 }
9454 else
9455 {
9456 apply_args_mode[regno] = VOIDmode;
9457 apply_args_reg_offset[regno] = 0;
9458 }
9459 }
9460 return size;
9461 }
9462
9463 /* Return the size required for the block returned by __builtin_apply,
9464 and initialize apply_result_mode. */
9465
9466 static int
9467 apply_result_size ()
9468 {
9469 static int size = -1;
9470 int align, regno;
9471 enum machine_mode mode;
9472
9473 /* The values computed by this function never change. */
9474 if (size < 0)
9475 {
9476 size = 0;
9477
9478 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9479 if (FUNCTION_VALUE_REGNO_P (regno))
9480 {
9481 /* Search for the proper mode for copying this register's
9482 value. I'm not sure this is right, but it works so far. */
9483 enum machine_mode best_mode = VOIDmode;
9484
9485 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9486 mode != TImode;
9487 mode = GET_MODE_WIDER_MODE (mode))
9488 if (HARD_REGNO_MODE_OK (regno, mode))
9489 best_mode = mode;
9490
9491 if (best_mode == VOIDmode)
9492 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9493 mode != VOIDmode;
9494 mode = GET_MODE_WIDER_MODE (mode))
9495 if (HARD_REGNO_MODE_OK (regno, mode)
9496 && (mov_optab->handlers[(int) mode].insn_code
9497 != CODE_FOR_nothing))
9498 best_mode = mode;
9499
9500 mode = best_mode;
9501 if (mode == VOIDmode)
9502 abort ();
9503
9504 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9505 if (size % align != 0)
9506 size = CEIL (size, align) * align;
9507 size += GET_MODE_SIZE (mode);
9508 apply_result_mode[regno] = mode;
9509 }
9510 else
9511 apply_result_mode[regno] = VOIDmode;
9512
9513 /* Allow targets that use untyped_call and untyped_return to override
9514 the size so that machine-specific information can be stored here. */
9515 #ifdef APPLY_RESULT_SIZE
9516 size = APPLY_RESULT_SIZE;
9517 #endif
9518 }
9519 return size;
9520 }
9521
9522 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
9523 /* Create a vector describing the result block RESULT. If SAVEP is true,
9524 the result block is used to save the values; otherwise it is used to
9525 restore the values. */
9526
9527 static rtx
9528 result_vector (savep, result)
9529 int savep;
9530 rtx result;
9531 {
9532 int regno, size, align, nelts;
9533 enum machine_mode mode;
9534 rtx reg, mem;
9535 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
9536
9537 size = nelts = 0;
9538 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9539 if ((mode = apply_result_mode[regno]) != VOIDmode)
9540 {
9541 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9542 if (size % align != 0)
9543 size = CEIL (size, align) * align;
9544 reg = gen_rtx (REG, mode, savep ? regno : INCOMING_REGNO (regno));
9545 mem = change_address (result, mode,
9546 plus_constant (XEXP (result, 0), size));
9547 savevec[nelts++] = (savep
9548 ? gen_rtx (SET, VOIDmode, mem, reg)
9549 : gen_rtx (SET, VOIDmode, reg, mem));
9550 size += GET_MODE_SIZE (mode);
9551 }
9552 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
9553 }
9554 #endif /* HAVE_untyped_call or HAVE_untyped_return */
9555
9556 /* Save the state required to perform an untyped call with the same
9557 arguments as were passed to the current function. */
9558
9559 static rtx
9560 expand_builtin_apply_args ()
9561 {
9562 rtx registers;
9563 int size, align, regno;
9564 enum machine_mode mode;
9565
9566 /* Create a block where the arg-pointer, structure value address,
9567 and argument registers can be saved. */
9568 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
9569
9570 /* Walk past the arg-pointer and structure value address. */
9571 size = GET_MODE_SIZE (Pmode);
9572 if (struct_value_rtx)
9573 size += GET_MODE_SIZE (Pmode);
9574
9575 /* Save each register used in calling a function to the block. */
9576 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9577 if ((mode = apply_args_mode[regno]) != VOIDmode)
9578 {
9579 rtx tem;
9580
9581 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9582 if (size % align != 0)
9583 size = CEIL (size, align) * align;
9584
9585 tem = gen_rtx (REG, mode, INCOMING_REGNO (regno));
9586
9587 #ifdef STACK_REGS
9588 /* For reg-stack.c's stack register household.
9589 Compare with a similar piece of code in function.c. */
9590
9591 emit_insn (gen_rtx (USE, mode, tem));
9592 #endif
9593
9594 emit_move_insn (change_address (registers, mode,
9595 plus_constant (XEXP (registers, 0),
9596 size)),
9597 tem);
9598 size += GET_MODE_SIZE (mode);
9599 }
9600
9601 /* Save the arg pointer to the block. */
9602 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
9603 copy_to_reg (virtual_incoming_args_rtx));
9604 size = GET_MODE_SIZE (Pmode);
9605
9606 /* Save the structure value address unless this is passed as an
9607 "invisible" first argument. */
9608 if (struct_value_incoming_rtx)
9609 {
9610 emit_move_insn (change_address (registers, Pmode,
9611 plus_constant (XEXP (registers, 0),
9612 size)),
9613 copy_to_reg (struct_value_incoming_rtx));
9614 size += GET_MODE_SIZE (Pmode);
9615 }
9616
9617 /* Return the address of the block. */
9618 return copy_addr_to_reg (XEXP (registers, 0));
9619 }
9620
9621 /* Perform an untyped call and save the state required to perform an
9622 untyped return of whatever value was returned by the given function. */
9623
9624 static rtx
9625 expand_builtin_apply (function, arguments, argsize)
9626 rtx function, arguments, argsize;
9627 {
9628 int size, align, regno;
9629 enum machine_mode mode;
9630 rtx incoming_args, result, reg, dest, call_insn;
9631 rtx old_stack_level = 0;
9632 rtx call_fusage = 0;
9633
9634 /* Create a block where the return registers can be saved. */
9635 result = assign_stack_local (BLKmode, apply_result_size (), -1);
9636
9637 /* ??? The argsize value should be adjusted here. */
9638
9639 /* Fetch the arg pointer from the ARGUMENTS block. */
9640 incoming_args = gen_reg_rtx (Pmode);
9641 emit_move_insn (incoming_args,
9642 gen_rtx (MEM, Pmode, arguments));
9643 #ifndef STACK_GROWS_DOWNWARD
9644 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
9645 incoming_args, 0, OPTAB_LIB_WIDEN);
9646 #endif
9647
9648 /* Perform postincrements before actually calling the function. */
9649 emit_queue ();
9650
9651 /* Push a new argument block and copy the arguments. */
9652 do_pending_stack_adjust ();
9653
9654 /* Save the stack with nonlocal if available */
9655 #ifdef HAVE_save_stack_nonlocal
9656 if (HAVE_save_stack_nonlocal)
9657 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
9658 else
9659 #endif
9660 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
9661
9662 /* Push a block of memory onto the stack to store the memory arguments.
9663 Save the address in a register, and copy the memory arguments. ??? I
9664 haven't figured out how the calling convention macros effect this,
9665 but it's likely that the source and/or destination addresses in
9666 the block copy will need updating in machine specific ways. */
9667 dest = allocate_dynamic_stack_space (argsize, 0, 0);
9668 emit_block_move (gen_rtx (MEM, BLKmode, dest),
9669 gen_rtx (MEM, BLKmode, incoming_args),
9670 argsize,
9671 PARM_BOUNDARY / BITS_PER_UNIT);
9672
9673 /* Refer to the argument block. */
9674 apply_args_size ();
9675 arguments = gen_rtx (MEM, BLKmode, arguments);
9676
9677 /* Walk past the arg-pointer and structure value address. */
9678 size = GET_MODE_SIZE (Pmode);
9679 if (struct_value_rtx)
9680 size += GET_MODE_SIZE (Pmode);
9681
9682 /* Restore each of the registers previously saved. Make USE insns
9683 for each of these registers for use in making the call. */
9684 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9685 if ((mode = apply_args_mode[regno]) != VOIDmode)
9686 {
9687 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9688 if (size % align != 0)
9689 size = CEIL (size, align) * align;
9690 reg = gen_rtx (REG, mode, regno);
9691 emit_move_insn (reg,
9692 change_address (arguments, mode,
9693 plus_constant (XEXP (arguments, 0),
9694 size)));
9695
9696 use_reg (&call_fusage, reg);
9697 size += GET_MODE_SIZE (mode);
9698 }
9699
9700 /* Restore the structure value address unless this is passed as an
9701 "invisible" first argument. */
9702 size = GET_MODE_SIZE (Pmode);
9703 if (struct_value_rtx)
9704 {
9705 rtx value = gen_reg_rtx (Pmode);
9706 emit_move_insn (value,
9707 change_address (arguments, Pmode,
9708 plus_constant (XEXP (arguments, 0),
9709 size)));
9710 emit_move_insn (struct_value_rtx, value);
9711 if (GET_CODE (struct_value_rtx) == REG)
9712 use_reg (&call_fusage, struct_value_rtx);
9713 size += GET_MODE_SIZE (Pmode);
9714 }
9715
9716 /* All arguments and registers used for the call are set up by now! */
9717 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
9718
9719 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
9720 and we don't want to load it into a register as an optimization,
9721 because prepare_call_address already did it if it should be done. */
9722 if (GET_CODE (function) != SYMBOL_REF)
9723 function = memory_address (FUNCTION_MODE, function);
9724
9725 /* Generate the actual call instruction and save the return value. */
9726 #ifdef HAVE_untyped_call
9727 if (HAVE_untyped_call)
9728 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
9729 result, result_vector (1, result)));
9730 else
9731 #endif
9732 #ifdef HAVE_call_value
9733 if (HAVE_call_value)
9734 {
9735 rtx valreg = 0;
9736
9737 /* Locate the unique return register. It is not possible to
9738 express a call that sets more than one return register using
9739 call_value; use untyped_call for that. In fact, untyped_call
9740 only needs to save the return registers in the given block. */
9741 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9742 if ((mode = apply_result_mode[regno]) != VOIDmode)
9743 {
9744 if (valreg)
9745 abort (); /* HAVE_untyped_call required. */
9746 valreg = gen_rtx (REG, mode, regno);
9747 }
9748
9749 emit_call_insn (gen_call_value (valreg,
9750 gen_rtx (MEM, FUNCTION_MODE, function),
9751 const0_rtx, NULL_RTX, const0_rtx));
9752
9753 emit_move_insn (change_address (result, GET_MODE (valreg),
9754 XEXP (result, 0)),
9755 valreg);
9756 }
9757 else
9758 #endif
9759 abort ();
9760
9761 /* Find the CALL insn we just emitted. */
9762 for (call_insn = get_last_insn ();
9763 call_insn && GET_CODE (call_insn) != CALL_INSN;
9764 call_insn = PREV_INSN (call_insn))
9765 ;
9766
9767 if (! call_insn)
9768 abort ();
9769
9770 /* Put the register usage information on the CALL. If there is already
9771 some usage information, put ours at the end. */
9772 if (CALL_INSN_FUNCTION_USAGE (call_insn))
9773 {
9774 rtx link;
9775
9776 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
9777 link = XEXP (link, 1))
9778 ;
9779
9780 XEXP (link, 1) = call_fusage;
9781 }
9782 else
9783 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
9784
9785 /* Restore the stack. */
9786 #ifdef HAVE_save_stack_nonlocal
9787 if (HAVE_save_stack_nonlocal)
9788 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
9789 else
9790 #endif
9791 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
9792
9793 /* Return the address of the result block. */
9794 return copy_addr_to_reg (XEXP (result, 0));
9795 }
9796
9797 /* Perform an untyped return. */
9798
9799 static void
9800 expand_builtin_return (result)
9801 rtx result;
9802 {
9803 int size, align, regno;
9804 enum machine_mode mode;
9805 rtx reg;
9806 rtx call_fusage = 0;
9807
9808 apply_result_size ();
9809 result = gen_rtx (MEM, BLKmode, result);
9810
9811 #ifdef HAVE_untyped_return
9812 if (HAVE_untyped_return)
9813 {
9814 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
9815 emit_barrier ();
9816 return;
9817 }
9818 #endif
9819
9820 /* Restore the return value and note that each value is used. */
9821 size = 0;
9822 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9823 if ((mode = apply_result_mode[regno]) != VOIDmode)
9824 {
9825 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9826 if (size % align != 0)
9827 size = CEIL (size, align) * align;
9828 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
9829 emit_move_insn (reg,
9830 change_address (result, mode,
9831 plus_constant (XEXP (result, 0),
9832 size)));
9833
9834 push_to_sequence (call_fusage);
9835 emit_insn (gen_rtx (USE, VOIDmode, reg));
9836 call_fusage = get_insns ();
9837 end_sequence ();
9838 size += GET_MODE_SIZE (mode);
9839 }
9840
9841 /* Put the USE insns before the return. */
9842 emit_insns (call_fusage);
9843
9844 /* Return whatever values was restored by jumping directly to the end
9845 of the function. */
9846 expand_null_return ();
9847 }
9848 \f
9849 /* Expand code for a post- or pre- increment or decrement
9850 and return the RTX for the result.
9851 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9852
9853 static rtx
9854 expand_increment (exp, post, ignore)
9855 register tree exp;
9856 int post, ignore;
9857 {
9858 register rtx op0, op1;
9859 register rtx temp, value;
9860 register tree incremented = TREE_OPERAND (exp, 0);
9861 optab this_optab = add_optab;
9862 int icode;
9863 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9864 int op0_is_copy = 0;
9865 int single_insn = 0;
9866 /* 1 means we can't store into OP0 directly,
9867 because it is a subreg narrower than a word,
9868 and we don't dare clobber the rest of the word. */
9869 int bad_subreg = 0;
9870
9871 if (output_bytecode)
9872 {
9873 bc_expand_expr (exp);
9874 return NULL_RTX;
9875 }
9876
9877 /* Stabilize any component ref that might need to be
9878 evaluated more than once below. */
9879 if (!post
9880 || TREE_CODE (incremented) == BIT_FIELD_REF
9881 || (TREE_CODE (incremented) == COMPONENT_REF
9882 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9883 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9884 incremented = stabilize_reference (incremented);
9885 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9886 ones into save exprs so that they don't accidentally get evaluated
9887 more than once by the code below. */
9888 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9889 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9890 incremented = save_expr (incremented);
9891
9892 /* Compute the operands as RTX.
9893 Note whether OP0 is the actual lvalue or a copy of it:
9894 I believe it is a copy iff it is a register or subreg
9895 and insns were generated in computing it. */
9896
9897 temp = get_last_insn ();
9898 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
9899
9900 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9901 in place but instead must do sign- or zero-extension during assignment,
9902 so we copy it into a new register and let the code below use it as
9903 a copy.
9904
9905 Note that we can safely modify this SUBREG since it is know not to be
9906 shared (it was made by the expand_expr call above). */
9907
9908 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9909 {
9910 if (post)
9911 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9912 else
9913 bad_subreg = 1;
9914 }
9915 else if (GET_CODE (op0) == SUBREG
9916 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9917 {
9918 /* We cannot increment this SUBREG in place. If we are
9919 post-incrementing, get a copy of the old value. Otherwise,
9920 just mark that we cannot increment in place. */
9921 if (post)
9922 op0 = copy_to_reg (op0);
9923 else
9924 bad_subreg = 1;
9925 }
9926
9927 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9928 && temp != get_last_insn ());
9929 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9930 EXPAND_MEMORY_USE_BAD);
9931
9932 /* Decide whether incrementing or decrementing. */
9933 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9934 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9935 this_optab = sub_optab;
9936
9937 /* Convert decrement by a constant into a negative increment. */
9938 if (this_optab == sub_optab
9939 && GET_CODE (op1) == CONST_INT)
9940 {
9941 op1 = GEN_INT (- INTVAL (op1));
9942 this_optab = add_optab;
9943 }
9944
9945 /* For a preincrement, see if we can do this with a single instruction. */
9946 if (!post)
9947 {
9948 icode = (int) this_optab->handlers[(int) mode].insn_code;
9949 if (icode != (int) CODE_FOR_nothing
9950 /* Make sure that OP0 is valid for operands 0 and 1
9951 of the insn we want to queue. */
9952 && (*insn_operand_predicate[icode][0]) (op0, mode)
9953 && (*insn_operand_predicate[icode][1]) (op0, mode)
9954 && (*insn_operand_predicate[icode][2]) (op1, mode))
9955 single_insn = 1;
9956 }
9957
9958 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9959 then we cannot just increment OP0. We must therefore contrive to
9960 increment the original value. Then, for postincrement, we can return
9961 OP0 since it is a copy of the old value. For preincrement, expand here
9962 unless we can do it with a single insn.
9963
9964 Likewise if storing directly into OP0 would clobber high bits
9965 we need to preserve (bad_subreg). */
9966 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9967 {
9968 /* This is the easiest way to increment the value wherever it is.
9969 Problems with multiple evaluation of INCREMENTED are prevented
9970 because either (1) it is a component_ref or preincrement,
9971 in which case it was stabilized above, or (2) it is an array_ref
9972 with constant index in an array in a register, which is
9973 safe to reevaluate. */
9974 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9975 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9976 ? MINUS_EXPR : PLUS_EXPR),
9977 TREE_TYPE (exp),
9978 incremented,
9979 TREE_OPERAND (exp, 1));
9980
9981 while (TREE_CODE (incremented) == NOP_EXPR
9982 || TREE_CODE (incremented) == CONVERT_EXPR)
9983 {
9984 newexp = convert (TREE_TYPE (incremented), newexp);
9985 incremented = TREE_OPERAND (incremented, 0);
9986 }
9987
9988 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9989 return post ? op0 : temp;
9990 }
9991
9992 if (post)
9993 {
9994 /* We have a true reference to the value in OP0.
9995 If there is an insn to add or subtract in this mode, queue it.
9996 Queueing the increment insn avoids the register shuffling
9997 that often results if we must increment now and first save
9998 the old value for subsequent use. */
9999
10000 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
10001 op0 = stabilize (op0);
10002 #endif
10003
10004 icode = (int) this_optab->handlers[(int) mode].insn_code;
10005 if (icode != (int) CODE_FOR_nothing
10006 /* Make sure that OP0 is valid for operands 0 and 1
10007 of the insn we want to queue. */
10008 && (*insn_operand_predicate[icode][0]) (op0, mode)
10009 && (*insn_operand_predicate[icode][1]) (op0, mode))
10010 {
10011 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
10012 op1 = force_reg (mode, op1);
10013
10014 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
10015 }
10016 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
10017 {
10018 rtx addr = (general_operand (XEXP (op0, 0), mode)
10019 ? force_reg (Pmode, XEXP (op0, 0))
10020 : copy_to_reg (XEXP (op0, 0)));
10021 rtx temp, result;
10022
10023 op0 = change_address (op0, VOIDmode, addr);
10024 temp = force_reg (GET_MODE (op0), op0);
10025 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
10026 op1 = force_reg (mode, op1);
10027
10028 /* The increment queue is LIFO, thus we have to `queue'
10029 the instructions in reverse order. */
10030 enqueue_insn (op0, gen_move_insn (op0, temp));
10031 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
10032 return result;
10033 }
10034 }
10035
10036 /* Preincrement, or we can't increment with one simple insn. */
10037 if (post)
10038 /* Save a copy of the value before inc or dec, to return it later. */
10039 temp = value = copy_to_reg (op0);
10040 else
10041 /* Arrange to return the incremented value. */
10042 /* Copy the rtx because expand_binop will protect from the queue,
10043 and the results of that would be invalid for us to return
10044 if our caller does emit_queue before using our result. */
10045 temp = copy_rtx (value = op0);
10046
10047 /* Increment however we can. */
10048 op1 = expand_binop (mode, this_optab, value, op1,
10049 flag_check_memory_usage ? NULL_RTX : op0,
10050 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
10051 /* Make sure the value is stored into OP0. */
10052 if (op1 != op0)
10053 emit_move_insn (op0, op1);
10054
10055 return temp;
10056 }
10057 \f
10058 /* Expand all function calls contained within EXP, innermost ones first.
10059 But don't look within expressions that have sequence points.
10060 For each CALL_EXPR, record the rtx for its value
10061 in the CALL_EXPR_RTL field. */
10062
10063 static void
10064 preexpand_calls (exp)
10065 tree exp;
10066 {
10067 register int nops, i;
10068 int type = TREE_CODE_CLASS (TREE_CODE (exp));
10069
10070 if (! do_preexpand_calls)
10071 return;
10072
10073 /* Only expressions and references can contain calls. */
10074
10075 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
10076 return;
10077
10078 switch (TREE_CODE (exp))
10079 {
10080 case CALL_EXPR:
10081 /* Do nothing if already expanded. */
10082 if (CALL_EXPR_RTL (exp) != 0
10083 /* Do nothing if the call returns a variable-sized object. */
10084 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
10085 /* Do nothing to built-in functions. */
10086 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
10087 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
10088 == FUNCTION_DECL)
10089 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
10090 return;
10091
10092 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
10093 return;
10094
10095 case COMPOUND_EXPR:
10096 case COND_EXPR:
10097 case TRUTH_ANDIF_EXPR:
10098 case TRUTH_ORIF_EXPR:
10099 /* If we find one of these, then we can be sure
10100 the adjust will be done for it (since it makes jumps).
10101 Do it now, so that if this is inside an argument
10102 of a function, we don't get the stack adjustment
10103 after some other args have already been pushed. */
10104 do_pending_stack_adjust ();
10105 return;
10106
10107 case BLOCK:
10108 case RTL_EXPR:
10109 case WITH_CLEANUP_EXPR:
10110 case CLEANUP_POINT_EXPR:
10111 case TRY_CATCH_EXPR:
10112 return;
10113
10114 case SAVE_EXPR:
10115 if (SAVE_EXPR_RTL (exp) != 0)
10116 return;
10117
10118 default:
10119 break;
10120 }
10121
10122 nops = tree_code_length[(int) TREE_CODE (exp)];
10123 for (i = 0; i < nops; i++)
10124 if (TREE_OPERAND (exp, i) != 0)
10125 {
10126 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
10127 if (type == 'e' || type == '<' || type == '1' || type == '2'
10128 || type == 'r')
10129 preexpand_calls (TREE_OPERAND (exp, i));
10130 }
10131 }
10132 \f
10133 /* At the start of a function, record that we have no previously-pushed
10134 arguments waiting to be popped. */
10135
10136 void
10137 init_pending_stack_adjust ()
10138 {
10139 pending_stack_adjust = 0;
10140 }
10141
10142 /* When exiting from function, if safe, clear out any pending stack adjust
10143 so the adjustment won't get done. */
10144
10145 void
10146 clear_pending_stack_adjust ()
10147 {
10148 #ifdef EXIT_IGNORE_STACK
10149 if (optimize > 0
10150 && ! flag_omit_frame_pointer && EXIT_IGNORE_STACK
10151 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
10152 && ! flag_inline_functions)
10153 pending_stack_adjust = 0;
10154 #endif
10155 }
10156
10157 /* Pop any previously-pushed arguments that have not been popped yet. */
10158
10159 void
10160 do_pending_stack_adjust ()
10161 {
10162 if (inhibit_defer_pop == 0)
10163 {
10164 if (pending_stack_adjust != 0)
10165 adjust_stack (GEN_INT (pending_stack_adjust));
10166 pending_stack_adjust = 0;
10167 }
10168 }
10169 \f
10170 /* Expand conditional expressions. */
10171
10172 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
10173 LABEL is an rtx of code CODE_LABEL, in this function and all the
10174 functions here. */
10175
10176 void
10177 jumpifnot (exp, label)
10178 tree exp;
10179 rtx label;
10180 {
10181 do_jump (exp, label, NULL_RTX);
10182 }
10183
10184 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
10185
10186 void
10187 jumpif (exp, label)
10188 tree exp;
10189 rtx label;
10190 {
10191 do_jump (exp, NULL_RTX, label);
10192 }
10193
10194 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
10195 the result is zero, or IF_TRUE_LABEL if the result is one.
10196 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
10197 meaning fall through in that case.
10198
10199 do_jump always does any pending stack adjust except when it does not
10200 actually perform a jump. An example where there is no jump
10201 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
10202
10203 This function is responsible for optimizing cases such as
10204 &&, || and comparison operators in EXP. */
10205
10206 void
10207 do_jump (exp, if_false_label, if_true_label)
10208 tree exp;
10209 rtx if_false_label, if_true_label;
10210 {
10211 register enum tree_code code = TREE_CODE (exp);
10212 /* Some cases need to create a label to jump to
10213 in order to properly fall through.
10214 These cases set DROP_THROUGH_LABEL nonzero. */
10215 rtx drop_through_label = 0;
10216 rtx temp;
10217 rtx comparison = 0;
10218 int i;
10219 tree type;
10220 enum machine_mode mode;
10221
10222 emit_queue ();
10223
10224 switch (code)
10225 {
10226 case ERROR_MARK:
10227 break;
10228
10229 case INTEGER_CST:
10230 temp = integer_zerop (exp) ? if_false_label : if_true_label;
10231 if (temp)
10232 emit_jump (temp);
10233 break;
10234
10235 #if 0
10236 /* This is not true with #pragma weak */
10237 case ADDR_EXPR:
10238 /* The address of something can never be zero. */
10239 if (if_true_label)
10240 emit_jump (if_true_label);
10241 break;
10242 #endif
10243
10244 case NOP_EXPR:
10245 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
10246 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
10247 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
10248 goto normal;
10249 case CONVERT_EXPR:
10250 /* If we are narrowing the operand, we have to do the compare in the
10251 narrower mode. */
10252 if ((TYPE_PRECISION (TREE_TYPE (exp))
10253 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10254 goto normal;
10255 case NON_LVALUE_EXPR:
10256 case REFERENCE_EXPR:
10257 case ABS_EXPR:
10258 case NEGATE_EXPR:
10259 case LROTATE_EXPR:
10260 case RROTATE_EXPR:
10261 /* These cannot change zero->non-zero or vice versa. */
10262 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10263 break;
10264
10265 #if 0
10266 /* This is never less insns than evaluating the PLUS_EXPR followed by
10267 a test and can be longer if the test is eliminated. */
10268 case PLUS_EXPR:
10269 /* Reduce to minus. */
10270 exp = build (MINUS_EXPR, TREE_TYPE (exp),
10271 TREE_OPERAND (exp, 0),
10272 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
10273 TREE_OPERAND (exp, 1))));
10274 /* Process as MINUS. */
10275 #endif
10276
10277 case MINUS_EXPR:
10278 /* Non-zero iff operands of minus differ. */
10279 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
10280 TREE_OPERAND (exp, 0),
10281 TREE_OPERAND (exp, 1)),
10282 NE, NE);
10283 break;
10284
10285 case BIT_AND_EXPR:
10286 /* If we are AND'ing with a small constant, do this comparison in the
10287 smallest type that fits. If the machine doesn't have comparisons
10288 that small, it will be converted back to the wider comparison.
10289 This helps if we are testing the sign bit of a narrower object.
10290 combine can't do this for us because it can't know whether a
10291 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
10292
10293 if (! SLOW_BYTE_ACCESS
10294 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
10295 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
10296 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
10297 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
10298 && (type = type_for_mode (mode, 1)) != 0
10299 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10300 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10301 != CODE_FOR_nothing))
10302 {
10303 do_jump (convert (type, exp), if_false_label, if_true_label);
10304 break;
10305 }
10306 goto normal;
10307
10308 case TRUTH_NOT_EXPR:
10309 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10310 break;
10311
10312 case TRUTH_ANDIF_EXPR:
10313 if (if_false_label == 0)
10314 if_false_label = drop_through_label = gen_label_rtx ();
10315 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
10316 start_cleanup_deferral ();
10317 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10318 end_cleanup_deferral ();
10319 break;
10320
10321 case TRUTH_ORIF_EXPR:
10322 if (if_true_label == 0)
10323 if_true_label = drop_through_label = gen_label_rtx ();
10324 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
10325 start_cleanup_deferral ();
10326 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10327 end_cleanup_deferral ();
10328 break;
10329
10330 case COMPOUND_EXPR:
10331 push_temp_slots ();
10332 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
10333 preserve_temp_slots (NULL_RTX);
10334 free_temp_slots ();
10335 pop_temp_slots ();
10336 emit_queue ();
10337 do_pending_stack_adjust ();
10338 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10339 break;
10340
10341 case COMPONENT_REF:
10342 case BIT_FIELD_REF:
10343 case ARRAY_REF:
10344 {
10345 int bitsize, bitpos, unsignedp;
10346 enum machine_mode mode;
10347 tree type;
10348 tree offset;
10349 int volatilep = 0;
10350 int alignment;
10351
10352 /* Get description of this reference. We don't actually care
10353 about the underlying object here. */
10354 get_inner_reference (exp, &bitsize, &bitpos, &offset,
10355 &mode, &unsignedp, &volatilep,
10356 &alignment);
10357
10358 type = type_for_size (bitsize, unsignedp);
10359 if (! SLOW_BYTE_ACCESS
10360 && type != 0 && bitsize >= 0
10361 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10362 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10363 != CODE_FOR_nothing))
10364 {
10365 do_jump (convert (type, exp), if_false_label, if_true_label);
10366 break;
10367 }
10368 goto normal;
10369 }
10370
10371 case COND_EXPR:
10372 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
10373 if (integer_onep (TREE_OPERAND (exp, 1))
10374 && integer_zerop (TREE_OPERAND (exp, 2)))
10375 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10376
10377 else if (integer_zerop (TREE_OPERAND (exp, 1))
10378 && integer_onep (TREE_OPERAND (exp, 2)))
10379 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10380
10381 else
10382 {
10383 register rtx label1 = gen_label_rtx ();
10384 drop_through_label = gen_label_rtx ();
10385
10386 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
10387
10388 start_cleanup_deferral ();
10389 /* Now the THEN-expression. */
10390 do_jump (TREE_OPERAND (exp, 1),
10391 if_false_label ? if_false_label : drop_through_label,
10392 if_true_label ? if_true_label : drop_through_label);
10393 /* In case the do_jump just above never jumps. */
10394 do_pending_stack_adjust ();
10395 emit_label (label1);
10396
10397 /* Now the ELSE-expression. */
10398 do_jump (TREE_OPERAND (exp, 2),
10399 if_false_label ? if_false_label : drop_through_label,
10400 if_true_label ? if_true_label : drop_through_label);
10401 end_cleanup_deferral ();
10402 }
10403 break;
10404
10405 case EQ_EXPR:
10406 {
10407 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10408
10409 if (integer_zerop (TREE_OPERAND (exp, 1)))
10410 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10411 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10412 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10413 do_jump
10414 (fold
10415 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
10416 fold (build (EQ_EXPR, TREE_TYPE (exp),
10417 fold (build1 (REALPART_EXPR,
10418 TREE_TYPE (inner_type),
10419 TREE_OPERAND (exp, 0))),
10420 fold (build1 (REALPART_EXPR,
10421 TREE_TYPE (inner_type),
10422 TREE_OPERAND (exp, 1))))),
10423 fold (build (EQ_EXPR, TREE_TYPE (exp),
10424 fold (build1 (IMAGPART_EXPR,
10425 TREE_TYPE (inner_type),
10426 TREE_OPERAND (exp, 0))),
10427 fold (build1 (IMAGPART_EXPR,
10428 TREE_TYPE (inner_type),
10429 TREE_OPERAND (exp, 1))))))),
10430 if_false_label, if_true_label);
10431 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10432 && !can_compare_p (TYPE_MODE (inner_type)))
10433 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
10434 else
10435 comparison = compare (exp, EQ, EQ);
10436 break;
10437 }
10438
10439 case NE_EXPR:
10440 {
10441 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10442
10443 if (integer_zerop (TREE_OPERAND (exp, 1)))
10444 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10445 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10446 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10447 do_jump
10448 (fold
10449 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
10450 fold (build (NE_EXPR, TREE_TYPE (exp),
10451 fold (build1 (REALPART_EXPR,
10452 TREE_TYPE (inner_type),
10453 TREE_OPERAND (exp, 0))),
10454 fold (build1 (REALPART_EXPR,
10455 TREE_TYPE (inner_type),
10456 TREE_OPERAND (exp, 1))))),
10457 fold (build (NE_EXPR, TREE_TYPE (exp),
10458 fold (build1 (IMAGPART_EXPR,
10459 TREE_TYPE (inner_type),
10460 TREE_OPERAND (exp, 0))),
10461 fold (build1 (IMAGPART_EXPR,
10462 TREE_TYPE (inner_type),
10463 TREE_OPERAND (exp, 1))))))),
10464 if_false_label, if_true_label);
10465 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10466 && !can_compare_p (TYPE_MODE (inner_type)))
10467 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
10468 else
10469 comparison = compare (exp, NE, NE);
10470 break;
10471 }
10472
10473 case LT_EXPR:
10474 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10475 == MODE_INT)
10476 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10477 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
10478 else
10479 comparison = compare (exp, LT, LTU);
10480 break;
10481
10482 case LE_EXPR:
10483 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10484 == MODE_INT)
10485 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10486 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
10487 else
10488 comparison = compare (exp, LE, LEU);
10489 break;
10490
10491 case GT_EXPR:
10492 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10493 == MODE_INT)
10494 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10495 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
10496 else
10497 comparison = compare (exp, GT, GTU);
10498 break;
10499
10500 case GE_EXPR:
10501 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10502 == MODE_INT)
10503 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10504 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
10505 else
10506 comparison = compare (exp, GE, GEU);
10507 break;
10508
10509 default:
10510 normal:
10511 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10512 #if 0
10513 /* This is not needed any more and causes poor code since it causes
10514 comparisons and tests from non-SI objects to have different code
10515 sequences. */
10516 /* Copy to register to avoid generating bad insns by cse
10517 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10518 if (!cse_not_expected && GET_CODE (temp) == MEM)
10519 temp = copy_to_reg (temp);
10520 #endif
10521 do_pending_stack_adjust ();
10522 if (GET_CODE (temp) == CONST_INT)
10523 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
10524 else if (GET_CODE (temp) == LABEL_REF)
10525 comparison = const_true_rtx;
10526 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10527 && !can_compare_p (GET_MODE (temp)))
10528 /* Note swapping the labels gives us not-equal. */
10529 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10530 else if (GET_MODE (temp) != VOIDmode)
10531 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
10532 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10533 GET_MODE (temp), NULL_RTX, 0);
10534 else
10535 abort ();
10536 }
10537
10538 /* Do any postincrements in the expression that was tested. */
10539 emit_queue ();
10540
10541 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
10542 straight into a conditional jump instruction as the jump condition.
10543 Otherwise, all the work has been done already. */
10544
10545 if (comparison == const_true_rtx)
10546 {
10547 if (if_true_label)
10548 emit_jump (if_true_label);
10549 }
10550 else if (comparison == const0_rtx)
10551 {
10552 if (if_false_label)
10553 emit_jump (if_false_label);
10554 }
10555 else if (comparison)
10556 do_jump_for_compare (comparison, if_false_label, if_true_label);
10557
10558 if (drop_through_label)
10559 {
10560 /* If do_jump produces code that might be jumped around,
10561 do any stack adjusts from that code, before the place
10562 where control merges in. */
10563 do_pending_stack_adjust ();
10564 emit_label (drop_through_label);
10565 }
10566 }
10567 \f
10568 /* Given a comparison expression EXP for values too wide to be compared
10569 with one insn, test the comparison and jump to the appropriate label.
10570 The code of EXP is ignored; we always test GT if SWAP is 0,
10571 and LT if SWAP is 1. */
10572
10573 static void
10574 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10575 tree exp;
10576 int swap;
10577 rtx if_false_label, if_true_label;
10578 {
10579 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10580 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10581 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10582 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10583 rtx drop_through_label = 0;
10584 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10585 int i;
10586
10587 if (! if_true_label || ! if_false_label)
10588 drop_through_label = gen_label_rtx ();
10589 if (! if_true_label)
10590 if_true_label = drop_through_label;
10591 if (! if_false_label)
10592 if_false_label = drop_through_label;
10593
10594 /* Compare a word at a time, high order first. */
10595 for (i = 0; i < nwords; i++)
10596 {
10597 rtx comp;
10598 rtx op0_word, op1_word;
10599
10600 if (WORDS_BIG_ENDIAN)
10601 {
10602 op0_word = operand_subword_force (op0, i, mode);
10603 op1_word = operand_subword_force (op1, i, mode);
10604 }
10605 else
10606 {
10607 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10608 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10609 }
10610
10611 /* All but high-order word must be compared as unsigned. */
10612 comp = compare_from_rtx (op0_word, op1_word,
10613 (unsignedp || i > 0) ? GTU : GT,
10614 unsignedp, word_mode, NULL_RTX, 0);
10615 if (comp == const_true_rtx)
10616 emit_jump (if_true_label);
10617 else if (comp != const0_rtx)
10618 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10619
10620 /* Consider lower words only if these are equal. */
10621 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10622 NULL_RTX, 0);
10623 if (comp == const_true_rtx)
10624 emit_jump (if_false_label);
10625 else if (comp != const0_rtx)
10626 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10627 }
10628
10629 if (if_false_label)
10630 emit_jump (if_false_label);
10631 if (drop_through_label)
10632 emit_label (drop_through_label);
10633 }
10634
10635 /* Compare OP0 with OP1, word at a time, in mode MODE.
10636 UNSIGNEDP says to do unsigned comparison.
10637 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10638
10639 void
10640 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10641 enum machine_mode mode;
10642 int unsignedp;
10643 rtx op0, op1;
10644 rtx if_false_label, if_true_label;
10645 {
10646 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10647 rtx drop_through_label = 0;
10648 int i;
10649
10650 if (! if_true_label || ! if_false_label)
10651 drop_through_label = gen_label_rtx ();
10652 if (! if_true_label)
10653 if_true_label = drop_through_label;
10654 if (! if_false_label)
10655 if_false_label = drop_through_label;
10656
10657 /* Compare a word at a time, high order first. */
10658 for (i = 0; i < nwords; i++)
10659 {
10660 rtx comp;
10661 rtx op0_word, op1_word;
10662
10663 if (WORDS_BIG_ENDIAN)
10664 {
10665 op0_word = operand_subword_force (op0, i, mode);
10666 op1_word = operand_subword_force (op1, i, mode);
10667 }
10668 else
10669 {
10670 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10671 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10672 }
10673
10674 /* All but high-order word must be compared as unsigned. */
10675 comp = compare_from_rtx (op0_word, op1_word,
10676 (unsignedp || i > 0) ? GTU : GT,
10677 unsignedp, word_mode, NULL_RTX, 0);
10678 if (comp == const_true_rtx)
10679 emit_jump (if_true_label);
10680 else if (comp != const0_rtx)
10681 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10682
10683 /* Consider lower words only if these are equal. */
10684 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10685 NULL_RTX, 0);
10686 if (comp == const_true_rtx)
10687 emit_jump (if_false_label);
10688 else if (comp != const0_rtx)
10689 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10690 }
10691
10692 if (if_false_label)
10693 emit_jump (if_false_label);
10694 if (drop_through_label)
10695 emit_label (drop_through_label);
10696 }
10697
10698 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10699 with one insn, test the comparison and jump to the appropriate label. */
10700
10701 static void
10702 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10703 tree exp;
10704 rtx if_false_label, if_true_label;
10705 {
10706 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10707 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10708 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10709 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10710 int i;
10711 rtx drop_through_label = 0;
10712
10713 if (! if_false_label)
10714 drop_through_label = if_false_label = gen_label_rtx ();
10715
10716 for (i = 0; i < nwords; i++)
10717 {
10718 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
10719 operand_subword_force (op1, i, mode),
10720 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10721 word_mode, NULL_RTX, 0);
10722 if (comp == const_true_rtx)
10723 emit_jump (if_false_label);
10724 else if (comp != const0_rtx)
10725 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10726 }
10727
10728 if (if_true_label)
10729 emit_jump (if_true_label);
10730 if (drop_through_label)
10731 emit_label (drop_through_label);
10732 }
10733 \f
10734 /* Jump according to whether OP0 is 0.
10735 We assume that OP0 has an integer mode that is too wide
10736 for the available compare insns. */
10737
10738 static void
10739 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10740 rtx op0;
10741 rtx if_false_label, if_true_label;
10742 {
10743 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10744 rtx part;
10745 int i;
10746 rtx drop_through_label = 0;
10747
10748 /* The fastest way of doing this comparison on almost any machine is to
10749 "or" all the words and compare the result. If all have to be loaded
10750 from memory and this is a very wide item, it's possible this may
10751 be slower, but that's highly unlikely. */
10752
10753 part = gen_reg_rtx (word_mode);
10754 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10755 for (i = 1; i < nwords && part != 0; i++)
10756 part = expand_binop (word_mode, ior_optab, part,
10757 operand_subword_force (op0, i, GET_MODE (op0)),
10758 part, 1, OPTAB_WIDEN);
10759
10760 if (part != 0)
10761 {
10762 rtx comp = compare_from_rtx (part, const0_rtx, EQ, 1, word_mode,
10763 NULL_RTX, 0);
10764
10765 if (comp == const_true_rtx)
10766 emit_jump (if_false_label);
10767 else if (comp == const0_rtx)
10768 emit_jump (if_true_label);
10769 else
10770 do_jump_for_compare (comp, if_false_label, if_true_label);
10771
10772 return;
10773 }
10774
10775 /* If we couldn't do the "or" simply, do this with a series of compares. */
10776 if (! if_false_label)
10777 drop_through_label = if_false_label = gen_label_rtx ();
10778
10779 for (i = 0; i < nwords; i++)
10780 {
10781 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
10782 GET_MODE (op0)),
10783 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
10784 if (comp == const_true_rtx)
10785 emit_jump (if_false_label);
10786 else if (comp != const0_rtx)
10787 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10788 }
10789
10790 if (if_true_label)
10791 emit_jump (if_true_label);
10792
10793 if (drop_through_label)
10794 emit_label (drop_through_label);
10795 }
10796
10797 /* Given a comparison expression in rtl form, output conditional branches to
10798 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
10799
10800 static void
10801 do_jump_for_compare (comparison, if_false_label, if_true_label)
10802 rtx comparison, if_false_label, if_true_label;
10803 {
10804 if (if_true_label)
10805 {
10806 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10807 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
10808 else
10809 abort ();
10810
10811 if (if_false_label)
10812 emit_jump (if_false_label);
10813 }
10814 else if (if_false_label)
10815 {
10816 rtx insn;
10817 rtx prev = get_last_insn ();
10818 rtx branch = 0;
10819
10820 /* Output the branch with the opposite condition. Then try to invert
10821 what is generated. If more than one insn is a branch, or if the
10822 branch is not the last insn written, abort. If we can't invert
10823 the branch, emit make a true label, redirect this jump to that,
10824 emit a jump to the false label and define the true label. */
10825
10826 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10827 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])(if_false_label));
10828 else
10829 abort ();
10830
10831 /* Here we get the first insn that was just emitted. It used to be the
10832 case that, on some machines, emitting the branch would discard
10833 the previous compare insn and emit a replacement. This isn't
10834 done anymore, but abort if we see that PREV is deleted. */
10835
10836 if (prev == 0)
10837 insn = get_insns ();
10838 else if (INSN_DELETED_P (prev))
10839 abort ();
10840 else
10841 insn = NEXT_INSN (prev);
10842
10843 for (; insn; insn = NEXT_INSN (insn))
10844 if (GET_CODE (insn) == JUMP_INSN)
10845 {
10846 if (branch)
10847 abort ();
10848 branch = insn;
10849 }
10850
10851 if (branch != get_last_insn ())
10852 abort ();
10853
10854 JUMP_LABEL (branch) = if_false_label;
10855 if (! invert_jump (branch, if_false_label))
10856 {
10857 if_true_label = gen_label_rtx ();
10858 redirect_jump (branch, if_true_label);
10859 emit_jump (if_false_label);
10860 emit_label (if_true_label);
10861 }
10862 }
10863 }
10864 \f
10865 /* Generate code for a comparison expression EXP
10866 (including code to compute the values to be compared)
10867 and set (CC0) according to the result.
10868 SIGNED_CODE should be the rtx operation for this comparison for
10869 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10870
10871 We force a stack adjustment unless there are currently
10872 things pushed on the stack that aren't yet used. */
10873
10874 static rtx
10875 compare (exp, signed_code, unsigned_code)
10876 register tree exp;
10877 enum rtx_code signed_code, unsigned_code;
10878 {
10879 register rtx op0
10880 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10881 register rtx op1
10882 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10883 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
10884 register enum machine_mode mode = TYPE_MODE (type);
10885 int unsignedp = TREE_UNSIGNED (type);
10886 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
10887
10888 #ifdef HAVE_canonicalize_funcptr_for_compare
10889 /* If function pointers need to be "canonicalized" before they can
10890 be reliably compared, then canonicalize them. */
10891 if (HAVE_canonicalize_funcptr_for_compare
10892 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10893 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10894 == FUNCTION_TYPE))
10895 {
10896 rtx new_op0 = gen_reg_rtx (mode);
10897
10898 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10899 op0 = new_op0;
10900 }
10901
10902 if (HAVE_canonicalize_funcptr_for_compare
10903 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10904 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10905 == FUNCTION_TYPE))
10906 {
10907 rtx new_op1 = gen_reg_rtx (mode);
10908
10909 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10910 op1 = new_op1;
10911 }
10912 #endif
10913
10914 return compare_from_rtx (op0, op1, code, unsignedp, mode,
10915 ((mode == BLKmode)
10916 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10917 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
10918 }
10919
10920 /* Like compare but expects the values to compare as two rtx's.
10921 The decision as to signed or unsigned comparison must be made by the caller.
10922
10923 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10924 compared.
10925
10926 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10927 size of MODE should be used. */
10928
10929 rtx
10930 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10931 register rtx op0, op1;
10932 enum rtx_code code;
10933 int unsignedp;
10934 enum machine_mode mode;
10935 rtx size;
10936 int align;
10937 {
10938 rtx tem;
10939
10940 /* If one operand is constant, make it the second one. Only do this
10941 if the other operand is not constant as well. */
10942
10943 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10944 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
10945 {
10946 tem = op0;
10947 op0 = op1;
10948 op1 = tem;
10949 code = swap_condition (code);
10950 }
10951
10952 if (flag_force_mem)
10953 {
10954 op0 = force_not_mem (op0);
10955 op1 = force_not_mem (op1);
10956 }
10957
10958 do_pending_stack_adjust ();
10959
10960 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10961 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10962 return tem;
10963
10964 #if 0
10965 /* There's no need to do this now that combine.c can eliminate lots of
10966 sign extensions. This can be less efficient in certain cases on other
10967 machines. */
10968
10969 /* If this is a signed equality comparison, we can do it as an
10970 unsigned comparison since zero-extension is cheaper than sign
10971 extension and comparisons with zero are done as unsigned. This is
10972 the case even on machines that can do fast sign extension, since
10973 zero-extension is easier to combine with other operations than
10974 sign-extension is. If we are comparing against a constant, we must
10975 convert it to what it would look like unsigned. */
10976 if ((code == EQ || code == NE) && ! unsignedp
10977 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10978 {
10979 if (GET_CODE (op1) == CONST_INT
10980 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10981 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10982 unsignedp = 1;
10983 }
10984 #endif
10985
10986 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10987
10988 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
10989 }
10990 \f
10991 /* Generate code to calculate EXP using a store-flag instruction
10992 and return an rtx for the result. EXP is either a comparison
10993 or a TRUTH_NOT_EXPR whose operand is a comparison.
10994
10995 If TARGET is nonzero, store the result there if convenient.
10996
10997 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10998 cheap.
10999
11000 Return zero if there is no suitable set-flag instruction
11001 available on this machine.
11002
11003 Once expand_expr has been called on the arguments of the comparison,
11004 we are committed to doing the store flag, since it is not safe to
11005 re-evaluate the expression. We emit the store-flag insn by calling
11006 emit_store_flag, but only expand the arguments if we have a reason
11007 to believe that emit_store_flag will be successful. If we think that
11008 it will, but it isn't, we have to simulate the store-flag with a
11009 set/jump/set sequence. */
11010
11011 static rtx
11012 do_store_flag (exp, target, mode, only_cheap)
11013 tree exp;
11014 rtx target;
11015 enum machine_mode mode;
11016 int only_cheap;
11017 {
11018 enum rtx_code code;
11019 tree arg0, arg1, type;
11020 tree tem;
11021 enum machine_mode operand_mode;
11022 int invert = 0;
11023 int unsignedp;
11024 rtx op0, op1;
11025 enum insn_code icode;
11026 rtx subtarget = target;
11027 rtx result, label, pattern, jump_pat;
11028
11029 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
11030 result at the end. We can't simply invert the test since it would
11031 have already been inverted if it were valid. This case occurs for
11032 some floating-point comparisons. */
11033
11034 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
11035 invert = 1, exp = TREE_OPERAND (exp, 0);
11036
11037 arg0 = TREE_OPERAND (exp, 0);
11038 arg1 = TREE_OPERAND (exp, 1);
11039 type = TREE_TYPE (arg0);
11040 operand_mode = TYPE_MODE (type);
11041 unsignedp = TREE_UNSIGNED (type);
11042
11043 /* We won't bother with BLKmode store-flag operations because it would mean
11044 passing a lot of information to emit_store_flag. */
11045 if (operand_mode == BLKmode)
11046 return 0;
11047
11048 /* We won't bother with store-flag operations involving function pointers
11049 when function pointers must be canonicalized before comparisons. */
11050 #ifdef HAVE_canonicalize_funcptr_for_compare
11051 if (HAVE_canonicalize_funcptr_for_compare
11052 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
11053 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
11054 == FUNCTION_TYPE))
11055 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
11056 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
11057 == FUNCTION_TYPE))))
11058 return 0;
11059 #endif
11060
11061 STRIP_NOPS (arg0);
11062 STRIP_NOPS (arg1);
11063
11064 /* Get the rtx comparison code to use. We know that EXP is a comparison
11065 operation of some type. Some comparisons against 1 and -1 can be
11066 converted to comparisons with zero. Do so here so that the tests
11067 below will be aware that we have a comparison with zero. These
11068 tests will not catch constants in the first operand, but constants
11069 are rarely passed as the first operand. */
11070
11071 switch (TREE_CODE (exp))
11072 {
11073 case EQ_EXPR:
11074 code = EQ;
11075 break;
11076 case NE_EXPR:
11077 code = NE;
11078 break;
11079 case LT_EXPR:
11080 if (integer_onep (arg1))
11081 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
11082 else
11083 code = unsignedp ? LTU : LT;
11084 break;
11085 case LE_EXPR:
11086 if (! unsignedp && integer_all_onesp (arg1))
11087 arg1 = integer_zero_node, code = LT;
11088 else
11089 code = unsignedp ? LEU : LE;
11090 break;
11091 case GT_EXPR:
11092 if (! unsignedp && integer_all_onesp (arg1))
11093 arg1 = integer_zero_node, code = GE;
11094 else
11095 code = unsignedp ? GTU : GT;
11096 break;
11097 case GE_EXPR:
11098 if (integer_onep (arg1))
11099 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
11100 else
11101 code = unsignedp ? GEU : GE;
11102 break;
11103 default:
11104 abort ();
11105 }
11106
11107 /* Put a constant second. */
11108 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
11109 {
11110 tem = arg0; arg0 = arg1; arg1 = tem;
11111 code = swap_condition (code);
11112 }
11113
11114 /* If this is an equality or inequality test of a single bit, we can
11115 do this by shifting the bit being tested to the low-order bit and
11116 masking the result with the constant 1. If the condition was EQ,
11117 we xor it with 1. This does not require an scc insn and is faster
11118 than an scc insn even if we have it. */
11119
11120 if ((code == NE || code == EQ)
11121 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
11122 && integer_pow2p (TREE_OPERAND (arg0, 1))
11123 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
11124 {
11125 tree inner = TREE_OPERAND (arg0, 0);
11126 HOST_WIDE_INT tem;
11127 int bitnum;
11128 int ops_unsignedp;
11129
11130 tem = INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
11131 NULL_RTX, VOIDmode, 0));
11132 /* In this case, immed_double_const will sign extend the value to make
11133 it look the same on the host and target. We must remove the
11134 sign-extension before calling exact_log2, since exact_log2 will
11135 fail for negative values. */
11136 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT
11137 && BITS_PER_WORD == GET_MODE_BITSIZE (TYPE_MODE (type)))
11138 /* We don't use the obvious constant shift to generate the mask,
11139 because that generates compiler warnings when BITS_PER_WORD is
11140 greater than or equal to HOST_BITS_PER_WIDE_INT, even though this
11141 code is unreachable in that case. */
11142 tem = tem & GET_MODE_MASK (word_mode);
11143 bitnum = exact_log2 (tem);
11144
11145 /* If INNER is a right shift of a constant and it plus BITNUM does
11146 not overflow, adjust BITNUM and INNER. */
11147
11148 if (TREE_CODE (inner) == RSHIFT_EXPR
11149 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
11150 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
11151 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
11152 < TYPE_PRECISION (type)))
11153 {
11154 bitnum +=TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
11155 inner = TREE_OPERAND (inner, 0);
11156 }
11157
11158 /* If we are going to be able to omit the AND below, we must do our
11159 operations as unsigned. If we must use the AND, we have a choice.
11160 Normally unsigned is faster, but for some machines signed is. */
11161 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
11162 #ifdef LOAD_EXTEND_OP
11163 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
11164 #else
11165 : 1
11166 #endif
11167 );
11168
11169 if (subtarget == 0 || GET_CODE (subtarget) != REG
11170 || GET_MODE (subtarget) != operand_mode
11171 || ! safe_from_p (subtarget, inner))
11172 subtarget = 0;
11173
11174 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
11175
11176 if (bitnum != 0)
11177 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
11178 size_int (bitnum), subtarget, ops_unsignedp);
11179
11180 if (GET_MODE (op0) != mode)
11181 op0 = convert_to_mode (mode, op0, ops_unsignedp);
11182
11183 if ((code == EQ && ! invert) || (code == NE && invert))
11184 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
11185 ops_unsignedp, OPTAB_LIB_WIDEN);
11186
11187 /* Put the AND last so it can combine with more things. */
11188 if (bitnum != TYPE_PRECISION (type) - 1)
11189 op0 = expand_and (op0, const1_rtx, subtarget);
11190
11191 return op0;
11192 }
11193
11194 /* Now see if we are likely to be able to do this. Return if not. */
11195 if (! can_compare_p (operand_mode))
11196 return 0;
11197 icode = setcc_gen_code[(int) code];
11198 if (icode == CODE_FOR_nothing
11199 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
11200 {
11201 /* We can only do this if it is one of the special cases that
11202 can be handled without an scc insn. */
11203 if ((code == LT && integer_zerop (arg1))
11204 || (! only_cheap && code == GE && integer_zerop (arg1)))
11205 ;
11206 else if (BRANCH_COST >= 0
11207 && ! only_cheap && (code == NE || code == EQ)
11208 && TREE_CODE (type) != REAL_TYPE
11209 && ((abs_optab->handlers[(int) operand_mode].insn_code
11210 != CODE_FOR_nothing)
11211 || (ffs_optab->handlers[(int) operand_mode].insn_code
11212 != CODE_FOR_nothing)))
11213 ;
11214 else
11215 return 0;
11216 }
11217
11218 preexpand_calls (exp);
11219 if (subtarget == 0 || GET_CODE (subtarget) != REG
11220 || GET_MODE (subtarget) != operand_mode
11221 || ! safe_from_p (subtarget, arg1))
11222 subtarget = 0;
11223
11224 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
11225 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
11226
11227 if (target == 0)
11228 target = gen_reg_rtx (mode);
11229
11230 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
11231 because, if the emit_store_flag does anything it will succeed and
11232 OP0 and OP1 will not be used subsequently. */
11233
11234 result = emit_store_flag (target, code,
11235 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
11236 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
11237 operand_mode, unsignedp, 1);
11238
11239 if (result)
11240 {
11241 if (invert)
11242 result = expand_binop (mode, xor_optab, result, const1_rtx,
11243 result, 0, OPTAB_LIB_WIDEN);
11244 return result;
11245 }
11246
11247 /* If this failed, we have to do this with set/compare/jump/set code. */
11248 if (GET_CODE (target) != REG
11249 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
11250 target = gen_reg_rtx (GET_MODE (target));
11251
11252 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
11253 result = compare_from_rtx (op0, op1, code, unsignedp,
11254 operand_mode, NULL_RTX, 0);
11255 if (GET_CODE (result) == CONST_INT)
11256 return (((result == const0_rtx && ! invert)
11257 || (result != const0_rtx && invert))
11258 ? const0_rtx : const1_rtx);
11259
11260 label = gen_label_rtx ();
11261 if (bcc_gen_fctn[(int) code] == 0)
11262 abort ();
11263
11264 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
11265 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
11266 emit_label (label);
11267
11268 return target;
11269 }
11270 \f
11271 /* Generate a tablejump instruction (used for switch statements). */
11272
11273 #ifdef HAVE_tablejump
11274
11275 /* INDEX is the value being switched on, with the lowest value
11276 in the table already subtracted.
11277 MODE is its expected mode (needed if INDEX is constant).
11278 RANGE is the length of the jump table.
11279 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11280
11281 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11282 index value is out of range. */
11283
11284 void
11285 do_tablejump (index, mode, range, table_label, default_label)
11286 rtx index, range, table_label, default_label;
11287 enum machine_mode mode;
11288 {
11289 register rtx temp, vector;
11290
11291 /* Do an unsigned comparison (in the proper mode) between the index
11292 expression and the value which represents the length of the range.
11293 Since we just finished subtracting the lower bound of the range
11294 from the index expression, this comparison allows us to simultaneously
11295 check that the original index expression value is both greater than
11296 or equal to the minimum value of the range and less than or equal to
11297 the maximum value of the range. */
11298
11299 emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
11300 emit_jump_insn (gen_bgtu (default_label));
11301
11302 /* If index is in range, it must fit in Pmode.
11303 Convert to Pmode so we can index with it. */
11304 if (mode != Pmode)
11305 index = convert_to_mode (Pmode, index, 1);
11306
11307 /* Don't let a MEM slip thru, because then INDEX that comes
11308 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11309 and break_out_memory_refs will go to work on it and mess it up. */
11310 #ifdef PIC_CASE_VECTOR_ADDRESS
11311 if (flag_pic && GET_CODE (index) != REG)
11312 index = copy_to_mode_reg (Pmode, index);
11313 #endif
11314
11315 /* If flag_force_addr were to affect this address
11316 it could interfere with the tricky assumptions made
11317 about addresses that contain label-refs,
11318 which may be valid only very near the tablejump itself. */
11319 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11320 GET_MODE_SIZE, because this indicates how large insns are. The other
11321 uses should all be Pmode, because they are addresses. This code
11322 could fail if addresses and insns are not the same size. */
11323 index = gen_rtx (PLUS, Pmode,
11324 gen_rtx (MULT, Pmode, index,
11325 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
11326 gen_rtx (LABEL_REF, Pmode, table_label));
11327 #ifdef PIC_CASE_VECTOR_ADDRESS
11328 if (flag_pic)
11329 index = PIC_CASE_VECTOR_ADDRESS (index);
11330 else
11331 #endif
11332 index = memory_address_noforce (CASE_VECTOR_MODE, index);
11333 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11334 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
11335 RTX_UNCHANGING_P (vector) = 1;
11336 convert_move (temp, vector, 0);
11337
11338 emit_jump_insn (gen_tablejump (temp, table_label));
11339
11340 /* If we are generating PIC code or if the table is PC-relative, the
11341 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11342 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11343 emit_barrier ();
11344 }
11345
11346 #endif /* HAVE_tablejump */
11347
11348
11349 /* Emit a suitable bytecode to load a value from memory, assuming a pointer
11350 to that value is on the top of the stack. The resulting type is TYPE, and
11351 the source declaration is DECL. */
11352
11353 void
11354 bc_load_memory (type, decl)
11355 tree type, decl;
11356 {
11357 enum bytecode_opcode opcode;
11358
11359
11360 /* Bit fields are special. We only know about signed and
11361 unsigned ints, and enums. The latter are treated as
11362 signed integers. */
11363
11364 if (DECL_BIT_FIELD (decl))
11365 if (TREE_CODE (type) == ENUMERAL_TYPE
11366 || TREE_CODE (type) == INTEGER_TYPE)
11367 opcode = TREE_UNSIGNED (type) ? zxloadBI : sxloadBI;
11368 else
11369 abort ();
11370 else
11371 /* See corresponding comment in bc_store_memory. */
11372 if (TYPE_MODE (type) == BLKmode
11373 || TYPE_MODE (type) == VOIDmode)
11374 return;
11375 else
11376 opcode = mode_to_load_map [(int) TYPE_MODE (type)];
11377
11378 if (opcode == neverneverland)
11379 abort ();
11380
11381 bc_emit_bytecode (opcode);
11382
11383 #ifdef DEBUG_PRINT_CODE
11384 fputc ('\n', stderr);
11385 #endif
11386 }
11387
11388
11389 /* Store the contents of the second stack slot to the address in the
11390 top stack slot. DECL is the declaration of the destination and is used
11391 to determine whether we're dealing with a bitfield. */
11392
11393 void
11394 bc_store_memory (type, decl)
11395 tree type, decl;
11396 {
11397 enum bytecode_opcode opcode;
11398
11399
11400 if (DECL_BIT_FIELD (decl))
11401 {
11402 if (TREE_CODE (type) == ENUMERAL_TYPE
11403 || TREE_CODE (type) == INTEGER_TYPE)
11404 opcode = sstoreBI;
11405 else
11406 abort ();
11407 }
11408 else
11409 if (TYPE_MODE (type) == BLKmode)
11410 {
11411 /* Copy structure. This expands to a block copy instruction, storeBLK.
11412 In addition to the arguments expected by the other store instructions,
11413 it also expects a type size (SImode) on top of the stack, which is the
11414 structure size in size units (usually bytes). The two first arguments
11415 are already on the stack; so we just put the size on level 1. For some
11416 other languages, the size may be variable, this is why we don't encode
11417 it as a storeBLK literal, but rather treat it as a full-fledged expression. */
11418
11419 bc_expand_expr (TYPE_SIZE (type));
11420 opcode = storeBLK;
11421 }
11422 else
11423 opcode = mode_to_store_map [(int) TYPE_MODE (type)];
11424
11425 if (opcode == neverneverland)
11426 abort ();
11427
11428 bc_emit_bytecode (opcode);
11429
11430 #ifdef DEBUG_PRINT_CODE
11431 fputc ('\n', stderr);
11432 #endif
11433 }
11434
11435
11436 /* Allocate local stack space sufficient to hold a value of the given
11437 SIZE at alignment boundary ALIGNMENT bits. ALIGNMENT must be an
11438 integral power of 2. A special case is locals of type VOID, which
11439 have size 0 and alignment 1 - any "voidish" SIZE or ALIGNMENT is
11440 remapped into the corresponding attribute of SI. */
11441
11442 rtx
11443 bc_allocate_local (size, alignment)
11444 int size, alignment;
11445 {
11446 rtx retval;
11447 int byte_alignment;
11448
11449 if (size < 0)
11450 abort ();
11451
11452 /* Normalize size and alignment */
11453 if (!size)
11454 size = UNITS_PER_WORD;
11455
11456 if (alignment < BITS_PER_UNIT)
11457 byte_alignment = 1 << (INT_ALIGN - 1);
11458 else
11459 /* Align */
11460 byte_alignment = alignment / BITS_PER_UNIT;
11461
11462 if (local_vars_size & (byte_alignment - 1))
11463 local_vars_size += byte_alignment - (local_vars_size & (byte_alignment - 1));
11464
11465 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
11466 local_vars_size += size;
11467
11468 return retval;
11469 }
11470
11471
11472 /* Allocate variable-sized local array. Variable-sized arrays are
11473 actually pointers to the address in memory where they are stored. */
11474
11475 rtx
11476 bc_allocate_variable_array (size)
11477 tree size;
11478 {
11479 rtx retval;
11480 const int ptralign = (1 << (PTR_ALIGN - 1));
11481
11482 /* Align pointer */
11483 if (local_vars_size & ptralign)
11484 local_vars_size += ptralign - (local_vars_size & ptralign);
11485
11486 /* Note down local space needed: pointer to block; also return
11487 dummy rtx */
11488
11489 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
11490 local_vars_size += POINTER_SIZE / BITS_PER_UNIT;
11491 return retval;
11492 }
11493
11494
11495 /* Push the machine address for the given external variable offset. */
11496
11497 void
11498 bc_load_externaddr (externaddr)
11499 rtx externaddr;
11500 {
11501 bc_emit_bytecode (constP);
11502 bc_emit_code_labelref (BYTECODE_LABEL (externaddr),
11503 BYTECODE_BC_LABEL (externaddr)->offset);
11504
11505 #ifdef DEBUG_PRINT_CODE
11506 fputc ('\n', stderr);
11507 #endif
11508 }
11509
11510
11511 /* Like above, but expects an IDENTIFIER. */
11512
11513 void
11514 bc_load_externaddr_id (id, offset)
11515 tree id;
11516 int offset;
11517 {
11518 if (!IDENTIFIER_POINTER (id))
11519 abort ();
11520
11521 bc_emit_bytecode (constP);
11522 bc_emit_code_labelref (xstrdup (IDENTIFIER_POINTER (id)), offset);
11523
11524 #ifdef DEBUG_PRINT_CODE
11525 fputc ('\n', stderr);
11526 #endif
11527 }
11528
11529
11530 /* Push the machine address for the given local variable offset. */
11531
11532 void
11533 bc_load_localaddr (localaddr)
11534 rtx localaddr;
11535 {
11536 bc_emit_instruction (localP, (HOST_WIDE_INT) BYTECODE_BC_LABEL (localaddr)->offset);
11537 }
11538
11539
11540 /* Push the machine address for the given parameter offset.
11541 NOTE: offset is in bits. */
11542
11543 void
11544 bc_load_parmaddr (parmaddr)
11545 rtx parmaddr;
11546 {
11547 bc_emit_instruction (argP, ((HOST_WIDE_INT) BYTECODE_BC_LABEL (parmaddr)->offset
11548 / BITS_PER_UNIT));
11549 }
11550
11551
11552 /* Convert a[i] into *(a + i). */
11553
11554 tree
11555 bc_canonicalize_array_ref (exp)
11556 tree exp;
11557 {
11558 tree type = TREE_TYPE (exp);
11559 tree array_adr = build1 (ADDR_EXPR, TYPE_POINTER_TO (type),
11560 TREE_OPERAND (exp, 0));
11561 tree index = TREE_OPERAND (exp, 1);
11562
11563
11564 /* Convert the integer argument to a type the same size as a pointer
11565 so the multiply won't overflow spuriously. */
11566
11567 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
11568 index = convert (type_for_size (POINTER_SIZE, 0), index);
11569
11570 /* The array address isn't volatile even if the array is.
11571 (Of course this isn't terribly relevant since the bytecode
11572 translator treats nearly everything as volatile anyway.) */
11573 TREE_THIS_VOLATILE (array_adr) = 0;
11574
11575 return build1 (INDIRECT_REF, type,
11576 fold (build (PLUS_EXPR,
11577 TYPE_POINTER_TO (type),
11578 array_adr,
11579 fold (build (MULT_EXPR,
11580 TYPE_POINTER_TO (type),
11581 index,
11582 size_in_bytes (type))))));
11583 }
11584
11585
11586 /* Load the address of the component referenced by the given
11587 COMPONENT_REF expression.
11588
11589 Returns innermost lvalue. */
11590
11591 tree
11592 bc_expand_component_address (exp)
11593 tree exp;
11594 {
11595 tree tem, chain;
11596 enum machine_mode mode;
11597 int bitpos = 0;
11598 HOST_WIDE_INT SIval;
11599
11600
11601 tem = TREE_OPERAND (exp, 1);
11602 mode = DECL_MODE (tem);
11603
11604
11605 /* Compute cumulative bit offset for nested component refs
11606 and array refs, and find the ultimate containing object. */
11607
11608 for (tem = exp;; tem = TREE_OPERAND (tem, 0))
11609 {
11610 if (TREE_CODE (tem) == COMPONENT_REF)
11611 bitpos += TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (tem, 1)));
11612 else
11613 if (TREE_CODE (tem) == ARRAY_REF
11614 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
11615 && TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) == INTEGER_CST)
11616
11617 bitpos += (TREE_INT_CST_LOW (TREE_OPERAND (tem, 1))
11618 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)))
11619 /* * TYPE_SIZE_UNIT (TREE_TYPE (tem)) */);
11620 else
11621 break;
11622 }
11623
11624 bc_expand_expr (tem);
11625
11626
11627 /* For bitfields also push their offset and size */
11628 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
11629 bc_push_offset_and_size (bitpos, /* DECL_SIZE_UNIT */ (TREE_OPERAND (exp, 1)));
11630 else
11631 if (SIval = bitpos / BITS_PER_UNIT)
11632 bc_emit_instruction (addconstPSI, SIval);
11633
11634 return (TREE_OPERAND (exp, 1));
11635 }
11636
11637
11638 /* Emit code to push two SI constants */
11639
11640 void
11641 bc_push_offset_and_size (offset, size)
11642 HOST_WIDE_INT offset, size;
11643 {
11644 bc_emit_instruction (constSI, offset);
11645 bc_emit_instruction (constSI, size);
11646 }
11647
11648
11649 /* Emit byte code to push the address of the given lvalue expression to
11650 the stack. If it's a bit field, we also push offset and size info.
11651
11652 Returns innermost component, which allows us to determine not only
11653 its type, but also whether it's a bitfield. */
11654
11655 tree
11656 bc_expand_address (exp)
11657 tree exp;
11658 {
11659 /* Safeguard */
11660 if (!exp || TREE_CODE (exp) == ERROR_MARK)
11661 return (exp);
11662
11663
11664 switch (TREE_CODE (exp))
11665 {
11666 case ARRAY_REF:
11667
11668 return (bc_expand_address (bc_canonicalize_array_ref (exp)));
11669
11670 case COMPONENT_REF:
11671
11672 return (bc_expand_component_address (exp));
11673
11674 case INDIRECT_REF:
11675
11676 bc_expand_expr (TREE_OPERAND (exp, 0));
11677
11678 /* For variable-sized types: retrieve pointer. Sometimes the
11679 TYPE_SIZE tree is NULL. Is this a bug or a feature? Let's
11680 also make sure we have an operand, just in case... */
11681
11682 if (TREE_OPERAND (exp, 0)
11683 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))
11684 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))) != INTEGER_CST)
11685 bc_emit_instruction (loadP);
11686
11687 /* If packed, also return offset and size */
11688 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 0)))
11689
11690 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (exp, 0))),
11691 TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (exp, 0))));
11692
11693 return (TREE_OPERAND (exp, 0));
11694
11695 case FUNCTION_DECL:
11696
11697 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
11698 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
11699 break;
11700
11701 case PARM_DECL:
11702
11703 bc_load_parmaddr (DECL_RTL (exp));
11704
11705 /* For variable-sized types: retrieve pointer */
11706 if (TYPE_SIZE (TREE_TYPE (exp))
11707 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
11708 bc_emit_instruction (loadP);
11709
11710 /* If packed, also return offset and size */
11711 if (DECL_BIT_FIELD (exp))
11712 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
11713 TREE_INT_CST_LOW (DECL_SIZE (exp)));
11714
11715 break;
11716
11717 case RESULT_DECL:
11718
11719 bc_emit_instruction (returnP);
11720 break;
11721
11722 case VAR_DECL:
11723
11724 #if 0
11725 if (BYTECODE_LABEL (DECL_RTL (exp)))
11726 bc_load_externaddr (DECL_RTL (exp));
11727 #endif
11728
11729 if (DECL_EXTERNAL (exp))
11730 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
11731 (BYTECODE_BC_LABEL (DECL_RTL (exp)))->offset);
11732 else
11733 bc_load_localaddr (DECL_RTL (exp));
11734
11735 /* For variable-sized types: retrieve pointer */
11736 if (TYPE_SIZE (TREE_TYPE (exp))
11737 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
11738 bc_emit_instruction (loadP);
11739
11740 /* If packed, also return offset and size */
11741 if (DECL_BIT_FIELD (exp))
11742 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
11743 TREE_INT_CST_LOW (DECL_SIZE (exp)));
11744
11745 break;
11746
11747 case STRING_CST:
11748 {
11749 rtx r;
11750
11751 bc_emit_bytecode (constP);
11752 r = output_constant_def (exp);
11753 bc_emit_code_labelref (BYTECODE_LABEL (r), BYTECODE_BC_LABEL (r)->offset);
11754
11755 #ifdef DEBUG_PRINT_CODE
11756 fputc ('\n', stderr);
11757 #endif
11758 }
11759 break;
11760
11761 default:
11762
11763 abort();
11764 break;
11765 }
11766
11767 /* Most lvalues don't have components. */
11768 return (exp);
11769 }
11770
11771
11772 /* Emit a type code to be used by the runtime support in handling
11773 parameter passing. The type code consists of the machine mode
11774 plus the minimal alignment shifted left 8 bits. */
11775
11776 tree
11777 bc_runtime_type_code (type)
11778 tree type;
11779 {
11780 int val;
11781
11782 switch (TREE_CODE (type))
11783 {
11784 case VOID_TYPE:
11785 case INTEGER_TYPE:
11786 case REAL_TYPE:
11787 case COMPLEX_TYPE:
11788 case ENUMERAL_TYPE:
11789 case POINTER_TYPE:
11790 case RECORD_TYPE:
11791
11792 val = (int) TYPE_MODE (type) | TYPE_ALIGN (type) << 8;
11793 break;
11794
11795 case ERROR_MARK:
11796
11797 val = 0;
11798 break;
11799
11800 default:
11801
11802 abort ();
11803 }
11804 return build_int_2 (val, 0);
11805 }
11806
11807
11808 /* Generate constructor label */
11809
11810 char *
11811 bc_gen_constr_label ()
11812 {
11813 static int label_counter;
11814 static char label[20];
11815
11816 sprintf (label, "*LR%d", label_counter++);
11817
11818 return (obstack_copy0 (&permanent_obstack, label, strlen (label)));
11819 }
11820
11821
11822 /* Evaluate constructor CONSTR and return pointer to it on level one. We
11823 expand the constructor data as static data, and push a pointer to it.
11824 The pointer is put in the pointer table and is retrieved by a constP
11825 bytecode instruction. We then loop and store each constructor member in
11826 the corresponding component. Finally, we return the original pointer on
11827 the stack. */
11828
11829 void
11830 bc_expand_constructor (constr)
11831 tree constr;
11832 {
11833 char *l;
11834 HOST_WIDE_INT ptroffs;
11835 rtx constr_rtx;
11836
11837
11838 /* Literal constructors are handled as constants, whereas
11839 non-literals are evaluated and stored element by element
11840 into the data segment. */
11841
11842 /* Allocate space in proper segment and push pointer to space on stack.
11843 */
11844
11845 l = bc_gen_constr_label ();
11846
11847 if (TREE_CONSTANT (constr))
11848 {
11849 text_section ();
11850
11851 bc_emit_const_labeldef (l);
11852 bc_output_constructor (constr, int_size_in_bytes (TREE_TYPE (constr)));
11853 }
11854 else
11855 {
11856 data_section ();
11857
11858 bc_emit_data_labeldef (l);
11859 bc_output_data_constructor (constr);
11860 }
11861
11862
11863 /* Add reference to pointer table and recall pointer to stack;
11864 this code is common for both types of constructors: literals
11865 and non-literals. */
11866
11867 ptroffs = bc_define_pointer (l);
11868 bc_emit_instruction (constP, ptroffs);
11869
11870 /* This is all that has to be done if it's a literal. */
11871 if (TREE_CONSTANT (constr))
11872 return;
11873
11874
11875 /* At this point, we have the pointer to the structure on top of the stack.
11876 Generate sequences of store_memory calls for the constructor. */
11877
11878 /* constructor type is structure */
11879 if (TREE_CODE (TREE_TYPE (constr)) == RECORD_TYPE)
11880 {
11881 register tree elt;
11882
11883 /* If the constructor has fewer fields than the structure,
11884 clear the whole structure first. */
11885
11886 if (list_length (CONSTRUCTOR_ELTS (constr))
11887 != list_length (TYPE_FIELDS (TREE_TYPE (constr))))
11888 {
11889 bc_emit_instruction (duplicate);
11890 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
11891 bc_emit_instruction (clearBLK);
11892 }
11893
11894 /* Store each element of the constructor into the corresponding
11895 field of TARGET. */
11896
11897 for (elt = CONSTRUCTOR_ELTS (constr); elt; elt = TREE_CHAIN (elt))
11898 {
11899 register tree field = TREE_PURPOSE (elt);
11900 register enum machine_mode mode;
11901 int bitsize;
11902 int bitpos;
11903 int unsignedp;
11904
11905 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field)) /* * DECL_SIZE_UNIT (field) */;
11906 mode = DECL_MODE (field);
11907 unsignedp = TREE_UNSIGNED (field);
11908
11909 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
11910
11911 bc_store_field (elt, bitsize, bitpos, mode, TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
11912 /* The alignment of TARGET is
11913 at least what its type requires. */
11914 VOIDmode, 0,
11915 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
11916 int_size_in_bytes (TREE_TYPE (constr)));
11917 }
11918 }
11919 else
11920
11921 /* Constructor type is array */
11922 if (TREE_CODE (TREE_TYPE (constr)) == ARRAY_TYPE)
11923 {
11924 register tree elt;
11925 register int i;
11926 tree domain = TYPE_DOMAIN (TREE_TYPE (constr));
11927 int minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
11928 int maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
11929 tree elttype = TREE_TYPE (TREE_TYPE (constr));
11930
11931 /* If the constructor has fewer fields than the structure,
11932 clear the whole structure first. */
11933
11934 if (list_length (CONSTRUCTOR_ELTS (constr)) < maxelt - minelt + 1)
11935 {
11936 bc_emit_instruction (duplicate);
11937 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
11938 bc_emit_instruction (clearBLK);
11939 }
11940
11941
11942 /* Store each element of the constructor into the corresponding
11943 element of TARGET, determined by counting the elements. */
11944
11945 for (elt = CONSTRUCTOR_ELTS (constr), i = 0;
11946 elt;
11947 elt = TREE_CHAIN (elt), i++)
11948 {
11949 register enum machine_mode mode;
11950 int bitsize;
11951 int bitpos;
11952 int unsignedp;
11953
11954 mode = TYPE_MODE (elttype);
11955 bitsize = GET_MODE_BITSIZE (mode);
11956 unsignedp = TREE_UNSIGNED (elttype);
11957
11958 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype))
11959 /* * TYPE_SIZE_UNIT (elttype) */ );
11960
11961 bc_store_field (elt, bitsize, bitpos, mode,
11962 TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
11963 /* The alignment of TARGET is
11964 at least what its type requires. */
11965 VOIDmode, 0,
11966 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
11967 int_size_in_bytes (TREE_TYPE (constr)));
11968 }
11969
11970 }
11971 }
11972
11973
11974 /* Store the value of EXP (an expression tree) into member FIELD of
11975 structure at address on stack, which has type TYPE, mode MODE and
11976 occupies BITSIZE bits, starting BITPOS bits from the beginning of the
11977 structure.
11978
11979 ALIGN is the alignment that TARGET is known to have, measured in bytes.
11980 TOTAL_SIZE is its size in bytes, or -1 if variable. */
11981
11982 void
11983 bc_store_field (field, bitsize, bitpos, mode, exp, type,
11984 value_mode, unsignedp, align, total_size)
11985 int bitsize, bitpos;
11986 enum machine_mode mode;
11987 tree field, exp, type;
11988 enum machine_mode value_mode;
11989 int unsignedp;
11990 int align;
11991 int total_size;
11992 {
11993
11994 /* Expand expression and copy pointer */
11995 bc_expand_expr (exp);
11996 bc_emit_instruction (over);
11997
11998
11999 /* If the component is a bit field, we cannot use addressing to access
12000 it. Use bit-field techniques to store in it. */
12001
12002 if (DECL_BIT_FIELD (field))
12003 {
12004 bc_store_bit_field (bitpos, bitsize, unsignedp);
12005 return;
12006 }
12007 else
12008 /* Not bit field */
12009 {
12010 HOST_WIDE_INT offset = bitpos / BITS_PER_UNIT;
12011
12012 /* Advance pointer to the desired member */
12013 if (offset)
12014 bc_emit_instruction (addconstPSI, offset);
12015
12016 /* Store */
12017 bc_store_memory (type, field);
12018 }
12019 }
12020
12021
12022 /* Store SI/SU in bitfield */
12023
12024 void
12025 bc_store_bit_field (offset, size, unsignedp)
12026 int offset, size, unsignedp;
12027 {
12028 /* Push bitfield offset and size */
12029 bc_push_offset_and_size (offset, size);
12030
12031 /* Store */
12032 bc_emit_instruction (sstoreBI);
12033 }
12034
12035
12036 /* Load SI/SU from bitfield */
12037
12038 void
12039 bc_load_bit_field (offset, size, unsignedp)
12040 int offset, size, unsignedp;
12041 {
12042 /* Push bitfield offset and size */
12043 bc_push_offset_and_size (offset, size);
12044
12045 /* Load: sign-extend if signed, else zero-extend */
12046 bc_emit_instruction (unsignedp ? zxloadBI : sxloadBI);
12047 }
12048
12049
12050 /* Adjust interpreter stack by NLEVELS. Positive means drop NLEVELS
12051 (adjust stack pointer upwards), negative means add that number of
12052 levels (adjust the stack pointer downwards). Only positive values
12053 normally make sense. */
12054
12055 void
12056 bc_adjust_stack (nlevels)
12057 int nlevels;
12058 {
12059 switch (nlevels)
12060 {
12061 case 0:
12062 break;
12063
12064 case 2:
12065 bc_emit_instruction (drop);
12066
12067 case 1:
12068 bc_emit_instruction (drop);
12069 break;
12070
12071 default:
12072
12073 bc_emit_instruction (adjstackSI, (HOST_WIDE_INT) nlevels);
12074 stack_depth -= nlevels;
12075 }
12076
12077 #if defined (VALIDATE_STACK_FOR_BC)
12078 VALIDATE_STACK_FOR_BC ();
12079 #endif
12080 }
This page took 0.950409 seconds and 4 git commands to generate.