]> gcc.gnu.org Git - gcc.git/blob - gcc/expr.c
09aeeeb2d5ff12755046eac546b59c7c486ad15d
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 92, 93, 94, 95, 1996 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 #include "config.h"
23 #include "machmode.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "obstack.h"
27 #include "flags.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "except.h"
31 #include "function.h"
32 #include "insn-flags.h"
33 #include "insn-codes.h"
34 #include "expr.h"
35 #include "insn-config.h"
36 #include "recog.h"
37 #include "output.h"
38 #include "typeclass.h"
39
40 #include "bytecode.h"
41 #include "bc-opcode.h"
42 #include "bc-typecd.h"
43 #include "bc-optab.h"
44 #include "bc-emit.h"
45
46
47 #define CEIL(x,y) (((x) + (y) - 1) / (y))
48
49 /* Decide whether a function's arguments should be processed
50 from first to last or from last to first.
51
52 They should if the stack and args grow in opposite directions, but
53 only if we have push insns. */
54
55 #ifdef PUSH_ROUNDING
56
57 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
58 #define PUSH_ARGS_REVERSED /* If it's last to first */
59 #endif
60
61 #endif
62
63 #ifndef STACK_PUSH_CODE
64 #ifdef STACK_GROWS_DOWNWARD
65 #define STACK_PUSH_CODE PRE_DEC
66 #else
67 #define STACK_PUSH_CODE PRE_INC
68 #endif
69 #endif
70
71 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
72 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
73
74 /* If this is nonzero, we do not bother generating VOLATILE
75 around volatile memory references, and we are willing to
76 output indirect addresses. If cse is to follow, we reject
77 indirect addresses so a useful potential cse is generated;
78 if it is used only once, instruction combination will produce
79 the same indirect address eventually. */
80 int cse_not_expected;
81
82 /* Nonzero to generate code for all the subroutines within an
83 expression before generating the upper levels of the expression.
84 Nowadays this is never zero. */
85 int do_preexpand_calls = 1;
86
87 /* Number of units that we should eventually pop off the stack.
88 These are the arguments to function calls that have already returned. */
89 int pending_stack_adjust;
90
91 /* Nonzero means stack pops must not be deferred, and deferred stack
92 pops must not be output. It is nonzero inside a function call,
93 inside a conditional expression, inside a statement expression,
94 and in other cases as well. */
95 int inhibit_defer_pop;
96
97 /* A list of all cleanups which belong to the arguments of
98 function calls being expanded by expand_call. */
99 tree cleanups_this_call;
100
101 /* When temporaries are created by TARGET_EXPRs, they are created at
102 this level of temp_slot_level, so that they can remain allocated
103 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
104 of TARGET_EXPRs. */
105 int target_temp_slot_level;
106
107 /* Nonzero means __builtin_saveregs has already been done in this function.
108 The value is the pseudoreg containing the value __builtin_saveregs
109 returned. */
110 static rtx saveregs_value;
111
112 /* Similarly for __builtin_apply_args. */
113 static rtx apply_args_value;
114
115 /* This structure is used by move_by_pieces to describe the move to
116 be performed. */
117
118 struct move_by_pieces
119 {
120 rtx to;
121 rtx to_addr;
122 int autinc_to;
123 int explicit_inc_to;
124 int to_struct;
125 rtx from;
126 rtx from_addr;
127 int autinc_from;
128 int explicit_inc_from;
129 int from_struct;
130 int len;
131 int offset;
132 int reverse;
133 };
134
135 /* This structure is used by clear_by_pieces to describe the clear to
136 be performed. */
137
138 struct clear_by_pieces
139 {
140 rtx to;
141 rtx to_addr;
142 int autinc_to;
143 int explicit_inc_to;
144 int to_struct;
145 int len;
146 int offset;
147 int reverse;
148 };
149
150 /* Used to generate bytecodes: keep track of size of local variables,
151 as well as depth of arithmetic stack. (Notice that variables are
152 stored on the machine's stack, not the arithmetic stack.) */
153
154 extern int local_vars_size;
155 extern int stack_depth;
156 extern int max_stack_depth;
157 extern struct obstack permanent_obstack;
158 extern rtx arg_pointer_save_area;
159
160 static rtx enqueue_insn PROTO((rtx, rtx));
161 static int queued_subexp_p PROTO((rtx));
162 static void init_queue PROTO((void));
163 static void move_by_pieces PROTO((rtx, rtx, int, int));
164 static int move_by_pieces_ninsns PROTO((unsigned int, int));
165 static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
166 struct move_by_pieces *));
167 static void clear_by_pieces PROTO((rtx, int, int));
168 static void clear_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
169 struct clear_by_pieces *));
170 static int is_zeros_p PROTO((tree));
171 static int mostly_zeros_p PROTO((tree));
172 static void store_constructor PROTO((tree, rtx, int));
173 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
174 enum machine_mode, int, int, int));
175 static int get_inner_unaligned_p PROTO((tree));
176 static tree save_noncopied_parts PROTO((tree, tree));
177 static tree init_noncopied_parts PROTO((tree, tree));
178 static int safe_from_p PROTO((rtx, tree));
179 static int fixed_type_p PROTO((tree));
180 static rtx var_rtx PROTO((tree));
181 static int get_pointer_alignment PROTO((tree, unsigned));
182 static tree string_constant PROTO((tree, tree *));
183 static tree c_strlen PROTO((tree));
184 static rtx expand_builtin PROTO((tree, rtx, rtx,
185 enum machine_mode, int));
186 static int apply_args_size PROTO((void));
187 static int apply_result_size PROTO((void));
188 static rtx result_vector PROTO((int, rtx));
189 static rtx expand_builtin_apply_args PROTO((void));
190 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
191 static void expand_builtin_return PROTO((rtx));
192 static rtx expand_increment PROTO((tree, int, int));
193 void bc_expand_increment PROTO((struct increment_operator *, tree));
194 rtx bc_allocate_local PROTO((int, int));
195 void bc_store_memory PROTO((tree, tree));
196 tree bc_expand_component_address PROTO((tree));
197 tree bc_expand_address PROTO((tree));
198 void bc_expand_constructor PROTO((tree));
199 void bc_adjust_stack PROTO((int));
200 tree bc_canonicalize_array_ref PROTO((tree));
201 void bc_load_memory PROTO((tree, tree));
202 void bc_load_externaddr PROTO((rtx));
203 void bc_load_externaddr_id PROTO((tree, int));
204 void bc_load_localaddr PROTO((rtx));
205 void bc_load_parmaddr PROTO((rtx));
206 static void preexpand_calls PROTO((tree));
207 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
208 void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
209 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
210 static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
211 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
212 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
213 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
214 static tree defer_cleanups_to PROTO((tree));
215 extern tree truthvalue_conversion PROTO((tree));
216
217 /* Record for each mode whether we can move a register directly to or
218 from an object of that mode in memory. If we can't, we won't try
219 to use that mode directly when accessing a field of that mode. */
220
221 static char direct_load[NUM_MACHINE_MODES];
222 static char direct_store[NUM_MACHINE_MODES];
223
224 /* MOVE_RATIO is the number of move instructions that is better than
225 a block move. */
226
227 #ifndef MOVE_RATIO
228 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
229 #define MOVE_RATIO 2
230 #else
231 /* A value of around 6 would minimize code size; infinity would minimize
232 execution time. */
233 #define MOVE_RATIO 15
234 #endif
235 #endif
236
237 /* This array records the insn_code of insns to perform block moves. */
238 enum insn_code movstr_optab[NUM_MACHINE_MODES];
239
240 /* This array records the insn_code of insns to perform block clears. */
241 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
242
243 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
244
245 #ifndef SLOW_UNALIGNED_ACCESS
246 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
247 #endif
248
249 /* Register mappings for target machines without register windows. */
250 #ifndef INCOMING_REGNO
251 #define INCOMING_REGNO(OUT) (OUT)
252 #endif
253 #ifndef OUTGOING_REGNO
254 #define OUTGOING_REGNO(IN) (IN)
255 #endif
256 \f
257 /* Maps used to convert modes to const, load, and store bytecodes. */
258 enum bytecode_opcode mode_to_const_map[MAX_MACHINE_MODE];
259 enum bytecode_opcode mode_to_load_map[MAX_MACHINE_MODE];
260 enum bytecode_opcode mode_to_store_map[MAX_MACHINE_MODE];
261
262 /* Initialize maps used to convert modes to const, load, and store
263 bytecodes. */
264
265 void
266 bc_init_mode_to_opcode_maps ()
267 {
268 int mode;
269
270 for (mode = 0; mode < (int) MAX_MACHINE_MODE; mode++)
271 mode_to_const_map[mode] =
272 mode_to_load_map[mode] =
273 mode_to_store_map[mode] = neverneverland;
274
275 #define DEF_MODEMAP(SYM, CODE, UCODE, CONST, LOAD, STORE) \
276 mode_to_const_map[(int) SYM] = CONST; \
277 mode_to_load_map[(int) SYM] = LOAD; \
278 mode_to_store_map[(int) SYM] = STORE;
279
280 #include "modemap.def"
281 #undef DEF_MODEMAP
282 }
283 \f
284 /* This is run once per compilation to set up which modes can be used
285 directly in memory and to initialize the block move optab. */
286
287 void
288 init_expr_once ()
289 {
290 rtx insn, pat;
291 enum machine_mode mode;
292 /* Try indexing by frame ptr and try by stack ptr.
293 It is known that on the Convex the stack ptr isn't a valid index.
294 With luck, one or the other is valid on any machine. */
295 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
296 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
297
298 start_sequence ();
299 insn = emit_insn (gen_rtx (SET, 0, 0));
300 pat = PATTERN (insn);
301
302 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
303 mode = (enum machine_mode) ((int) mode + 1))
304 {
305 int regno;
306 rtx reg;
307 int num_clobbers;
308
309 direct_load[(int) mode] = direct_store[(int) mode] = 0;
310 PUT_MODE (mem, mode);
311 PUT_MODE (mem1, mode);
312
313 /* See if there is some register that can be used in this mode and
314 directly loaded or stored from memory. */
315
316 if (mode != VOIDmode && mode != BLKmode)
317 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
318 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
319 regno++)
320 {
321 if (! HARD_REGNO_MODE_OK (regno, mode))
322 continue;
323
324 reg = gen_rtx (REG, mode, regno);
325
326 SET_SRC (pat) = mem;
327 SET_DEST (pat) = reg;
328 if (recog (pat, insn, &num_clobbers) >= 0)
329 direct_load[(int) mode] = 1;
330
331 SET_SRC (pat) = mem1;
332 SET_DEST (pat) = reg;
333 if (recog (pat, insn, &num_clobbers) >= 0)
334 direct_load[(int) mode] = 1;
335
336 SET_SRC (pat) = reg;
337 SET_DEST (pat) = mem;
338 if (recog (pat, insn, &num_clobbers) >= 0)
339 direct_store[(int) mode] = 1;
340
341 SET_SRC (pat) = reg;
342 SET_DEST (pat) = mem1;
343 if (recog (pat, insn, &num_clobbers) >= 0)
344 direct_store[(int) mode] = 1;
345 }
346 }
347
348 end_sequence ();
349 }
350
351 /* This is run at the start of compiling a function. */
352
353 void
354 init_expr ()
355 {
356 init_queue ();
357
358 pending_stack_adjust = 0;
359 inhibit_defer_pop = 0;
360 cleanups_this_call = 0;
361 saveregs_value = 0;
362 apply_args_value = 0;
363 forced_labels = 0;
364 }
365
366 /* Save all variables describing the current status into the structure *P.
367 This is used before starting a nested function. */
368
369 void
370 save_expr_status (p)
371 struct function *p;
372 {
373 /* Instead of saving the postincrement queue, empty it. */
374 emit_queue ();
375
376 p->pending_stack_adjust = pending_stack_adjust;
377 p->inhibit_defer_pop = inhibit_defer_pop;
378 p->cleanups_this_call = cleanups_this_call;
379 p->saveregs_value = saveregs_value;
380 p->apply_args_value = apply_args_value;
381 p->forced_labels = forced_labels;
382
383 pending_stack_adjust = 0;
384 inhibit_defer_pop = 0;
385 cleanups_this_call = 0;
386 saveregs_value = 0;
387 apply_args_value = 0;
388 forced_labels = 0;
389 }
390
391 /* Restore all variables describing the current status from the structure *P.
392 This is used after a nested function. */
393
394 void
395 restore_expr_status (p)
396 struct function *p;
397 {
398 pending_stack_adjust = p->pending_stack_adjust;
399 inhibit_defer_pop = p->inhibit_defer_pop;
400 cleanups_this_call = p->cleanups_this_call;
401 saveregs_value = p->saveregs_value;
402 apply_args_value = p->apply_args_value;
403 forced_labels = p->forced_labels;
404 }
405 \f
406 /* Manage the queue of increment instructions to be output
407 for POSTINCREMENT_EXPR expressions, etc. */
408
409 static rtx pending_chain;
410
411 /* Queue up to increment (or change) VAR later. BODY says how:
412 BODY should be the same thing you would pass to emit_insn
413 to increment right away. It will go to emit_insn later on.
414
415 The value is a QUEUED expression to be used in place of VAR
416 where you want to guarantee the pre-incrementation value of VAR. */
417
418 static rtx
419 enqueue_insn (var, body)
420 rtx var, body;
421 {
422 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
423 var, NULL_RTX, NULL_RTX, body, pending_chain);
424 return pending_chain;
425 }
426
427 /* Use protect_from_queue to convert a QUEUED expression
428 into something that you can put immediately into an instruction.
429 If the queued incrementation has not happened yet,
430 protect_from_queue returns the variable itself.
431 If the incrementation has happened, protect_from_queue returns a temp
432 that contains a copy of the old value of the variable.
433
434 Any time an rtx which might possibly be a QUEUED is to be put
435 into an instruction, it must be passed through protect_from_queue first.
436 QUEUED expressions are not meaningful in instructions.
437
438 Do not pass a value through protect_from_queue and then hold
439 on to it for a while before putting it in an instruction!
440 If the queue is flushed in between, incorrect code will result. */
441
442 rtx
443 protect_from_queue (x, modify)
444 register rtx x;
445 int modify;
446 {
447 register RTX_CODE code = GET_CODE (x);
448
449 #if 0 /* A QUEUED can hang around after the queue is forced out. */
450 /* Shortcut for most common case. */
451 if (pending_chain == 0)
452 return x;
453 #endif
454
455 if (code != QUEUED)
456 {
457 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
458 use of autoincrement. Make a copy of the contents of the memory
459 location rather than a copy of the address, but not if the value is
460 of mode BLKmode. Don't modify X in place since it might be
461 shared. */
462 if (code == MEM && GET_MODE (x) != BLKmode
463 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
464 {
465 register rtx y = XEXP (x, 0);
466 register rtx new = gen_rtx (MEM, GET_MODE (x), QUEUED_VAR (y));
467
468 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
469 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
470 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
471
472 if (QUEUED_INSN (y))
473 {
474 register rtx temp = gen_reg_rtx (GET_MODE (new));
475 emit_insn_before (gen_move_insn (temp, new),
476 QUEUED_INSN (y));
477 return temp;
478 }
479 return new;
480 }
481 /* Otherwise, recursively protect the subexpressions of all
482 the kinds of rtx's that can contain a QUEUED. */
483 if (code == MEM)
484 {
485 rtx tem = protect_from_queue (XEXP (x, 0), 0);
486 if (tem != XEXP (x, 0))
487 {
488 x = copy_rtx (x);
489 XEXP (x, 0) = tem;
490 }
491 }
492 else if (code == PLUS || code == MULT)
493 {
494 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
495 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
496 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
497 {
498 x = copy_rtx (x);
499 XEXP (x, 0) = new0;
500 XEXP (x, 1) = new1;
501 }
502 }
503 return x;
504 }
505 /* If the increment has not happened, use the variable itself. */
506 if (QUEUED_INSN (x) == 0)
507 return QUEUED_VAR (x);
508 /* If the increment has happened and a pre-increment copy exists,
509 use that copy. */
510 if (QUEUED_COPY (x) != 0)
511 return QUEUED_COPY (x);
512 /* The increment has happened but we haven't set up a pre-increment copy.
513 Set one up now, and use it. */
514 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
515 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
516 QUEUED_INSN (x));
517 return QUEUED_COPY (x);
518 }
519
520 /* Return nonzero if X contains a QUEUED expression:
521 if it contains anything that will be altered by a queued increment.
522 We handle only combinations of MEM, PLUS, MINUS and MULT operators
523 since memory addresses generally contain only those. */
524
525 static int
526 queued_subexp_p (x)
527 rtx x;
528 {
529 register enum rtx_code code = GET_CODE (x);
530 switch (code)
531 {
532 case QUEUED:
533 return 1;
534 case MEM:
535 return queued_subexp_p (XEXP (x, 0));
536 case MULT:
537 case PLUS:
538 case MINUS:
539 return queued_subexp_p (XEXP (x, 0))
540 || queued_subexp_p (XEXP (x, 1));
541 }
542 return 0;
543 }
544
545 /* Perform all the pending incrementations. */
546
547 void
548 emit_queue ()
549 {
550 register rtx p;
551 while (p = pending_chain)
552 {
553 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
554 pending_chain = QUEUED_NEXT (p);
555 }
556 }
557
558 static void
559 init_queue ()
560 {
561 if (pending_chain)
562 abort ();
563 }
564 \f
565 /* Copy data from FROM to TO, where the machine modes are not the same.
566 Both modes may be integer, or both may be floating.
567 UNSIGNEDP should be nonzero if FROM is an unsigned type.
568 This causes zero-extension instead of sign-extension. */
569
570 void
571 convert_move (to, from, unsignedp)
572 register rtx to, from;
573 int unsignedp;
574 {
575 enum machine_mode to_mode = GET_MODE (to);
576 enum machine_mode from_mode = GET_MODE (from);
577 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
578 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
579 enum insn_code code;
580 rtx libcall;
581
582 /* rtx code for making an equivalent value. */
583 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
584
585 to = protect_from_queue (to, 1);
586 from = protect_from_queue (from, 0);
587
588 if (to_real != from_real)
589 abort ();
590
591 /* If FROM is a SUBREG that indicates that we have already done at least
592 the required extension, strip it. We don't handle such SUBREGs as
593 TO here. */
594
595 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
596 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
597 >= GET_MODE_SIZE (to_mode))
598 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
599 from = gen_lowpart (to_mode, from), from_mode = to_mode;
600
601 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
602 abort ();
603
604 if (to_mode == from_mode
605 || (from_mode == VOIDmode && CONSTANT_P (from)))
606 {
607 emit_move_insn (to, from);
608 return;
609 }
610
611 if (to_real)
612 {
613 rtx value;
614
615 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
616 {
617 /* Try converting directly if the insn is supported. */
618 if ((code = can_extend_p (to_mode, from_mode, 0))
619 != CODE_FOR_nothing)
620 {
621 emit_unop_insn (code, to, from, UNKNOWN);
622 return;
623 }
624 }
625
626 #ifdef HAVE_trunchfqf2
627 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
628 {
629 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
630 return;
631 }
632 #endif
633 #ifdef HAVE_truncsfqf2
634 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
635 {
636 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
637 return;
638 }
639 #endif
640 #ifdef HAVE_truncdfqf2
641 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
642 {
643 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
644 return;
645 }
646 #endif
647 #ifdef HAVE_truncxfqf2
648 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
649 {
650 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
651 return;
652 }
653 #endif
654 #ifdef HAVE_trunctfqf2
655 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
656 {
657 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
658 return;
659 }
660 #endif
661
662 #ifdef HAVE_trunctqfhf2
663 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
664 {
665 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
666 return;
667 }
668 #endif
669 #ifdef HAVE_truncsfhf2
670 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
671 {
672 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
673 return;
674 }
675 #endif
676 #ifdef HAVE_truncdfhf2
677 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
678 {
679 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
680 return;
681 }
682 #endif
683 #ifdef HAVE_truncxfhf2
684 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
685 {
686 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
687 return;
688 }
689 #endif
690 #ifdef HAVE_trunctfhf2
691 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
692 {
693 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
694 return;
695 }
696 #endif
697
698 #ifdef HAVE_truncsftqf2
699 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
700 {
701 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
702 return;
703 }
704 #endif
705 #ifdef HAVE_truncdftqf2
706 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
707 {
708 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
709 return;
710 }
711 #endif
712 #ifdef HAVE_truncxftqf2
713 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
714 {
715 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
716 return;
717 }
718 #endif
719 #ifdef HAVE_trunctftqf2
720 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
721 {
722 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
723 return;
724 }
725 #endif
726
727 #ifdef HAVE_truncdfsf2
728 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
729 {
730 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
731 return;
732 }
733 #endif
734 #ifdef HAVE_truncxfsf2
735 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
736 {
737 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
738 return;
739 }
740 #endif
741 #ifdef HAVE_trunctfsf2
742 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
743 {
744 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
745 return;
746 }
747 #endif
748 #ifdef HAVE_truncxfdf2
749 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
750 {
751 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
752 return;
753 }
754 #endif
755 #ifdef HAVE_trunctfdf2
756 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
757 {
758 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
759 return;
760 }
761 #endif
762
763 libcall = (rtx) 0;
764 switch (from_mode)
765 {
766 case SFmode:
767 switch (to_mode)
768 {
769 case DFmode:
770 libcall = extendsfdf2_libfunc;
771 break;
772
773 case XFmode:
774 libcall = extendsfxf2_libfunc;
775 break;
776
777 case TFmode:
778 libcall = extendsftf2_libfunc;
779 break;
780 }
781 break;
782
783 case DFmode:
784 switch (to_mode)
785 {
786 case SFmode:
787 libcall = truncdfsf2_libfunc;
788 break;
789
790 case XFmode:
791 libcall = extenddfxf2_libfunc;
792 break;
793
794 case TFmode:
795 libcall = extenddftf2_libfunc;
796 break;
797 }
798 break;
799
800 case XFmode:
801 switch (to_mode)
802 {
803 case SFmode:
804 libcall = truncxfsf2_libfunc;
805 break;
806
807 case DFmode:
808 libcall = truncxfdf2_libfunc;
809 break;
810 }
811 break;
812
813 case TFmode:
814 switch (to_mode)
815 {
816 case SFmode:
817 libcall = trunctfsf2_libfunc;
818 break;
819
820 case DFmode:
821 libcall = trunctfdf2_libfunc;
822 break;
823 }
824 break;
825 }
826
827 if (libcall == (rtx) 0)
828 /* This conversion is not implemented yet. */
829 abort ();
830
831 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
832 1, from, from_mode);
833 emit_move_insn (to, value);
834 return;
835 }
836
837 /* Now both modes are integers. */
838
839 /* Handle expanding beyond a word. */
840 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
841 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
842 {
843 rtx insns;
844 rtx lowpart;
845 rtx fill_value;
846 rtx lowfrom;
847 int i;
848 enum machine_mode lowpart_mode;
849 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
850
851 /* Try converting directly if the insn is supported. */
852 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
853 != CODE_FOR_nothing)
854 {
855 /* If FROM is a SUBREG, put it into a register. Do this
856 so that we always generate the same set of insns for
857 better cse'ing; if an intermediate assignment occurred,
858 we won't be doing the operation directly on the SUBREG. */
859 if (optimize > 0 && GET_CODE (from) == SUBREG)
860 from = force_reg (from_mode, from);
861 emit_unop_insn (code, to, from, equiv_code);
862 return;
863 }
864 /* Next, try converting via full word. */
865 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
866 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
867 != CODE_FOR_nothing))
868 {
869 if (GET_CODE (to) == REG)
870 emit_insn (gen_rtx (CLOBBER, VOIDmode, to));
871 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
872 emit_unop_insn (code, to,
873 gen_lowpart (word_mode, to), equiv_code);
874 return;
875 }
876
877 /* No special multiword conversion insn; do it by hand. */
878 start_sequence ();
879
880 /* Since we will turn this into a no conflict block, we must ensure
881 that the source does not overlap the target. */
882
883 if (reg_overlap_mentioned_p (to, from))
884 from = force_reg (from_mode, from);
885
886 /* Get a copy of FROM widened to a word, if necessary. */
887 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
888 lowpart_mode = word_mode;
889 else
890 lowpart_mode = from_mode;
891
892 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
893
894 lowpart = gen_lowpart (lowpart_mode, to);
895 emit_move_insn (lowpart, lowfrom);
896
897 /* Compute the value to put in each remaining word. */
898 if (unsignedp)
899 fill_value = const0_rtx;
900 else
901 {
902 #ifdef HAVE_slt
903 if (HAVE_slt
904 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
905 && STORE_FLAG_VALUE == -1)
906 {
907 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
908 lowpart_mode, 0, 0);
909 fill_value = gen_reg_rtx (word_mode);
910 emit_insn (gen_slt (fill_value));
911 }
912 else
913 #endif
914 {
915 fill_value
916 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
917 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
918 NULL_RTX, 0);
919 fill_value = convert_to_mode (word_mode, fill_value, 1);
920 }
921 }
922
923 /* Fill the remaining words. */
924 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
925 {
926 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
927 rtx subword = operand_subword (to, index, 1, to_mode);
928
929 if (subword == 0)
930 abort ();
931
932 if (fill_value != subword)
933 emit_move_insn (subword, fill_value);
934 }
935
936 insns = get_insns ();
937 end_sequence ();
938
939 emit_no_conflict_block (insns, to, from, NULL_RTX,
940 gen_rtx (equiv_code, to_mode, copy_rtx (from)));
941 return;
942 }
943
944 /* Truncating multi-word to a word or less. */
945 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
946 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
947 {
948 if (!((GET_CODE (from) == MEM
949 && ! MEM_VOLATILE_P (from)
950 && direct_load[(int) to_mode]
951 && ! mode_dependent_address_p (XEXP (from, 0)))
952 || GET_CODE (from) == REG
953 || GET_CODE (from) == SUBREG))
954 from = force_reg (from_mode, from);
955 convert_move (to, gen_lowpart (word_mode, from), 0);
956 return;
957 }
958
959 /* Handle pointer conversion */ /* SPEE 900220 */
960 if (to_mode == PSImode)
961 {
962 if (from_mode != SImode)
963 from = convert_to_mode (SImode, from, unsignedp);
964
965 #ifdef HAVE_truncsipsi2
966 if (HAVE_truncsipsi2)
967 {
968 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
969 return;
970 }
971 #endif /* HAVE_truncsipsi2 */
972 abort ();
973 }
974
975 if (from_mode == PSImode)
976 {
977 if (to_mode != SImode)
978 {
979 from = convert_to_mode (SImode, from, unsignedp);
980 from_mode = SImode;
981 }
982 else
983 {
984 #ifdef HAVE_extendpsisi2
985 if (HAVE_extendpsisi2)
986 {
987 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
988 return;
989 }
990 #endif /* HAVE_extendpsisi2 */
991 abort ();
992 }
993 }
994
995 if (to_mode == PDImode)
996 {
997 if (from_mode != DImode)
998 from = convert_to_mode (DImode, from, unsignedp);
999
1000 #ifdef HAVE_truncdipdi2
1001 if (HAVE_truncdipdi2)
1002 {
1003 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1004 return;
1005 }
1006 #endif /* HAVE_truncdipdi2 */
1007 abort ();
1008 }
1009
1010 if (from_mode == PDImode)
1011 {
1012 if (to_mode != DImode)
1013 {
1014 from = convert_to_mode (DImode, from, unsignedp);
1015 from_mode = DImode;
1016 }
1017 else
1018 {
1019 #ifdef HAVE_extendpdidi2
1020 if (HAVE_extendpdidi2)
1021 {
1022 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1023 return;
1024 }
1025 #endif /* HAVE_extendpdidi2 */
1026 abort ();
1027 }
1028 }
1029
1030 /* Now follow all the conversions between integers
1031 no more than a word long. */
1032
1033 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1034 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1035 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1036 GET_MODE_BITSIZE (from_mode)))
1037 {
1038 if (!((GET_CODE (from) == MEM
1039 && ! MEM_VOLATILE_P (from)
1040 && direct_load[(int) to_mode]
1041 && ! mode_dependent_address_p (XEXP (from, 0)))
1042 || GET_CODE (from) == REG
1043 || GET_CODE (from) == SUBREG))
1044 from = force_reg (from_mode, from);
1045 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1046 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1047 from = copy_to_reg (from);
1048 emit_move_insn (to, gen_lowpart (to_mode, from));
1049 return;
1050 }
1051
1052 /* Handle extension. */
1053 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1054 {
1055 /* Convert directly if that works. */
1056 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1057 != CODE_FOR_nothing)
1058 {
1059 emit_unop_insn (code, to, from, equiv_code);
1060 return;
1061 }
1062 else
1063 {
1064 enum machine_mode intermediate;
1065
1066 /* Search for a mode to convert via. */
1067 for (intermediate = from_mode; intermediate != VOIDmode;
1068 intermediate = GET_MODE_WIDER_MODE (intermediate))
1069 if (((can_extend_p (to_mode, intermediate, unsignedp)
1070 != CODE_FOR_nothing)
1071 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1072 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
1073 && (can_extend_p (intermediate, from_mode, unsignedp)
1074 != CODE_FOR_nothing))
1075 {
1076 convert_move (to, convert_to_mode (intermediate, from,
1077 unsignedp), unsignedp);
1078 return;
1079 }
1080
1081 /* No suitable intermediate mode. */
1082 abort ();
1083 }
1084 }
1085
1086 /* Support special truncate insns for certain modes. */
1087
1088 if (from_mode == DImode && to_mode == SImode)
1089 {
1090 #ifdef HAVE_truncdisi2
1091 if (HAVE_truncdisi2)
1092 {
1093 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1094 return;
1095 }
1096 #endif
1097 convert_move (to, force_reg (from_mode, from), unsignedp);
1098 return;
1099 }
1100
1101 if (from_mode == DImode && to_mode == HImode)
1102 {
1103 #ifdef HAVE_truncdihi2
1104 if (HAVE_truncdihi2)
1105 {
1106 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1107 return;
1108 }
1109 #endif
1110 convert_move (to, force_reg (from_mode, from), unsignedp);
1111 return;
1112 }
1113
1114 if (from_mode == DImode && to_mode == QImode)
1115 {
1116 #ifdef HAVE_truncdiqi2
1117 if (HAVE_truncdiqi2)
1118 {
1119 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1120 return;
1121 }
1122 #endif
1123 convert_move (to, force_reg (from_mode, from), unsignedp);
1124 return;
1125 }
1126
1127 if (from_mode == SImode && to_mode == HImode)
1128 {
1129 #ifdef HAVE_truncsihi2
1130 if (HAVE_truncsihi2)
1131 {
1132 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1133 return;
1134 }
1135 #endif
1136 convert_move (to, force_reg (from_mode, from), unsignedp);
1137 return;
1138 }
1139
1140 if (from_mode == SImode && to_mode == QImode)
1141 {
1142 #ifdef HAVE_truncsiqi2
1143 if (HAVE_truncsiqi2)
1144 {
1145 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1146 return;
1147 }
1148 #endif
1149 convert_move (to, force_reg (from_mode, from), unsignedp);
1150 return;
1151 }
1152
1153 if (from_mode == HImode && to_mode == QImode)
1154 {
1155 #ifdef HAVE_trunchiqi2
1156 if (HAVE_trunchiqi2)
1157 {
1158 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1159 return;
1160 }
1161 #endif
1162 convert_move (to, force_reg (from_mode, from), unsignedp);
1163 return;
1164 }
1165
1166 if (from_mode == TImode && to_mode == DImode)
1167 {
1168 #ifdef HAVE_trunctidi2
1169 if (HAVE_trunctidi2)
1170 {
1171 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1172 return;
1173 }
1174 #endif
1175 convert_move (to, force_reg (from_mode, from), unsignedp);
1176 return;
1177 }
1178
1179 if (from_mode == TImode && to_mode == SImode)
1180 {
1181 #ifdef HAVE_trunctisi2
1182 if (HAVE_trunctisi2)
1183 {
1184 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1185 return;
1186 }
1187 #endif
1188 convert_move (to, force_reg (from_mode, from), unsignedp);
1189 return;
1190 }
1191
1192 if (from_mode == TImode && to_mode == HImode)
1193 {
1194 #ifdef HAVE_trunctihi2
1195 if (HAVE_trunctihi2)
1196 {
1197 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1198 return;
1199 }
1200 #endif
1201 convert_move (to, force_reg (from_mode, from), unsignedp);
1202 return;
1203 }
1204
1205 if (from_mode == TImode && to_mode == QImode)
1206 {
1207 #ifdef HAVE_trunctiqi2
1208 if (HAVE_trunctiqi2)
1209 {
1210 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1211 return;
1212 }
1213 #endif
1214 convert_move (to, force_reg (from_mode, from), unsignedp);
1215 return;
1216 }
1217
1218 /* Handle truncation of volatile memrefs, and so on;
1219 the things that couldn't be truncated directly,
1220 and for which there was no special instruction. */
1221 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1222 {
1223 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1224 emit_move_insn (to, temp);
1225 return;
1226 }
1227
1228 /* Mode combination is not recognized. */
1229 abort ();
1230 }
1231
1232 /* Return an rtx for a value that would result
1233 from converting X to mode MODE.
1234 Both X and MODE may be floating, or both integer.
1235 UNSIGNEDP is nonzero if X is an unsigned value.
1236 This can be done by referring to a part of X in place
1237 or by copying to a new temporary with conversion.
1238
1239 This function *must not* call protect_from_queue
1240 except when putting X into an insn (in which case convert_move does it). */
1241
1242 rtx
1243 convert_to_mode (mode, x, unsignedp)
1244 enum machine_mode mode;
1245 rtx x;
1246 int unsignedp;
1247 {
1248 return convert_modes (mode, VOIDmode, x, unsignedp);
1249 }
1250
1251 /* Return an rtx for a value that would result
1252 from converting X from mode OLDMODE to mode MODE.
1253 Both modes may be floating, or both integer.
1254 UNSIGNEDP is nonzero if X is an unsigned value.
1255
1256 This can be done by referring to a part of X in place
1257 or by copying to a new temporary with conversion.
1258
1259 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1260
1261 This function *must not* call protect_from_queue
1262 except when putting X into an insn (in which case convert_move does it). */
1263
1264 rtx
1265 convert_modes (mode, oldmode, x, unsignedp)
1266 enum machine_mode mode, oldmode;
1267 rtx x;
1268 int unsignedp;
1269 {
1270 register rtx temp;
1271
1272 /* If FROM is a SUBREG that indicates that we have already done at least
1273 the required extension, strip it. */
1274
1275 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1276 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1277 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1278 x = gen_lowpart (mode, x);
1279
1280 if (GET_MODE (x) != VOIDmode)
1281 oldmode = GET_MODE (x);
1282
1283 if (mode == oldmode)
1284 return x;
1285
1286 /* There is one case that we must handle specially: If we are converting
1287 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1288 we are to interpret the constant as unsigned, gen_lowpart will do
1289 the wrong if the constant appears negative. What we want to do is
1290 make the high-order word of the constant zero, not all ones. */
1291
1292 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1293 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1294 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1295 {
1296 HOST_WIDE_INT val = INTVAL (x);
1297
1298 if (oldmode != VOIDmode
1299 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1300 {
1301 int width = GET_MODE_BITSIZE (oldmode);
1302
1303 /* We need to zero extend VAL. */
1304 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1305 }
1306
1307 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1308 }
1309
1310 /* We can do this with a gen_lowpart if both desired and current modes
1311 are integer, and this is either a constant integer, a register, or a
1312 non-volatile MEM. Except for the constant case where MODE is no
1313 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1314
1315 if ((GET_CODE (x) == CONST_INT
1316 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1317 || (GET_MODE_CLASS (mode) == MODE_INT
1318 && GET_MODE_CLASS (oldmode) == MODE_INT
1319 && (GET_CODE (x) == CONST_DOUBLE
1320 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1321 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1322 && direct_load[(int) mode])
1323 || (GET_CODE (x) == REG
1324 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1325 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1326 {
1327 /* ?? If we don't know OLDMODE, we have to assume here that
1328 X does not need sign- or zero-extension. This may not be
1329 the case, but it's the best we can do. */
1330 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1331 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1332 {
1333 HOST_WIDE_INT val = INTVAL (x);
1334 int width = GET_MODE_BITSIZE (oldmode);
1335
1336 /* We must sign or zero-extend in this case. Start by
1337 zero-extending, then sign extend if we need to. */
1338 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1339 if (! unsignedp
1340 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1341 val |= (HOST_WIDE_INT) (-1) << width;
1342
1343 return GEN_INT (val);
1344 }
1345
1346 return gen_lowpart (mode, x);
1347 }
1348
1349 temp = gen_reg_rtx (mode);
1350 convert_move (temp, x, unsignedp);
1351 return temp;
1352 }
1353 \f
1354 /* Generate several move instructions to copy LEN bytes
1355 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1356 The caller must pass FROM and TO
1357 through protect_from_queue before calling.
1358 ALIGN (in bytes) is maximum alignment we can assume. */
1359
1360 static void
1361 move_by_pieces (to, from, len, align)
1362 rtx to, from;
1363 int len, align;
1364 {
1365 struct move_by_pieces data;
1366 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1367 int max_size = MOVE_MAX + 1;
1368
1369 data.offset = 0;
1370 data.to_addr = to_addr;
1371 data.from_addr = from_addr;
1372 data.to = to;
1373 data.from = from;
1374 data.autinc_to
1375 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1376 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1377 data.autinc_from
1378 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1379 || GET_CODE (from_addr) == POST_INC
1380 || GET_CODE (from_addr) == POST_DEC);
1381
1382 data.explicit_inc_from = 0;
1383 data.explicit_inc_to = 0;
1384 data.reverse
1385 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1386 if (data.reverse) data.offset = len;
1387 data.len = len;
1388
1389 data.to_struct = MEM_IN_STRUCT_P (to);
1390 data.from_struct = MEM_IN_STRUCT_P (from);
1391
1392 /* If copying requires more than two move insns,
1393 copy addresses to registers (to make displacements shorter)
1394 and use post-increment if available. */
1395 if (!(data.autinc_from && data.autinc_to)
1396 && move_by_pieces_ninsns (len, align) > 2)
1397 {
1398 #ifdef HAVE_PRE_DECREMENT
1399 if (data.reverse && ! data.autinc_from)
1400 {
1401 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1402 data.autinc_from = 1;
1403 data.explicit_inc_from = -1;
1404 }
1405 #endif
1406 #ifdef HAVE_POST_INCREMENT
1407 if (! data.autinc_from)
1408 {
1409 data.from_addr = copy_addr_to_reg (from_addr);
1410 data.autinc_from = 1;
1411 data.explicit_inc_from = 1;
1412 }
1413 #endif
1414 if (!data.autinc_from && CONSTANT_P (from_addr))
1415 data.from_addr = copy_addr_to_reg (from_addr);
1416 #ifdef HAVE_PRE_DECREMENT
1417 if (data.reverse && ! data.autinc_to)
1418 {
1419 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1420 data.autinc_to = 1;
1421 data.explicit_inc_to = -1;
1422 }
1423 #endif
1424 #ifdef HAVE_POST_INCREMENT
1425 if (! data.reverse && ! data.autinc_to)
1426 {
1427 data.to_addr = copy_addr_to_reg (to_addr);
1428 data.autinc_to = 1;
1429 data.explicit_inc_to = 1;
1430 }
1431 #endif
1432 if (!data.autinc_to && CONSTANT_P (to_addr))
1433 data.to_addr = copy_addr_to_reg (to_addr);
1434 }
1435
1436 if (! SLOW_UNALIGNED_ACCESS
1437 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1438 align = MOVE_MAX;
1439
1440 /* First move what we can in the largest integer mode, then go to
1441 successively smaller modes. */
1442
1443 while (max_size > 1)
1444 {
1445 enum machine_mode mode = VOIDmode, tmode;
1446 enum insn_code icode;
1447
1448 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1449 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1450 if (GET_MODE_SIZE (tmode) < max_size)
1451 mode = tmode;
1452
1453 if (mode == VOIDmode)
1454 break;
1455
1456 icode = mov_optab->handlers[(int) mode].insn_code;
1457 if (icode != CODE_FOR_nothing
1458 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1459 GET_MODE_SIZE (mode)))
1460 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1461
1462 max_size = GET_MODE_SIZE (mode);
1463 }
1464
1465 /* The code above should have handled everything. */
1466 if (data.len != 0)
1467 abort ();
1468 }
1469
1470 /* Return number of insns required to move L bytes by pieces.
1471 ALIGN (in bytes) is maximum alignment we can assume. */
1472
1473 static int
1474 move_by_pieces_ninsns (l, align)
1475 unsigned int l;
1476 int align;
1477 {
1478 register int n_insns = 0;
1479 int max_size = MOVE_MAX + 1;
1480
1481 if (! SLOW_UNALIGNED_ACCESS
1482 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1483 align = MOVE_MAX;
1484
1485 while (max_size > 1)
1486 {
1487 enum machine_mode mode = VOIDmode, tmode;
1488 enum insn_code icode;
1489
1490 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1491 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1492 if (GET_MODE_SIZE (tmode) < max_size)
1493 mode = tmode;
1494
1495 if (mode == VOIDmode)
1496 break;
1497
1498 icode = mov_optab->handlers[(int) mode].insn_code;
1499 if (icode != CODE_FOR_nothing
1500 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1501 GET_MODE_SIZE (mode)))
1502 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1503
1504 max_size = GET_MODE_SIZE (mode);
1505 }
1506
1507 return n_insns;
1508 }
1509
1510 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1511 with move instructions for mode MODE. GENFUN is the gen_... function
1512 to make a move insn for that mode. DATA has all the other info. */
1513
1514 static void
1515 move_by_pieces_1 (genfun, mode, data)
1516 rtx (*genfun) ();
1517 enum machine_mode mode;
1518 struct move_by_pieces *data;
1519 {
1520 register int size = GET_MODE_SIZE (mode);
1521 register rtx to1, from1;
1522
1523 while (data->len >= size)
1524 {
1525 if (data->reverse) data->offset -= size;
1526
1527 to1 = (data->autinc_to
1528 ? gen_rtx (MEM, mode, data->to_addr)
1529 : change_address (data->to, mode,
1530 plus_constant (data->to_addr, data->offset)));
1531 MEM_IN_STRUCT_P (to1) = data->to_struct;
1532 from1 =
1533 (data->autinc_from
1534 ? gen_rtx (MEM, mode, data->from_addr)
1535 : change_address (data->from, mode,
1536 plus_constant (data->from_addr, data->offset)));
1537 MEM_IN_STRUCT_P (from1) = data->from_struct;
1538
1539 #ifdef HAVE_PRE_DECREMENT
1540 if (data->explicit_inc_to < 0)
1541 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1542 if (data->explicit_inc_from < 0)
1543 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1544 #endif
1545
1546 emit_insn ((*genfun) (to1, from1));
1547 #ifdef HAVE_POST_INCREMENT
1548 if (data->explicit_inc_to > 0)
1549 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1550 if (data->explicit_inc_from > 0)
1551 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1552 #endif
1553
1554 if (! data->reverse) data->offset += size;
1555
1556 data->len -= size;
1557 }
1558 }
1559 \f
1560 /* Emit code to move a block Y to a block X.
1561 This may be done with string-move instructions,
1562 with multiple scalar move instructions, or with a library call.
1563
1564 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1565 with mode BLKmode.
1566 SIZE is an rtx that says how long they are.
1567 ALIGN is the maximum alignment we can assume they have,
1568 measured in bytes. */
1569
1570 void
1571 emit_block_move (x, y, size, align)
1572 rtx x, y;
1573 rtx size;
1574 int align;
1575 {
1576 if (GET_MODE (x) != BLKmode)
1577 abort ();
1578
1579 if (GET_MODE (y) != BLKmode)
1580 abort ();
1581
1582 x = protect_from_queue (x, 1);
1583 y = protect_from_queue (y, 0);
1584 size = protect_from_queue (size, 0);
1585
1586 if (GET_CODE (x) != MEM)
1587 abort ();
1588 if (GET_CODE (y) != MEM)
1589 abort ();
1590 if (size == 0)
1591 abort ();
1592
1593 if (GET_CODE (size) == CONST_INT
1594 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1595 move_by_pieces (x, y, INTVAL (size), align);
1596 else
1597 {
1598 /* Try the most limited insn first, because there's no point
1599 including more than one in the machine description unless
1600 the more limited one has some advantage. */
1601
1602 rtx opalign = GEN_INT (align);
1603 enum machine_mode mode;
1604
1605 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1606 mode = GET_MODE_WIDER_MODE (mode))
1607 {
1608 enum insn_code code = movstr_optab[(int) mode];
1609
1610 if (code != CODE_FOR_nothing
1611 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1612 here because if SIZE is less than the mode mask, as it is
1613 returned by the macro, it will definitely be less than the
1614 actual mode mask. */
1615 && ((GET_CODE (size) == CONST_INT
1616 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1617 <= GET_MODE_MASK (mode)))
1618 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1619 && (insn_operand_predicate[(int) code][0] == 0
1620 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1621 && (insn_operand_predicate[(int) code][1] == 0
1622 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1623 && (insn_operand_predicate[(int) code][3] == 0
1624 || (*insn_operand_predicate[(int) code][3]) (opalign,
1625 VOIDmode)))
1626 {
1627 rtx op2;
1628 rtx last = get_last_insn ();
1629 rtx pat;
1630
1631 op2 = convert_to_mode (mode, size, 1);
1632 if (insn_operand_predicate[(int) code][2] != 0
1633 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1634 op2 = copy_to_mode_reg (mode, op2);
1635
1636 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1637 if (pat)
1638 {
1639 emit_insn (pat);
1640 return;
1641 }
1642 else
1643 delete_insns_since (last);
1644 }
1645 }
1646
1647 #ifdef TARGET_MEM_FUNCTIONS
1648 emit_library_call (memcpy_libfunc, 0,
1649 VOIDmode, 3, XEXP (x, 0), Pmode,
1650 XEXP (y, 0), Pmode,
1651 convert_to_mode (TYPE_MODE (sizetype), size,
1652 TREE_UNSIGNED (sizetype)),
1653 TYPE_MODE (sizetype));
1654 #else
1655 emit_library_call (bcopy_libfunc, 0,
1656 VOIDmode, 3, XEXP (y, 0), Pmode,
1657 XEXP (x, 0), Pmode,
1658 convert_to_mode (TYPE_MODE (integer_type_node), size,
1659 TREE_UNSIGNED (integer_type_node)),
1660 TYPE_MODE (integer_type_node));
1661 #endif
1662 }
1663 }
1664 \f
1665 /* Copy all or part of a value X into registers starting at REGNO.
1666 The number of registers to be filled is NREGS. */
1667
1668 void
1669 move_block_to_reg (regno, x, nregs, mode)
1670 int regno;
1671 rtx x;
1672 int nregs;
1673 enum machine_mode mode;
1674 {
1675 int i;
1676 rtx pat, last;
1677
1678 if (nregs == 0)
1679 return;
1680
1681 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1682 x = validize_mem (force_const_mem (mode, x));
1683
1684 /* See if the machine can do this with a load multiple insn. */
1685 #ifdef HAVE_load_multiple
1686 if (HAVE_load_multiple)
1687 {
1688 last = get_last_insn ();
1689 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1690 GEN_INT (nregs));
1691 if (pat)
1692 {
1693 emit_insn (pat);
1694 return;
1695 }
1696 else
1697 delete_insns_since (last);
1698 }
1699 #endif
1700
1701 for (i = 0; i < nregs; i++)
1702 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1703 operand_subword_force (x, i, mode));
1704 }
1705
1706 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1707 The number of registers to be filled is NREGS. SIZE indicates the number
1708 of bytes in the object X. */
1709
1710
1711 void
1712 move_block_from_reg (regno, x, nregs, size)
1713 int regno;
1714 rtx x;
1715 int nregs;
1716 int size;
1717 {
1718 int i;
1719 rtx pat, last;
1720
1721 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1722 to the left before storing to memory. */
1723 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1724 {
1725 rtx tem = operand_subword (x, 0, 1, BLKmode);
1726 rtx shift;
1727
1728 if (tem == 0)
1729 abort ();
1730
1731 shift = expand_shift (LSHIFT_EXPR, word_mode,
1732 gen_rtx (REG, word_mode, regno),
1733 build_int_2 ((UNITS_PER_WORD - size)
1734 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1735 emit_move_insn (tem, shift);
1736 return;
1737 }
1738
1739 /* See if the machine can do this with a store multiple insn. */
1740 #ifdef HAVE_store_multiple
1741 if (HAVE_store_multiple)
1742 {
1743 last = get_last_insn ();
1744 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1745 GEN_INT (nregs));
1746 if (pat)
1747 {
1748 emit_insn (pat);
1749 return;
1750 }
1751 else
1752 delete_insns_since (last);
1753 }
1754 #endif
1755
1756 for (i = 0; i < nregs; i++)
1757 {
1758 rtx tem = operand_subword (x, i, 1, BLKmode);
1759
1760 if (tem == 0)
1761 abort ();
1762
1763 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1764 }
1765 }
1766
1767 /* Emit code to move a block Y to a block X, where X is non-consecutive
1768 registers represented by a PARALLEL. */
1769
1770 void
1771 emit_group_load (x, y)
1772 rtx x, y;
1773 {
1774 rtx target_reg, source;
1775 int i;
1776
1777 if (GET_CODE (x) != PARALLEL)
1778 abort ();
1779
1780 /* Check for a NULL entry, used to indicate that the parameter goes
1781 both on the stack and in registers. */
1782 if (XEXP (XVECEXP (x, 0, 0), 0))
1783 i = 0;
1784 else
1785 i = 1;
1786
1787 for (; i < XVECLEN (x, 0); i++)
1788 {
1789 rtx element = XVECEXP (x, 0, i);
1790
1791 target_reg = XEXP (element, 0);
1792
1793 if (GET_CODE (y) == MEM)
1794 source = change_address (y, GET_MODE (target_reg),
1795 plus_constant (XEXP (y, 0),
1796 INTVAL (XEXP (element, 1))));
1797 else if (XEXP (element, 1) == const0_rtx)
1798 {
1799 if (GET_MODE (target_reg) == GET_MODE (y))
1800 source = y;
1801 /* Allow for the target_reg to be smaller than the input register
1802 to allow for AIX with 4 DF arguments after a single SI arg. The
1803 last DF argument will only load 1 word into the integer registers,
1804 but load a DF value into the float registers. */
1805 else if (GET_MODE_SIZE (GET_MODE (target_reg))
1806 <= GET_MODE_SIZE (GET_MODE (y)))
1807 source = gen_rtx (SUBREG, GET_MODE (target_reg), y, 0);
1808 else
1809 abort ();
1810 }
1811 else
1812 abort ();
1813
1814 emit_move_insn (target_reg, source);
1815 }
1816 }
1817
1818 /* Emit code to move a block Y to a block X, where Y is non-consecutive
1819 registers represented by a PARALLEL. */
1820
1821 void
1822 emit_group_store (x, y)
1823 rtx x, y;
1824 {
1825 rtx source_reg, target;
1826 int i;
1827
1828 if (GET_CODE (y) != PARALLEL)
1829 abort ();
1830
1831 /* Check for a NULL entry, used to indicate that the parameter goes
1832 both on the stack and in registers. */
1833 if (XEXP (XVECEXP (y, 0, 0), 0))
1834 i = 0;
1835 else
1836 i = 1;
1837
1838 for (; i < XVECLEN (y, 0); i++)
1839 {
1840 rtx element = XVECEXP (y, 0, i);
1841
1842 source_reg = XEXP (element, 0);
1843
1844 if (GET_CODE (x) == MEM)
1845 target = change_address (x, GET_MODE (source_reg),
1846 plus_constant (XEXP (x, 0),
1847 INTVAL (XEXP (element, 1))));
1848 else if (XEXP (element, 1) == const0_rtx)
1849 target = x;
1850 else
1851 abort ();
1852
1853 emit_move_insn (target, source_reg);
1854 }
1855 }
1856
1857 /* Add a USE expression for REG to the (possibly empty) list pointed
1858 to by CALL_FUSAGE. REG must denote a hard register. */
1859
1860 void
1861 use_reg (call_fusage, reg)
1862 rtx *call_fusage, reg;
1863 {
1864 if (GET_CODE (reg) != REG
1865 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
1866 abort();
1867
1868 *call_fusage
1869 = gen_rtx (EXPR_LIST, VOIDmode,
1870 gen_rtx (USE, VOIDmode, reg), *call_fusage);
1871 }
1872
1873 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1874 starting at REGNO. All of these registers must be hard registers. */
1875
1876 void
1877 use_regs (call_fusage, regno, nregs)
1878 rtx *call_fusage;
1879 int regno;
1880 int nregs;
1881 {
1882 int i;
1883
1884 if (regno + nregs > FIRST_PSEUDO_REGISTER)
1885 abort ();
1886
1887 for (i = 0; i < nregs; i++)
1888 use_reg (call_fusage, gen_rtx (REG, reg_raw_mode[regno + i], regno + i));
1889 }
1890
1891 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
1892 PARALLEL REGS. This is for calls that pass values in multiple
1893 non-contiguous locations. The Irix 6 ABI has examples of this. */
1894
1895 void
1896 use_group_regs (call_fusage, regs)
1897 rtx *call_fusage;
1898 rtx regs;
1899 {
1900 int i;
1901
1902 /* Check for a NULL entry, used to indicate that the parameter goes
1903 both on the stack and in registers. */
1904 if (XEXP (XVECEXP (regs, 0, 0), 0))
1905 i = 0;
1906 else
1907 i = 1;
1908
1909 for (; i < XVECLEN (regs, 0); i++)
1910 use_reg (call_fusage, XEXP (XVECEXP (regs, 0, i), 0));
1911 }
1912 \f
1913 /* Generate several move instructions to clear LEN bytes of block TO.
1914 (A MEM rtx with BLKmode). The caller must pass TO through
1915 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
1916 we can assume. */
1917
1918 static void
1919 clear_by_pieces (to, len, align)
1920 rtx to;
1921 int len, align;
1922 {
1923 struct clear_by_pieces data;
1924 rtx to_addr = XEXP (to, 0);
1925 int max_size = MOVE_MAX + 1;
1926
1927 data.offset = 0;
1928 data.to_addr = to_addr;
1929 data.to = to;
1930 data.autinc_to
1931 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1932 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1933
1934 data.explicit_inc_to = 0;
1935 data.reverse
1936 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1937 if (data.reverse) data.offset = len;
1938 data.len = len;
1939
1940 data.to_struct = MEM_IN_STRUCT_P (to);
1941
1942 /* If copying requires more than two move insns,
1943 copy addresses to registers (to make displacements shorter)
1944 and use post-increment if available. */
1945 if (!data.autinc_to
1946 && move_by_pieces_ninsns (len, align) > 2)
1947 {
1948 #ifdef HAVE_PRE_DECREMENT
1949 if (data.reverse && ! data.autinc_to)
1950 {
1951 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1952 data.autinc_to = 1;
1953 data.explicit_inc_to = -1;
1954 }
1955 #endif
1956 #ifdef HAVE_POST_INCREMENT
1957 if (! data.reverse && ! data.autinc_to)
1958 {
1959 data.to_addr = copy_addr_to_reg (to_addr);
1960 data.autinc_to = 1;
1961 data.explicit_inc_to = 1;
1962 }
1963 #endif
1964 if (!data.autinc_to && CONSTANT_P (to_addr))
1965 data.to_addr = copy_addr_to_reg (to_addr);
1966 }
1967
1968 if (! SLOW_UNALIGNED_ACCESS
1969 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1970 align = MOVE_MAX;
1971
1972 /* First move what we can in the largest integer mode, then go to
1973 successively smaller modes. */
1974
1975 while (max_size > 1)
1976 {
1977 enum machine_mode mode = VOIDmode, tmode;
1978 enum insn_code icode;
1979
1980 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1981 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1982 if (GET_MODE_SIZE (tmode) < max_size)
1983 mode = tmode;
1984
1985 if (mode == VOIDmode)
1986 break;
1987
1988 icode = mov_optab->handlers[(int) mode].insn_code;
1989 if (icode != CODE_FOR_nothing
1990 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1991 GET_MODE_SIZE (mode)))
1992 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
1993
1994 max_size = GET_MODE_SIZE (mode);
1995 }
1996
1997 /* The code above should have handled everything. */
1998 if (data.len != 0)
1999 abort ();
2000 }
2001
2002 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2003 with move instructions for mode MODE. GENFUN is the gen_... function
2004 to make a move insn for that mode. DATA has all the other info. */
2005
2006 static void
2007 clear_by_pieces_1 (genfun, mode, data)
2008 rtx (*genfun) ();
2009 enum machine_mode mode;
2010 struct clear_by_pieces *data;
2011 {
2012 register int size = GET_MODE_SIZE (mode);
2013 register rtx to1;
2014
2015 while (data->len >= size)
2016 {
2017 if (data->reverse) data->offset -= size;
2018
2019 to1 = (data->autinc_to
2020 ? gen_rtx (MEM, mode, data->to_addr)
2021 : change_address (data->to, mode,
2022 plus_constant (data->to_addr, data->offset)));
2023 MEM_IN_STRUCT_P (to1) = data->to_struct;
2024
2025 #ifdef HAVE_PRE_DECREMENT
2026 if (data->explicit_inc_to < 0)
2027 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2028 #endif
2029
2030 emit_insn ((*genfun) (to1, const0_rtx));
2031 #ifdef HAVE_POST_INCREMENT
2032 if (data->explicit_inc_to > 0)
2033 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2034 #endif
2035
2036 if (! data->reverse) data->offset += size;
2037
2038 data->len -= size;
2039 }
2040 }
2041 \f
2042 /* Write zeros through the storage of OBJECT.
2043 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2044 the maximum alignment we can is has, measured in bytes. */
2045
2046 void
2047 clear_storage (object, size, align)
2048 rtx object;
2049 rtx size;
2050 int align;
2051 {
2052 if (GET_MODE (object) == BLKmode)
2053 {
2054 object = protect_from_queue (object, 1);
2055 size = protect_from_queue (size, 0);
2056
2057 if (GET_CODE (size) == CONST_INT
2058 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
2059 clear_by_pieces (object, INTVAL (size), align);
2060
2061 else
2062 {
2063 /* Try the most limited insn first, because there's no point
2064 including more than one in the machine description unless
2065 the more limited one has some advantage. */
2066
2067 rtx opalign = GEN_INT (align);
2068 enum machine_mode mode;
2069
2070 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2071 mode = GET_MODE_WIDER_MODE (mode))
2072 {
2073 enum insn_code code = clrstr_optab[(int) mode];
2074
2075 if (code != CODE_FOR_nothing
2076 /* We don't need MODE to be narrower than
2077 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2078 the mode mask, as it is returned by the macro, it will
2079 definitely be less than the actual mode mask. */
2080 && ((GET_CODE (size) == CONST_INT
2081 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2082 <= GET_MODE_MASK (mode)))
2083 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2084 && (insn_operand_predicate[(int) code][0] == 0
2085 || (*insn_operand_predicate[(int) code][0]) (object,
2086 BLKmode))
2087 && (insn_operand_predicate[(int) code][2] == 0
2088 || (*insn_operand_predicate[(int) code][2]) (opalign,
2089 VOIDmode)))
2090 {
2091 rtx op1;
2092 rtx last = get_last_insn ();
2093 rtx pat;
2094
2095 op1 = convert_to_mode (mode, size, 1);
2096 if (insn_operand_predicate[(int) code][1] != 0
2097 && ! (*insn_operand_predicate[(int) code][1]) (op1,
2098 mode))
2099 op1 = copy_to_mode_reg (mode, op1);
2100
2101 pat = GEN_FCN ((int) code) (object, op1, opalign);
2102 if (pat)
2103 {
2104 emit_insn (pat);
2105 return;
2106 }
2107 else
2108 delete_insns_since (last);
2109 }
2110 }
2111
2112
2113 #ifdef TARGET_MEM_FUNCTIONS
2114 emit_library_call (memset_libfunc, 0,
2115 VOIDmode, 3,
2116 XEXP (object, 0), Pmode,
2117 const0_rtx, TYPE_MODE (integer_type_node),
2118 convert_to_mode (TYPE_MODE (sizetype),
2119 size, TREE_UNSIGNED (sizetype)),
2120 TYPE_MODE (sizetype));
2121 #else
2122 emit_library_call (bzero_libfunc, 0,
2123 VOIDmode, 2,
2124 XEXP (object, 0), Pmode,
2125 convert_to_mode (TYPE_MODE (integer_type_node),
2126 size,
2127 TREE_UNSIGNED (integer_type_node)),
2128 TYPE_MODE (integer_type_node));
2129 #endif
2130 }
2131 }
2132 else
2133 emit_move_insn (object, const0_rtx);
2134 }
2135
2136 /* Generate code to copy Y into X.
2137 Both Y and X must have the same mode, except that
2138 Y can be a constant with VOIDmode.
2139 This mode cannot be BLKmode; use emit_block_move for that.
2140
2141 Return the last instruction emitted. */
2142
2143 rtx
2144 emit_move_insn (x, y)
2145 rtx x, y;
2146 {
2147 enum machine_mode mode = GET_MODE (x);
2148
2149 x = protect_from_queue (x, 1);
2150 y = protect_from_queue (y, 0);
2151
2152 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2153 abort ();
2154
2155 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2156 y = force_const_mem (mode, y);
2157
2158 /* If X or Y are memory references, verify that their addresses are valid
2159 for the machine. */
2160 if (GET_CODE (x) == MEM
2161 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2162 && ! push_operand (x, GET_MODE (x)))
2163 || (flag_force_addr
2164 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2165 x = change_address (x, VOIDmode, XEXP (x, 0));
2166
2167 if (GET_CODE (y) == MEM
2168 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2169 || (flag_force_addr
2170 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2171 y = change_address (y, VOIDmode, XEXP (y, 0));
2172
2173 if (mode == BLKmode)
2174 abort ();
2175
2176 return emit_move_insn_1 (x, y);
2177 }
2178
2179 /* Low level part of emit_move_insn.
2180 Called just like emit_move_insn, but assumes X and Y
2181 are basically valid. */
2182
2183 rtx
2184 emit_move_insn_1 (x, y)
2185 rtx x, y;
2186 {
2187 enum machine_mode mode = GET_MODE (x);
2188 enum machine_mode submode;
2189 enum mode_class class = GET_MODE_CLASS (mode);
2190 int i;
2191
2192 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2193 return
2194 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2195
2196 /* Expand complex moves by moving real part and imag part, if possible. */
2197 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2198 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2199 * BITS_PER_UNIT),
2200 (class == MODE_COMPLEX_INT
2201 ? MODE_INT : MODE_FLOAT),
2202 0))
2203 && (mov_optab->handlers[(int) submode].insn_code
2204 != CODE_FOR_nothing))
2205 {
2206 /* Don't split destination if it is a stack push. */
2207 int stack = push_operand (x, GET_MODE (x));
2208 rtx insns;
2209
2210 /* If this is a stack, push the highpart first, so it
2211 will be in the argument order.
2212
2213 In that case, change_address is used only to convert
2214 the mode, not to change the address. */
2215 if (stack)
2216 {
2217 /* Note that the real part always precedes the imag part in memory
2218 regardless of machine's endianness. */
2219 #ifdef STACK_GROWS_DOWNWARD
2220 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2221 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2222 gen_imagpart (submode, y)));
2223 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2224 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2225 gen_realpart (submode, y)));
2226 #else
2227 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2228 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2229 gen_realpart (submode, y)));
2230 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2231 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2232 gen_imagpart (submode, y)));
2233 #endif
2234 }
2235 else
2236 {
2237 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2238 (gen_realpart (submode, x), gen_realpart (submode, y)));
2239 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2240 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
2241 }
2242
2243 return get_last_insn ();
2244 }
2245
2246 /* This will handle any multi-word mode that lacks a move_insn pattern.
2247 However, you will get better code if you define such patterns,
2248 even if they must turn into multiple assembler instructions. */
2249 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2250 {
2251 rtx last_insn = 0;
2252 rtx insns;
2253
2254 #ifdef PUSH_ROUNDING
2255
2256 /* If X is a push on the stack, do the push now and replace
2257 X with a reference to the stack pointer. */
2258 if (push_operand (x, GET_MODE (x)))
2259 {
2260 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2261 x = change_address (x, VOIDmode, stack_pointer_rtx);
2262 }
2263 #endif
2264
2265 /* Show the output dies here. */
2266 if (x != y)
2267 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
2268
2269 for (i = 0;
2270 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2271 i++)
2272 {
2273 rtx xpart = operand_subword (x, i, 1, mode);
2274 rtx ypart = operand_subword (y, i, 1, mode);
2275
2276 /* If we can't get a part of Y, put Y into memory if it is a
2277 constant. Otherwise, force it into a register. If we still
2278 can't get a part of Y, abort. */
2279 if (ypart == 0 && CONSTANT_P (y))
2280 {
2281 y = force_const_mem (mode, y);
2282 ypart = operand_subword (y, i, 1, mode);
2283 }
2284 else if (ypart == 0)
2285 ypart = operand_subword_force (y, i, mode);
2286
2287 if (xpart == 0 || ypart == 0)
2288 abort ();
2289
2290 last_insn = emit_move_insn (xpart, ypart);
2291 }
2292
2293 return last_insn;
2294 }
2295 else
2296 abort ();
2297 }
2298 \f
2299 /* Pushing data onto the stack. */
2300
2301 /* Push a block of length SIZE (perhaps variable)
2302 and return an rtx to address the beginning of the block.
2303 Note that it is not possible for the value returned to be a QUEUED.
2304 The value may be virtual_outgoing_args_rtx.
2305
2306 EXTRA is the number of bytes of padding to push in addition to SIZE.
2307 BELOW nonzero means this padding comes at low addresses;
2308 otherwise, the padding comes at high addresses. */
2309
2310 rtx
2311 push_block (size, extra, below)
2312 rtx size;
2313 int extra, below;
2314 {
2315 register rtx temp;
2316
2317 size = convert_modes (Pmode, ptr_mode, size, 1);
2318 if (CONSTANT_P (size))
2319 anti_adjust_stack (plus_constant (size, extra));
2320 else if (GET_CODE (size) == REG && extra == 0)
2321 anti_adjust_stack (size);
2322 else
2323 {
2324 rtx temp = copy_to_mode_reg (Pmode, size);
2325 if (extra != 0)
2326 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2327 temp, 0, OPTAB_LIB_WIDEN);
2328 anti_adjust_stack (temp);
2329 }
2330
2331 #ifdef STACK_GROWS_DOWNWARD
2332 temp = virtual_outgoing_args_rtx;
2333 if (extra != 0 && below)
2334 temp = plus_constant (temp, extra);
2335 #else
2336 if (GET_CODE (size) == CONST_INT)
2337 temp = plus_constant (virtual_outgoing_args_rtx,
2338 - INTVAL (size) - (below ? 0 : extra));
2339 else if (extra != 0 && !below)
2340 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2341 negate_rtx (Pmode, plus_constant (size, extra)));
2342 else
2343 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2344 negate_rtx (Pmode, size));
2345 #endif
2346
2347 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2348 }
2349
2350 rtx
2351 gen_push_operand ()
2352 {
2353 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2354 }
2355
2356 /* Generate code to push X onto the stack, assuming it has mode MODE and
2357 type TYPE.
2358 MODE is redundant except when X is a CONST_INT (since they don't
2359 carry mode info).
2360 SIZE is an rtx for the size of data to be copied (in bytes),
2361 needed only if X is BLKmode.
2362
2363 ALIGN (in bytes) is maximum alignment we can assume.
2364
2365 If PARTIAL and REG are both nonzero, then copy that many of the first
2366 words of X into registers starting with REG, and push the rest of X.
2367 The amount of space pushed is decreased by PARTIAL words,
2368 rounded *down* to a multiple of PARM_BOUNDARY.
2369 REG must be a hard register in this case.
2370 If REG is zero but PARTIAL is not, take any all others actions for an
2371 argument partially in registers, but do not actually load any
2372 registers.
2373
2374 EXTRA is the amount in bytes of extra space to leave next to this arg.
2375 This is ignored if an argument block has already been allocated.
2376
2377 On a machine that lacks real push insns, ARGS_ADDR is the address of
2378 the bottom of the argument block for this call. We use indexing off there
2379 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2380 argument block has not been preallocated.
2381
2382 ARGS_SO_FAR is the size of args previously pushed for this call. */
2383
2384 void
2385 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2386 args_addr, args_so_far)
2387 register rtx x;
2388 enum machine_mode mode;
2389 tree type;
2390 rtx size;
2391 int align;
2392 int partial;
2393 rtx reg;
2394 int extra;
2395 rtx args_addr;
2396 rtx args_so_far;
2397 {
2398 rtx xinner;
2399 enum direction stack_direction
2400 #ifdef STACK_GROWS_DOWNWARD
2401 = downward;
2402 #else
2403 = upward;
2404 #endif
2405
2406 /* Decide where to pad the argument: `downward' for below,
2407 `upward' for above, or `none' for don't pad it.
2408 Default is below for small data on big-endian machines; else above. */
2409 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2410
2411 /* If we're placing part of X into a register and part of X onto
2412 the stack, indicate that the entire register is clobbered to
2413 keep flow from thinking the unused part of the register is live. */
2414 if (partial > 0 && reg != 0)
2415 emit_insn (gen_rtx (CLOBBER, VOIDmode, reg));
2416
2417 /* Invert direction if stack is post-update. */
2418 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2419 if (where_pad != none)
2420 where_pad = (where_pad == downward ? upward : downward);
2421
2422 xinner = x = protect_from_queue (x, 0);
2423
2424 if (mode == BLKmode)
2425 {
2426 /* Copy a block into the stack, entirely or partially. */
2427
2428 register rtx temp;
2429 int used = partial * UNITS_PER_WORD;
2430 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2431 int skip;
2432
2433 if (size == 0)
2434 abort ();
2435
2436 used -= offset;
2437
2438 /* USED is now the # of bytes we need not copy to the stack
2439 because registers will take care of them. */
2440
2441 if (partial != 0)
2442 xinner = change_address (xinner, BLKmode,
2443 plus_constant (XEXP (xinner, 0), used));
2444
2445 /* If the partial register-part of the arg counts in its stack size,
2446 skip the part of stack space corresponding to the registers.
2447 Otherwise, start copying to the beginning of the stack space,
2448 by setting SKIP to 0. */
2449 #ifndef REG_PARM_STACK_SPACE
2450 skip = 0;
2451 #else
2452 skip = used;
2453 #endif
2454
2455 #ifdef PUSH_ROUNDING
2456 /* Do it with several push insns if that doesn't take lots of insns
2457 and if there is no difficulty with push insns that skip bytes
2458 on the stack for alignment purposes. */
2459 if (args_addr == 0
2460 && GET_CODE (size) == CONST_INT
2461 && skip == 0
2462 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2463 < MOVE_RATIO)
2464 /* Here we avoid the case of a structure whose weak alignment
2465 forces many pushes of a small amount of data,
2466 and such small pushes do rounding that causes trouble. */
2467 && ((! SLOW_UNALIGNED_ACCESS)
2468 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2469 || PUSH_ROUNDING (align) == align)
2470 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2471 {
2472 /* Push padding now if padding above and stack grows down,
2473 or if padding below and stack grows up.
2474 But if space already allocated, this has already been done. */
2475 if (extra && args_addr == 0
2476 && where_pad != none && where_pad != stack_direction)
2477 anti_adjust_stack (GEN_INT (extra));
2478
2479 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
2480 INTVAL (size) - used, align);
2481 }
2482 else
2483 #endif /* PUSH_ROUNDING */
2484 {
2485 /* Otherwise make space on the stack and copy the data
2486 to the address of that space. */
2487
2488 /* Deduct words put into registers from the size we must copy. */
2489 if (partial != 0)
2490 {
2491 if (GET_CODE (size) == CONST_INT)
2492 size = GEN_INT (INTVAL (size) - used);
2493 else
2494 size = expand_binop (GET_MODE (size), sub_optab, size,
2495 GEN_INT (used), NULL_RTX, 0,
2496 OPTAB_LIB_WIDEN);
2497 }
2498
2499 /* Get the address of the stack space.
2500 In this case, we do not deal with EXTRA separately.
2501 A single stack adjust will do. */
2502 if (! args_addr)
2503 {
2504 temp = push_block (size, extra, where_pad == downward);
2505 extra = 0;
2506 }
2507 else if (GET_CODE (args_so_far) == CONST_INT)
2508 temp = memory_address (BLKmode,
2509 plus_constant (args_addr,
2510 skip + INTVAL (args_so_far)));
2511 else
2512 temp = memory_address (BLKmode,
2513 plus_constant (gen_rtx (PLUS, Pmode,
2514 args_addr, args_so_far),
2515 skip));
2516
2517 /* TEMP is the address of the block. Copy the data there. */
2518 if (GET_CODE (size) == CONST_INT
2519 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2520 < MOVE_RATIO))
2521 {
2522 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
2523 INTVAL (size), align);
2524 goto ret;
2525 }
2526 /* Try the most limited insn first, because there's no point
2527 including more than one in the machine description unless
2528 the more limited one has some advantage. */
2529 #ifdef HAVE_movstrqi
2530 if (HAVE_movstrqi
2531 && GET_CODE (size) == CONST_INT
2532 && ((unsigned) INTVAL (size)
2533 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
2534 {
2535 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
2536 xinner, size, GEN_INT (align));
2537 if (pat != 0)
2538 {
2539 emit_insn (pat);
2540 goto ret;
2541 }
2542 }
2543 #endif
2544 #ifdef HAVE_movstrhi
2545 if (HAVE_movstrhi
2546 && GET_CODE (size) == CONST_INT
2547 && ((unsigned) INTVAL (size)
2548 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
2549 {
2550 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
2551 xinner, size, GEN_INT (align));
2552 if (pat != 0)
2553 {
2554 emit_insn (pat);
2555 goto ret;
2556 }
2557 }
2558 #endif
2559 #ifdef HAVE_movstrsi
2560 if (HAVE_movstrsi)
2561 {
2562 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
2563 xinner, size, GEN_INT (align));
2564 if (pat != 0)
2565 {
2566 emit_insn (pat);
2567 goto ret;
2568 }
2569 }
2570 #endif
2571 #ifdef HAVE_movstrdi
2572 if (HAVE_movstrdi)
2573 {
2574 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
2575 xinner, size, GEN_INT (align));
2576 if (pat != 0)
2577 {
2578 emit_insn (pat);
2579 goto ret;
2580 }
2581 }
2582 #endif
2583
2584 #ifndef ACCUMULATE_OUTGOING_ARGS
2585 /* If the source is referenced relative to the stack pointer,
2586 copy it to another register to stabilize it. We do not need
2587 to do this if we know that we won't be changing sp. */
2588
2589 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2590 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2591 temp = copy_to_reg (temp);
2592 #endif
2593
2594 /* Make inhibit_defer_pop nonzero around the library call
2595 to force it to pop the bcopy-arguments right away. */
2596 NO_DEFER_POP;
2597 #ifdef TARGET_MEM_FUNCTIONS
2598 emit_library_call (memcpy_libfunc, 0,
2599 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
2600 convert_to_mode (TYPE_MODE (sizetype),
2601 size, TREE_UNSIGNED (sizetype)),
2602 TYPE_MODE (sizetype));
2603 #else
2604 emit_library_call (bcopy_libfunc, 0,
2605 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
2606 convert_to_mode (TYPE_MODE (integer_type_node),
2607 size,
2608 TREE_UNSIGNED (integer_type_node)),
2609 TYPE_MODE (integer_type_node));
2610 #endif
2611 OK_DEFER_POP;
2612 }
2613 }
2614 else if (partial > 0)
2615 {
2616 /* Scalar partly in registers. */
2617
2618 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2619 int i;
2620 int not_stack;
2621 /* # words of start of argument
2622 that we must make space for but need not store. */
2623 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2624 int args_offset = INTVAL (args_so_far);
2625 int skip;
2626
2627 /* Push padding now if padding above and stack grows down,
2628 or if padding below and stack grows up.
2629 But if space already allocated, this has already been done. */
2630 if (extra && args_addr == 0
2631 && where_pad != none && where_pad != stack_direction)
2632 anti_adjust_stack (GEN_INT (extra));
2633
2634 /* If we make space by pushing it, we might as well push
2635 the real data. Otherwise, we can leave OFFSET nonzero
2636 and leave the space uninitialized. */
2637 if (args_addr == 0)
2638 offset = 0;
2639
2640 /* Now NOT_STACK gets the number of words that we don't need to
2641 allocate on the stack. */
2642 not_stack = partial - offset;
2643
2644 /* If the partial register-part of the arg counts in its stack size,
2645 skip the part of stack space corresponding to the registers.
2646 Otherwise, start copying to the beginning of the stack space,
2647 by setting SKIP to 0. */
2648 #ifndef REG_PARM_STACK_SPACE
2649 skip = 0;
2650 #else
2651 skip = not_stack;
2652 #endif
2653
2654 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2655 x = validize_mem (force_const_mem (mode, x));
2656
2657 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2658 SUBREGs of such registers are not allowed. */
2659 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2660 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2661 x = copy_to_reg (x);
2662
2663 /* Loop over all the words allocated on the stack for this arg. */
2664 /* We can do it by words, because any scalar bigger than a word
2665 has a size a multiple of a word. */
2666 #ifndef PUSH_ARGS_REVERSED
2667 for (i = not_stack; i < size; i++)
2668 #else
2669 for (i = size - 1; i >= not_stack; i--)
2670 #endif
2671 if (i >= not_stack + offset)
2672 emit_push_insn (operand_subword_force (x, i, mode),
2673 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2674 0, args_addr,
2675 GEN_INT (args_offset + ((i - not_stack + skip)
2676 * UNITS_PER_WORD)));
2677 }
2678 else
2679 {
2680 rtx addr;
2681
2682 /* Push padding now if padding above and stack grows down,
2683 or if padding below and stack grows up.
2684 But if space already allocated, this has already been done. */
2685 if (extra && args_addr == 0
2686 && where_pad != none && where_pad != stack_direction)
2687 anti_adjust_stack (GEN_INT (extra));
2688
2689 #ifdef PUSH_ROUNDING
2690 if (args_addr == 0)
2691 addr = gen_push_operand ();
2692 else
2693 #endif
2694 if (GET_CODE (args_so_far) == CONST_INT)
2695 addr
2696 = memory_address (mode,
2697 plus_constant (args_addr, INTVAL (args_so_far)));
2698 else
2699 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2700 args_so_far));
2701
2702 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2703 }
2704
2705 ret:
2706 /* If part should go in registers, copy that part
2707 into the appropriate registers. Do this now, at the end,
2708 since mem-to-mem copies above may do function calls. */
2709 if (partial > 0 && reg != 0)
2710 {
2711 /* Handle calls that pass values in multiple non-contiguous locations.
2712 The Irix 6 ABI has examples of this. */
2713 if (GET_CODE (reg) == PARALLEL)
2714 emit_group_load (reg, x);
2715 else
2716 move_block_to_reg (REGNO (reg), x, partial, mode);
2717 }
2718
2719 if (extra && args_addr == 0 && where_pad == stack_direction)
2720 anti_adjust_stack (GEN_INT (extra));
2721 }
2722 \f
2723 /* Expand an assignment that stores the value of FROM into TO.
2724 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2725 (This may contain a QUEUED rtx;
2726 if the value is constant, this rtx is a constant.)
2727 Otherwise, the returned value is NULL_RTX.
2728
2729 SUGGEST_REG is no longer actually used.
2730 It used to mean, copy the value through a register
2731 and return that register, if that is possible.
2732 We now use WANT_VALUE to decide whether to do this. */
2733
2734 rtx
2735 expand_assignment (to, from, want_value, suggest_reg)
2736 tree to, from;
2737 int want_value;
2738 int suggest_reg;
2739 {
2740 register rtx to_rtx = 0;
2741 rtx result;
2742
2743 /* Don't crash if the lhs of the assignment was erroneous. */
2744
2745 if (TREE_CODE (to) == ERROR_MARK)
2746 {
2747 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
2748 return want_value ? result : NULL_RTX;
2749 }
2750
2751 if (output_bytecode)
2752 {
2753 tree dest_innermost;
2754
2755 bc_expand_expr (from);
2756 bc_emit_instruction (duplicate);
2757
2758 dest_innermost = bc_expand_address (to);
2759
2760 /* Can't deduce from TYPE that we're dealing with a bitfield, so
2761 take care of it here. */
2762
2763 bc_store_memory (TREE_TYPE (to), dest_innermost);
2764 return NULL;
2765 }
2766
2767 /* Assignment of a structure component needs special treatment
2768 if the structure component's rtx is not simply a MEM.
2769 Assignment of an array element at a constant index, and assignment of
2770 an array element in an unaligned packed structure field, has the same
2771 problem. */
2772
2773 if (TREE_CODE (to) == COMPONENT_REF
2774 || TREE_CODE (to) == BIT_FIELD_REF
2775 || (TREE_CODE (to) == ARRAY_REF
2776 && ((TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2777 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST)
2778 || (SLOW_UNALIGNED_ACCESS && get_inner_unaligned_p (to)))))
2779 {
2780 enum machine_mode mode1;
2781 int bitsize;
2782 int bitpos;
2783 tree offset;
2784 int unsignedp;
2785 int volatilep = 0;
2786 tree tem;
2787 int alignment;
2788
2789 push_temp_slots ();
2790 tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
2791 &mode1, &unsignedp, &volatilep);
2792
2793 /* If we are going to use store_bit_field and extract_bit_field,
2794 make sure to_rtx will be safe for multiple use. */
2795
2796 if (mode1 == VOIDmode && want_value)
2797 tem = stabilize_reference (tem);
2798
2799 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
2800 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
2801 if (offset != 0)
2802 {
2803 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2804
2805 if (GET_CODE (to_rtx) != MEM)
2806 abort ();
2807 to_rtx = change_address (to_rtx, VOIDmode,
2808 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
2809 force_reg (ptr_mode, offset_rtx)));
2810 /* If we have a variable offset, the known alignment
2811 is only that of the innermost structure containing the field.
2812 (Actually, we could sometimes do better by using the
2813 align of an element of the innermost array, but no need.) */
2814 if (TREE_CODE (to) == COMPONENT_REF
2815 || TREE_CODE (to) == BIT_FIELD_REF)
2816 alignment
2817 = TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (to, 0))) / BITS_PER_UNIT;
2818 }
2819 if (volatilep)
2820 {
2821 if (GET_CODE (to_rtx) == MEM)
2822 {
2823 /* When the offset is zero, to_rtx is the address of the
2824 structure we are storing into, and hence may be shared.
2825 We must make a new MEM before setting the volatile bit. */
2826 if (offset == 0)
2827 to_rtx = change_address (to_rtx, VOIDmode, XEXP (to_rtx, 0));
2828 MEM_VOLATILE_P (to_rtx) = 1;
2829 }
2830 #if 0 /* This was turned off because, when a field is volatile
2831 in an object which is not volatile, the object may be in a register,
2832 and then we would abort over here. */
2833 else
2834 abort ();
2835 #endif
2836 }
2837
2838 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2839 (want_value
2840 /* Spurious cast makes HPUX compiler happy. */
2841 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2842 : VOIDmode),
2843 unsignedp,
2844 /* Required alignment of containing datum. */
2845 alignment,
2846 int_size_in_bytes (TREE_TYPE (tem)));
2847 preserve_temp_slots (result);
2848 free_temp_slots ();
2849 pop_temp_slots ();
2850
2851 /* If the value is meaningful, convert RESULT to the proper mode.
2852 Otherwise, return nothing. */
2853 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
2854 TYPE_MODE (TREE_TYPE (from)),
2855 result,
2856 TREE_UNSIGNED (TREE_TYPE (to)))
2857 : NULL_RTX);
2858 }
2859
2860 /* If the rhs is a function call and its value is not an aggregate,
2861 call the function before we start to compute the lhs.
2862 This is needed for correct code for cases such as
2863 val = setjmp (buf) on machines where reference to val
2864 requires loading up part of an address in a separate insn.
2865
2866 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
2867 a promoted variable where the zero- or sign- extension needs to be done.
2868 Handling this in the normal way is safe because no computation is done
2869 before the call. */
2870 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
2871 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
2872 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
2873 {
2874 rtx value;
2875
2876 push_temp_slots ();
2877 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
2878 if (to_rtx == 0)
2879 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2880
2881 /* Handle calls that return values in multiple non-contiguous locations.
2882 The Irix 6 ABI has examples of this. */
2883 if (GET_CODE (to_rtx) == PARALLEL)
2884 emit_group_load (to_rtx, value);
2885 else if (GET_MODE (to_rtx) == BLKmode)
2886 emit_block_move (to_rtx, value, expr_size (from),
2887 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
2888 else
2889 emit_move_insn (to_rtx, value);
2890 preserve_temp_slots (to_rtx);
2891 free_temp_slots ();
2892 pop_temp_slots ();
2893 return want_value ? to_rtx : NULL_RTX;
2894 }
2895
2896 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2897 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2898
2899 if (to_rtx == 0)
2900 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2901
2902 /* Don't move directly into a return register. */
2903 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
2904 {
2905 rtx temp;
2906
2907 push_temp_slots ();
2908 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
2909 emit_move_insn (to_rtx, temp);
2910 preserve_temp_slots (to_rtx);
2911 free_temp_slots ();
2912 pop_temp_slots ();
2913 return want_value ? to_rtx : NULL_RTX;
2914 }
2915
2916 /* In case we are returning the contents of an object which overlaps
2917 the place the value is being stored, use a safe function when copying
2918 a value through a pointer into a structure value return block. */
2919 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2920 && current_function_returns_struct
2921 && !current_function_returns_pcc_struct)
2922 {
2923 rtx from_rtx, size;
2924
2925 push_temp_slots ();
2926 size = expr_size (from);
2927 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
2928
2929 #ifdef TARGET_MEM_FUNCTIONS
2930 emit_library_call (memcpy_libfunc, 0,
2931 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2932 XEXP (from_rtx, 0), Pmode,
2933 convert_to_mode (TYPE_MODE (sizetype),
2934 size, TREE_UNSIGNED (sizetype)),
2935 TYPE_MODE (sizetype));
2936 #else
2937 emit_library_call (bcopy_libfunc, 0,
2938 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2939 XEXP (to_rtx, 0), Pmode,
2940 convert_to_mode (TYPE_MODE (integer_type_node),
2941 size, TREE_UNSIGNED (integer_type_node)),
2942 TYPE_MODE (integer_type_node));
2943 #endif
2944
2945 preserve_temp_slots (to_rtx);
2946 free_temp_slots ();
2947 pop_temp_slots ();
2948 return want_value ? to_rtx : NULL_RTX;
2949 }
2950
2951 /* Compute FROM and store the value in the rtx we got. */
2952
2953 push_temp_slots ();
2954 result = store_expr (from, to_rtx, want_value);
2955 preserve_temp_slots (result);
2956 free_temp_slots ();
2957 pop_temp_slots ();
2958 return want_value ? result : NULL_RTX;
2959 }
2960
2961 /* Generate code for computing expression EXP,
2962 and storing the value into TARGET.
2963 TARGET may contain a QUEUED rtx.
2964
2965 If WANT_VALUE is nonzero, return a copy of the value
2966 not in TARGET, so that we can be sure to use the proper
2967 value in a containing expression even if TARGET has something
2968 else stored in it. If possible, we copy the value through a pseudo
2969 and return that pseudo. Or, if the value is constant, we try to
2970 return the constant. In some cases, we return a pseudo
2971 copied *from* TARGET.
2972
2973 If the mode is BLKmode then we may return TARGET itself.
2974 It turns out that in BLKmode it doesn't cause a problem.
2975 because C has no operators that could combine two different
2976 assignments into the same BLKmode object with different values
2977 with no sequence point. Will other languages need this to
2978 be more thorough?
2979
2980 If WANT_VALUE is 0, we return NULL, to make sure
2981 to catch quickly any cases where the caller uses the value
2982 and fails to set WANT_VALUE. */
2983
2984 rtx
2985 store_expr (exp, target, want_value)
2986 register tree exp;
2987 register rtx target;
2988 int want_value;
2989 {
2990 register rtx temp;
2991 int dont_return_target = 0;
2992
2993 if (TREE_CODE (exp) == COMPOUND_EXPR)
2994 {
2995 /* Perform first part of compound expression, then assign from second
2996 part. */
2997 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2998 emit_queue ();
2999 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3000 }
3001 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3002 {
3003 /* For conditional expression, get safe form of the target. Then
3004 test the condition, doing the appropriate assignment on either
3005 side. This avoids the creation of unnecessary temporaries.
3006 For non-BLKmode, it is more efficient not to do this. */
3007
3008 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3009 rtx flag = NULL_RTX;
3010 tree left_cleanups = NULL_TREE;
3011 tree right_cleanups = NULL_TREE;
3012 tree old_cleanups = cleanups_this_call;
3013
3014 /* Used to save a pointer to the place to put the setting of
3015 the flag that indicates if this side of the conditional was
3016 taken. We backpatch the code, if we find out later that we
3017 have any conditional cleanups that need to be performed. */
3018 rtx dest_right_flag = NULL_RTX;
3019 rtx dest_left_flag = NULL_RTX;
3020
3021 emit_queue ();
3022 target = protect_from_queue (target, 1);
3023
3024 do_pending_stack_adjust ();
3025 NO_DEFER_POP;
3026 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3027 store_expr (TREE_OPERAND (exp, 1), target, 0);
3028 dest_left_flag = get_last_insn ();
3029 /* Handle conditional cleanups, if any. */
3030 left_cleanups = defer_cleanups_to (old_cleanups);
3031 emit_queue ();
3032 emit_jump_insn (gen_jump (lab2));
3033 emit_barrier ();
3034 emit_label (lab1);
3035 store_expr (TREE_OPERAND (exp, 2), target, 0);
3036 dest_right_flag = get_last_insn ();
3037 /* Handle conditional cleanups, if any. */
3038 right_cleanups = defer_cleanups_to (old_cleanups);
3039 emit_queue ();
3040 emit_label (lab2);
3041 OK_DEFER_POP;
3042
3043 /* Add back in any conditional cleanups. */
3044 if (left_cleanups || right_cleanups)
3045 {
3046 tree new_cleanups;
3047 tree cond;
3048 rtx last;
3049
3050 /* Now that we know that a flag is needed, go back and add in the
3051 setting of the flag. */
3052
3053 flag = gen_reg_rtx (word_mode);
3054
3055 /* Do the left side flag. */
3056 last = get_last_insn ();
3057 /* Flag left cleanups as needed. */
3058 emit_move_insn (flag, const1_rtx);
3059 /* ??? deprecated, use sequences instead. */
3060 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_left_flag);
3061
3062 /* Do the right side flag. */
3063 last = get_last_insn ();
3064 /* Flag left cleanups as needed. */
3065 emit_move_insn (flag, const0_rtx);
3066 /* ??? deprecated, use sequences instead. */
3067 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_right_flag);
3068
3069 /* All cleanups must be on the function_obstack. */
3070 push_obstacks_nochange ();
3071 resume_temporary_allocation ();
3072
3073 /* convert flag, which is an rtx, into a tree. */
3074 cond = make_node (RTL_EXPR);
3075 TREE_TYPE (cond) = integer_type_node;
3076 RTL_EXPR_RTL (cond) = flag;
3077 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
3078 cond = save_expr (cond);
3079
3080 if (! left_cleanups)
3081 left_cleanups = integer_zero_node;
3082 if (! right_cleanups)
3083 right_cleanups = integer_zero_node;
3084 new_cleanups = build (COND_EXPR, void_type_node,
3085 truthvalue_conversion (cond),
3086 left_cleanups, right_cleanups);
3087 new_cleanups = fold (new_cleanups);
3088
3089 pop_obstacks ();
3090
3091 /* Now add in the conditionalized cleanups. */
3092 cleanups_this_call
3093 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
3094 expand_eh_region_start ();
3095 }
3096 return want_value ? target : NULL_RTX;
3097 }
3098 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3099 && GET_MODE (target) != BLKmode)
3100 /* If target is in memory and caller wants value in a register instead,
3101 arrange that. Pass TARGET as target for expand_expr so that,
3102 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3103 We know expand_expr will not use the target in that case.
3104 Don't do this if TARGET is volatile because we are supposed
3105 to write it and then read it. */
3106 {
3107 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
3108 GET_MODE (target), 0);
3109 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3110 temp = copy_to_reg (temp);
3111 dont_return_target = 1;
3112 }
3113 else if (queued_subexp_p (target))
3114 /* If target contains a postincrement, let's not risk
3115 using it as the place to generate the rhs. */
3116 {
3117 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3118 {
3119 /* Expand EXP into a new pseudo. */
3120 temp = gen_reg_rtx (GET_MODE (target));
3121 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3122 }
3123 else
3124 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3125
3126 /* If target is volatile, ANSI requires accessing the value
3127 *from* the target, if it is accessed. So make that happen.
3128 In no case return the target itself. */
3129 if (! MEM_VOLATILE_P (target) && want_value)
3130 dont_return_target = 1;
3131 }
3132 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3133 /* If this is an scalar in a register that is stored in a wider mode
3134 than the declared mode, compute the result into its declared mode
3135 and then convert to the wider mode. Our value is the computed
3136 expression. */
3137 {
3138 /* If we don't want a value, we can do the conversion inside EXP,
3139 which will often result in some optimizations. Do the conversion
3140 in two steps: first change the signedness, if needed, then
3141 the extend. But don't do this if the type of EXP is a subtype
3142 of something else since then the conversion might involve
3143 more than just converting modes. */
3144 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3145 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3146 {
3147 if (TREE_UNSIGNED (TREE_TYPE (exp))
3148 != SUBREG_PROMOTED_UNSIGNED_P (target))
3149 exp
3150 = convert
3151 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3152 TREE_TYPE (exp)),
3153 exp);
3154
3155 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3156 SUBREG_PROMOTED_UNSIGNED_P (target)),
3157 exp);
3158 }
3159
3160 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3161
3162 /* If TEMP is a volatile MEM and we want a result value, make
3163 the access now so it gets done only once. Likewise if
3164 it contains TARGET. */
3165 if (GET_CODE (temp) == MEM && want_value
3166 && (MEM_VOLATILE_P (temp)
3167 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3168 temp = copy_to_reg (temp);
3169
3170 /* If TEMP is a VOIDmode constant, use convert_modes to make
3171 sure that we properly convert it. */
3172 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3173 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3174 TYPE_MODE (TREE_TYPE (exp)), temp,
3175 SUBREG_PROMOTED_UNSIGNED_P (target));
3176
3177 convert_move (SUBREG_REG (target), temp,
3178 SUBREG_PROMOTED_UNSIGNED_P (target));
3179 return want_value ? temp : NULL_RTX;
3180 }
3181 else
3182 {
3183 temp = expand_expr (exp, target, GET_MODE (target), 0);
3184 /* Return TARGET if it's a specified hardware register.
3185 If TARGET is a volatile mem ref, either return TARGET
3186 or return a reg copied *from* TARGET; ANSI requires this.
3187
3188 Otherwise, if TEMP is not TARGET, return TEMP
3189 if it is constant (for efficiency),
3190 or if we really want the correct value. */
3191 if (!(target && GET_CODE (target) == REG
3192 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3193 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3194 && temp != target
3195 && (CONSTANT_P (temp) || want_value))
3196 dont_return_target = 1;
3197 }
3198
3199 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3200 the same as that of TARGET, adjust the constant. This is needed, for
3201 example, in case it is a CONST_DOUBLE and we want only a word-sized
3202 value. */
3203 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3204 && TREE_CODE (exp) != ERROR_MARK
3205 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3206 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3207 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3208
3209 /* If value was not generated in the target, store it there.
3210 Convert the value to TARGET's type first if nec. */
3211
3212 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
3213 {
3214 target = protect_from_queue (target, 1);
3215 if (GET_MODE (temp) != GET_MODE (target)
3216 && GET_MODE (temp) != VOIDmode)
3217 {
3218 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3219 if (dont_return_target)
3220 {
3221 /* In this case, we will return TEMP,
3222 so make sure it has the proper mode.
3223 But don't forget to store the value into TARGET. */
3224 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3225 emit_move_insn (target, temp);
3226 }
3227 else
3228 convert_move (target, temp, unsignedp);
3229 }
3230
3231 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3232 {
3233 /* Handle copying a string constant into an array.
3234 The string constant may be shorter than the array.
3235 So copy just the string's actual length, and clear the rest. */
3236 rtx size;
3237 rtx addr;
3238
3239 /* Get the size of the data type of the string,
3240 which is actually the size of the target. */
3241 size = expr_size (exp);
3242 if (GET_CODE (size) == CONST_INT
3243 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3244 emit_block_move (target, temp, size,
3245 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3246 else
3247 {
3248 /* Compute the size of the data to copy from the string. */
3249 tree copy_size
3250 = size_binop (MIN_EXPR,
3251 make_tree (sizetype, size),
3252 convert (sizetype,
3253 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
3254 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3255 VOIDmode, 0);
3256 rtx label = 0;
3257
3258 /* Copy that much. */
3259 emit_block_move (target, temp, copy_size_rtx,
3260 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3261
3262 /* Figure out how much is left in TARGET that we have to clear.
3263 Do all calculations in ptr_mode. */
3264
3265 addr = XEXP (target, 0);
3266 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3267
3268 if (GET_CODE (copy_size_rtx) == CONST_INT)
3269 {
3270 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3271 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3272 }
3273 else
3274 {
3275 addr = force_reg (ptr_mode, addr);
3276 addr = expand_binop (ptr_mode, add_optab, addr,
3277 copy_size_rtx, NULL_RTX, 0,
3278 OPTAB_LIB_WIDEN);
3279
3280 size = expand_binop (ptr_mode, sub_optab, size,
3281 copy_size_rtx, NULL_RTX, 0,
3282 OPTAB_LIB_WIDEN);
3283
3284 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
3285 GET_MODE (size), 0, 0);
3286 label = gen_label_rtx ();
3287 emit_jump_insn (gen_blt (label));
3288 }
3289
3290 if (size != const0_rtx)
3291 {
3292 #ifdef TARGET_MEM_FUNCTIONS
3293 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3294 addr, ptr_mode,
3295 const0_rtx, TYPE_MODE (integer_type_node),
3296 convert_to_mode (TYPE_MODE (sizetype),
3297 size,
3298 TREE_UNSIGNED (sizetype)),
3299 TYPE_MODE (sizetype));
3300 #else
3301 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3302 addr, ptr_mode,
3303 convert_to_mode (TYPE_MODE (integer_type_node),
3304 size,
3305 TREE_UNSIGNED (integer_type_node)),
3306 TYPE_MODE (integer_type_node));
3307 #endif
3308 }
3309
3310 if (label)
3311 emit_label (label);
3312 }
3313 }
3314 /* Handle calls that return values in multiple non-contiguous locations.
3315 The Irix 6 ABI has examples of this. */
3316 else if (GET_CODE (target) == PARALLEL)
3317 emit_group_load (target, temp);
3318 else if (GET_MODE (temp) == BLKmode)
3319 emit_block_move (target, temp, expr_size (exp),
3320 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3321 else
3322 emit_move_insn (target, temp);
3323 }
3324
3325 /* If we don't want a value, return NULL_RTX. */
3326 if (! want_value)
3327 return NULL_RTX;
3328
3329 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3330 ??? The latter test doesn't seem to make sense. */
3331 else if (dont_return_target && GET_CODE (temp) != MEM)
3332 return temp;
3333
3334 /* Return TARGET itself if it is a hard register. */
3335 else if (want_value && GET_MODE (target) != BLKmode
3336 && ! (GET_CODE (target) == REG
3337 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3338 return copy_to_reg (target);
3339
3340 else
3341 return target;
3342 }
3343 \f
3344 /* Return 1 if EXP just contains zeros. */
3345
3346 static int
3347 is_zeros_p (exp)
3348 tree exp;
3349 {
3350 tree elt;
3351
3352 switch (TREE_CODE (exp))
3353 {
3354 case CONVERT_EXPR:
3355 case NOP_EXPR:
3356 case NON_LVALUE_EXPR:
3357 return is_zeros_p (TREE_OPERAND (exp, 0));
3358
3359 case INTEGER_CST:
3360 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3361
3362 case COMPLEX_CST:
3363 return
3364 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3365
3366 case REAL_CST:
3367 return REAL_VALUES_EQUAL (TREE_REAL_CST (exp), dconst0);
3368
3369 case CONSTRUCTOR:
3370 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3371 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
3372 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3373 if (! is_zeros_p (TREE_VALUE (elt)))
3374 return 0;
3375
3376 return 1;
3377 }
3378
3379 return 0;
3380 }
3381
3382 /* Return 1 if EXP contains mostly (3/4) zeros. */
3383
3384 static int
3385 mostly_zeros_p (exp)
3386 tree exp;
3387 {
3388 if (TREE_CODE (exp) == CONSTRUCTOR)
3389 {
3390 int elts = 0, zeros = 0;
3391 tree elt = CONSTRUCTOR_ELTS (exp);
3392 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3393 {
3394 /* If there are no ranges of true bits, it is all zero. */
3395 return elt == NULL_TREE;
3396 }
3397 for (; elt; elt = TREE_CHAIN (elt))
3398 {
3399 /* We do not handle the case where the index is a RANGE_EXPR,
3400 so the statistic will be somewhat inaccurate.
3401 We do make a more accurate count in store_constructor itself,
3402 so since this function is only used for nested array elements,
3403 this should be close enough. */
3404 if (mostly_zeros_p (TREE_VALUE (elt)))
3405 zeros++;
3406 elts++;
3407 }
3408
3409 return 4 * zeros >= 3 * elts;
3410 }
3411
3412 return is_zeros_p (exp);
3413 }
3414 \f
3415 /* Helper function for store_constructor.
3416 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3417 TYPE is the type of the CONSTRUCTOR, not the element type.
3418 CLEARED is as for store_constructor.
3419
3420 This provides a recursive shortcut back to store_constructor when it isn't
3421 necessary to go through store_field. This is so that we can pass through
3422 the cleared field to let store_constructor know that we may not have to
3423 clear a substructure if the outer structure has already been cleared. */
3424
3425 static void
3426 store_constructor_field (target, bitsize, bitpos,
3427 mode, exp, type, cleared)
3428 rtx target;
3429 int bitsize, bitpos;
3430 enum machine_mode mode;
3431 tree exp, type;
3432 int cleared;
3433 {
3434 if (TREE_CODE (exp) == CONSTRUCTOR
3435 && bitpos % BITS_PER_UNIT == 0
3436 /* If we have a non-zero bitpos for a register target, then we just
3437 let store_field do the bitfield handling. This is unlikely to
3438 generate unnecessary clear instructions anyways. */
3439 && (bitpos == 0 || GET_CODE (target) == MEM))
3440 {
3441 if (bitpos != 0)
3442 target = change_address (target, VOIDmode,
3443 plus_constant (XEXP (target, 0),
3444 bitpos / BITS_PER_UNIT));
3445 store_constructor (exp, target, cleared);
3446 }
3447 else
3448 store_field (target, bitsize, bitpos, mode, exp,
3449 VOIDmode, 0, TYPE_ALIGN (type) / BITS_PER_UNIT,
3450 int_size_in_bytes (type));
3451 }
3452
3453 /* Store the value of constructor EXP into the rtx TARGET.
3454 TARGET is either a REG or a MEM.
3455 CLEARED is true if TARGET is known to have been zero'd. */
3456
3457 static void
3458 store_constructor (exp, target, cleared)
3459 tree exp;
3460 rtx target;
3461 int cleared;
3462 {
3463 tree type = TREE_TYPE (exp);
3464
3465 /* We know our target cannot conflict, since safe_from_p has been called. */
3466 #if 0
3467 /* Don't try copying piece by piece into a hard register
3468 since that is vulnerable to being clobbered by EXP.
3469 Instead, construct in a pseudo register and then copy it all. */
3470 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
3471 {
3472 rtx temp = gen_reg_rtx (GET_MODE (target));
3473 store_constructor (exp, temp, 0);
3474 emit_move_insn (target, temp);
3475 return;
3476 }
3477 #endif
3478
3479 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
3480 || TREE_CODE (type) == QUAL_UNION_TYPE)
3481 {
3482 register tree elt;
3483
3484 /* Inform later passes that the whole union value is dead. */
3485 if (TREE_CODE (type) == UNION_TYPE
3486 || TREE_CODE (type) == QUAL_UNION_TYPE)
3487 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3488
3489 /* If we are building a static constructor into a register,
3490 set the initial value as zero so we can fold the value into
3491 a constant. But if more than one register is involved,
3492 this probably loses. */
3493 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
3494 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
3495 {
3496 if (! cleared)
3497 emit_move_insn (target, const0_rtx);
3498
3499 cleared = 1;
3500 }
3501
3502 /* If the constructor has fewer fields than the structure
3503 or if we are initializing the structure to mostly zeros,
3504 clear the whole structure first. */
3505 else if ((list_length (CONSTRUCTOR_ELTS (exp))
3506 != list_length (TYPE_FIELDS (type)))
3507 || mostly_zeros_p (exp))
3508 {
3509 if (! cleared)
3510 clear_storage (target, expr_size (exp),
3511 TYPE_ALIGN (type) / BITS_PER_UNIT);
3512
3513 cleared = 1;
3514 }
3515 else
3516 /* Inform later passes that the old value is dead. */
3517 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3518
3519 /* Store each element of the constructor into
3520 the corresponding field of TARGET. */
3521
3522 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3523 {
3524 register tree field = TREE_PURPOSE (elt);
3525 register enum machine_mode mode;
3526 int bitsize;
3527 int bitpos = 0;
3528 int unsignedp;
3529 tree pos, constant = 0, offset = 0;
3530 rtx to_rtx = target;
3531
3532 /* Just ignore missing fields.
3533 We cleared the whole structure, above,
3534 if any fields are missing. */
3535 if (field == 0)
3536 continue;
3537
3538 if (cleared && is_zeros_p (TREE_VALUE (elt)))
3539 continue;
3540
3541 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
3542 unsignedp = TREE_UNSIGNED (field);
3543 mode = DECL_MODE (field);
3544 if (DECL_BIT_FIELD (field))
3545 mode = VOIDmode;
3546
3547 pos = DECL_FIELD_BITPOS (field);
3548 if (TREE_CODE (pos) == INTEGER_CST)
3549 constant = pos;
3550 else if (TREE_CODE (pos) == PLUS_EXPR
3551 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3552 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
3553 else
3554 offset = pos;
3555
3556 if (constant)
3557 bitpos = TREE_INT_CST_LOW (constant);
3558
3559 if (offset)
3560 {
3561 rtx offset_rtx;
3562
3563 if (contains_placeholder_p (offset))
3564 offset = build (WITH_RECORD_EXPR, sizetype,
3565 offset, exp);
3566
3567 offset = size_binop (FLOOR_DIV_EXPR, offset,
3568 size_int (BITS_PER_UNIT));
3569
3570 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3571 if (GET_CODE (to_rtx) != MEM)
3572 abort ();
3573
3574 to_rtx
3575 = change_address (to_rtx, VOIDmode,
3576 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
3577 force_reg (ptr_mode, offset_rtx)));
3578 }
3579 if (TREE_READONLY (field))
3580 {
3581 if (GET_CODE (to_rtx) == MEM)
3582 to_rtx = change_address (to_rtx, GET_MODE (to_rtx),
3583 XEXP (to_rtx, 0));
3584 RTX_UNCHANGING_P (to_rtx) = 1;
3585 }
3586
3587 store_constructor_field (to_rtx, bitsize, bitpos,
3588 mode, TREE_VALUE (elt), type, cleared);
3589 }
3590 }
3591 else if (TREE_CODE (type) == ARRAY_TYPE)
3592 {
3593 register tree elt;
3594 register int i;
3595 int need_to_clear;
3596 tree domain = TYPE_DOMAIN (type);
3597 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
3598 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
3599 tree elttype = TREE_TYPE (type);
3600
3601 /* If the constructor has fewer elements than the array,
3602 clear the whole array first. Similarly if this this is
3603 static constructor of a non-BLKmode object. */
3604 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
3605 need_to_clear = 1;
3606 else
3607 {
3608 HOST_WIDE_INT count = 0, zero_count = 0;
3609 need_to_clear = 0;
3610 /* This loop is a more accurate version of the loop in
3611 mostly_zeros_p (it handles RANGE_EXPR in an index).
3612 It is also needed to check for missing elements. */
3613 for (elt = CONSTRUCTOR_ELTS (exp);
3614 elt != NULL_TREE;
3615 elt = TREE_CHAIN (elt))
3616 {
3617 tree index = TREE_PURPOSE (elt);
3618 HOST_WIDE_INT this_node_count;
3619 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3620 {
3621 tree lo_index = TREE_OPERAND (index, 0);
3622 tree hi_index = TREE_OPERAND (index, 1);
3623 if (TREE_CODE (lo_index) != INTEGER_CST
3624 || TREE_CODE (hi_index) != INTEGER_CST)
3625 {
3626 need_to_clear = 1;
3627 break;
3628 }
3629 this_node_count = TREE_INT_CST_LOW (hi_index)
3630 - TREE_INT_CST_LOW (lo_index) + 1;
3631 }
3632 else
3633 this_node_count = 1;
3634 count += this_node_count;
3635 if (mostly_zeros_p (TREE_VALUE (elt)))
3636 zero_count += this_node_count;
3637 }
3638 /* Clear the entire array first if there are any missing elements,
3639 or if the incidence of zero elements is >= 75%. */
3640 if (count < maxelt - minelt + 1
3641 || 4 * zero_count >= 3 * count)
3642 need_to_clear = 1;
3643 }
3644 if (need_to_clear)
3645 {
3646 if (! cleared)
3647 clear_storage (target, expr_size (exp),
3648 TYPE_ALIGN (type) / BITS_PER_UNIT);
3649 cleared = 1;
3650 }
3651 else
3652 /* Inform later passes that the old value is dead. */
3653 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3654
3655 /* Store each element of the constructor into
3656 the corresponding element of TARGET, determined
3657 by counting the elements. */
3658 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
3659 elt;
3660 elt = TREE_CHAIN (elt), i++)
3661 {
3662 register enum machine_mode mode;
3663 int bitsize;
3664 int bitpos;
3665 int unsignedp;
3666 tree value = TREE_VALUE (elt);
3667 tree index = TREE_PURPOSE (elt);
3668 rtx xtarget = target;
3669
3670 if (cleared && is_zeros_p (value))
3671 continue;
3672
3673 mode = TYPE_MODE (elttype);
3674 bitsize = GET_MODE_BITSIZE (mode);
3675 unsignedp = TREE_UNSIGNED (elttype);
3676
3677 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3678 {
3679 tree lo_index = TREE_OPERAND (index, 0);
3680 tree hi_index = TREE_OPERAND (index, 1);
3681 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
3682 struct nesting *loop;
3683 HOST_WIDE_INT lo, hi, count;
3684 tree position;
3685
3686 /* If the range is constant and "small", unroll the loop. */
3687 if (TREE_CODE (lo_index) == INTEGER_CST
3688 && TREE_CODE (hi_index) == INTEGER_CST
3689 && (lo = TREE_INT_CST_LOW (lo_index),
3690 hi = TREE_INT_CST_LOW (hi_index),
3691 count = hi - lo + 1,
3692 (GET_CODE (target) != MEM
3693 || count <= 2
3694 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
3695 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
3696 <= 40 * 8))))
3697 {
3698 lo -= minelt; hi -= minelt;
3699 for (; lo <= hi; lo++)
3700 {
3701 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
3702 store_constructor_field (target, bitsize, bitpos,
3703 mode, value, type, cleared);
3704 }
3705 }
3706 else
3707 {
3708 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
3709 loop_top = gen_label_rtx ();
3710 loop_end = gen_label_rtx ();
3711
3712 unsignedp = TREE_UNSIGNED (domain);
3713
3714 index = build_decl (VAR_DECL, NULL_TREE, domain);
3715
3716 DECL_RTL (index) = index_r
3717 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
3718 &unsignedp, 0));
3719
3720 if (TREE_CODE (value) == SAVE_EXPR
3721 && SAVE_EXPR_RTL (value) == 0)
3722 {
3723 /* Make sure value gets expanded once before the
3724 loop. */
3725 expand_expr (value, const0_rtx, VOIDmode, 0);
3726 emit_queue ();
3727 }
3728 store_expr (lo_index, index_r, 0);
3729 loop = expand_start_loop (0);
3730
3731 /* Assign value to element index. */
3732 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3733 size_int (BITS_PER_UNIT));
3734 position = size_binop (MULT_EXPR,
3735 size_binop (MINUS_EXPR, index,
3736 TYPE_MIN_VALUE (domain)),
3737 position);
3738 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3739 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
3740 xtarget = change_address (target, mode, addr);
3741 if (TREE_CODE (value) == CONSTRUCTOR)
3742 store_constructor (value, xtarget, cleared);
3743 else
3744 store_expr (value, xtarget, 0);
3745
3746 expand_exit_loop_if_false (loop,
3747 build (LT_EXPR, integer_type_node,
3748 index, hi_index));
3749
3750 expand_increment (build (PREINCREMENT_EXPR,
3751 TREE_TYPE (index),
3752 index, integer_one_node), 0, 0);
3753 expand_end_loop ();
3754 emit_label (loop_end);
3755
3756 /* Needed by stupid register allocation. to extend the
3757 lifetime of pseudo-regs used by target past the end
3758 of the loop. */
3759 emit_insn (gen_rtx (USE, GET_MODE (target), target));
3760 }
3761 }
3762 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
3763 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
3764 {
3765 rtx pos_rtx, addr;
3766 tree position;
3767
3768 if (index == 0)
3769 index = size_int (i);
3770
3771 if (minelt)
3772 index = size_binop (MINUS_EXPR, index,
3773 TYPE_MIN_VALUE (domain));
3774 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3775 size_int (BITS_PER_UNIT));
3776 position = size_binop (MULT_EXPR, index, position);
3777 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3778 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
3779 xtarget = change_address (target, mode, addr);
3780 store_expr (value, xtarget, 0);
3781 }
3782 else
3783 {
3784 if (index != 0)
3785 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
3786 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3787 else
3788 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3789 store_constructor_field (target, bitsize, bitpos,
3790 mode, value, type, cleared);
3791 }
3792 }
3793 }
3794 /* set constructor assignments */
3795 else if (TREE_CODE (type) == SET_TYPE)
3796 {
3797 tree elt = CONSTRUCTOR_ELTS (exp);
3798 rtx xtarget = XEXP (target, 0);
3799 int set_word_size = TYPE_ALIGN (type);
3800 int nbytes = int_size_in_bytes (type), nbits;
3801 tree domain = TYPE_DOMAIN (type);
3802 tree domain_min, domain_max, bitlength;
3803
3804 /* The default implementation strategy is to extract the constant
3805 parts of the constructor, use that to initialize the target,
3806 and then "or" in whatever non-constant ranges we need in addition.
3807
3808 If a large set is all zero or all ones, it is
3809 probably better to set it using memset (if available) or bzero.
3810 Also, if a large set has just a single range, it may also be
3811 better to first clear all the first clear the set (using
3812 bzero/memset), and set the bits we want. */
3813
3814 /* Check for all zeros. */
3815 if (elt == NULL_TREE)
3816 {
3817 if (!cleared)
3818 clear_storage (target, expr_size (exp),
3819 TYPE_ALIGN (type) / BITS_PER_UNIT);
3820 return;
3821 }
3822
3823 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
3824 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
3825 bitlength = size_binop (PLUS_EXPR,
3826 size_binop (MINUS_EXPR, domain_max, domain_min),
3827 size_one_node);
3828
3829 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
3830 abort ();
3831 nbits = TREE_INT_CST_LOW (bitlength);
3832
3833 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
3834 are "complicated" (more than one range), initialize (the
3835 constant parts) by copying from a constant. */
3836 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
3837 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
3838 {
3839 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
3840 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
3841 char *bit_buffer = (char *) alloca (nbits);
3842 HOST_WIDE_INT word = 0;
3843 int bit_pos = 0;
3844 int ibit = 0;
3845 int offset = 0; /* In bytes from beginning of set. */
3846 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
3847 for (;;)
3848 {
3849 if (bit_buffer[ibit])
3850 {
3851 if (BYTES_BIG_ENDIAN)
3852 word |= (1 << (set_word_size - 1 - bit_pos));
3853 else
3854 word |= 1 << bit_pos;
3855 }
3856 bit_pos++; ibit++;
3857 if (bit_pos >= set_word_size || ibit == nbits)
3858 {
3859 if (word != 0 || ! cleared)
3860 {
3861 rtx datum = GEN_INT (word);
3862 rtx to_rtx;
3863 /* The assumption here is that it is safe to use
3864 XEXP if the set is multi-word, but not if
3865 it's single-word. */
3866 if (GET_CODE (target) == MEM)
3867 {
3868 to_rtx = plus_constant (XEXP (target, 0), offset);
3869 to_rtx = change_address (target, mode, to_rtx);
3870 }
3871 else if (offset == 0)
3872 to_rtx = target;
3873 else
3874 abort ();
3875 emit_move_insn (to_rtx, datum);
3876 }
3877 if (ibit == nbits)
3878 break;
3879 word = 0;
3880 bit_pos = 0;
3881 offset += set_word_size / BITS_PER_UNIT;
3882 }
3883 }
3884 }
3885 else if (!cleared)
3886 {
3887 /* Don't bother clearing storage if the set is all ones. */
3888 if (TREE_CHAIN (elt) != NULL_TREE
3889 || (TREE_PURPOSE (elt) == NULL_TREE
3890 ? nbits != 1
3891 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
3892 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
3893 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
3894 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
3895 != nbits))))
3896 clear_storage (target, expr_size (exp),
3897 TYPE_ALIGN (type) / BITS_PER_UNIT);
3898 }
3899
3900 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
3901 {
3902 /* start of range of element or NULL */
3903 tree startbit = TREE_PURPOSE (elt);
3904 /* end of range of element, or element value */
3905 tree endbit = TREE_VALUE (elt);
3906 HOST_WIDE_INT startb, endb;
3907 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
3908
3909 bitlength_rtx = expand_expr (bitlength,
3910 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
3911
3912 /* handle non-range tuple element like [ expr ] */
3913 if (startbit == NULL_TREE)
3914 {
3915 startbit = save_expr (endbit);
3916 endbit = startbit;
3917 }
3918 startbit = convert (sizetype, startbit);
3919 endbit = convert (sizetype, endbit);
3920 if (! integer_zerop (domain_min))
3921 {
3922 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
3923 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
3924 }
3925 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
3926 EXPAND_CONST_ADDRESS);
3927 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
3928 EXPAND_CONST_ADDRESS);
3929
3930 if (REG_P (target))
3931 {
3932 targetx = assign_stack_temp (GET_MODE (target),
3933 GET_MODE_SIZE (GET_MODE (target)),
3934 0);
3935 emit_move_insn (targetx, target);
3936 }
3937 else if (GET_CODE (target) == MEM)
3938 targetx = target;
3939 else
3940 abort ();
3941
3942 #ifdef TARGET_MEM_FUNCTIONS
3943 /* Optimization: If startbit and endbit are
3944 constants divisible by BITS_PER_UNIT,
3945 call memset instead. */
3946 if (TREE_CODE (startbit) == INTEGER_CST
3947 && TREE_CODE (endbit) == INTEGER_CST
3948 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
3949 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
3950 {
3951 emit_library_call (memset_libfunc, 0,
3952 VOIDmode, 3,
3953 plus_constant (XEXP (targetx, 0),
3954 startb / BITS_PER_UNIT),
3955 Pmode,
3956 constm1_rtx, TYPE_MODE (integer_type_node),
3957 GEN_INT ((endb - startb) / BITS_PER_UNIT),
3958 TYPE_MODE (sizetype));
3959 }
3960 else
3961 #endif
3962 {
3963 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__setbits"),
3964 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
3965 bitlength_rtx, TYPE_MODE (sizetype),
3966 startbit_rtx, TYPE_MODE (sizetype),
3967 endbit_rtx, TYPE_MODE (sizetype));
3968 }
3969 if (REG_P (target))
3970 emit_move_insn (target, targetx);
3971 }
3972 }
3973
3974 else
3975 abort ();
3976 }
3977
3978 /* Store the value of EXP (an expression tree)
3979 into a subfield of TARGET which has mode MODE and occupies
3980 BITSIZE bits, starting BITPOS bits from the start of TARGET.
3981 If MODE is VOIDmode, it means that we are storing into a bit-field.
3982
3983 If VALUE_MODE is VOIDmode, return nothing in particular.
3984 UNSIGNEDP is not used in this case.
3985
3986 Otherwise, return an rtx for the value stored. This rtx
3987 has mode VALUE_MODE if that is convenient to do.
3988 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
3989
3990 ALIGN is the alignment that TARGET is known to have, measured in bytes.
3991 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
3992
3993 static rtx
3994 store_field (target, bitsize, bitpos, mode, exp, value_mode,
3995 unsignedp, align, total_size)
3996 rtx target;
3997 int bitsize, bitpos;
3998 enum machine_mode mode;
3999 tree exp;
4000 enum machine_mode value_mode;
4001 int unsignedp;
4002 int align;
4003 int total_size;
4004 {
4005 HOST_WIDE_INT width_mask = 0;
4006
4007 if (bitsize < HOST_BITS_PER_WIDE_INT)
4008 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4009
4010 /* If we are storing into an unaligned field of an aligned union that is
4011 in a register, we may have the mode of TARGET being an integer mode but
4012 MODE == BLKmode. In that case, get an aligned object whose size and
4013 alignment are the same as TARGET and store TARGET into it (we can avoid
4014 the store if the field being stored is the entire width of TARGET). Then
4015 call ourselves recursively to store the field into a BLKmode version of
4016 that object. Finally, load from the object into TARGET. This is not
4017 very efficient in general, but should only be slightly more expensive
4018 than the otherwise-required unaligned accesses. Perhaps this can be
4019 cleaned up later. */
4020
4021 if (mode == BLKmode
4022 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4023 {
4024 rtx object = assign_stack_temp (GET_MODE (target),
4025 GET_MODE_SIZE (GET_MODE (target)), 0);
4026 rtx blk_object = copy_rtx (object);
4027
4028 MEM_IN_STRUCT_P (object) = 1;
4029 MEM_IN_STRUCT_P (blk_object) = 1;
4030 PUT_MODE (blk_object, BLKmode);
4031
4032 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4033 emit_move_insn (object, target);
4034
4035 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4036 align, total_size);
4037
4038 /* Even though we aren't returning target, we need to
4039 give it the updated value. */
4040 emit_move_insn (target, object);
4041
4042 return blk_object;
4043 }
4044
4045 /* If the structure is in a register or if the component
4046 is a bit field, we cannot use addressing to access it.
4047 Use bit-field techniques or SUBREG to store in it. */
4048
4049 if (mode == VOIDmode
4050 || (mode != BLKmode && ! direct_store[(int) mode])
4051 || GET_CODE (target) == REG
4052 || GET_CODE (target) == SUBREG
4053 /* If the field isn't aligned enough to store as an ordinary memref,
4054 store it as a bit field. */
4055 || (SLOW_UNALIGNED_ACCESS
4056 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
4057 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
4058 {
4059 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4060
4061 /* If BITSIZE is narrower than the size of the type of EXP
4062 we will be narrowing TEMP. Normally, what's wanted are the
4063 low-order bits. However, if EXP's type is a record and this is
4064 big-endian machine, we want the upper BITSIZE bits. */
4065 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4066 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4067 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4068 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4069 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4070 - bitsize),
4071 temp, 1);
4072
4073 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4074 MODE. */
4075 if (mode != VOIDmode && mode != BLKmode
4076 && mode != TYPE_MODE (TREE_TYPE (exp)))
4077 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4078
4079 /* If the modes of TARGET and TEMP are both BLKmode, both
4080 must be in memory and BITPOS must be aligned on a byte
4081 boundary. If so, we simply do a block copy. */
4082 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4083 {
4084 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4085 || bitpos % BITS_PER_UNIT != 0)
4086 abort ();
4087
4088 target = change_address (target, VOIDmode,
4089 plus_constant (XEXP (target, 0),
4090 bitpos / BITS_PER_UNIT));
4091
4092 emit_block_move (target, temp,
4093 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4094 / BITS_PER_UNIT),
4095 1);
4096
4097 return value_mode == VOIDmode ? const0_rtx : target;
4098 }
4099
4100 /* Store the value in the bitfield. */
4101 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4102 if (value_mode != VOIDmode)
4103 {
4104 /* The caller wants an rtx for the value. */
4105 /* If possible, avoid refetching from the bitfield itself. */
4106 if (width_mask != 0
4107 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4108 {
4109 tree count;
4110 enum machine_mode tmode;
4111
4112 if (unsignedp)
4113 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4114 tmode = GET_MODE (temp);
4115 if (tmode == VOIDmode)
4116 tmode = value_mode;
4117 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4118 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4119 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4120 }
4121 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4122 NULL_RTX, value_mode, 0, align,
4123 total_size);
4124 }
4125 return const0_rtx;
4126 }
4127 else
4128 {
4129 rtx addr = XEXP (target, 0);
4130 rtx to_rtx;
4131
4132 /* If a value is wanted, it must be the lhs;
4133 so make the address stable for multiple use. */
4134
4135 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4136 && ! CONSTANT_ADDRESS_P (addr)
4137 /* A frame-pointer reference is already stable. */
4138 && ! (GET_CODE (addr) == PLUS
4139 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4140 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4141 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4142 addr = copy_to_reg (addr);
4143
4144 /* Now build a reference to just the desired component. */
4145
4146 to_rtx = change_address (target, mode,
4147 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
4148 MEM_IN_STRUCT_P (to_rtx) = 1;
4149
4150 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4151 }
4152 }
4153 \f
4154 /* Return true if any object containing the innermost array is an unaligned
4155 packed structure field. */
4156
4157 static int
4158 get_inner_unaligned_p (exp)
4159 tree exp;
4160 {
4161 int needed_alignment = TYPE_ALIGN (TREE_TYPE (exp));
4162
4163 while (1)
4164 {
4165 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4166 {
4167 if (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
4168 < needed_alignment)
4169 return 1;
4170 }
4171 else if (TREE_CODE (exp) != ARRAY_REF
4172 && TREE_CODE (exp) != NON_LVALUE_EXPR
4173 && ! ((TREE_CODE (exp) == NOP_EXPR
4174 || TREE_CODE (exp) == CONVERT_EXPR)
4175 && (TYPE_MODE (TREE_TYPE (exp))
4176 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4177 break;
4178
4179 exp = TREE_OPERAND (exp, 0);
4180 }
4181
4182 return 0;
4183 }
4184
4185 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4186 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4187 ARRAY_REFs and find the ultimate containing object, which we return.
4188
4189 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4190 bit position, and *PUNSIGNEDP to the signedness of the field.
4191 If the position of the field is variable, we store a tree
4192 giving the variable offset (in units) in *POFFSET.
4193 This offset is in addition to the bit position.
4194 If the position is not variable, we store 0 in *POFFSET.
4195
4196 If any of the extraction expressions is volatile,
4197 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4198
4199 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4200 is a mode that can be used to access the field. In that case, *PBITSIZE
4201 is redundant.
4202
4203 If the field describes a variable-sized object, *PMODE is set to
4204 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4205 this case, but the address of the object can be found. */
4206
4207 tree
4208 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4209 punsignedp, pvolatilep)
4210 tree exp;
4211 int *pbitsize;
4212 int *pbitpos;
4213 tree *poffset;
4214 enum machine_mode *pmode;
4215 int *punsignedp;
4216 int *pvolatilep;
4217 {
4218 tree orig_exp = exp;
4219 tree size_tree = 0;
4220 enum machine_mode mode = VOIDmode;
4221 tree offset = integer_zero_node;
4222
4223 if (TREE_CODE (exp) == COMPONENT_REF)
4224 {
4225 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4226 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4227 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4228 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4229 }
4230 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4231 {
4232 size_tree = TREE_OPERAND (exp, 1);
4233 *punsignedp = TREE_UNSIGNED (exp);
4234 }
4235 else
4236 {
4237 mode = TYPE_MODE (TREE_TYPE (exp));
4238 *pbitsize = GET_MODE_BITSIZE (mode);
4239 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4240 }
4241
4242 if (size_tree)
4243 {
4244 if (TREE_CODE (size_tree) != INTEGER_CST)
4245 mode = BLKmode, *pbitsize = -1;
4246 else
4247 *pbitsize = TREE_INT_CST_LOW (size_tree);
4248 }
4249
4250 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4251 and find the ultimate containing object. */
4252
4253 *pbitpos = 0;
4254
4255 while (1)
4256 {
4257 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4258 {
4259 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4260 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4261 : TREE_OPERAND (exp, 2));
4262 tree constant = integer_zero_node, var = pos;
4263
4264 /* If this field hasn't been filled in yet, don't go
4265 past it. This should only happen when folding expressions
4266 made during type construction. */
4267 if (pos == 0)
4268 break;
4269
4270 /* Assume here that the offset is a multiple of a unit.
4271 If not, there should be an explicitly added constant. */
4272 if (TREE_CODE (pos) == PLUS_EXPR
4273 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4274 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
4275 else if (TREE_CODE (pos) == INTEGER_CST)
4276 constant = pos, var = integer_zero_node;
4277
4278 *pbitpos += TREE_INT_CST_LOW (constant);
4279 offset = size_binop (PLUS_EXPR, offset,
4280 size_binop (EXACT_DIV_EXPR, var,
4281 size_int (BITS_PER_UNIT)));
4282 }
4283
4284 else if (TREE_CODE (exp) == ARRAY_REF)
4285 {
4286 /* This code is based on the code in case ARRAY_REF in expand_expr
4287 below. We assume here that the size of an array element is
4288 always an integral multiple of BITS_PER_UNIT. */
4289
4290 tree index = TREE_OPERAND (exp, 1);
4291 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4292 tree low_bound
4293 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4294 tree index_type = TREE_TYPE (index);
4295
4296 if (! integer_zerop (low_bound))
4297 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
4298
4299 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
4300 {
4301 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4302 index);
4303 index_type = TREE_TYPE (index);
4304 }
4305
4306 index = fold (build (MULT_EXPR, index_type, index,
4307 TYPE_SIZE (TREE_TYPE (exp))));
4308
4309 if (TREE_CODE (index) == INTEGER_CST
4310 && TREE_INT_CST_HIGH (index) == 0)
4311 *pbitpos += TREE_INT_CST_LOW (index);
4312 else
4313 offset = size_binop (PLUS_EXPR, offset,
4314 size_binop (FLOOR_DIV_EXPR, index,
4315 size_int (BITS_PER_UNIT)));
4316 }
4317 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
4318 && ! ((TREE_CODE (exp) == NOP_EXPR
4319 || TREE_CODE (exp) == CONVERT_EXPR)
4320 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4321 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
4322 != UNION_TYPE))
4323 && (TYPE_MODE (TREE_TYPE (exp))
4324 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4325 break;
4326
4327 /* If any reference in the chain is volatile, the effect is volatile. */
4328 if (TREE_THIS_VOLATILE (exp))
4329 *pvolatilep = 1;
4330 exp = TREE_OPERAND (exp, 0);
4331 }
4332
4333 if (integer_zerop (offset))
4334 offset = 0;
4335
4336 if (offset != 0 && contains_placeholder_p (offset))
4337 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
4338
4339 *pmode = mode;
4340 *poffset = offset;
4341 return exp;
4342 }
4343 \f
4344 /* Given an rtx VALUE that may contain additions and multiplications,
4345 return an equivalent value that just refers to a register or memory.
4346 This is done by generating instructions to perform the arithmetic
4347 and returning a pseudo-register containing the value.
4348
4349 The returned value may be a REG, SUBREG, MEM or constant. */
4350
4351 rtx
4352 force_operand (value, target)
4353 rtx value, target;
4354 {
4355 register optab binoptab = 0;
4356 /* Use a temporary to force order of execution of calls to
4357 `force_operand'. */
4358 rtx tmp;
4359 register rtx op2;
4360 /* Use subtarget as the target for operand 0 of a binary operation. */
4361 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4362
4363 if (GET_CODE (value) == PLUS)
4364 binoptab = add_optab;
4365 else if (GET_CODE (value) == MINUS)
4366 binoptab = sub_optab;
4367 else if (GET_CODE (value) == MULT)
4368 {
4369 op2 = XEXP (value, 1);
4370 if (!CONSTANT_P (op2)
4371 && !(GET_CODE (op2) == REG && op2 != subtarget))
4372 subtarget = 0;
4373 tmp = force_operand (XEXP (value, 0), subtarget);
4374 return expand_mult (GET_MODE (value), tmp,
4375 force_operand (op2, NULL_RTX),
4376 target, 0);
4377 }
4378
4379 if (binoptab)
4380 {
4381 op2 = XEXP (value, 1);
4382 if (!CONSTANT_P (op2)
4383 && !(GET_CODE (op2) == REG && op2 != subtarget))
4384 subtarget = 0;
4385 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
4386 {
4387 binoptab = add_optab;
4388 op2 = negate_rtx (GET_MODE (value), op2);
4389 }
4390
4391 /* Check for an addition with OP2 a constant integer and our first
4392 operand a PLUS of a virtual register and something else. In that
4393 case, we want to emit the sum of the virtual register and the
4394 constant first and then add the other value. This allows virtual
4395 register instantiation to simply modify the constant rather than
4396 creating another one around this addition. */
4397 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
4398 && GET_CODE (XEXP (value, 0)) == PLUS
4399 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
4400 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
4401 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
4402 {
4403 rtx temp = expand_binop (GET_MODE (value), binoptab,
4404 XEXP (XEXP (value, 0), 0), op2,
4405 subtarget, 0, OPTAB_LIB_WIDEN);
4406 return expand_binop (GET_MODE (value), binoptab, temp,
4407 force_operand (XEXP (XEXP (value, 0), 1), 0),
4408 target, 0, OPTAB_LIB_WIDEN);
4409 }
4410
4411 tmp = force_operand (XEXP (value, 0), subtarget);
4412 return expand_binop (GET_MODE (value), binoptab, tmp,
4413 force_operand (op2, NULL_RTX),
4414 target, 0, OPTAB_LIB_WIDEN);
4415 /* We give UNSIGNEDP = 0 to expand_binop
4416 because the only operations we are expanding here are signed ones. */
4417 }
4418 return value;
4419 }
4420 \f
4421 /* Subroutine of expand_expr:
4422 save the non-copied parts (LIST) of an expr (LHS), and return a list
4423 which can restore these values to their previous values,
4424 should something modify their storage. */
4425
4426 static tree
4427 save_noncopied_parts (lhs, list)
4428 tree lhs;
4429 tree list;
4430 {
4431 tree tail;
4432 tree parts = 0;
4433
4434 for (tail = list; tail; tail = TREE_CHAIN (tail))
4435 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4436 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
4437 else
4438 {
4439 tree part = TREE_VALUE (tail);
4440 tree part_type = TREE_TYPE (part);
4441 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
4442 rtx target = assign_temp (part_type, 0, 1, 1);
4443 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
4444 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
4445 parts = tree_cons (to_be_saved,
4446 build (RTL_EXPR, part_type, NULL_TREE,
4447 (tree) target),
4448 parts);
4449 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
4450 }
4451 return parts;
4452 }
4453
4454 /* Subroutine of expand_expr:
4455 record the non-copied parts (LIST) of an expr (LHS), and return a list
4456 which specifies the initial values of these parts. */
4457
4458 static tree
4459 init_noncopied_parts (lhs, list)
4460 tree lhs;
4461 tree list;
4462 {
4463 tree tail;
4464 tree parts = 0;
4465
4466 for (tail = list; tail; tail = TREE_CHAIN (tail))
4467 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4468 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
4469 else
4470 {
4471 tree part = TREE_VALUE (tail);
4472 tree part_type = TREE_TYPE (part);
4473 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
4474 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
4475 }
4476 return parts;
4477 }
4478
4479 /* Subroutine of expand_expr: return nonzero iff there is no way that
4480 EXP can reference X, which is being modified. */
4481
4482 static int
4483 safe_from_p (x, exp)
4484 rtx x;
4485 tree exp;
4486 {
4487 rtx exp_rtl = 0;
4488 int i, nops;
4489
4490 if (x == 0
4491 /* If EXP has varying size, we MUST use a target since we currently
4492 have no way of allocating temporaries of variable size
4493 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
4494 So we assume here that something at a higher level has prevented a
4495 clash. This is somewhat bogus, but the best we can do. Only
4496 do this when X is BLKmode. */
4497 || (TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4498 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
4499 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
4500 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
4501 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
4502 != INTEGER_CST)
4503 && GET_MODE (x) == BLKmode))
4504 return 1;
4505
4506 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
4507 find the underlying pseudo. */
4508 if (GET_CODE (x) == SUBREG)
4509 {
4510 x = SUBREG_REG (x);
4511 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4512 return 0;
4513 }
4514
4515 /* If X is a location in the outgoing argument area, it is always safe. */
4516 if (GET_CODE (x) == MEM
4517 && (XEXP (x, 0) == virtual_outgoing_args_rtx
4518 || (GET_CODE (XEXP (x, 0)) == PLUS
4519 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
4520 return 1;
4521
4522 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4523 {
4524 case 'd':
4525 exp_rtl = DECL_RTL (exp);
4526 break;
4527
4528 case 'c':
4529 return 1;
4530
4531 case 'x':
4532 if (TREE_CODE (exp) == TREE_LIST)
4533 return ((TREE_VALUE (exp) == 0
4534 || safe_from_p (x, TREE_VALUE (exp)))
4535 && (TREE_CHAIN (exp) == 0
4536 || safe_from_p (x, TREE_CHAIN (exp))));
4537 else
4538 return 0;
4539
4540 case '1':
4541 return safe_from_p (x, TREE_OPERAND (exp, 0));
4542
4543 case '2':
4544 case '<':
4545 return (safe_from_p (x, TREE_OPERAND (exp, 0))
4546 && safe_from_p (x, TREE_OPERAND (exp, 1)));
4547
4548 case 'e':
4549 case 'r':
4550 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
4551 the expression. If it is set, we conflict iff we are that rtx or
4552 both are in memory. Otherwise, we check all operands of the
4553 expression recursively. */
4554
4555 switch (TREE_CODE (exp))
4556 {
4557 case ADDR_EXPR:
4558 return (staticp (TREE_OPERAND (exp, 0))
4559 || safe_from_p (x, TREE_OPERAND (exp, 0)));
4560
4561 case INDIRECT_REF:
4562 if (GET_CODE (x) == MEM)
4563 return 0;
4564 break;
4565
4566 case CALL_EXPR:
4567 exp_rtl = CALL_EXPR_RTL (exp);
4568 if (exp_rtl == 0)
4569 {
4570 /* Assume that the call will clobber all hard registers and
4571 all of memory. */
4572 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4573 || GET_CODE (x) == MEM)
4574 return 0;
4575 }
4576
4577 break;
4578
4579 case RTL_EXPR:
4580 /* If a sequence exists, we would have to scan every instruction
4581 in the sequence to see if it was safe. This is probably not
4582 worthwhile. */
4583 if (RTL_EXPR_SEQUENCE (exp))
4584 return 0;
4585
4586 exp_rtl = RTL_EXPR_RTL (exp);
4587 break;
4588
4589 case WITH_CLEANUP_EXPR:
4590 exp_rtl = RTL_EXPR_RTL (exp);
4591 break;
4592
4593 case CLEANUP_POINT_EXPR:
4594 return safe_from_p (x, TREE_OPERAND (exp, 0));
4595
4596 case SAVE_EXPR:
4597 exp_rtl = SAVE_EXPR_RTL (exp);
4598 break;
4599
4600 case BIND_EXPR:
4601 /* The only operand we look at is operand 1. The rest aren't
4602 part of the expression. */
4603 return safe_from_p (x, TREE_OPERAND (exp, 1));
4604
4605 case METHOD_CALL_EXPR:
4606 /* This takes a rtx argument, but shouldn't appear here. */
4607 abort ();
4608 }
4609
4610 /* If we have an rtx, we do not need to scan our operands. */
4611 if (exp_rtl)
4612 break;
4613
4614 nops = tree_code_length[(int) TREE_CODE (exp)];
4615 for (i = 0; i < nops; i++)
4616 if (TREE_OPERAND (exp, i) != 0
4617 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
4618 return 0;
4619 }
4620
4621 /* If we have an rtl, find any enclosed object. Then see if we conflict
4622 with it. */
4623 if (exp_rtl)
4624 {
4625 if (GET_CODE (exp_rtl) == SUBREG)
4626 {
4627 exp_rtl = SUBREG_REG (exp_rtl);
4628 if (GET_CODE (exp_rtl) == REG
4629 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
4630 return 0;
4631 }
4632
4633 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
4634 are memory and EXP is not readonly. */
4635 return ! (rtx_equal_p (x, exp_rtl)
4636 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
4637 && ! TREE_READONLY (exp)));
4638 }
4639
4640 /* If we reach here, it is safe. */
4641 return 1;
4642 }
4643
4644 /* Subroutine of expand_expr: return nonzero iff EXP is an
4645 expression whose type is statically determinable. */
4646
4647 static int
4648 fixed_type_p (exp)
4649 tree exp;
4650 {
4651 if (TREE_CODE (exp) == PARM_DECL
4652 || TREE_CODE (exp) == VAR_DECL
4653 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
4654 || TREE_CODE (exp) == COMPONENT_REF
4655 || TREE_CODE (exp) == ARRAY_REF)
4656 return 1;
4657 return 0;
4658 }
4659
4660 /* Subroutine of expand_expr: return rtx if EXP is a
4661 variable or parameter; else return 0. */
4662
4663 static rtx
4664 var_rtx (exp)
4665 tree exp;
4666 {
4667 STRIP_NOPS (exp);
4668 switch (TREE_CODE (exp))
4669 {
4670 case PARM_DECL:
4671 case VAR_DECL:
4672 return DECL_RTL (exp);
4673 default:
4674 return 0;
4675 }
4676 }
4677 \f
4678 /* expand_expr: generate code for computing expression EXP.
4679 An rtx for the computed value is returned. The value is never null.
4680 In the case of a void EXP, const0_rtx is returned.
4681
4682 The value may be stored in TARGET if TARGET is nonzero.
4683 TARGET is just a suggestion; callers must assume that
4684 the rtx returned may not be the same as TARGET.
4685
4686 If TARGET is CONST0_RTX, it means that the value will be ignored.
4687
4688 If TMODE is not VOIDmode, it suggests generating the
4689 result in mode TMODE. But this is done only when convenient.
4690 Otherwise, TMODE is ignored and the value generated in its natural mode.
4691 TMODE is just a suggestion; callers must assume that
4692 the rtx returned may not have mode TMODE.
4693
4694 Note that TARGET may have neither TMODE nor MODE. In that case, it
4695 probably will not be used.
4696
4697 If MODIFIER is EXPAND_SUM then when EXP is an addition
4698 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
4699 or a nest of (PLUS ...) and (MINUS ...) where the terms are
4700 products as above, or REG or MEM, or constant.
4701 Ordinarily in such cases we would output mul or add instructions
4702 and then return a pseudo reg containing the sum.
4703
4704 EXPAND_INITIALIZER is much like EXPAND_SUM except that
4705 it also marks a label as absolutely required (it can't be dead).
4706 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
4707 This is used for outputting expressions used in initializers.
4708
4709 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
4710 with a constant address even if that address is not normally legitimate.
4711 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
4712
4713 rtx
4714 expand_expr (exp, target, tmode, modifier)
4715 register tree exp;
4716 rtx target;
4717 enum machine_mode tmode;
4718 enum expand_modifier modifier;
4719 {
4720 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
4721 This is static so it will be accessible to our recursive callees. */
4722 static tree placeholder_list = 0;
4723 register rtx op0, op1, temp;
4724 tree type = TREE_TYPE (exp);
4725 int unsignedp = TREE_UNSIGNED (type);
4726 register enum machine_mode mode = TYPE_MODE (type);
4727 register enum tree_code code = TREE_CODE (exp);
4728 optab this_optab;
4729 /* Use subtarget as the target for operand 0 of a binary operation. */
4730 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4731 rtx original_target = target;
4732 /* Maybe defer this until sure not doing bytecode? */
4733 int ignore = (target == const0_rtx
4734 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
4735 || code == CONVERT_EXPR || code == REFERENCE_EXPR
4736 || code == COND_EXPR)
4737 && TREE_CODE (type) == VOID_TYPE));
4738 tree context;
4739
4740
4741 if (output_bytecode && modifier != EXPAND_INITIALIZER)
4742 {
4743 bc_expand_expr (exp);
4744 return NULL;
4745 }
4746
4747 /* Don't use hard regs as subtargets, because the combiner
4748 can only handle pseudo regs. */
4749 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
4750 subtarget = 0;
4751 /* Avoid subtargets inside loops,
4752 since they hide some invariant expressions. */
4753 if (preserve_subexpressions_p ())
4754 subtarget = 0;
4755
4756 /* If we are going to ignore this result, we need only do something
4757 if there is a side-effect somewhere in the expression. If there
4758 is, short-circuit the most common cases here. Note that we must
4759 not call expand_expr with anything but const0_rtx in case this
4760 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
4761
4762 if (ignore)
4763 {
4764 if (! TREE_SIDE_EFFECTS (exp))
4765 return const0_rtx;
4766
4767 /* Ensure we reference a volatile object even if value is ignored. */
4768 if (TREE_THIS_VOLATILE (exp)
4769 && TREE_CODE (exp) != FUNCTION_DECL
4770 && mode != VOIDmode && mode != BLKmode)
4771 {
4772 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
4773 if (GET_CODE (temp) == MEM)
4774 temp = copy_to_reg (temp);
4775 return const0_rtx;
4776 }
4777
4778 if (TREE_CODE_CLASS (code) == '1')
4779 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4780 VOIDmode, modifier);
4781 else if (TREE_CODE_CLASS (code) == '2'
4782 || TREE_CODE_CLASS (code) == '<')
4783 {
4784 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
4785 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
4786 return const0_rtx;
4787 }
4788 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
4789 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
4790 /* If the second operand has no side effects, just evaluate
4791 the first. */
4792 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4793 VOIDmode, modifier);
4794
4795 target = 0;
4796 }
4797
4798 /* If will do cse, generate all results into pseudo registers
4799 since 1) that allows cse to find more things
4800 and 2) otherwise cse could produce an insn the machine
4801 cannot support. */
4802
4803 if (! cse_not_expected && mode != BLKmode && target
4804 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
4805 target = subtarget;
4806
4807 switch (code)
4808 {
4809 case LABEL_DECL:
4810 {
4811 tree function = decl_function_context (exp);
4812 /* Handle using a label in a containing function. */
4813 if (function != current_function_decl && function != 0)
4814 {
4815 struct function *p = find_function_data (function);
4816 /* Allocate in the memory associated with the function
4817 that the label is in. */
4818 push_obstacks (p->function_obstack,
4819 p->function_maybepermanent_obstack);
4820
4821 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4822 label_rtx (exp), p->forced_labels);
4823 pop_obstacks ();
4824 }
4825 else if (modifier == EXPAND_INITIALIZER)
4826 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4827 label_rtx (exp), forced_labels);
4828 temp = gen_rtx (MEM, FUNCTION_MODE,
4829 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
4830 if (function != current_function_decl && function != 0)
4831 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
4832 return temp;
4833 }
4834
4835 case PARM_DECL:
4836 if (DECL_RTL (exp) == 0)
4837 {
4838 error_with_decl (exp, "prior parameter's size depends on `%s'");
4839 return CONST0_RTX (mode);
4840 }
4841
4842 /* ... fall through ... */
4843
4844 case VAR_DECL:
4845 /* If a static var's type was incomplete when the decl was written,
4846 but the type is complete now, lay out the decl now. */
4847 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4848 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
4849 {
4850 push_obstacks_nochange ();
4851 end_temporary_allocation ();
4852 layout_decl (exp, 0);
4853 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
4854 pop_obstacks ();
4855 }
4856
4857 /* ... fall through ... */
4858
4859 case FUNCTION_DECL:
4860 case RESULT_DECL:
4861 if (DECL_RTL (exp) == 0)
4862 abort ();
4863
4864 /* Ensure variable marked as used even if it doesn't go through
4865 a parser. If it hasn't be used yet, write out an external
4866 definition. */
4867 if (! TREE_USED (exp))
4868 {
4869 assemble_external (exp);
4870 TREE_USED (exp) = 1;
4871 }
4872
4873 /* Show we haven't gotten RTL for this yet. */
4874 temp = 0;
4875
4876 /* Handle variables inherited from containing functions. */
4877 context = decl_function_context (exp);
4878
4879 /* We treat inline_function_decl as an alias for the current function
4880 because that is the inline function whose vars, types, etc.
4881 are being merged into the current function.
4882 See expand_inline_function. */
4883
4884 if (context != 0 && context != current_function_decl
4885 && context != inline_function_decl
4886 /* If var is static, we don't need a static chain to access it. */
4887 && ! (GET_CODE (DECL_RTL (exp)) == MEM
4888 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
4889 {
4890 rtx addr;
4891
4892 /* Mark as non-local and addressable. */
4893 DECL_NONLOCAL (exp) = 1;
4894 if (DECL_NO_STATIC_CHAIN (current_function_decl))
4895 abort ();
4896 mark_addressable (exp);
4897 if (GET_CODE (DECL_RTL (exp)) != MEM)
4898 abort ();
4899 addr = XEXP (DECL_RTL (exp), 0);
4900 if (GET_CODE (addr) == MEM)
4901 addr = gen_rtx (MEM, Pmode,
4902 fix_lexical_addr (XEXP (addr, 0), exp));
4903 else
4904 addr = fix_lexical_addr (addr, exp);
4905 temp = change_address (DECL_RTL (exp), mode, addr);
4906 }
4907
4908 /* This is the case of an array whose size is to be determined
4909 from its initializer, while the initializer is still being parsed.
4910 See expand_decl. */
4911
4912 else if (GET_CODE (DECL_RTL (exp)) == MEM
4913 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
4914 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
4915 XEXP (DECL_RTL (exp), 0));
4916
4917 /* If DECL_RTL is memory, we are in the normal case and either
4918 the address is not valid or it is not a register and -fforce-addr
4919 is specified, get the address into a register. */
4920
4921 else if (GET_CODE (DECL_RTL (exp)) == MEM
4922 && modifier != EXPAND_CONST_ADDRESS
4923 && modifier != EXPAND_SUM
4924 && modifier != EXPAND_INITIALIZER
4925 && (! memory_address_p (DECL_MODE (exp),
4926 XEXP (DECL_RTL (exp), 0))
4927 || (flag_force_addr
4928 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
4929 temp = change_address (DECL_RTL (exp), VOIDmode,
4930 copy_rtx (XEXP (DECL_RTL (exp), 0)));
4931
4932 /* If we got something, return it. But first, set the alignment
4933 the address is a register. */
4934 if (temp != 0)
4935 {
4936 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
4937 mark_reg_pointer (XEXP (temp, 0),
4938 DECL_ALIGN (exp) / BITS_PER_UNIT);
4939
4940 return temp;
4941 }
4942
4943 /* If the mode of DECL_RTL does not match that of the decl, it
4944 must be a promoted value. We return a SUBREG of the wanted mode,
4945 but mark it so that we know that it was already extended. */
4946
4947 if (GET_CODE (DECL_RTL (exp)) == REG
4948 && GET_MODE (DECL_RTL (exp)) != mode)
4949 {
4950 /* Get the signedness used for this variable. Ensure we get the
4951 same mode we got when the variable was declared. */
4952 if (GET_MODE (DECL_RTL (exp))
4953 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
4954 abort ();
4955
4956 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
4957 SUBREG_PROMOTED_VAR_P (temp) = 1;
4958 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4959 return temp;
4960 }
4961
4962 return DECL_RTL (exp);
4963
4964 case INTEGER_CST:
4965 return immed_double_const (TREE_INT_CST_LOW (exp),
4966 TREE_INT_CST_HIGH (exp),
4967 mode);
4968
4969 case CONST_DECL:
4970 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
4971
4972 case REAL_CST:
4973 /* If optimized, generate immediate CONST_DOUBLE
4974 which will be turned into memory by reload if necessary.
4975
4976 We used to force a register so that loop.c could see it. But
4977 this does not allow gen_* patterns to perform optimizations with
4978 the constants. It also produces two insns in cases like "x = 1.0;".
4979 On most machines, floating-point constants are not permitted in
4980 many insns, so we'd end up copying it to a register in any case.
4981
4982 Now, we do the copying in expand_binop, if appropriate. */
4983 return immed_real_const (exp);
4984
4985 case COMPLEX_CST:
4986 case STRING_CST:
4987 if (! TREE_CST_RTL (exp))
4988 output_constant_def (exp);
4989
4990 /* TREE_CST_RTL probably contains a constant address.
4991 On RISC machines where a constant address isn't valid,
4992 make some insns to get that address into a register. */
4993 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
4994 && modifier != EXPAND_CONST_ADDRESS
4995 && modifier != EXPAND_INITIALIZER
4996 && modifier != EXPAND_SUM
4997 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
4998 || (flag_force_addr
4999 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
5000 return change_address (TREE_CST_RTL (exp), VOIDmode,
5001 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
5002 return TREE_CST_RTL (exp);
5003
5004 case SAVE_EXPR:
5005 context = decl_function_context (exp);
5006
5007 /* We treat inline_function_decl as an alias for the current function
5008 because that is the inline function whose vars, types, etc.
5009 are being merged into the current function.
5010 See expand_inline_function. */
5011 if (context == current_function_decl || context == inline_function_decl)
5012 context = 0;
5013
5014 /* If this is non-local, handle it. */
5015 if (context)
5016 {
5017 temp = SAVE_EXPR_RTL (exp);
5018 if (temp && GET_CODE (temp) == REG)
5019 {
5020 put_var_into_stack (exp);
5021 temp = SAVE_EXPR_RTL (exp);
5022 }
5023 if (temp == 0 || GET_CODE (temp) != MEM)
5024 abort ();
5025 return change_address (temp, mode,
5026 fix_lexical_addr (XEXP (temp, 0), exp));
5027 }
5028 if (SAVE_EXPR_RTL (exp) == 0)
5029 {
5030 if (mode == VOIDmode)
5031 temp = const0_rtx;
5032 else
5033 temp = assign_temp (type, 0, 0, 0);
5034
5035 SAVE_EXPR_RTL (exp) = temp;
5036 if (!optimize && GET_CODE (temp) == REG)
5037 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
5038 save_expr_regs);
5039
5040 /* If the mode of TEMP does not match that of the expression, it
5041 must be a promoted value. We pass store_expr a SUBREG of the
5042 wanted mode but mark it so that we know that it was already
5043 extended. Note that `unsignedp' was modified above in
5044 this case. */
5045
5046 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
5047 {
5048 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
5049 SUBREG_PROMOTED_VAR_P (temp) = 1;
5050 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5051 }
5052
5053 if (temp == const0_rtx)
5054 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
5055 else
5056 store_expr (TREE_OPERAND (exp, 0), temp, 0);
5057 }
5058
5059 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
5060 must be a promoted value. We return a SUBREG of the wanted mode,
5061 but mark it so that we know that it was already extended. */
5062
5063 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
5064 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
5065 {
5066 /* Compute the signedness and make the proper SUBREG. */
5067 promote_mode (type, mode, &unsignedp, 0);
5068 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
5069 SUBREG_PROMOTED_VAR_P (temp) = 1;
5070 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5071 return temp;
5072 }
5073
5074 return SAVE_EXPR_RTL (exp);
5075
5076 case UNSAVE_EXPR:
5077 {
5078 rtx temp;
5079 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5080 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
5081 return temp;
5082 }
5083
5084 case PLACEHOLDER_EXPR:
5085 /* If there is an object on the head of the placeholder list,
5086 see if some object in it's references is of type TYPE. For
5087 further information, see tree.def. */
5088 if (placeholder_list)
5089 {
5090 tree object;
5091 tree old_list = placeholder_list;
5092
5093 for (object = TREE_PURPOSE (placeholder_list);
5094 (TYPE_MAIN_VARIANT (TREE_TYPE (object))
5095 != TYPE_MAIN_VARIANT (type))
5096 && (TREE_CODE_CLASS (TREE_CODE (object)) == 'r'
5097 || TREE_CODE_CLASS (TREE_CODE (object)) == '1'
5098 || TREE_CODE_CLASS (TREE_CODE (object)) == '2'
5099 || TREE_CODE_CLASS (TREE_CODE (object)) == 'e');
5100 object = TREE_OPERAND (object, 0))
5101 ;
5102
5103 if (object != 0
5104 && (TYPE_MAIN_VARIANT (TREE_TYPE (object))
5105 == TYPE_MAIN_VARIANT (type)))
5106 {
5107 /* Expand this object skipping the list entries before
5108 it was found in case it is also a PLACEHOLDER_EXPR.
5109 In that case, we want to translate it using subsequent
5110 entries. */
5111 placeholder_list = TREE_CHAIN (placeholder_list);
5112 temp = expand_expr (object, original_target, tmode, modifier);
5113 placeholder_list = old_list;
5114 return temp;
5115 }
5116 }
5117
5118 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
5119 abort ();
5120
5121 case WITH_RECORD_EXPR:
5122 /* Put the object on the placeholder list, expand our first operand,
5123 and pop the list. */
5124 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
5125 placeholder_list);
5126 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
5127 tmode, modifier);
5128 placeholder_list = TREE_CHAIN (placeholder_list);
5129 return target;
5130
5131 case EXIT_EXPR:
5132 expand_exit_loop_if_false (NULL_PTR,
5133 invert_truthvalue (TREE_OPERAND (exp, 0)));
5134 return const0_rtx;
5135
5136 case LOOP_EXPR:
5137 push_temp_slots ();
5138 expand_start_loop (1);
5139 expand_expr_stmt (TREE_OPERAND (exp, 0));
5140 expand_end_loop ();
5141 pop_temp_slots ();
5142
5143 return const0_rtx;
5144
5145 case BIND_EXPR:
5146 {
5147 tree vars = TREE_OPERAND (exp, 0);
5148 int vars_need_expansion = 0;
5149
5150 /* Need to open a binding contour here because
5151 if there are any cleanups they most be contained here. */
5152 expand_start_bindings (0);
5153
5154 /* Mark the corresponding BLOCK for output in its proper place. */
5155 if (TREE_OPERAND (exp, 2) != 0
5156 && ! TREE_USED (TREE_OPERAND (exp, 2)))
5157 insert_block (TREE_OPERAND (exp, 2));
5158
5159 /* If VARS have not yet been expanded, expand them now. */
5160 while (vars)
5161 {
5162 if (DECL_RTL (vars) == 0)
5163 {
5164 vars_need_expansion = 1;
5165 expand_decl (vars);
5166 }
5167 expand_decl_init (vars);
5168 vars = TREE_CHAIN (vars);
5169 }
5170
5171 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
5172
5173 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
5174
5175 return temp;
5176 }
5177
5178 case RTL_EXPR:
5179 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
5180 abort ();
5181 emit_insns (RTL_EXPR_SEQUENCE (exp));
5182 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
5183 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
5184 free_temps_for_rtl_expr (exp);
5185 return RTL_EXPR_RTL (exp);
5186
5187 case CONSTRUCTOR:
5188 /* If we don't need the result, just ensure we evaluate any
5189 subexpressions. */
5190 if (ignore)
5191 {
5192 tree elt;
5193 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
5194 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
5195 return const0_rtx;
5196 }
5197
5198 /* All elts simple constants => refer to a constant in memory. But
5199 if this is a non-BLKmode mode, let it store a field at a time
5200 since that should make a CONST_INT or CONST_DOUBLE when we
5201 fold. Likewise, if we have a target we can use, it is best to
5202 store directly into the target unless the type is large enough
5203 that memcpy will be used. If we are making an initializer and
5204 all operands are constant, put it in memory as well. */
5205 else if ((TREE_STATIC (exp)
5206 && ((mode == BLKmode
5207 && ! (target != 0 && safe_from_p (target, exp)))
5208 || TREE_ADDRESSABLE (exp)
5209 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5210 && (move_by_pieces_ninsns
5211 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
5212 TYPE_ALIGN (type) / BITS_PER_UNIT)
5213 > MOVE_RATIO)
5214 && ! mostly_zeros_p (exp))))
5215 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
5216 {
5217 rtx constructor = output_constant_def (exp);
5218 if (modifier != EXPAND_CONST_ADDRESS
5219 && modifier != EXPAND_INITIALIZER
5220 && modifier != EXPAND_SUM
5221 && (! memory_address_p (GET_MODE (constructor),
5222 XEXP (constructor, 0))
5223 || (flag_force_addr
5224 && GET_CODE (XEXP (constructor, 0)) != REG)))
5225 constructor = change_address (constructor, VOIDmode,
5226 XEXP (constructor, 0));
5227 return constructor;
5228 }
5229
5230 else
5231 {
5232 /* Handle calls that pass values in multiple non-contiguous
5233 locations. The Irix 6 ABI has examples of this. */
5234 if (target == 0 || ! safe_from_p (target, exp)
5235 || GET_CODE (target) == PARALLEL)
5236 {
5237 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
5238 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5239 else
5240 target = assign_temp (type, 0, 1, 1);
5241 }
5242
5243 if (TREE_READONLY (exp))
5244 {
5245 if (GET_CODE (target) == MEM)
5246 target = change_address (target, GET_MODE (target),
5247 XEXP (target, 0));
5248 RTX_UNCHANGING_P (target) = 1;
5249 }
5250
5251 store_constructor (exp, target, 0);
5252 return target;
5253 }
5254
5255 case INDIRECT_REF:
5256 {
5257 tree exp1 = TREE_OPERAND (exp, 0);
5258 tree exp2;
5259
5260 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
5261 op0 = memory_address (mode, op0);
5262
5263 temp = gen_rtx (MEM, mode, op0);
5264 /* If address was computed by addition,
5265 mark this as an element of an aggregate. */
5266 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5267 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
5268 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
5269 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
5270 || (TREE_CODE (exp1) == ADDR_EXPR
5271 && (exp2 = TREE_OPERAND (exp1, 0))
5272 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
5273 MEM_IN_STRUCT_P (temp) = 1;
5274 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
5275
5276 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
5277 here, because, in C and C++, the fact that a location is accessed
5278 through a pointer to const does not mean that the value there can
5279 never change. Languages where it can never change should
5280 also set TREE_STATIC. */
5281 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
5282 return temp;
5283 }
5284
5285 case ARRAY_REF:
5286 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
5287 abort ();
5288
5289 {
5290 tree array = TREE_OPERAND (exp, 0);
5291 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5292 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
5293 tree index = TREE_OPERAND (exp, 1);
5294 tree index_type = TREE_TYPE (index);
5295 int i;
5296
5297 if (TREE_CODE (low_bound) != INTEGER_CST
5298 && contains_placeholder_p (low_bound))
5299 low_bound = build (WITH_RECORD_EXPR, sizetype, low_bound, exp);
5300
5301 /* Optimize the special-case of a zero lower bound.
5302
5303 We convert the low_bound to sizetype to avoid some problems
5304 with constant folding. (E.g. suppose the lower bound is 1,
5305 and its mode is QI. Without the conversion, (ARRAY
5306 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
5307 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
5308
5309 But sizetype isn't quite right either (especially if
5310 the lowbound is negative). FIXME */
5311
5312 if (! integer_zerop (low_bound))
5313 index = fold (build (MINUS_EXPR, index_type, index,
5314 convert (sizetype, low_bound)));
5315
5316 if ((TREE_CODE (index) != INTEGER_CST
5317 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5318 && (! SLOW_UNALIGNED_ACCESS || ! get_inner_unaligned_p (exp)))
5319 {
5320 /* Nonconstant array index or nonconstant element size, and
5321 not an array in an unaligned (packed) structure field.
5322 Generate the tree for *(&array+index) and expand that,
5323 except do it in a language-independent way
5324 and don't complain about non-lvalue arrays.
5325 `mark_addressable' should already have been called
5326 for any array for which this case will be reached. */
5327
5328 /* Don't forget the const or volatile flag from the array
5329 element. */
5330 tree variant_type = build_type_variant (type,
5331 TREE_READONLY (exp),
5332 TREE_THIS_VOLATILE (exp));
5333 tree array_adr = build1 (ADDR_EXPR,
5334 build_pointer_type (variant_type), array);
5335 tree elt;
5336 tree size = size_in_bytes (type);
5337
5338 /* Convert the integer argument to a type the same size as sizetype
5339 so the multiply won't overflow spuriously. */
5340 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
5341 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
5342 index);
5343
5344 if (TREE_CODE (size) != INTEGER_CST
5345 && contains_placeholder_p (size))
5346 size = build (WITH_RECORD_EXPR, sizetype, size, exp);
5347
5348 /* Don't think the address has side effects
5349 just because the array does.
5350 (In some cases the address might have side effects,
5351 and we fail to record that fact here. However, it should not
5352 matter, since expand_expr should not care.) */
5353 TREE_SIDE_EFFECTS (array_adr) = 0;
5354
5355 elt
5356 = build1
5357 (INDIRECT_REF, type,
5358 fold (build (PLUS_EXPR,
5359 TYPE_POINTER_TO (variant_type),
5360 array_adr,
5361 fold
5362 (build1
5363 (NOP_EXPR,
5364 TYPE_POINTER_TO (variant_type),
5365 fold (build (MULT_EXPR, TREE_TYPE (index),
5366 index,
5367 convert (TREE_TYPE (index),
5368 size))))))));;
5369
5370 /* Volatility, etc., of new expression is same as old
5371 expression. */
5372 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
5373 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
5374 TREE_READONLY (elt) = TREE_READONLY (exp);
5375
5376 return expand_expr (elt, target, tmode, modifier);
5377 }
5378
5379 /* Fold an expression like: "foo"[2].
5380 This is not done in fold so it won't happen inside &.
5381 Don't fold if this is for wide characters since it's too
5382 difficult to do correctly and this is a very rare case. */
5383
5384 if (TREE_CODE (array) == STRING_CST
5385 && TREE_CODE (index) == INTEGER_CST
5386 && !TREE_INT_CST_HIGH (index)
5387 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
5388 && GET_MODE_CLASS (mode) == MODE_INT
5389 && GET_MODE_SIZE (mode) == 1)
5390 return GEN_INT (TREE_STRING_POINTER (array)[i]);
5391
5392 /* If this is a constant index into a constant array,
5393 just get the value from the array. Handle both the cases when
5394 we have an explicit constructor and when our operand is a variable
5395 that was declared const. */
5396
5397 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
5398 {
5399 if (TREE_CODE (index) == INTEGER_CST
5400 && TREE_INT_CST_HIGH (index) == 0)
5401 {
5402 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
5403
5404 i = TREE_INT_CST_LOW (index);
5405 while (elem && i--)
5406 elem = TREE_CHAIN (elem);
5407 if (elem)
5408 return expand_expr (fold (TREE_VALUE (elem)), target,
5409 tmode, modifier);
5410 }
5411 }
5412
5413 else if (optimize >= 1
5414 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
5415 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
5416 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
5417 {
5418 if (TREE_CODE (index) == INTEGER_CST
5419 && TREE_INT_CST_HIGH (index) == 0)
5420 {
5421 tree init = DECL_INITIAL (array);
5422
5423 i = TREE_INT_CST_LOW (index);
5424 if (TREE_CODE (init) == CONSTRUCTOR)
5425 {
5426 tree elem = CONSTRUCTOR_ELTS (init);
5427
5428 while (elem
5429 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
5430 elem = TREE_CHAIN (elem);
5431 if (elem)
5432 return expand_expr (fold (TREE_VALUE (elem)), target,
5433 tmode, modifier);
5434 }
5435 else if (TREE_CODE (init) == STRING_CST
5436 && i < TREE_STRING_LENGTH (init))
5437 return GEN_INT (TREE_STRING_POINTER (init)[i]);
5438 }
5439 }
5440 }
5441
5442 /* Treat array-ref with constant index as a component-ref. */
5443
5444 case COMPONENT_REF:
5445 case BIT_FIELD_REF:
5446 /* If the operand is a CONSTRUCTOR, we can just extract the
5447 appropriate field if it is present. Don't do this if we have
5448 already written the data since we want to refer to that copy
5449 and varasm.c assumes that's what we'll do. */
5450 if (code != ARRAY_REF
5451 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
5452 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
5453 {
5454 tree elt;
5455
5456 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
5457 elt = TREE_CHAIN (elt))
5458 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
5459 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
5460 }
5461
5462 {
5463 enum machine_mode mode1;
5464 int bitsize;
5465 int bitpos;
5466 tree offset;
5467 int volatilep = 0;
5468 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
5469 &mode1, &unsignedp, &volatilep);
5470 int alignment;
5471
5472 /* If we got back the original object, something is wrong. Perhaps
5473 we are evaluating an expression too early. In any event, don't
5474 infinitely recurse. */
5475 if (tem == exp)
5476 abort ();
5477
5478 /* If TEM's type is a union of variable size, pass TARGET to the inner
5479 computation, since it will need a temporary and TARGET is known
5480 to have to do. This occurs in unchecked conversion in Ada. */
5481
5482 op0 = expand_expr (tem,
5483 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
5484 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
5485 != INTEGER_CST)
5486 ? target : NULL_RTX),
5487 VOIDmode,
5488 modifier == EXPAND_INITIALIZER ? modifier : 0);
5489
5490 /* If this is a constant, put it into a register if it is a
5491 legitimate constant and memory if it isn't. */
5492 if (CONSTANT_P (op0))
5493 {
5494 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
5495 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
5496 op0 = force_reg (mode, op0);
5497 else
5498 op0 = validize_mem (force_const_mem (mode, op0));
5499 }
5500
5501 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
5502 if (offset != 0)
5503 {
5504 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
5505
5506 if (GET_CODE (op0) != MEM)
5507 abort ();
5508 op0 = change_address (op0, VOIDmode,
5509 gen_rtx (PLUS, ptr_mode, XEXP (op0, 0),
5510 force_reg (ptr_mode, offset_rtx)));
5511 /* If we have a variable offset, the known alignment
5512 is only that of the innermost structure containing the field.
5513 (Actually, we could sometimes do better by using the
5514 size of an element of the innermost array, but no need.) */
5515 if (TREE_CODE (exp) == COMPONENT_REF
5516 || TREE_CODE (exp) == BIT_FIELD_REF)
5517 alignment = (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5518 / BITS_PER_UNIT);
5519 }
5520
5521 /* Don't forget about volatility even if this is a bitfield. */
5522 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
5523 {
5524 op0 = copy_rtx (op0);
5525 MEM_VOLATILE_P (op0) = 1;
5526 }
5527
5528 /* In cases where an aligned union has an unaligned object
5529 as a field, we might be extracting a BLKmode value from
5530 an integer-mode (e.g., SImode) object. Handle this case
5531 by doing the extract into an object as wide as the field
5532 (which we know to be the width of a basic mode), then
5533 storing into memory, and changing the mode to BLKmode.
5534 If we ultimately want the address (EXPAND_CONST_ADDRESS or
5535 EXPAND_INITIALIZER), then we must not copy to a temporary. */
5536 if (mode1 == VOIDmode
5537 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
5538 || (modifier != EXPAND_CONST_ADDRESS
5539 && modifier != EXPAND_INITIALIZER
5540 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
5541 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5542 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5543 /* If the field isn't aligned enough to fetch as a memref,
5544 fetch it as a bit field. */
5545 || (SLOW_UNALIGNED_ACCESS
5546 && ((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
5547 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
5548 {
5549 enum machine_mode ext_mode = mode;
5550
5551 if (ext_mode == BLKmode)
5552 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
5553
5554 if (ext_mode == BLKmode)
5555 {
5556 /* In this case, BITPOS must start at a byte boundary and
5557 TARGET, if specified, must be a MEM. */
5558 if (GET_CODE (op0) != MEM
5559 || (target != 0 && GET_CODE (target) != MEM)
5560 || bitpos % BITS_PER_UNIT != 0)
5561 abort ();
5562
5563 op0 = change_address (op0, VOIDmode,
5564 plus_constant (XEXP (op0, 0),
5565 bitpos / BITS_PER_UNIT));
5566 if (target == 0)
5567 target = assign_temp (type, 0, 1, 1);
5568
5569 emit_block_move (target, op0,
5570 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5571 / BITS_PER_UNIT),
5572 1);
5573
5574 return target;
5575 }
5576
5577 op0 = validize_mem (op0);
5578
5579 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
5580 mark_reg_pointer (XEXP (op0, 0), alignment);
5581
5582 op0 = extract_bit_field (op0, bitsize, bitpos,
5583 unsignedp, target, ext_mode, ext_mode,
5584 alignment,
5585 int_size_in_bytes (TREE_TYPE (tem)));
5586
5587 /* If the result is a record type and BITSIZE is narrower than
5588 the mode of OP0, an integral mode, and this is a big endian
5589 machine, we must put the field into the high-order bits. */
5590 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
5591 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
5592 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
5593 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
5594 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
5595 - bitsize),
5596 op0, 1);
5597
5598 if (mode == BLKmode)
5599 {
5600 rtx new = assign_stack_temp (ext_mode,
5601 bitsize / BITS_PER_UNIT, 0);
5602
5603 emit_move_insn (new, op0);
5604 op0 = copy_rtx (new);
5605 PUT_MODE (op0, BLKmode);
5606 MEM_IN_STRUCT_P (op0) = 1;
5607 }
5608
5609 return op0;
5610 }
5611
5612 /* If the result is BLKmode, use that to access the object
5613 now as well. */
5614 if (mode == BLKmode)
5615 mode1 = BLKmode;
5616
5617 /* Get a reference to just this component. */
5618 if (modifier == EXPAND_CONST_ADDRESS
5619 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5620 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
5621 (bitpos / BITS_PER_UNIT)));
5622 else
5623 op0 = change_address (op0, mode1,
5624 plus_constant (XEXP (op0, 0),
5625 (bitpos / BITS_PER_UNIT)));
5626 if (GET_CODE (XEXP (op0, 0)) == REG)
5627 mark_reg_pointer (XEXP (op0, 0), alignment);
5628
5629 MEM_IN_STRUCT_P (op0) = 1;
5630 MEM_VOLATILE_P (op0) |= volatilep;
5631 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
5632 return op0;
5633 if (target == 0)
5634 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5635 convert_move (target, op0, unsignedp);
5636 return target;
5637 }
5638
5639 /* Intended for a reference to a buffer of a file-object in Pascal.
5640 But it's not certain that a special tree code will really be
5641 necessary for these. INDIRECT_REF might work for them. */
5642 case BUFFER_REF:
5643 abort ();
5644
5645 case IN_EXPR:
5646 {
5647 /* Pascal set IN expression.
5648
5649 Algorithm:
5650 rlo = set_low - (set_low%bits_per_word);
5651 the_word = set [ (index - rlo)/bits_per_word ];
5652 bit_index = index % bits_per_word;
5653 bitmask = 1 << bit_index;
5654 return !!(the_word & bitmask); */
5655
5656 tree set = TREE_OPERAND (exp, 0);
5657 tree index = TREE_OPERAND (exp, 1);
5658 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
5659 tree set_type = TREE_TYPE (set);
5660 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
5661 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
5662 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
5663 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
5664 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
5665 rtx setval = expand_expr (set, 0, VOIDmode, 0);
5666 rtx setaddr = XEXP (setval, 0);
5667 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
5668 rtx rlow;
5669 rtx diff, quo, rem, addr, bit, result;
5670
5671 preexpand_calls (exp);
5672
5673 /* If domain is empty, answer is no. Likewise if index is constant
5674 and out of bounds. */
5675 if ((TREE_CODE (set_high_bound) == INTEGER_CST
5676 && TREE_CODE (set_low_bound) == INTEGER_CST
5677 && tree_int_cst_lt (set_high_bound, set_low_bound)
5678 || (TREE_CODE (index) == INTEGER_CST
5679 && TREE_CODE (set_low_bound) == INTEGER_CST
5680 && tree_int_cst_lt (index, set_low_bound))
5681 || (TREE_CODE (set_high_bound) == INTEGER_CST
5682 && TREE_CODE (index) == INTEGER_CST
5683 && tree_int_cst_lt (set_high_bound, index))))
5684 return const0_rtx;
5685
5686 if (target == 0)
5687 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5688
5689 /* If we get here, we have to generate the code for both cases
5690 (in range and out of range). */
5691
5692 op0 = gen_label_rtx ();
5693 op1 = gen_label_rtx ();
5694
5695 if (! (GET_CODE (index_val) == CONST_INT
5696 && GET_CODE (lo_r) == CONST_INT))
5697 {
5698 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
5699 GET_MODE (index_val), iunsignedp, 0);
5700 emit_jump_insn (gen_blt (op1));
5701 }
5702
5703 if (! (GET_CODE (index_val) == CONST_INT
5704 && GET_CODE (hi_r) == CONST_INT))
5705 {
5706 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
5707 GET_MODE (index_val), iunsignedp, 0);
5708 emit_jump_insn (gen_bgt (op1));
5709 }
5710
5711 /* Calculate the element number of bit zero in the first word
5712 of the set. */
5713 if (GET_CODE (lo_r) == CONST_INT)
5714 rlow = GEN_INT (INTVAL (lo_r)
5715 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
5716 else
5717 rlow = expand_binop (index_mode, and_optab, lo_r,
5718 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
5719 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
5720
5721 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
5722 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
5723
5724 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
5725 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
5726 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
5727 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
5728
5729 addr = memory_address (byte_mode,
5730 expand_binop (index_mode, add_optab, diff,
5731 setaddr, NULL_RTX, iunsignedp,
5732 OPTAB_LIB_WIDEN));
5733
5734 /* Extract the bit we want to examine */
5735 bit = expand_shift (RSHIFT_EXPR, byte_mode,
5736 gen_rtx (MEM, byte_mode, addr),
5737 make_tree (TREE_TYPE (index), rem),
5738 NULL_RTX, 1);
5739 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
5740 GET_MODE (target) == byte_mode ? target : 0,
5741 1, OPTAB_LIB_WIDEN);
5742
5743 if (result != target)
5744 convert_move (target, result, 1);
5745
5746 /* Output the code to handle the out-of-range case. */
5747 emit_jump (op0);
5748 emit_label (op1);
5749 emit_move_insn (target, const0_rtx);
5750 emit_label (op0);
5751 return target;
5752 }
5753
5754 case WITH_CLEANUP_EXPR:
5755 if (RTL_EXPR_RTL (exp) == 0)
5756 {
5757 RTL_EXPR_RTL (exp)
5758 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5759 cleanups_this_call
5760 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
5761 /* That's it for this cleanup. */
5762 TREE_OPERAND (exp, 2) = 0;
5763 expand_eh_region_start ();
5764 }
5765 return RTL_EXPR_RTL (exp);
5766
5767 case CLEANUP_POINT_EXPR:
5768 {
5769 extern int temp_slot_level;
5770 tree old_cleanups = cleanups_this_call;
5771 int old_temp_level = target_temp_slot_level;
5772 push_temp_slots ();
5773 target_temp_slot_level = temp_slot_level;
5774 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5775 /* If we're going to use this value, load it up now. */
5776 if (! ignore)
5777 op0 = force_not_mem (op0);
5778 expand_cleanups_to (old_cleanups);
5779 preserve_temp_slots (op0);
5780 free_temp_slots ();
5781 pop_temp_slots ();
5782 target_temp_slot_level = old_temp_level;
5783 }
5784 return op0;
5785
5786 case CALL_EXPR:
5787 /* Check for a built-in function. */
5788 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5789 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5790 == FUNCTION_DECL)
5791 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5792 return expand_builtin (exp, target, subtarget, tmode, ignore);
5793
5794 /* If this call was expanded already by preexpand_calls,
5795 just return the result we got. */
5796 if (CALL_EXPR_RTL (exp) != 0)
5797 return CALL_EXPR_RTL (exp);
5798
5799 return expand_call (exp, target, ignore);
5800
5801 case NON_LVALUE_EXPR:
5802 case NOP_EXPR:
5803 case CONVERT_EXPR:
5804 case REFERENCE_EXPR:
5805 if (TREE_CODE (type) == UNION_TYPE)
5806 {
5807 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
5808 if (target == 0)
5809 {
5810 if (mode != BLKmode)
5811 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5812 else
5813 target = assign_temp (type, 0, 1, 1);
5814 }
5815
5816 if (GET_CODE (target) == MEM)
5817 /* Store data into beginning of memory target. */
5818 store_expr (TREE_OPERAND (exp, 0),
5819 change_address (target, TYPE_MODE (valtype), 0), 0);
5820
5821 else if (GET_CODE (target) == REG)
5822 /* Store this field into a union of the proper type. */
5823 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
5824 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
5825 VOIDmode, 0, 1,
5826 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
5827 else
5828 abort ();
5829
5830 /* Return the entire union. */
5831 return target;
5832 }
5833
5834 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5835 {
5836 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
5837 modifier);
5838
5839 /* If the signedness of the conversion differs and OP0 is
5840 a promoted SUBREG, clear that indication since we now
5841 have to do the proper extension. */
5842 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
5843 && GET_CODE (op0) == SUBREG)
5844 SUBREG_PROMOTED_VAR_P (op0) = 0;
5845
5846 return op0;
5847 }
5848
5849 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
5850 if (GET_MODE (op0) == mode)
5851 return op0;
5852
5853 /* If OP0 is a constant, just convert it into the proper mode. */
5854 if (CONSTANT_P (op0))
5855 return
5856 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5857 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5858
5859 if (modifier == EXPAND_INITIALIZER)
5860 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
5861
5862 if (target == 0)
5863 return
5864 convert_to_mode (mode, op0,
5865 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5866 else
5867 convert_move (target, op0,
5868 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5869 return target;
5870
5871 case PLUS_EXPR:
5872 /* We come here from MINUS_EXPR when the second operand is a
5873 constant. */
5874 plus_expr:
5875 this_optab = add_optab;
5876
5877 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
5878 something else, make sure we add the register to the constant and
5879 then to the other thing. This case can occur during strength
5880 reduction and doing it this way will produce better code if the
5881 frame pointer or argument pointer is eliminated.
5882
5883 fold-const.c will ensure that the constant is always in the inner
5884 PLUS_EXPR, so the only case we need to do anything about is if
5885 sp, ap, or fp is our second argument, in which case we must swap
5886 the innermost first argument and our second argument. */
5887
5888 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5889 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
5890 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
5891 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
5892 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
5893 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
5894 {
5895 tree t = TREE_OPERAND (exp, 1);
5896
5897 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5898 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
5899 }
5900
5901 /* If the result is to be ptr_mode and we are adding an integer to
5902 something, we might be forming a constant. So try to use
5903 plus_constant. If it produces a sum and we can't accept it,
5904 use force_operand. This allows P = &ARR[const] to generate
5905 efficient code on machines where a SYMBOL_REF is not a valid
5906 address.
5907
5908 If this is an EXPAND_SUM call, always return the sum. */
5909 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
5910 || mode == ptr_mode)
5911 {
5912 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
5913 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5914 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
5915 {
5916 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
5917 EXPAND_SUM);
5918 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
5919 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5920 op1 = force_operand (op1, target);
5921 return op1;
5922 }
5923
5924 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
5925 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
5926 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
5927 {
5928 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
5929 EXPAND_SUM);
5930 if (! CONSTANT_P (op0))
5931 {
5932 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5933 VOIDmode, modifier);
5934 /* Don't go to both_summands if modifier
5935 says it's not right to return a PLUS. */
5936 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5937 goto binop2;
5938 goto both_summands;
5939 }
5940 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
5941 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5942 op0 = force_operand (op0, target);
5943 return op0;
5944 }
5945 }
5946
5947 /* No sense saving up arithmetic to be done
5948 if it's all in the wrong mode to form part of an address.
5949 And force_operand won't know whether to sign-extend or
5950 zero-extend. */
5951 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5952 || mode != ptr_mode)
5953 goto binop;
5954
5955 preexpand_calls (exp);
5956 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5957 subtarget = 0;
5958
5959 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
5960 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
5961
5962 both_summands:
5963 /* Make sure any term that's a sum with a constant comes last. */
5964 if (GET_CODE (op0) == PLUS
5965 && CONSTANT_P (XEXP (op0, 1)))
5966 {
5967 temp = op0;
5968 op0 = op1;
5969 op1 = temp;
5970 }
5971 /* If adding to a sum including a constant,
5972 associate it to put the constant outside. */
5973 if (GET_CODE (op1) == PLUS
5974 && CONSTANT_P (XEXP (op1, 1)))
5975 {
5976 rtx constant_term = const0_rtx;
5977
5978 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
5979 if (temp != 0)
5980 op0 = temp;
5981 /* Ensure that MULT comes first if there is one. */
5982 else if (GET_CODE (op0) == MULT)
5983 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
5984 else
5985 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
5986
5987 /* Let's also eliminate constants from op0 if possible. */
5988 op0 = eliminate_constant_term (op0, &constant_term);
5989
5990 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
5991 their sum should be a constant. Form it into OP1, since the
5992 result we want will then be OP0 + OP1. */
5993
5994 temp = simplify_binary_operation (PLUS, mode, constant_term,
5995 XEXP (op1, 1));
5996 if (temp != 0)
5997 op1 = temp;
5998 else
5999 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
6000 }
6001
6002 /* Put a constant term last and put a multiplication first. */
6003 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
6004 temp = op1, op1 = op0, op0 = temp;
6005
6006 temp = simplify_binary_operation (PLUS, mode, op0, op1);
6007 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
6008
6009 case MINUS_EXPR:
6010 /* For initializers, we are allowed to return a MINUS of two
6011 symbolic constants. Here we handle all cases when both operands
6012 are constant. */
6013 /* Handle difference of two symbolic constants,
6014 for the sake of an initializer. */
6015 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6016 && really_constant_p (TREE_OPERAND (exp, 0))
6017 && really_constant_p (TREE_OPERAND (exp, 1)))
6018 {
6019 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
6020 VOIDmode, modifier);
6021 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6022 VOIDmode, modifier);
6023
6024 /* If the last operand is a CONST_INT, use plus_constant of
6025 the negated constant. Else make the MINUS. */
6026 if (GET_CODE (op1) == CONST_INT)
6027 return plus_constant (op0, - INTVAL (op1));
6028 else
6029 return gen_rtx (MINUS, mode, op0, op1);
6030 }
6031 /* Convert A - const to A + (-const). */
6032 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6033 {
6034 tree negated = fold (build1 (NEGATE_EXPR, type,
6035 TREE_OPERAND (exp, 1)));
6036
6037 /* Deal with the case where we can't negate the constant
6038 in TYPE. */
6039 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
6040 {
6041 tree newtype = signed_type (type);
6042 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
6043 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
6044 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
6045
6046 if (! TREE_OVERFLOW (newneg))
6047 return expand_expr (convert (type,
6048 build (PLUS_EXPR, newtype,
6049 newop0, newneg)),
6050 target, tmode, modifier);
6051 }
6052 else
6053 {
6054 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
6055 goto plus_expr;
6056 }
6057 }
6058 this_optab = sub_optab;
6059 goto binop;
6060
6061 case MULT_EXPR:
6062 preexpand_calls (exp);
6063 /* If first operand is constant, swap them.
6064 Thus the following special case checks need only
6065 check the second operand. */
6066 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6067 {
6068 register tree t1 = TREE_OPERAND (exp, 0);
6069 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
6070 TREE_OPERAND (exp, 1) = t1;
6071 }
6072
6073 /* Attempt to return something suitable for generating an
6074 indexed address, for machines that support that. */
6075
6076 if (modifier == EXPAND_SUM && mode == ptr_mode
6077 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6078 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
6079 {
6080 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
6081
6082 /* Apply distributive law if OP0 is x+c. */
6083 if (GET_CODE (op0) == PLUS
6084 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
6085 return gen_rtx (PLUS, mode,
6086 gen_rtx (MULT, mode, XEXP (op0, 0),
6087 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
6088 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
6089 * INTVAL (XEXP (op0, 1))));
6090
6091 if (GET_CODE (op0) != REG)
6092 op0 = force_operand (op0, NULL_RTX);
6093 if (GET_CODE (op0) != REG)
6094 op0 = copy_to_mode_reg (mode, op0);
6095
6096 return gen_rtx (MULT, mode, op0,
6097 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
6098 }
6099
6100 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6101 subtarget = 0;
6102
6103 /* Check for multiplying things that have been extended
6104 from a narrower type. If this machine supports multiplying
6105 in that narrower type with a result in the desired type,
6106 do it that way, and avoid the explicit type-conversion. */
6107 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
6108 && TREE_CODE (type) == INTEGER_TYPE
6109 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6110 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
6111 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6112 && int_fits_type_p (TREE_OPERAND (exp, 1),
6113 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6114 /* Don't use a widening multiply if a shift will do. */
6115 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
6116 > HOST_BITS_PER_WIDE_INT)
6117 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
6118 ||
6119 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
6120 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6121 ==
6122 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
6123 /* If both operands are extended, they must either both
6124 be zero-extended or both be sign-extended. */
6125 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6126 ==
6127 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
6128 {
6129 enum machine_mode innermode
6130 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
6131 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6132 ? smul_widen_optab : umul_widen_optab);
6133 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6134 ? umul_widen_optab : smul_widen_optab);
6135 if (mode == GET_MODE_WIDER_MODE (innermode))
6136 {
6137 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
6138 {
6139 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6140 NULL_RTX, VOIDmode, 0);
6141 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6142 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6143 VOIDmode, 0);
6144 else
6145 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6146 NULL_RTX, VOIDmode, 0);
6147 goto binop2;
6148 }
6149 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
6150 && innermode == word_mode)
6151 {
6152 rtx htem;
6153 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6154 NULL_RTX, VOIDmode, 0);
6155 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6156 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6157 VOIDmode, 0);
6158 else
6159 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6160 NULL_RTX, VOIDmode, 0);
6161 temp = expand_binop (mode, other_optab, op0, op1, target,
6162 unsignedp, OPTAB_LIB_WIDEN);
6163 htem = expand_mult_highpart_adjust (innermode,
6164 gen_highpart (innermode, temp),
6165 op0, op1,
6166 gen_highpart (innermode, temp),
6167 unsignedp);
6168 emit_move_insn (gen_highpart (innermode, temp), htem);
6169 return temp;
6170 }
6171 }
6172 }
6173 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6174 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6175 return expand_mult (mode, op0, op1, target, unsignedp);
6176
6177 case TRUNC_DIV_EXPR:
6178 case FLOOR_DIV_EXPR:
6179 case CEIL_DIV_EXPR:
6180 case ROUND_DIV_EXPR:
6181 case EXACT_DIV_EXPR:
6182 preexpand_calls (exp);
6183 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6184 subtarget = 0;
6185 /* Possible optimization: compute the dividend with EXPAND_SUM
6186 then if the divisor is constant can optimize the case
6187 where some terms of the dividend have coeffs divisible by it. */
6188 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6189 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6190 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
6191
6192 case RDIV_EXPR:
6193 this_optab = flodiv_optab;
6194 goto binop;
6195
6196 case TRUNC_MOD_EXPR:
6197 case FLOOR_MOD_EXPR:
6198 case CEIL_MOD_EXPR:
6199 case ROUND_MOD_EXPR:
6200 preexpand_calls (exp);
6201 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6202 subtarget = 0;
6203 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6204 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6205 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
6206
6207 case FIX_ROUND_EXPR:
6208 case FIX_FLOOR_EXPR:
6209 case FIX_CEIL_EXPR:
6210 abort (); /* Not used for C. */
6211
6212 case FIX_TRUNC_EXPR:
6213 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6214 if (target == 0)
6215 target = gen_reg_rtx (mode);
6216 expand_fix (target, op0, unsignedp);
6217 return target;
6218
6219 case FLOAT_EXPR:
6220 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6221 if (target == 0)
6222 target = gen_reg_rtx (mode);
6223 /* expand_float can't figure out what to do if FROM has VOIDmode.
6224 So give it the correct mode. With -O, cse will optimize this. */
6225 if (GET_MODE (op0) == VOIDmode)
6226 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6227 op0);
6228 expand_float (target, op0,
6229 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6230 return target;
6231
6232 case NEGATE_EXPR:
6233 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6234 temp = expand_unop (mode, neg_optab, op0, target, 0);
6235 if (temp == 0)
6236 abort ();
6237 return temp;
6238
6239 case ABS_EXPR:
6240 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6241
6242 /* Handle complex values specially. */
6243 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
6244 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
6245 return expand_complex_abs (mode, op0, target, unsignedp);
6246
6247 /* Unsigned abs is simply the operand. Testing here means we don't
6248 risk generating incorrect code below. */
6249 if (TREE_UNSIGNED (type))
6250 return op0;
6251
6252 return expand_abs (mode, op0, target, unsignedp,
6253 safe_from_p (target, TREE_OPERAND (exp, 0)));
6254
6255 case MAX_EXPR:
6256 case MIN_EXPR:
6257 target = original_target;
6258 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
6259 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
6260 || GET_MODE (target) != mode
6261 || (GET_CODE (target) == REG
6262 && REGNO (target) < FIRST_PSEUDO_REGISTER))
6263 target = gen_reg_rtx (mode);
6264 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6265 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6266
6267 /* First try to do it with a special MIN or MAX instruction.
6268 If that does not win, use a conditional jump to select the proper
6269 value. */
6270 this_optab = (TREE_UNSIGNED (type)
6271 ? (code == MIN_EXPR ? umin_optab : umax_optab)
6272 : (code == MIN_EXPR ? smin_optab : smax_optab));
6273
6274 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
6275 OPTAB_WIDEN);
6276 if (temp != 0)
6277 return temp;
6278
6279 /* At this point, a MEM target is no longer useful; we will get better
6280 code without it. */
6281
6282 if (GET_CODE (target) == MEM)
6283 target = gen_reg_rtx (mode);
6284
6285 if (target != op0)
6286 emit_move_insn (target, op0);
6287
6288 op0 = gen_label_rtx ();
6289
6290 /* If this mode is an integer too wide to compare properly,
6291 compare word by word. Rely on cse to optimize constant cases. */
6292 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
6293 {
6294 if (code == MAX_EXPR)
6295 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6296 target, op1, NULL_RTX, op0);
6297 else
6298 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6299 op1, target, NULL_RTX, op0);
6300 emit_move_insn (target, op1);
6301 }
6302 else
6303 {
6304 if (code == MAX_EXPR)
6305 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
6306 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
6307 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
6308 else
6309 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
6310 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
6311 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
6312 if (temp == const0_rtx)
6313 emit_move_insn (target, op1);
6314 else if (temp != const_true_rtx)
6315 {
6316 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
6317 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
6318 else
6319 abort ();
6320 emit_move_insn (target, op1);
6321 }
6322 }
6323 emit_label (op0);
6324 return target;
6325
6326 case BIT_NOT_EXPR:
6327 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6328 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
6329 if (temp == 0)
6330 abort ();
6331 return temp;
6332
6333 case FFS_EXPR:
6334 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6335 temp = expand_unop (mode, ffs_optab, op0, target, 1);
6336 if (temp == 0)
6337 abort ();
6338 return temp;
6339
6340 /* ??? Can optimize bitwise operations with one arg constant.
6341 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
6342 and (a bitwise1 b) bitwise2 b (etc)
6343 but that is probably not worth while. */
6344
6345 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
6346 boolean values when we want in all cases to compute both of them. In
6347 general it is fastest to do TRUTH_AND_EXPR by computing both operands
6348 as actual zero-or-1 values and then bitwise anding. In cases where
6349 there cannot be any side effects, better code would be made by
6350 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
6351 how to recognize those cases. */
6352
6353 case TRUTH_AND_EXPR:
6354 case BIT_AND_EXPR:
6355 this_optab = and_optab;
6356 goto binop;
6357
6358 case TRUTH_OR_EXPR:
6359 case BIT_IOR_EXPR:
6360 this_optab = ior_optab;
6361 goto binop;
6362
6363 case TRUTH_XOR_EXPR:
6364 case BIT_XOR_EXPR:
6365 this_optab = xor_optab;
6366 goto binop;
6367
6368 case LSHIFT_EXPR:
6369 case RSHIFT_EXPR:
6370 case LROTATE_EXPR:
6371 case RROTATE_EXPR:
6372 preexpand_calls (exp);
6373 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6374 subtarget = 0;
6375 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6376 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
6377 unsignedp);
6378
6379 /* Could determine the answer when only additive constants differ. Also,
6380 the addition of one can be handled by changing the condition. */
6381 case LT_EXPR:
6382 case LE_EXPR:
6383 case GT_EXPR:
6384 case GE_EXPR:
6385 case EQ_EXPR:
6386 case NE_EXPR:
6387 preexpand_calls (exp);
6388 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
6389 if (temp != 0)
6390 return temp;
6391
6392 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
6393 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
6394 && original_target
6395 && GET_CODE (original_target) == REG
6396 && (GET_MODE (original_target)
6397 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6398 {
6399 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
6400 VOIDmode, 0);
6401
6402 if (temp != original_target)
6403 temp = copy_to_reg (temp);
6404
6405 op1 = gen_label_rtx ();
6406 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
6407 GET_MODE (temp), unsignedp, 0);
6408 emit_jump_insn (gen_beq (op1));
6409 emit_move_insn (temp, const1_rtx);
6410 emit_label (op1);
6411 return temp;
6412 }
6413
6414 /* If no set-flag instruction, must generate a conditional
6415 store into a temporary variable. Drop through
6416 and handle this like && and ||. */
6417
6418 case TRUTH_ANDIF_EXPR:
6419 case TRUTH_ORIF_EXPR:
6420 if (! ignore
6421 && (target == 0 || ! safe_from_p (target, exp)
6422 /* Make sure we don't have a hard reg (such as function's return
6423 value) live across basic blocks, if not optimizing. */
6424 || (!optimize && GET_CODE (target) == REG
6425 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
6426 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6427
6428 if (target)
6429 emit_clr_insn (target);
6430
6431 op1 = gen_label_rtx ();
6432 jumpifnot (exp, op1);
6433
6434 if (target)
6435 emit_0_to_1_insn (target);
6436
6437 emit_label (op1);
6438 return ignore ? const0_rtx : target;
6439
6440 case TRUTH_NOT_EXPR:
6441 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6442 /* The parser is careful to generate TRUTH_NOT_EXPR
6443 only with operands that are always zero or one. */
6444 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
6445 target, 1, OPTAB_LIB_WIDEN);
6446 if (temp == 0)
6447 abort ();
6448 return temp;
6449
6450 case COMPOUND_EXPR:
6451 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6452 emit_queue ();
6453 return expand_expr (TREE_OPERAND (exp, 1),
6454 (ignore ? const0_rtx : target),
6455 VOIDmode, 0);
6456
6457 case COND_EXPR:
6458 {
6459 rtx flag = NULL_RTX;
6460 tree left_cleanups = NULL_TREE;
6461 tree right_cleanups = NULL_TREE;
6462
6463 /* Used to save a pointer to the place to put the setting of
6464 the flag that indicates if this side of the conditional was
6465 taken. We backpatch the code, if we find out later that we
6466 have any conditional cleanups that need to be performed. */
6467 rtx dest_right_flag = NULL_RTX;
6468 rtx dest_left_flag = NULL_RTX;
6469
6470 /* Note that COND_EXPRs whose type is a structure or union
6471 are required to be constructed to contain assignments of
6472 a temporary variable, so that we can evaluate them here
6473 for side effect only. If type is void, we must do likewise. */
6474
6475 /* If an arm of the branch requires a cleanup,
6476 only that cleanup is performed. */
6477
6478 tree singleton = 0;
6479 tree binary_op = 0, unary_op = 0;
6480 tree old_cleanups = cleanups_this_call;
6481
6482 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
6483 convert it to our mode, if necessary. */
6484 if (integer_onep (TREE_OPERAND (exp, 1))
6485 && integer_zerop (TREE_OPERAND (exp, 2))
6486 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6487 {
6488 if (ignore)
6489 {
6490 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6491 modifier);
6492 return const0_rtx;
6493 }
6494
6495 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
6496 if (GET_MODE (op0) == mode)
6497 return op0;
6498
6499 if (target == 0)
6500 target = gen_reg_rtx (mode);
6501 convert_move (target, op0, unsignedp);
6502 return target;
6503 }
6504
6505 /* Check for X ? A + B : A. If we have this, we can copy
6506 A to the output and conditionally add B. Similarly for unary
6507 operations. Don't do this if X has side-effects because
6508 those side effects might affect A or B and the "?" operation is
6509 a sequence point in ANSI. (We test for side effects later.) */
6510
6511 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
6512 && operand_equal_p (TREE_OPERAND (exp, 2),
6513 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6514 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
6515 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
6516 && operand_equal_p (TREE_OPERAND (exp, 1),
6517 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6518 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
6519 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
6520 && operand_equal_p (TREE_OPERAND (exp, 2),
6521 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6522 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
6523 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
6524 && operand_equal_p (TREE_OPERAND (exp, 1),
6525 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6526 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
6527
6528 /* If we are not to produce a result, we have no target. Otherwise,
6529 if a target was specified use it; it will not be used as an
6530 intermediate target unless it is safe. If no target, use a
6531 temporary. */
6532
6533 if (ignore)
6534 temp = 0;
6535 else if (original_target
6536 && (safe_from_p (original_target, TREE_OPERAND (exp, 0))
6537 || (singleton && GET_CODE (original_target) == REG
6538 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
6539 && original_target == var_rtx (singleton)))
6540 && GET_MODE (original_target) == mode
6541 && ! (GET_CODE (original_target) == MEM
6542 && MEM_VOLATILE_P (original_target)))
6543 temp = original_target;
6544 else if (TREE_ADDRESSABLE (type))
6545 abort ();
6546 else
6547 temp = assign_temp (type, 0, 0, 1);
6548
6549 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
6550 operation, do this as A + (X != 0). Similarly for other simple
6551 binary operators. */
6552 if (temp && singleton && binary_op
6553 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6554 && (TREE_CODE (binary_op) == PLUS_EXPR
6555 || TREE_CODE (binary_op) == MINUS_EXPR
6556 || TREE_CODE (binary_op) == BIT_IOR_EXPR
6557 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
6558 && integer_onep (TREE_OPERAND (binary_op, 1))
6559 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6560 {
6561 rtx result;
6562 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
6563 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
6564 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
6565 : xor_optab);
6566
6567 /* If we had X ? A : A + 1, do this as A + (X == 0).
6568
6569 We have to invert the truth value here and then put it
6570 back later if do_store_flag fails. We cannot simply copy
6571 TREE_OPERAND (exp, 0) to another variable and modify that
6572 because invert_truthvalue can modify the tree pointed to
6573 by its argument. */
6574 if (singleton == TREE_OPERAND (exp, 1))
6575 TREE_OPERAND (exp, 0)
6576 = invert_truthvalue (TREE_OPERAND (exp, 0));
6577
6578 result = do_store_flag (TREE_OPERAND (exp, 0),
6579 (safe_from_p (temp, singleton)
6580 ? temp : NULL_RTX),
6581 mode, BRANCH_COST <= 1);
6582
6583 if (result)
6584 {
6585 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
6586 return expand_binop (mode, boptab, op1, result, temp,
6587 unsignedp, OPTAB_LIB_WIDEN);
6588 }
6589 else if (singleton == TREE_OPERAND (exp, 1))
6590 TREE_OPERAND (exp, 0)
6591 = invert_truthvalue (TREE_OPERAND (exp, 0));
6592 }
6593
6594 do_pending_stack_adjust ();
6595 NO_DEFER_POP;
6596 op0 = gen_label_rtx ();
6597
6598 flag = gen_reg_rtx (word_mode);
6599 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
6600 {
6601 if (temp != 0)
6602 {
6603 /* If the target conflicts with the other operand of the
6604 binary op, we can't use it. Also, we can't use the target
6605 if it is a hard register, because evaluating the condition
6606 might clobber it. */
6607 if ((binary_op
6608 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
6609 || (GET_CODE (temp) == REG
6610 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
6611 temp = gen_reg_rtx (mode);
6612 store_expr (singleton, temp, 0);
6613 }
6614 else
6615 expand_expr (singleton,
6616 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6617 dest_left_flag = get_last_insn ();
6618 if (singleton == TREE_OPERAND (exp, 1))
6619 jumpif (TREE_OPERAND (exp, 0), op0);
6620 else
6621 jumpifnot (TREE_OPERAND (exp, 0), op0);
6622
6623 /* Allows cleanups up to here. */
6624 old_cleanups = cleanups_this_call;
6625 if (binary_op && temp == 0)
6626 /* Just touch the other operand. */
6627 expand_expr (TREE_OPERAND (binary_op, 1),
6628 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6629 else if (binary_op)
6630 store_expr (build (TREE_CODE (binary_op), type,
6631 make_tree (type, temp),
6632 TREE_OPERAND (binary_op, 1)),
6633 temp, 0);
6634 else
6635 store_expr (build1 (TREE_CODE (unary_op), type,
6636 make_tree (type, temp)),
6637 temp, 0);
6638 op1 = op0;
6639 dest_right_flag = get_last_insn ();
6640 }
6641 #if 0
6642 /* This is now done in jump.c and is better done there because it
6643 produces shorter register lifetimes. */
6644
6645 /* Check for both possibilities either constants or variables
6646 in registers (but not the same as the target!). If so, can
6647 save branches by assigning one, branching, and assigning the
6648 other. */
6649 else if (temp && GET_MODE (temp) != BLKmode
6650 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
6651 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
6652 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
6653 && DECL_RTL (TREE_OPERAND (exp, 1))
6654 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
6655 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
6656 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
6657 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
6658 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
6659 && DECL_RTL (TREE_OPERAND (exp, 2))
6660 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
6661 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
6662 {
6663 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6664 temp = gen_reg_rtx (mode);
6665 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6666 dest_left_flag = get_last_insn ();
6667 jumpifnot (TREE_OPERAND (exp, 0), op0);
6668
6669 /* Allows cleanups up to here. */
6670 old_cleanups = cleanups_this_call;
6671 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6672 op1 = op0;
6673 dest_right_flag = get_last_insn ();
6674 }
6675 #endif
6676 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
6677 comparison operator. If we have one of these cases, set the
6678 output to A, branch on A (cse will merge these two references),
6679 then set the output to FOO. */
6680 else if (temp
6681 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6682 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6683 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6684 TREE_OPERAND (exp, 1), 0)
6685 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6686 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
6687 {
6688 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6689 temp = gen_reg_rtx (mode);
6690 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6691 dest_left_flag = get_last_insn ();
6692 jumpif (TREE_OPERAND (exp, 0), op0);
6693
6694 /* Allows cleanups up to here. */
6695 old_cleanups = cleanups_this_call;
6696 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6697 op1 = op0;
6698 dest_right_flag = get_last_insn ();
6699 }
6700 else if (temp
6701 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6702 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6703 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6704 TREE_OPERAND (exp, 2), 0)
6705 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6706 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
6707 {
6708 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6709 temp = gen_reg_rtx (mode);
6710 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6711 dest_left_flag = get_last_insn ();
6712 jumpifnot (TREE_OPERAND (exp, 0), op0);
6713
6714 /* Allows cleanups up to here. */
6715 old_cleanups = cleanups_this_call;
6716 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6717 op1 = op0;
6718 dest_right_flag = get_last_insn ();
6719 }
6720 else
6721 {
6722 op1 = gen_label_rtx ();
6723 jumpifnot (TREE_OPERAND (exp, 0), op0);
6724
6725 /* Allows cleanups up to here. */
6726 old_cleanups = cleanups_this_call;
6727 if (temp != 0)
6728 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6729 else
6730 expand_expr (TREE_OPERAND (exp, 1),
6731 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6732 dest_left_flag = get_last_insn ();
6733
6734 /* Handle conditional cleanups, if any. */
6735 left_cleanups = defer_cleanups_to (old_cleanups);
6736
6737 emit_queue ();
6738 emit_jump_insn (gen_jump (op1));
6739 emit_barrier ();
6740 emit_label (op0);
6741 if (temp != 0)
6742 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6743 else
6744 expand_expr (TREE_OPERAND (exp, 2),
6745 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6746 dest_right_flag = get_last_insn ();
6747 }
6748
6749 /* Handle conditional cleanups, if any. */
6750 right_cleanups = defer_cleanups_to (old_cleanups);
6751
6752 emit_queue ();
6753 emit_label (op1);
6754 OK_DEFER_POP;
6755
6756 /* Add back in, any conditional cleanups. */
6757 if (left_cleanups || right_cleanups)
6758 {
6759 tree new_cleanups;
6760 tree cond;
6761 rtx last;
6762
6763 /* Now that we know that a flag is needed, go back and add in the
6764 setting of the flag. */
6765
6766 /* Do the left side flag. */
6767 last = get_last_insn ();
6768 /* Flag left cleanups as needed. */
6769 emit_move_insn (flag, const1_rtx);
6770 /* ??? deprecated, use sequences instead. */
6771 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_left_flag);
6772
6773 /* Do the right side flag. */
6774 last = get_last_insn ();
6775 /* Flag left cleanups as needed. */
6776 emit_move_insn (flag, const0_rtx);
6777 /* ??? deprecated, use sequences instead. */
6778 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_right_flag);
6779
6780 /* All cleanups must be on the function_obstack. */
6781 push_obstacks_nochange ();
6782 resume_temporary_allocation ();
6783
6784 /* convert flag, which is an rtx, into a tree. */
6785 cond = make_node (RTL_EXPR);
6786 TREE_TYPE (cond) = integer_type_node;
6787 RTL_EXPR_RTL (cond) = flag;
6788 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
6789 cond = save_expr (cond);
6790
6791 if (! left_cleanups)
6792 left_cleanups = integer_zero_node;
6793 if (! right_cleanups)
6794 right_cleanups = integer_zero_node;
6795 new_cleanups = build (COND_EXPR, void_type_node,
6796 truthvalue_conversion (cond),
6797 left_cleanups, right_cleanups);
6798 new_cleanups = fold (new_cleanups);
6799
6800 pop_obstacks ();
6801
6802 /* Now add in the conditionalized cleanups. */
6803 cleanups_this_call
6804 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
6805 expand_eh_region_start ();
6806 }
6807 return temp;
6808 }
6809
6810 case TARGET_EXPR:
6811 {
6812 /* Something needs to be initialized, but we didn't know
6813 where that thing was when building the tree. For example,
6814 it could be the return value of a function, or a parameter
6815 to a function which lays down in the stack, or a temporary
6816 variable which must be passed by reference.
6817
6818 We guarantee that the expression will either be constructed
6819 or copied into our original target. */
6820
6821 tree slot = TREE_OPERAND (exp, 0);
6822 tree cleanups = NULL_TREE;
6823 tree exp1;
6824 rtx temp;
6825
6826 if (TREE_CODE (slot) != VAR_DECL)
6827 abort ();
6828
6829 if (! ignore)
6830 target = original_target;
6831
6832 if (target == 0)
6833 {
6834 if (DECL_RTL (slot) != 0)
6835 {
6836 target = DECL_RTL (slot);
6837 /* If we have already expanded the slot, so don't do
6838 it again. (mrs) */
6839 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6840 return target;
6841 }
6842 else
6843 {
6844 target = assign_temp (type, 2, 1, 1);
6845 /* All temp slots at this level must not conflict. */
6846 preserve_temp_slots (target);
6847 DECL_RTL (slot) = target;
6848
6849 /* Since SLOT is not known to the called function
6850 to belong to its stack frame, we must build an explicit
6851 cleanup. This case occurs when we must build up a reference
6852 to pass the reference as an argument. In this case,
6853 it is very likely that such a reference need not be
6854 built here. */
6855
6856 if (TREE_OPERAND (exp, 2) == 0)
6857 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
6858 cleanups = TREE_OPERAND (exp, 2);
6859 }
6860 }
6861 else
6862 {
6863 /* This case does occur, when expanding a parameter which
6864 needs to be constructed on the stack. The target
6865 is the actual stack address that we want to initialize.
6866 The function we call will perform the cleanup in this case. */
6867
6868 /* If we have already assigned it space, use that space,
6869 not target that we were passed in, as our target
6870 parameter is only a hint. */
6871 if (DECL_RTL (slot) != 0)
6872 {
6873 target = DECL_RTL (slot);
6874 /* If we have already expanded the slot, so don't do
6875 it again. (mrs) */
6876 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6877 return target;
6878 }
6879
6880 DECL_RTL (slot) = target;
6881 }
6882
6883 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
6884 /* Mark it as expanded. */
6885 TREE_OPERAND (exp, 1) = NULL_TREE;
6886
6887 store_expr (exp1, target, 0);
6888
6889 if (cleanups)
6890 {
6891 cleanups_this_call = tree_cons (NULL_TREE,
6892 cleanups,
6893 cleanups_this_call);
6894 expand_eh_region_start ();
6895 }
6896
6897 return target;
6898 }
6899
6900 case INIT_EXPR:
6901 {
6902 tree lhs = TREE_OPERAND (exp, 0);
6903 tree rhs = TREE_OPERAND (exp, 1);
6904 tree noncopied_parts = 0;
6905 tree lhs_type = TREE_TYPE (lhs);
6906
6907 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
6908 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
6909 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
6910 TYPE_NONCOPIED_PARTS (lhs_type));
6911 while (noncopied_parts != 0)
6912 {
6913 expand_assignment (TREE_VALUE (noncopied_parts),
6914 TREE_PURPOSE (noncopied_parts), 0, 0);
6915 noncopied_parts = TREE_CHAIN (noncopied_parts);
6916 }
6917 return temp;
6918 }
6919
6920 case MODIFY_EXPR:
6921 {
6922 /* If lhs is complex, expand calls in rhs before computing it.
6923 That's so we don't compute a pointer and save it over a call.
6924 If lhs is simple, compute it first so we can give it as a
6925 target if the rhs is just a call. This avoids an extra temp and copy
6926 and that prevents a partial-subsumption which makes bad code.
6927 Actually we could treat component_ref's of vars like vars. */
6928
6929 tree lhs = TREE_OPERAND (exp, 0);
6930 tree rhs = TREE_OPERAND (exp, 1);
6931 tree noncopied_parts = 0;
6932 tree lhs_type = TREE_TYPE (lhs);
6933
6934 temp = 0;
6935
6936 if (TREE_CODE (lhs) != VAR_DECL
6937 && TREE_CODE (lhs) != RESULT_DECL
6938 && TREE_CODE (lhs) != PARM_DECL)
6939 preexpand_calls (exp);
6940
6941 /* Check for |= or &= of a bitfield of size one into another bitfield
6942 of size 1. In this case, (unless we need the result of the
6943 assignment) we can do this more efficiently with a
6944 test followed by an assignment, if necessary.
6945
6946 ??? At this point, we can't get a BIT_FIELD_REF here. But if
6947 things change so we do, this code should be enhanced to
6948 support it. */
6949 if (ignore
6950 && TREE_CODE (lhs) == COMPONENT_REF
6951 && (TREE_CODE (rhs) == BIT_IOR_EXPR
6952 || TREE_CODE (rhs) == BIT_AND_EXPR)
6953 && TREE_OPERAND (rhs, 0) == lhs
6954 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
6955 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
6956 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
6957 {
6958 rtx label = gen_label_rtx ();
6959
6960 do_jump (TREE_OPERAND (rhs, 1),
6961 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
6962 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
6963 expand_assignment (lhs, convert (TREE_TYPE (rhs),
6964 (TREE_CODE (rhs) == BIT_IOR_EXPR
6965 ? integer_one_node
6966 : integer_zero_node)),
6967 0, 0);
6968 do_pending_stack_adjust ();
6969 emit_label (label);
6970 return const0_rtx;
6971 }
6972
6973 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
6974 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
6975 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
6976 TYPE_NONCOPIED_PARTS (lhs_type));
6977
6978 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
6979 while (noncopied_parts != 0)
6980 {
6981 expand_assignment (TREE_PURPOSE (noncopied_parts),
6982 TREE_VALUE (noncopied_parts), 0, 0);
6983 noncopied_parts = TREE_CHAIN (noncopied_parts);
6984 }
6985 return temp;
6986 }
6987
6988 case PREINCREMENT_EXPR:
6989 case PREDECREMENT_EXPR:
6990 return expand_increment (exp, 0, ignore);
6991
6992 case POSTINCREMENT_EXPR:
6993 case POSTDECREMENT_EXPR:
6994 /* Faster to treat as pre-increment if result is not used. */
6995 return expand_increment (exp, ! ignore, ignore);
6996
6997 case ADDR_EXPR:
6998 /* If nonzero, TEMP will be set to the address of something that might
6999 be a MEM corresponding to a stack slot. */
7000 temp = 0;
7001
7002 /* Are we taking the address of a nested function? */
7003 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
7004 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
7005 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0)))
7006 {
7007 op0 = trampoline_address (TREE_OPERAND (exp, 0));
7008 op0 = force_operand (op0, target);
7009 }
7010 /* If we are taking the address of something erroneous, just
7011 return a zero. */
7012 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
7013 return const0_rtx;
7014 else
7015 {
7016 /* We make sure to pass const0_rtx down if we came in with
7017 ignore set, to avoid doing the cleanups twice for something. */
7018 op0 = expand_expr (TREE_OPERAND (exp, 0),
7019 ignore ? const0_rtx : NULL_RTX, VOIDmode,
7020 (modifier == EXPAND_INITIALIZER
7021 ? modifier : EXPAND_CONST_ADDRESS));
7022
7023 /* If we are going to ignore the result, OP0 will have been set
7024 to const0_rtx, so just return it. Don't get confused and
7025 think we are taking the address of the constant. */
7026 if (ignore)
7027 return op0;
7028
7029 op0 = protect_from_queue (op0, 0);
7030
7031 /* We would like the object in memory. If it is a constant,
7032 we can have it be statically allocated into memory. For
7033 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
7034 memory and store the value into it. */
7035
7036 if (CONSTANT_P (op0))
7037 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7038 op0);
7039 else if (GET_CODE (op0) == MEM)
7040 {
7041 mark_temp_addr_taken (op0);
7042 temp = XEXP (op0, 0);
7043 }
7044
7045 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7046 || GET_CODE (op0) == CONCAT)
7047 {
7048 /* If this object is in a register, it must be not
7049 be BLKmode. */
7050 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7051 rtx memloc = assign_temp (inner_type, 1, 1, 1);
7052
7053 mark_temp_addr_taken (memloc);
7054 emit_move_insn (memloc, op0);
7055 op0 = memloc;
7056 }
7057
7058 if (GET_CODE (op0) != MEM)
7059 abort ();
7060
7061 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7062 {
7063 temp = XEXP (op0, 0);
7064 #ifdef POINTERS_EXTEND_UNSIGNED
7065 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
7066 && mode == ptr_mode)
7067 temp = convert_memory_address (ptr_mode, temp);
7068 #endif
7069 return temp;
7070 }
7071
7072 op0 = force_operand (XEXP (op0, 0), target);
7073 }
7074
7075 if (flag_force_addr && GET_CODE (op0) != REG)
7076 op0 = force_reg (Pmode, op0);
7077
7078 if (GET_CODE (op0) == REG
7079 && ! REG_USERVAR_P (op0))
7080 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
7081
7082 /* If we might have had a temp slot, add an equivalent address
7083 for it. */
7084 if (temp != 0)
7085 update_temp_slot_address (temp, op0);
7086
7087 #ifdef POINTERS_EXTEND_UNSIGNED
7088 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
7089 && mode == ptr_mode)
7090 op0 = convert_memory_address (ptr_mode, op0);
7091 #endif
7092
7093 return op0;
7094
7095 case ENTRY_VALUE_EXPR:
7096 abort ();
7097
7098 /* COMPLEX type for Extended Pascal & Fortran */
7099 case COMPLEX_EXPR:
7100 {
7101 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7102 rtx insns;
7103
7104 /* Get the rtx code of the operands. */
7105 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7106 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
7107
7108 if (! target)
7109 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7110
7111 start_sequence ();
7112
7113 /* Move the real (op0) and imaginary (op1) parts to their location. */
7114 emit_move_insn (gen_realpart (mode, target), op0);
7115 emit_move_insn (gen_imagpart (mode, target), op1);
7116
7117 insns = get_insns ();
7118 end_sequence ();
7119
7120 /* Complex construction should appear as a single unit. */
7121 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
7122 each with a separate pseudo as destination.
7123 It's not correct for flow to treat them as a unit. */
7124 if (GET_CODE (target) != CONCAT)
7125 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
7126 else
7127 emit_insns (insns);
7128
7129 return target;
7130 }
7131
7132 case REALPART_EXPR:
7133 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7134 return gen_realpart (mode, op0);
7135
7136 case IMAGPART_EXPR:
7137 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7138 return gen_imagpart (mode, op0);
7139
7140 case CONJ_EXPR:
7141 {
7142 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7143 rtx imag_t;
7144 rtx insns;
7145
7146 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7147
7148 if (! target)
7149 target = gen_reg_rtx (mode);
7150
7151 start_sequence ();
7152
7153 /* Store the realpart and the negated imagpart to target. */
7154 emit_move_insn (gen_realpart (partmode, target),
7155 gen_realpart (partmode, op0));
7156
7157 imag_t = gen_imagpart (partmode, target);
7158 temp = expand_unop (partmode, neg_optab,
7159 gen_imagpart (partmode, op0), imag_t, 0);
7160 if (temp != imag_t)
7161 emit_move_insn (imag_t, temp);
7162
7163 insns = get_insns ();
7164 end_sequence ();
7165
7166 /* Conjugate should appear as a single unit
7167 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
7168 each with a separate pseudo as destination.
7169 It's not correct for flow to treat them as a unit. */
7170 if (GET_CODE (target) != CONCAT)
7171 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
7172 else
7173 emit_insns (insns);
7174
7175 return target;
7176 }
7177
7178 case ERROR_MARK:
7179 op0 = CONST0_RTX (tmode);
7180 if (op0 != 0)
7181 return op0;
7182 return const0_rtx;
7183
7184 default:
7185 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7186 }
7187
7188 /* Here to do an ordinary binary operator, generating an instruction
7189 from the optab already placed in `this_optab'. */
7190 binop:
7191 preexpand_calls (exp);
7192 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
7193 subtarget = 0;
7194 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7195 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7196 binop2:
7197 temp = expand_binop (mode, this_optab, op0, op1, target,
7198 unsignedp, OPTAB_LIB_WIDEN);
7199 if (temp == 0)
7200 abort ();
7201 return temp;
7202 }
7203
7204
7205 /* Emit bytecode to evaluate the given expression EXP to the stack. */
7206
7207 void
7208 bc_expand_expr (exp)
7209 tree exp;
7210 {
7211 enum tree_code code;
7212 tree type, arg0;
7213 rtx r;
7214 struct binary_operator *binoptab;
7215 struct unary_operator *unoptab;
7216 struct increment_operator *incroptab;
7217 struct bc_label *lab, *lab1;
7218 enum bytecode_opcode opcode;
7219
7220
7221 code = TREE_CODE (exp);
7222
7223 switch (code)
7224 {
7225 case PARM_DECL:
7226
7227 if (DECL_RTL (exp) == 0)
7228 {
7229 error_with_decl (exp, "prior parameter's size depends on `%s'");
7230 return;
7231 }
7232
7233 bc_load_parmaddr (DECL_RTL (exp));
7234 bc_load_memory (TREE_TYPE (exp), exp);
7235
7236 return;
7237
7238 case VAR_DECL:
7239
7240 if (DECL_RTL (exp) == 0)
7241 abort ();
7242
7243 #if 0
7244 if (BYTECODE_LABEL (DECL_RTL (exp)))
7245 bc_load_externaddr (DECL_RTL (exp));
7246 else
7247 bc_load_localaddr (DECL_RTL (exp));
7248 #endif
7249 if (TREE_PUBLIC (exp))
7250 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
7251 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
7252 else
7253 bc_load_localaddr (DECL_RTL (exp));
7254
7255 bc_load_memory (TREE_TYPE (exp), exp);
7256 return;
7257
7258 case INTEGER_CST:
7259
7260 #ifdef DEBUG_PRINT_CODE
7261 fprintf (stderr, " [%x]\n", TREE_INT_CST_LOW (exp));
7262 #endif
7263 bc_emit_instruction (mode_to_const_map[(int) (DECL_BIT_FIELD (exp)
7264 ? SImode
7265 : TYPE_MODE (TREE_TYPE (exp)))],
7266 (HOST_WIDE_INT) TREE_INT_CST_LOW (exp));
7267 return;
7268
7269 case REAL_CST:
7270
7271 #if 0
7272 #ifdef DEBUG_PRINT_CODE
7273 fprintf (stderr, " [%g]\n", (double) TREE_INT_CST_LOW (exp));
7274 #endif
7275 /* FIX THIS: find a better way to pass real_cst's. -bson */
7276 bc_emit_instruction (mode_to_const_map[TYPE_MODE (TREE_TYPE (exp))],
7277 (double) TREE_REAL_CST (exp));
7278 #else
7279 abort ();
7280 #endif
7281
7282 return;
7283
7284 case CALL_EXPR:
7285
7286 /* We build a call description vector describing the type of
7287 the return value and of the arguments; this call vector,
7288 together with a pointer to a location for the return value
7289 and the base of the argument list, is passed to the low
7290 level machine dependent call subroutine, which is responsible
7291 for putting the arguments wherever real functions expect
7292 them, as well as getting the return value back. */
7293 {
7294 tree calldesc = 0, arg;
7295 int nargs = 0, i;
7296 rtx retval;
7297
7298 /* Push the evaluated args on the evaluation stack in reverse
7299 order. Also make an entry for each arg in the calldesc
7300 vector while we're at it. */
7301
7302 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
7303
7304 for (arg = TREE_OPERAND (exp, 1); arg; arg = TREE_CHAIN (arg))
7305 {
7306 ++nargs;
7307 bc_expand_expr (TREE_VALUE (arg));
7308
7309 calldesc = tree_cons ((tree) 0,
7310 size_in_bytes (TREE_TYPE (TREE_VALUE (arg))),
7311 calldesc);
7312 calldesc = tree_cons ((tree) 0,
7313 bc_runtime_type_code (TREE_TYPE (TREE_VALUE (arg))),
7314 calldesc);
7315 }
7316
7317 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
7318
7319 /* Allocate a location for the return value and push its
7320 address on the evaluation stack. Also make an entry
7321 at the front of the calldesc for the return value type. */
7322
7323 type = TREE_TYPE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7324 retval = bc_allocate_local (int_size_in_bytes (type), TYPE_ALIGN (type));
7325 bc_load_localaddr (retval);
7326
7327 calldesc = tree_cons ((tree) 0, size_in_bytes (type), calldesc);
7328 calldesc = tree_cons ((tree) 0, bc_runtime_type_code (type), calldesc);
7329
7330 /* Prepend the argument count. */
7331 calldesc = tree_cons ((tree) 0,
7332 build_int_2 (nargs, 0),
7333 calldesc);
7334
7335 /* Push the address of the call description vector on the stack. */
7336 calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
7337 TREE_TYPE (calldesc) = build_array_type (integer_type_node,
7338 build_index_type (build_int_2 (nargs * 2, 0)));
7339 r = output_constant_def (calldesc);
7340 bc_load_externaddr (r);
7341
7342 /* Push the address of the function to be called. */
7343 bc_expand_expr (TREE_OPERAND (exp, 0));
7344
7345 /* Call the function, popping its address and the calldesc vector
7346 address off the evaluation stack in the process. */
7347 bc_emit_instruction (call);
7348
7349 /* Pop the arguments off the stack. */
7350 bc_adjust_stack (nargs);
7351
7352 /* Load the return value onto the stack. */
7353 bc_load_localaddr (retval);
7354 bc_load_memory (type, TREE_OPERAND (exp, 0));
7355 }
7356 return;
7357
7358 case SAVE_EXPR:
7359
7360 if (!SAVE_EXPR_RTL (exp))
7361 {
7362 /* First time around: copy to local variable */
7363 SAVE_EXPR_RTL (exp) = bc_allocate_local (int_size_in_bytes (TREE_TYPE (exp)),
7364 TYPE_ALIGN (TREE_TYPE(exp)));
7365 bc_expand_expr (TREE_OPERAND (exp, 0));
7366 bc_emit_instruction (duplicate);
7367
7368 bc_load_localaddr (SAVE_EXPR_RTL (exp));
7369 bc_store_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7370 }
7371 else
7372 {
7373 /* Consecutive reference: use saved copy */
7374 bc_load_localaddr (SAVE_EXPR_RTL (exp));
7375 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7376 }
7377 return;
7378
7379 #if 0
7380 /* FIXME: the XXXX_STMT codes have been removed in GCC2, but
7381 how are they handled instead? */
7382 case LET_STMT:
7383
7384 TREE_USED (exp) = 1;
7385 bc_expand_expr (STMT_BODY (exp));
7386 return;
7387 #endif
7388
7389 case NOP_EXPR:
7390 case CONVERT_EXPR:
7391
7392 bc_expand_expr (TREE_OPERAND (exp, 0));
7393 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)), TREE_TYPE (exp));
7394 return;
7395
7396 case MODIFY_EXPR:
7397
7398 expand_assignment (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 0, 0);
7399 return;
7400
7401 case ADDR_EXPR:
7402
7403 bc_expand_address (TREE_OPERAND (exp, 0));
7404 return;
7405
7406 case INDIRECT_REF:
7407
7408 bc_expand_expr (TREE_OPERAND (exp, 0));
7409 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7410 return;
7411
7412 case ARRAY_REF:
7413
7414 bc_expand_expr (bc_canonicalize_array_ref (exp));
7415 return;
7416
7417 case COMPONENT_REF:
7418
7419 bc_expand_component_address (exp);
7420
7421 /* If we have a bitfield, generate a proper load */
7422 bc_load_memory (TREE_TYPE (TREE_OPERAND (exp, 1)), TREE_OPERAND (exp, 1));
7423 return;
7424
7425 case COMPOUND_EXPR:
7426
7427 bc_expand_expr (TREE_OPERAND (exp, 0));
7428 bc_emit_instruction (drop);
7429 bc_expand_expr (TREE_OPERAND (exp, 1));
7430 return;
7431
7432 case COND_EXPR:
7433
7434 bc_expand_expr (TREE_OPERAND (exp, 0));
7435 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
7436 lab = bc_get_bytecode_label ();
7437 bc_emit_bytecode (xjumpifnot);
7438 bc_emit_bytecode_labelref (lab);
7439
7440 #ifdef DEBUG_PRINT_CODE
7441 fputc ('\n', stderr);
7442 #endif
7443 bc_expand_expr (TREE_OPERAND (exp, 1));
7444 lab1 = bc_get_bytecode_label ();
7445 bc_emit_bytecode (jump);
7446 bc_emit_bytecode_labelref (lab1);
7447
7448 #ifdef DEBUG_PRINT_CODE
7449 fputc ('\n', stderr);
7450 #endif
7451
7452 bc_emit_bytecode_labeldef (lab);
7453 bc_expand_expr (TREE_OPERAND (exp, 2));
7454 bc_emit_bytecode_labeldef (lab1);
7455 return;
7456
7457 case TRUTH_ANDIF_EXPR:
7458
7459 opcode = xjumpifnot;
7460 goto andorif;
7461
7462 case TRUTH_ORIF_EXPR:
7463
7464 opcode = xjumpif;
7465 goto andorif;
7466
7467 case PLUS_EXPR:
7468
7469 binoptab = optab_plus_expr;
7470 goto binop;
7471
7472 case MINUS_EXPR:
7473
7474 binoptab = optab_minus_expr;
7475 goto binop;
7476
7477 case MULT_EXPR:
7478
7479 binoptab = optab_mult_expr;
7480 goto binop;
7481
7482 case TRUNC_DIV_EXPR:
7483 case FLOOR_DIV_EXPR:
7484 case CEIL_DIV_EXPR:
7485 case ROUND_DIV_EXPR:
7486 case EXACT_DIV_EXPR:
7487
7488 binoptab = optab_trunc_div_expr;
7489 goto binop;
7490
7491 case TRUNC_MOD_EXPR:
7492 case FLOOR_MOD_EXPR:
7493 case CEIL_MOD_EXPR:
7494 case ROUND_MOD_EXPR:
7495
7496 binoptab = optab_trunc_mod_expr;
7497 goto binop;
7498
7499 case FIX_ROUND_EXPR:
7500 case FIX_FLOOR_EXPR:
7501 case FIX_CEIL_EXPR:
7502 abort (); /* Not used for C. */
7503
7504 case FIX_TRUNC_EXPR:
7505 case FLOAT_EXPR:
7506 case MAX_EXPR:
7507 case MIN_EXPR:
7508 case FFS_EXPR:
7509 case LROTATE_EXPR:
7510 case RROTATE_EXPR:
7511 abort (); /* FIXME */
7512
7513 case RDIV_EXPR:
7514
7515 binoptab = optab_rdiv_expr;
7516 goto binop;
7517
7518 case BIT_AND_EXPR:
7519
7520 binoptab = optab_bit_and_expr;
7521 goto binop;
7522
7523 case BIT_IOR_EXPR:
7524
7525 binoptab = optab_bit_ior_expr;
7526 goto binop;
7527
7528 case BIT_XOR_EXPR:
7529
7530 binoptab = optab_bit_xor_expr;
7531 goto binop;
7532
7533 case LSHIFT_EXPR:
7534
7535 binoptab = optab_lshift_expr;
7536 goto binop;
7537
7538 case RSHIFT_EXPR:
7539
7540 binoptab = optab_rshift_expr;
7541 goto binop;
7542
7543 case TRUTH_AND_EXPR:
7544
7545 binoptab = optab_truth_and_expr;
7546 goto binop;
7547
7548 case TRUTH_OR_EXPR:
7549
7550 binoptab = optab_truth_or_expr;
7551 goto binop;
7552
7553 case LT_EXPR:
7554
7555 binoptab = optab_lt_expr;
7556 goto binop;
7557
7558 case LE_EXPR:
7559
7560 binoptab = optab_le_expr;
7561 goto binop;
7562
7563 case GE_EXPR:
7564
7565 binoptab = optab_ge_expr;
7566 goto binop;
7567
7568 case GT_EXPR:
7569
7570 binoptab = optab_gt_expr;
7571 goto binop;
7572
7573 case EQ_EXPR:
7574
7575 binoptab = optab_eq_expr;
7576 goto binop;
7577
7578 case NE_EXPR:
7579
7580 binoptab = optab_ne_expr;
7581 goto binop;
7582
7583 case NEGATE_EXPR:
7584
7585 unoptab = optab_negate_expr;
7586 goto unop;
7587
7588 case BIT_NOT_EXPR:
7589
7590 unoptab = optab_bit_not_expr;
7591 goto unop;
7592
7593 case TRUTH_NOT_EXPR:
7594
7595 unoptab = optab_truth_not_expr;
7596 goto unop;
7597
7598 case PREDECREMENT_EXPR:
7599
7600 incroptab = optab_predecrement_expr;
7601 goto increment;
7602
7603 case PREINCREMENT_EXPR:
7604
7605 incroptab = optab_preincrement_expr;
7606 goto increment;
7607
7608 case POSTDECREMENT_EXPR:
7609
7610 incroptab = optab_postdecrement_expr;
7611 goto increment;
7612
7613 case POSTINCREMENT_EXPR:
7614
7615 incroptab = optab_postincrement_expr;
7616 goto increment;
7617
7618 case CONSTRUCTOR:
7619
7620 bc_expand_constructor (exp);
7621 return;
7622
7623 case ERROR_MARK:
7624 case RTL_EXPR:
7625
7626 return;
7627
7628 case BIND_EXPR:
7629 {
7630 tree vars = TREE_OPERAND (exp, 0);
7631 int vars_need_expansion = 0;
7632
7633 /* Need to open a binding contour here because
7634 if there are any cleanups they most be contained here. */
7635 expand_start_bindings (0);
7636
7637 /* Mark the corresponding BLOCK for output. */
7638 if (TREE_OPERAND (exp, 2) != 0)
7639 TREE_USED (TREE_OPERAND (exp, 2)) = 1;
7640
7641 /* If VARS have not yet been expanded, expand them now. */
7642 while (vars)
7643 {
7644 if (DECL_RTL (vars) == 0)
7645 {
7646 vars_need_expansion = 1;
7647 expand_decl (vars);
7648 }
7649 expand_decl_init (vars);
7650 vars = TREE_CHAIN (vars);
7651 }
7652
7653 bc_expand_expr (TREE_OPERAND (exp, 1));
7654
7655 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
7656
7657 return;
7658 }
7659 }
7660
7661 abort ();
7662
7663 binop:
7664
7665 bc_expand_binary_operation (binoptab, TREE_TYPE (exp),
7666 TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1));
7667 return;
7668
7669
7670 unop:
7671
7672 bc_expand_unary_operation (unoptab, TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7673 return;
7674
7675
7676 andorif:
7677
7678 bc_expand_expr (TREE_OPERAND (exp, 0));
7679 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
7680 lab = bc_get_bytecode_label ();
7681
7682 bc_emit_instruction (duplicate);
7683 bc_emit_bytecode (opcode);
7684 bc_emit_bytecode_labelref (lab);
7685
7686 #ifdef DEBUG_PRINT_CODE
7687 fputc ('\n', stderr);
7688 #endif
7689
7690 bc_emit_instruction (drop);
7691
7692 bc_expand_expr (TREE_OPERAND (exp, 1));
7693 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)));
7694 bc_emit_bytecode_labeldef (lab);
7695 return;
7696
7697
7698 increment:
7699
7700 type = TREE_TYPE (TREE_OPERAND (exp, 0));
7701
7702 /* Push the quantum. */
7703 bc_expand_expr (TREE_OPERAND (exp, 1));
7704
7705 /* Convert it to the lvalue's type. */
7706 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)), type);
7707
7708 /* Push the address of the lvalue */
7709 bc_expand_expr (build1 (ADDR_EXPR, TYPE_POINTER_TO (type), TREE_OPERAND (exp, 0)));
7710
7711 /* Perform actual increment */
7712 bc_expand_increment (incroptab, type);
7713 return;
7714 }
7715 \f
7716 /* Return the alignment in bits of EXP, a pointer valued expression.
7717 But don't return more than MAX_ALIGN no matter what.
7718 The alignment returned is, by default, the alignment of the thing that
7719 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
7720
7721 Otherwise, look at the expression to see if we can do better, i.e., if the
7722 expression is actually pointing at an object whose alignment is tighter. */
7723
7724 static int
7725 get_pointer_alignment (exp, max_align)
7726 tree exp;
7727 unsigned max_align;
7728 {
7729 unsigned align, inner;
7730
7731 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7732 return 0;
7733
7734 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7735 align = MIN (align, max_align);
7736
7737 while (1)
7738 {
7739 switch (TREE_CODE (exp))
7740 {
7741 case NOP_EXPR:
7742 case CONVERT_EXPR:
7743 case NON_LVALUE_EXPR:
7744 exp = TREE_OPERAND (exp, 0);
7745 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7746 return align;
7747 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7748 align = MIN (inner, max_align);
7749 break;
7750
7751 case PLUS_EXPR:
7752 /* If sum of pointer + int, restrict our maximum alignment to that
7753 imposed by the integer. If not, we can't do any better than
7754 ALIGN. */
7755 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
7756 return align;
7757
7758 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
7759 & (max_align - 1))
7760 != 0)
7761 max_align >>= 1;
7762
7763 exp = TREE_OPERAND (exp, 0);
7764 break;
7765
7766 case ADDR_EXPR:
7767 /* See what we are pointing at and look at its alignment. */
7768 exp = TREE_OPERAND (exp, 0);
7769 if (TREE_CODE (exp) == FUNCTION_DECL)
7770 align = FUNCTION_BOUNDARY;
7771 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
7772 align = DECL_ALIGN (exp);
7773 #ifdef CONSTANT_ALIGNMENT
7774 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
7775 align = CONSTANT_ALIGNMENT (exp, align);
7776 #endif
7777 return MIN (align, max_align);
7778
7779 default:
7780 return align;
7781 }
7782 }
7783 }
7784 \f
7785 /* Return the tree node and offset if a given argument corresponds to
7786 a string constant. */
7787
7788 static tree
7789 string_constant (arg, ptr_offset)
7790 tree arg;
7791 tree *ptr_offset;
7792 {
7793 STRIP_NOPS (arg);
7794
7795 if (TREE_CODE (arg) == ADDR_EXPR
7796 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
7797 {
7798 *ptr_offset = integer_zero_node;
7799 return TREE_OPERAND (arg, 0);
7800 }
7801 else if (TREE_CODE (arg) == PLUS_EXPR)
7802 {
7803 tree arg0 = TREE_OPERAND (arg, 0);
7804 tree arg1 = TREE_OPERAND (arg, 1);
7805
7806 STRIP_NOPS (arg0);
7807 STRIP_NOPS (arg1);
7808
7809 if (TREE_CODE (arg0) == ADDR_EXPR
7810 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
7811 {
7812 *ptr_offset = arg1;
7813 return TREE_OPERAND (arg0, 0);
7814 }
7815 else if (TREE_CODE (arg1) == ADDR_EXPR
7816 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
7817 {
7818 *ptr_offset = arg0;
7819 return TREE_OPERAND (arg1, 0);
7820 }
7821 }
7822
7823 return 0;
7824 }
7825
7826 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
7827 way, because it could contain a zero byte in the middle.
7828 TREE_STRING_LENGTH is the size of the character array, not the string.
7829
7830 Unfortunately, string_constant can't access the values of const char
7831 arrays with initializers, so neither can we do so here. */
7832
7833 static tree
7834 c_strlen (src)
7835 tree src;
7836 {
7837 tree offset_node;
7838 int offset, max;
7839 char *ptr;
7840
7841 src = string_constant (src, &offset_node);
7842 if (src == 0)
7843 return 0;
7844 max = TREE_STRING_LENGTH (src);
7845 ptr = TREE_STRING_POINTER (src);
7846 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
7847 {
7848 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
7849 compute the offset to the following null if we don't know where to
7850 start searching for it. */
7851 int i;
7852 for (i = 0; i < max; i++)
7853 if (ptr[i] == 0)
7854 return 0;
7855 /* We don't know the starting offset, but we do know that the string
7856 has no internal zero bytes. We can assume that the offset falls
7857 within the bounds of the string; otherwise, the programmer deserves
7858 what he gets. Subtract the offset from the length of the string,
7859 and return that. */
7860 /* This would perhaps not be valid if we were dealing with named
7861 arrays in addition to literal string constants. */
7862 return size_binop (MINUS_EXPR, size_int (max), offset_node);
7863 }
7864
7865 /* We have a known offset into the string. Start searching there for
7866 a null character. */
7867 if (offset_node == 0)
7868 offset = 0;
7869 else
7870 {
7871 /* Did we get a long long offset? If so, punt. */
7872 if (TREE_INT_CST_HIGH (offset_node) != 0)
7873 return 0;
7874 offset = TREE_INT_CST_LOW (offset_node);
7875 }
7876 /* If the offset is known to be out of bounds, warn, and call strlen at
7877 runtime. */
7878 if (offset < 0 || offset > max)
7879 {
7880 warning ("offset outside bounds of constant string");
7881 return 0;
7882 }
7883 /* Use strlen to search for the first zero byte. Since any strings
7884 constructed with build_string will have nulls appended, we win even
7885 if we get handed something like (char[4])"abcd".
7886
7887 Since OFFSET is our starting index into the string, no further
7888 calculation is needed. */
7889 return size_int (strlen (ptr + offset));
7890 }
7891
7892 rtx
7893 expand_builtin_return_addr (fndecl_code, count, tem)
7894 enum built_in_function fndecl_code;
7895 int count;
7896 rtx tem;
7897 {
7898 int i;
7899
7900 /* Some machines need special handling before we can access
7901 arbitrary frames. For example, on the sparc, we must first flush
7902 all register windows to the stack. */
7903 #ifdef SETUP_FRAME_ADDRESSES
7904 SETUP_FRAME_ADDRESSES ();
7905 #endif
7906
7907 /* On the sparc, the return address is not in the frame, it is in a
7908 register. There is no way to access it off of the current frame
7909 pointer, but it can be accessed off the previous frame pointer by
7910 reading the value from the register window save area. */
7911 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
7912 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
7913 count--;
7914 #endif
7915
7916 /* Scan back COUNT frames to the specified frame. */
7917 for (i = 0; i < count; i++)
7918 {
7919 /* Assume the dynamic chain pointer is in the word that the
7920 frame address points to, unless otherwise specified. */
7921 #ifdef DYNAMIC_CHAIN_ADDRESS
7922 tem = DYNAMIC_CHAIN_ADDRESS (tem);
7923 #endif
7924 tem = memory_address (Pmode, tem);
7925 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
7926 }
7927
7928 /* For __builtin_frame_address, return what we've got. */
7929 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
7930 return tem;
7931
7932 /* For __builtin_return_address, Get the return address from that
7933 frame. */
7934 #ifdef RETURN_ADDR_RTX
7935 tem = RETURN_ADDR_RTX (count, tem);
7936 #else
7937 tem = memory_address (Pmode,
7938 plus_constant (tem, GET_MODE_SIZE (Pmode)));
7939 tem = gen_rtx (MEM, Pmode, tem);
7940 #endif
7941 return tem;
7942 }
7943 \f
7944 /* Expand an expression EXP that calls a built-in function,
7945 with result going to TARGET if that's convenient
7946 (and in mode MODE if that's convenient).
7947 SUBTARGET may be used as the target for computing one of EXP's operands.
7948 IGNORE is nonzero if the value is to be ignored. */
7949
7950 #define CALLED_AS_BUILT_IN(NODE) \
7951 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
7952
7953 static rtx
7954 expand_builtin (exp, target, subtarget, mode, ignore)
7955 tree exp;
7956 rtx target;
7957 rtx subtarget;
7958 enum machine_mode mode;
7959 int ignore;
7960 {
7961 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7962 tree arglist = TREE_OPERAND (exp, 1);
7963 rtx op0;
7964 rtx lab1, insns;
7965 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
7966 optab builtin_optab;
7967
7968 switch (DECL_FUNCTION_CODE (fndecl))
7969 {
7970 case BUILT_IN_ABS:
7971 case BUILT_IN_LABS:
7972 case BUILT_IN_FABS:
7973 /* build_function_call changes these into ABS_EXPR. */
7974 abort ();
7975
7976 case BUILT_IN_SIN:
7977 case BUILT_IN_COS:
7978 /* Treat these like sqrt, but only if the user asks for them. */
7979 if (! flag_fast_math)
7980 break;
7981 case BUILT_IN_FSQRT:
7982 /* If not optimizing, call the library function. */
7983 if (! optimize)
7984 break;
7985
7986 if (arglist == 0
7987 /* Arg could be wrong type if user redeclared this fcn wrong. */
7988 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
7989 break;
7990
7991 /* Stabilize and compute the argument. */
7992 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
7993 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
7994 {
7995 exp = copy_node (exp);
7996 arglist = copy_node (arglist);
7997 TREE_OPERAND (exp, 1) = arglist;
7998 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
7999 }
8000 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8001
8002 /* Make a suitable register to place result in. */
8003 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8004
8005 emit_queue ();
8006 start_sequence ();
8007
8008 switch (DECL_FUNCTION_CODE (fndecl))
8009 {
8010 case BUILT_IN_SIN:
8011 builtin_optab = sin_optab; break;
8012 case BUILT_IN_COS:
8013 builtin_optab = cos_optab; break;
8014 case BUILT_IN_FSQRT:
8015 builtin_optab = sqrt_optab; break;
8016 default:
8017 abort ();
8018 }
8019
8020 /* Compute into TARGET.
8021 Set TARGET to wherever the result comes back. */
8022 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8023 builtin_optab, op0, target, 0);
8024
8025 /* If we were unable to expand via the builtin, stop the
8026 sequence (without outputting the insns) and break, causing
8027 a call the the library function. */
8028 if (target == 0)
8029 {
8030 end_sequence ();
8031 break;
8032 }
8033
8034 /* Check the results by default. But if flag_fast_math is turned on,
8035 then assume sqrt will always be called with valid arguments. */
8036
8037 if (! flag_fast_math)
8038 {
8039 /* Don't define the builtin FP instructions
8040 if your machine is not IEEE. */
8041 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
8042 abort ();
8043
8044 lab1 = gen_label_rtx ();
8045
8046 /* Test the result; if it is NaN, set errno=EDOM because
8047 the argument was not in the domain. */
8048 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
8049 emit_jump_insn (gen_beq (lab1));
8050
8051 #ifdef TARGET_EDOM
8052 {
8053 #ifdef GEN_ERRNO_RTX
8054 rtx errno_rtx = GEN_ERRNO_RTX;
8055 #else
8056 rtx errno_rtx
8057 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "errno"));
8058 #endif
8059
8060 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
8061 }
8062 #else
8063 /* We can't set errno=EDOM directly; let the library call do it.
8064 Pop the arguments right away in case the call gets deleted. */
8065 NO_DEFER_POP;
8066 expand_call (exp, target, 0);
8067 OK_DEFER_POP;
8068 #endif
8069
8070 emit_label (lab1);
8071 }
8072
8073 /* Output the entire sequence. */
8074 insns = get_insns ();
8075 end_sequence ();
8076 emit_insns (insns);
8077
8078 return target;
8079
8080 /* __builtin_apply_args returns block of memory allocated on
8081 the stack into which is stored the arg pointer, structure
8082 value address, static chain, and all the registers that might
8083 possibly be used in performing a function call. The code is
8084 moved to the start of the function so the incoming values are
8085 saved. */
8086 case BUILT_IN_APPLY_ARGS:
8087 /* Don't do __builtin_apply_args more than once in a function.
8088 Save the result of the first call and reuse it. */
8089 if (apply_args_value != 0)
8090 return apply_args_value;
8091 {
8092 /* When this function is called, it means that registers must be
8093 saved on entry to this function. So we migrate the
8094 call to the first insn of this function. */
8095 rtx temp;
8096 rtx seq;
8097
8098 start_sequence ();
8099 temp = expand_builtin_apply_args ();
8100 seq = get_insns ();
8101 end_sequence ();
8102
8103 apply_args_value = temp;
8104
8105 /* Put the sequence after the NOTE that starts the function.
8106 If this is inside a SEQUENCE, make the outer-level insn
8107 chain current, so the code is placed at the start of the
8108 function. */
8109 push_topmost_sequence ();
8110 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8111 pop_topmost_sequence ();
8112 return temp;
8113 }
8114
8115 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8116 FUNCTION with a copy of the parameters described by
8117 ARGUMENTS, and ARGSIZE. It returns a block of memory
8118 allocated on the stack into which is stored all the registers
8119 that might possibly be used for returning the result of a
8120 function. ARGUMENTS is the value returned by
8121 __builtin_apply_args. ARGSIZE is the number of bytes of
8122 arguments that must be copied. ??? How should this value be
8123 computed? We'll also need a safe worst case value for varargs
8124 functions. */
8125 case BUILT_IN_APPLY:
8126 if (arglist == 0
8127 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8128 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8129 || TREE_CHAIN (arglist) == 0
8130 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8131 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8132 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8133 return const0_rtx;
8134 else
8135 {
8136 int i;
8137 tree t;
8138 rtx ops[3];
8139
8140 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
8141 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
8142
8143 return expand_builtin_apply (ops[0], ops[1], ops[2]);
8144 }
8145
8146 /* __builtin_return (RESULT) causes the function to return the
8147 value described by RESULT. RESULT is address of the block of
8148 memory returned by __builtin_apply. */
8149 case BUILT_IN_RETURN:
8150 if (arglist
8151 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8152 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
8153 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
8154 NULL_RTX, VOIDmode, 0));
8155 return const0_rtx;
8156
8157 case BUILT_IN_SAVEREGS:
8158 /* Don't do __builtin_saveregs more than once in a function.
8159 Save the result of the first call and reuse it. */
8160 if (saveregs_value != 0)
8161 return saveregs_value;
8162 {
8163 /* When this function is called, it means that registers must be
8164 saved on entry to this function. So we migrate the
8165 call to the first insn of this function. */
8166 rtx temp;
8167 rtx seq;
8168
8169 /* Now really call the function. `expand_call' does not call
8170 expand_builtin, so there is no danger of infinite recursion here. */
8171 start_sequence ();
8172
8173 #ifdef EXPAND_BUILTIN_SAVEREGS
8174 /* Do whatever the machine needs done in this case. */
8175 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
8176 #else
8177 /* The register where the function returns its value
8178 is likely to have something else in it, such as an argument.
8179 So preserve that register around the call. */
8180
8181 if (value_mode != VOIDmode)
8182 {
8183 rtx valreg = hard_libcall_value (value_mode);
8184 rtx saved_valreg = gen_reg_rtx (value_mode);
8185
8186 emit_move_insn (saved_valreg, valreg);
8187 temp = expand_call (exp, target, ignore);
8188 emit_move_insn (valreg, saved_valreg);
8189 }
8190 else
8191 /* Generate the call, putting the value in a pseudo. */
8192 temp = expand_call (exp, target, ignore);
8193 #endif
8194
8195 seq = get_insns ();
8196 end_sequence ();
8197
8198 saveregs_value = temp;
8199
8200 /* Put the sequence after the NOTE that starts the function.
8201 If this is inside a SEQUENCE, make the outer-level insn
8202 chain current, so the code is placed at the start of the
8203 function. */
8204 push_topmost_sequence ();
8205 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8206 pop_topmost_sequence ();
8207 return temp;
8208 }
8209
8210 /* __builtin_args_info (N) returns word N of the arg space info
8211 for the current function. The number and meanings of words
8212 is controlled by the definition of CUMULATIVE_ARGS. */
8213 case BUILT_IN_ARGS_INFO:
8214 {
8215 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
8216 int i;
8217 int *word_ptr = (int *) &current_function_args_info;
8218 tree type, elts, result;
8219
8220 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
8221 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
8222 __FILE__, __LINE__);
8223
8224 if (arglist != 0)
8225 {
8226 tree arg = TREE_VALUE (arglist);
8227 if (TREE_CODE (arg) != INTEGER_CST)
8228 error ("argument of `__builtin_args_info' must be constant");
8229 else
8230 {
8231 int wordnum = TREE_INT_CST_LOW (arg);
8232
8233 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
8234 error ("argument of `__builtin_args_info' out of range");
8235 else
8236 return GEN_INT (word_ptr[wordnum]);
8237 }
8238 }
8239 else
8240 error ("missing argument in `__builtin_args_info'");
8241
8242 return const0_rtx;
8243
8244 #if 0
8245 for (i = 0; i < nwords; i++)
8246 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
8247
8248 type = build_array_type (integer_type_node,
8249 build_index_type (build_int_2 (nwords, 0)));
8250 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
8251 TREE_CONSTANT (result) = 1;
8252 TREE_STATIC (result) = 1;
8253 result = build (INDIRECT_REF, build_pointer_type (type), result);
8254 TREE_CONSTANT (result) = 1;
8255 return expand_expr (result, NULL_RTX, VOIDmode, 0);
8256 #endif
8257 }
8258
8259 /* Return the address of the first anonymous stack arg. */
8260 case BUILT_IN_NEXT_ARG:
8261 {
8262 tree fntype = TREE_TYPE (current_function_decl);
8263
8264 if ((TYPE_ARG_TYPES (fntype) == 0
8265 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
8266 == void_type_node))
8267 && ! current_function_varargs)
8268 {
8269 error ("`va_start' used in function with fixed args");
8270 return const0_rtx;
8271 }
8272
8273 if (arglist)
8274 {
8275 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
8276 tree arg = TREE_VALUE (arglist);
8277
8278 /* Strip off all nops for the sake of the comparison. This
8279 is not quite the same as STRIP_NOPS. It does more.
8280 We must also strip off INDIRECT_EXPR for C++ reference
8281 parameters. */
8282 while (TREE_CODE (arg) == NOP_EXPR
8283 || TREE_CODE (arg) == CONVERT_EXPR
8284 || TREE_CODE (arg) == NON_LVALUE_EXPR
8285 || TREE_CODE (arg) == INDIRECT_REF)
8286 arg = TREE_OPERAND (arg, 0);
8287 if (arg != last_parm)
8288 warning ("second parameter of `va_start' not last named argument");
8289 }
8290 else if (! current_function_varargs)
8291 /* Evidently an out of date version of <stdarg.h>; can't validate
8292 va_start's second argument, but can still work as intended. */
8293 warning ("`__builtin_next_arg' called without an argument");
8294 }
8295
8296 return expand_binop (Pmode, add_optab,
8297 current_function_internal_arg_pointer,
8298 current_function_arg_offset_rtx,
8299 NULL_RTX, 0, OPTAB_LIB_WIDEN);
8300
8301 case BUILT_IN_CLASSIFY_TYPE:
8302 if (arglist != 0)
8303 {
8304 tree type = TREE_TYPE (TREE_VALUE (arglist));
8305 enum tree_code code = TREE_CODE (type);
8306 if (code == VOID_TYPE)
8307 return GEN_INT (void_type_class);
8308 if (code == INTEGER_TYPE)
8309 return GEN_INT (integer_type_class);
8310 if (code == CHAR_TYPE)
8311 return GEN_INT (char_type_class);
8312 if (code == ENUMERAL_TYPE)
8313 return GEN_INT (enumeral_type_class);
8314 if (code == BOOLEAN_TYPE)
8315 return GEN_INT (boolean_type_class);
8316 if (code == POINTER_TYPE)
8317 return GEN_INT (pointer_type_class);
8318 if (code == REFERENCE_TYPE)
8319 return GEN_INT (reference_type_class);
8320 if (code == OFFSET_TYPE)
8321 return GEN_INT (offset_type_class);
8322 if (code == REAL_TYPE)
8323 return GEN_INT (real_type_class);
8324 if (code == COMPLEX_TYPE)
8325 return GEN_INT (complex_type_class);
8326 if (code == FUNCTION_TYPE)
8327 return GEN_INT (function_type_class);
8328 if (code == METHOD_TYPE)
8329 return GEN_INT (method_type_class);
8330 if (code == RECORD_TYPE)
8331 return GEN_INT (record_type_class);
8332 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
8333 return GEN_INT (union_type_class);
8334 if (code == ARRAY_TYPE)
8335 {
8336 if (TYPE_STRING_FLAG (type))
8337 return GEN_INT (string_type_class);
8338 else
8339 return GEN_INT (array_type_class);
8340 }
8341 if (code == SET_TYPE)
8342 return GEN_INT (set_type_class);
8343 if (code == FILE_TYPE)
8344 return GEN_INT (file_type_class);
8345 if (code == LANG_TYPE)
8346 return GEN_INT (lang_type_class);
8347 }
8348 return GEN_INT (no_type_class);
8349
8350 case BUILT_IN_CONSTANT_P:
8351 if (arglist == 0)
8352 return const0_rtx;
8353 else
8354 {
8355 tree arg = TREE_VALUE (arglist);
8356
8357 STRIP_NOPS (arg);
8358 return (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c'
8359 || (TREE_CODE (arg) == ADDR_EXPR
8360 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8361 ? const1_rtx : const0_rtx);
8362 }
8363
8364 case BUILT_IN_FRAME_ADDRESS:
8365 /* The argument must be a nonnegative integer constant.
8366 It counts the number of frames to scan up the stack.
8367 The value is the address of that frame. */
8368 case BUILT_IN_RETURN_ADDRESS:
8369 /* The argument must be a nonnegative integer constant.
8370 It counts the number of frames to scan up the stack.
8371 The value is the return address saved in that frame. */
8372 if (arglist == 0)
8373 /* Warning about missing arg was already issued. */
8374 return const0_rtx;
8375 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
8376 {
8377 error ("invalid arg to `__builtin_return_address'");
8378 return const0_rtx;
8379 }
8380 else if (tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
8381 {
8382 error ("invalid arg to `__builtin_return_address'");
8383 return const0_rtx;
8384 }
8385 else
8386 {
8387 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
8388 TREE_INT_CST_LOW (TREE_VALUE (arglist)),
8389 hard_frame_pointer_rtx);
8390
8391 /* For __builtin_frame_address, return what we've got. */
8392 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8393 return tem;
8394
8395 if (GET_CODE (tem) != REG)
8396 tem = copy_to_reg (tem);
8397 return tem;
8398 }
8399
8400 case BUILT_IN_ALLOCA:
8401 if (arglist == 0
8402 /* Arg could be non-integer if user redeclared this fcn wrong. */
8403 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8404 break;
8405
8406 /* Compute the argument. */
8407 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
8408
8409 /* Allocate the desired space. */
8410 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
8411
8412 case BUILT_IN_FFS:
8413 /* If not optimizing, call the library function. */
8414 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8415 break;
8416
8417 if (arglist == 0
8418 /* Arg could be non-integer if user redeclared this fcn wrong. */
8419 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8420 break;
8421
8422 /* Compute the argument. */
8423 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8424 /* Compute ffs, into TARGET if possible.
8425 Set TARGET to wherever the result comes back. */
8426 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8427 ffs_optab, op0, target, 1);
8428 if (target == 0)
8429 abort ();
8430 return target;
8431
8432 case BUILT_IN_STRLEN:
8433 /* If not optimizing, call the library function. */
8434 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8435 break;
8436
8437 if (arglist == 0
8438 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8439 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8440 break;
8441 else
8442 {
8443 tree src = TREE_VALUE (arglist);
8444 tree len = c_strlen (src);
8445
8446 int align
8447 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8448
8449 rtx result, src_rtx, char_rtx;
8450 enum machine_mode insn_mode = value_mode, char_mode;
8451 enum insn_code icode;
8452
8453 /* If the length is known, just return it. */
8454 if (len != 0)
8455 return expand_expr (len, target, mode, 0);
8456
8457 /* If SRC is not a pointer type, don't do this operation inline. */
8458 if (align == 0)
8459 break;
8460
8461 /* Call a function if we can't compute strlen in the right mode. */
8462
8463 while (insn_mode != VOIDmode)
8464 {
8465 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
8466 if (icode != CODE_FOR_nothing)
8467 break;
8468
8469 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
8470 }
8471 if (insn_mode == VOIDmode)
8472 break;
8473
8474 /* Make a place to write the result of the instruction. */
8475 result = target;
8476 if (! (result != 0
8477 && GET_CODE (result) == REG
8478 && GET_MODE (result) == insn_mode
8479 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8480 result = gen_reg_rtx (insn_mode);
8481
8482 /* Make sure the operands are acceptable to the predicates. */
8483
8484 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
8485 result = gen_reg_rtx (insn_mode);
8486
8487 src_rtx = memory_address (BLKmode,
8488 expand_expr (src, NULL_RTX, ptr_mode,
8489 EXPAND_NORMAL));
8490 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
8491 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
8492
8493 char_rtx = const0_rtx;
8494 char_mode = insn_operand_mode[(int)icode][2];
8495 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
8496 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
8497
8498 emit_insn (GEN_FCN (icode) (result,
8499 gen_rtx (MEM, BLKmode, src_rtx),
8500 char_rtx, GEN_INT (align)));
8501
8502 /* Return the value in the proper mode for this function. */
8503 if (GET_MODE (result) == value_mode)
8504 return result;
8505 else if (target != 0)
8506 {
8507 convert_move (target, result, 0);
8508 return target;
8509 }
8510 else
8511 return convert_to_mode (value_mode, result, 0);
8512 }
8513
8514 case BUILT_IN_STRCPY:
8515 /* If not optimizing, call the library function. */
8516 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8517 break;
8518
8519 if (arglist == 0
8520 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8521 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8522 || TREE_CHAIN (arglist) == 0
8523 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
8524 break;
8525 else
8526 {
8527 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
8528
8529 if (len == 0)
8530 break;
8531
8532 len = size_binop (PLUS_EXPR, len, integer_one_node);
8533
8534 chainon (arglist, build_tree_list (NULL_TREE, len));
8535 }
8536
8537 /* Drops in. */
8538 case BUILT_IN_MEMCPY:
8539 /* If not optimizing, call the library function. */
8540 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8541 break;
8542
8543 if (arglist == 0
8544 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8545 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8546 || TREE_CHAIN (arglist) == 0
8547 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8548 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8549 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8550 break;
8551 else
8552 {
8553 tree dest = TREE_VALUE (arglist);
8554 tree src = TREE_VALUE (TREE_CHAIN (arglist));
8555 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8556 tree type;
8557
8558 int src_align
8559 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8560 int dest_align
8561 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8562 rtx dest_rtx, dest_mem, src_mem;
8563
8564 /* If either SRC or DEST is not a pointer type, don't do
8565 this operation in-line. */
8566 if (src_align == 0 || dest_align == 0)
8567 {
8568 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
8569 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8570 break;
8571 }
8572
8573 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
8574 dest_mem = gen_rtx (MEM, BLKmode,
8575 memory_address (BLKmode, dest_rtx));
8576 /* There could be a void* cast on top of the object. */
8577 while (TREE_CODE (dest) == NOP_EXPR)
8578 dest = TREE_OPERAND (dest, 0);
8579 type = TREE_TYPE (TREE_TYPE (dest));
8580 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
8581 src_mem = gen_rtx (MEM, BLKmode,
8582 memory_address (BLKmode,
8583 expand_expr (src, NULL_RTX,
8584 ptr_mode,
8585 EXPAND_SUM)));
8586 /* There could be a void* cast on top of the object. */
8587 while (TREE_CODE (src) == NOP_EXPR)
8588 src = TREE_OPERAND (src, 0);
8589 type = TREE_TYPE (TREE_TYPE (src));
8590 MEM_IN_STRUCT_P (src_mem) = AGGREGATE_TYPE_P (type);
8591
8592 /* Copy word part most expediently. */
8593 emit_block_move (dest_mem, src_mem,
8594 expand_expr (len, NULL_RTX, VOIDmode, 0),
8595 MIN (src_align, dest_align));
8596 return force_operand (dest_rtx, NULL_RTX);
8597 }
8598
8599 case BUILT_IN_MEMSET:
8600 /* If not optimizing, call the library function. */
8601 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8602 break;
8603
8604 if (arglist == 0
8605 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8606 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8607 || TREE_CHAIN (arglist) == 0
8608 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
8609 != INTEGER_TYPE)
8610 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8611 || (INTEGER_CST
8612 != (TREE_CODE (TREE_TYPE
8613 (TREE_VALUE
8614 (TREE_CHAIN (TREE_CHAIN (arglist))))))))
8615 break;
8616 else
8617 {
8618 tree dest = TREE_VALUE (arglist);
8619 tree val = TREE_VALUE (TREE_CHAIN (arglist));
8620 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8621 tree type;
8622
8623 int dest_align
8624 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8625 rtx dest_rtx, dest_mem;
8626
8627 /* If DEST is not a pointer type, don't do this
8628 operation in-line. */
8629 if (dest_align == 0)
8630 break;
8631
8632 /* If VAL is not 0, don't do this operation in-line. */
8633 if (expand_expr (val, NULL_RTX, VOIDmode, 0) != const0_rtx)
8634 break;
8635
8636 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
8637 dest_mem = gen_rtx (MEM, BLKmode,
8638 memory_address (BLKmode, dest_rtx));
8639 /* There could be a void* cast on top of the object. */
8640 while (TREE_CODE (dest) == NOP_EXPR)
8641 dest = TREE_OPERAND (dest, 0);
8642 type = TREE_TYPE (TREE_TYPE (dest));
8643 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
8644
8645 clear_storage (dest_mem, expand_expr (len, NULL_RTX, VOIDmode, 0),
8646 dest_align);
8647
8648 return force_operand (dest_rtx, NULL_RTX);
8649 }
8650
8651 /* These comparison functions need an instruction that returns an actual
8652 index. An ordinary compare that just sets the condition codes
8653 is not enough. */
8654 #ifdef HAVE_cmpstrsi
8655 case BUILT_IN_STRCMP:
8656 /* If not optimizing, call the library function. */
8657 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8658 break;
8659
8660 if (arglist == 0
8661 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8662 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8663 || TREE_CHAIN (arglist) == 0
8664 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
8665 break;
8666 else if (!HAVE_cmpstrsi)
8667 break;
8668 {
8669 tree arg1 = TREE_VALUE (arglist);
8670 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
8671 tree offset;
8672 tree len, len2;
8673
8674 len = c_strlen (arg1);
8675 if (len)
8676 len = size_binop (PLUS_EXPR, integer_one_node, len);
8677 len2 = c_strlen (arg2);
8678 if (len2)
8679 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
8680
8681 /* If we don't have a constant length for the first, use the length
8682 of the second, if we know it. We don't require a constant for
8683 this case; some cost analysis could be done if both are available
8684 but neither is constant. For now, assume they're equally cheap.
8685
8686 If both strings have constant lengths, use the smaller. This
8687 could arise if optimization results in strcpy being called with
8688 two fixed strings, or if the code was machine-generated. We should
8689 add some code to the `memcmp' handler below to deal with such
8690 situations, someday. */
8691 if (!len || TREE_CODE (len) != INTEGER_CST)
8692 {
8693 if (len2)
8694 len = len2;
8695 else if (len == 0)
8696 break;
8697 }
8698 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
8699 {
8700 if (tree_int_cst_lt (len2, len))
8701 len = len2;
8702 }
8703
8704 chainon (arglist, build_tree_list (NULL_TREE, len));
8705 }
8706
8707 /* Drops in. */
8708 case BUILT_IN_MEMCMP:
8709 /* If not optimizing, call the library function. */
8710 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8711 break;
8712
8713 if (arglist == 0
8714 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8715 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8716 || TREE_CHAIN (arglist) == 0
8717 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8718 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8719 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8720 break;
8721 else if (!HAVE_cmpstrsi)
8722 break;
8723 {
8724 tree arg1 = TREE_VALUE (arglist);
8725 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
8726 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8727 rtx result;
8728
8729 int arg1_align
8730 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8731 int arg2_align
8732 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8733 enum machine_mode insn_mode
8734 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
8735
8736 /* If we don't have POINTER_TYPE, call the function. */
8737 if (arg1_align == 0 || arg2_align == 0)
8738 {
8739 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
8740 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8741 break;
8742 }
8743
8744 /* Make a place to write the result of the instruction. */
8745 result = target;
8746 if (! (result != 0
8747 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
8748 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8749 result = gen_reg_rtx (insn_mode);
8750
8751 emit_insn (gen_cmpstrsi (result,
8752 gen_rtx (MEM, BLKmode,
8753 expand_expr (arg1, NULL_RTX,
8754 ptr_mode,
8755 EXPAND_NORMAL)),
8756 gen_rtx (MEM, BLKmode,
8757 expand_expr (arg2, NULL_RTX,
8758 ptr_mode,
8759 EXPAND_NORMAL)),
8760 expand_expr (len, NULL_RTX, VOIDmode, 0),
8761 GEN_INT (MIN (arg1_align, arg2_align))));
8762
8763 /* Return the value in the proper mode for this function. */
8764 mode = TYPE_MODE (TREE_TYPE (exp));
8765 if (GET_MODE (result) == mode)
8766 return result;
8767 else if (target != 0)
8768 {
8769 convert_move (target, result, 0);
8770 return target;
8771 }
8772 else
8773 return convert_to_mode (mode, result, 0);
8774 }
8775 #else
8776 case BUILT_IN_STRCMP:
8777 case BUILT_IN_MEMCMP:
8778 break;
8779 #endif
8780
8781 /* __builtin_setjmp is passed a pointer to an array of five words
8782 (not all will be used on all machines). It operates similarly to
8783 the C library function of the same name, but is more efficient.
8784 Much of the code below (and for longjmp) is copied from the handling
8785 of non-local gotos.
8786
8787 NOTE: This is intended for use by GNAT and will only work in
8788 the method used by it. This code will likely NOT survive to
8789 the GCC 2.8.0 release. */
8790 case BUILT_IN_SETJMP:
8791 if (arglist == 0
8792 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8793 break;
8794
8795 {
8796 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
8797 VOIDmode, 0);
8798 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
8799 enum machine_mode sa_mode = Pmode;
8800 rtx stack_save;
8801 int old_inhibit_defer_pop = inhibit_defer_pop;
8802 int return_pops = RETURN_POPS_ARGS (get_identifier ("__dummy"),
8803 get_identifier ("__dummy"), 0);
8804 rtx next_arg_reg;
8805 CUMULATIVE_ARGS args_so_far;
8806 int i;
8807
8808 #ifdef POINTERS_EXTEND_UNSIGNED
8809 buf_addr = convert_memory_address (Pmode, buf_addr);
8810 #endif
8811
8812 buf_addr = force_reg (Pmode, buf_addr);
8813
8814 if (target == 0 || GET_CODE (target) != REG
8815 || REGNO (target) < FIRST_PSEUDO_REGISTER)
8816 target = gen_reg_rtx (value_mode);
8817
8818 emit_queue ();
8819
8820 CONST_CALL_P (emit_note (NULL_PTR, NOTE_INSN_SETJMP)) = 1;
8821 current_function_calls_setjmp = 1;
8822
8823 /* We store the frame pointer and the address of lab1 in the buffer
8824 and use the rest of it for the stack save area, which is
8825 machine-dependent. */
8826 emit_move_insn (gen_rtx (MEM, Pmode, buf_addr),
8827 virtual_stack_vars_rtx);
8828 emit_move_insn
8829 (validize_mem (gen_rtx (MEM, Pmode,
8830 plus_constant (buf_addr,
8831 GET_MODE_SIZE (Pmode)))),
8832 gen_rtx (LABEL_REF, Pmode, lab1));
8833
8834 #ifdef HAVE_save_stack_nonlocal
8835 if (HAVE_save_stack_nonlocal)
8836 sa_mode = insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0];
8837 #endif
8838
8839 stack_save = gen_rtx (MEM, sa_mode,
8840 plus_constant (buf_addr,
8841 2 * GET_MODE_SIZE (Pmode)));
8842 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
8843
8844 #ifdef HAVE_setjmp
8845 if (HAVE_setjmp)
8846 emit_insn (gen_setjmp ());
8847 #endif
8848
8849 /* Set TARGET to zero and branch around the other case. */
8850 emit_move_insn (target, const0_rtx);
8851 emit_jump_insn (gen_jump (lab2));
8852 emit_barrier ();
8853 emit_label (lab1);
8854
8855 /* Note that setjmp clobbers FP when we get here, so we have to
8856 make sure it's marked as used by this function. */
8857 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
8858
8859 /* Mark the static chain as clobbered here so life information
8860 doesn't get messed up for it. */
8861 emit_insn (gen_rtx (CLOBBER, VOIDmode, static_chain_rtx));
8862
8863 /* Now put in the code to restore the frame pointer, and argument
8864 pointer, if needed. The code below is from expand_end_bindings
8865 in stmt.c; see detailed documentation there. */
8866 #ifdef HAVE_nonlocal_goto
8867 if (! HAVE_nonlocal_goto)
8868 #endif
8869 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
8870
8871 current_function_has_nonlocal_goto = 1;
8872
8873 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
8874 if (fixed_regs[ARG_POINTER_REGNUM])
8875 {
8876 #ifdef ELIMINABLE_REGS
8877 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
8878
8879 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
8880 if (elim_regs[i].from == ARG_POINTER_REGNUM
8881 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
8882 break;
8883
8884 if (i == sizeof elim_regs / sizeof elim_regs [0])
8885 #endif
8886 {
8887 /* Now restore our arg pointer from the address at which it
8888 was saved in our stack frame.
8889 If there hasn't be space allocated for it yet, make
8890 some now. */
8891 if (arg_pointer_save_area == 0)
8892 arg_pointer_save_area
8893 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
8894 emit_move_insn (virtual_incoming_args_rtx,
8895 copy_to_reg (arg_pointer_save_area));
8896 }
8897 }
8898 #endif
8899
8900 #ifdef HAVE_nonlocal_goto_receiver
8901 if (HAVE_nonlocal_goto_receiver)
8902 emit_insn (gen_nonlocal_goto_receiver ());
8903 #endif
8904 /* The static chain pointer contains the address of dummy function.
8905 We need to call it here to handle some PIC cases of restoring
8906 a global pointer. Then return 1. */
8907 op0 = copy_to_mode_reg (Pmode, static_chain_rtx);
8908
8909 /* We can't actually call emit_library_call here, so do everything
8910 it does, which isn't much for a libfunc with no args. */
8911 op0 = memory_address (FUNCTION_MODE, op0);
8912
8913 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE,
8914 gen_rtx (SYMBOL_REF, Pmode, "__dummy"), 1);
8915 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1);
8916
8917 #ifndef ACCUMULATE_OUTGOING_ARGS
8918 #ifdef HAVE_call_pop
8919 if (HAVE_call_pop)
8920 emit_call_insn (gen_call_pop (gen_rtx (MEM, FUNCTION_MODE, op0),
8921 const0_rtx, next_arg_reg,
8922 GEN_INT (return_pops)));
8923 else
8924 #endif
8925 #endif
8926
8927 #ifdef HAVE_call
8928 if (HAVE_call)
8929 emit_call_insn (gen_call (gen_rtx (MEM, FUNCTION_MODE, op0),
8930 const0_rtx, next_arg_reg, const0_rtx));
8931 else
8932 #endif
8933 abort ();
8934
8935 emit_move_insn (target, const1_rtx);
8936 emit_label (lab2);
8937 return target;
8938 }
8939
8940 /* __builtin_longjmp is passed a pointer to an array of five words
8941 and a value, which is a dummy. It's similar to the C library longjmp
8942 function but works with __builtin_setjmp above. */
8943 case BUILT_IN_LONGJMP:
8944 if (arglist == 0 || TREE_CHAIN (arglist) == 0
8945 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8946 break;
8947
8948 {
8949 tree dummy_id = get_identifier ("__dummy");
8950 tree dummy_type = build_function_type (void_type_node, NULL_TREE);
8951 tree dummy_decl = build_decl (FUNCTION_DECL, dummy_id, dummy_type);
8952 #ifdef POINTERS_EXTEND_UNSIGNED
8953 rtx buf_addr
8954 = force_reg (Pmode,
8955 convert_memory_address
8956 (Pmode,
8957 expand_expr (TREE_VALUE (arglist),
8958 NULL_RTX, VOIDmode, 0)));
8959 #else
8960 rtx buf_addr
8961 = force_reg (Pmode, expand_expr (TREE_VALUE (arglist),
8962 NULL_RTX,
8963 VOIDmode, 0));
8964 #endif
8965 rtx fp = gen_rtx (MEM, Pmode, buf_addr);
8966 rtx lab = gen_rtx (MEM, Pmode,
8967 plus_constant (buf_addr, GET_MODE_SIZE (Pmode)));
8968 enum machine_mode sa_mode
8969 #ifdef HAVE_save_stack_nonlocal
8970 = (HAVE_save_stack_nonlocal
8971 ? insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0]
8972 : Pmode);
8973 #else
8974 = Pmode;
8975 #endif
8976 rtx stack = gen_rtx (MEM, sa_mode,
8977 plus_constant (buf_addr,
8978 2 * GET_MODE_SIZE (Pmode)));
8979
8980 DECL_EXTERNAL (dummy_decl) = 1;
8981 TREE_PUBLIC (dummy_decl) = 1;
8982 make_decl_rtl (dummy_decl, NULL_PTR, 1);
8983
8984 /* Expand the second expression just for side-effects. */
8985 expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
8986 const0_rtx, VOIDmode, 0);
8987
8988 assemble_external (dummy_decl);
8989
8990 /* Pick up FP, label, and SP from the block and jump. This code is
8991 from expand_goto in stmt.c; see there for detailed comments. */
8992 #if HAVE_nonlocal_goto
8993 if (HAVE_nonlocal_goto)
8994 emit_insn (gen_nonlocal_goto (fp, lab, stack,
8995 XEXP (DECL_RTL (dummy_decl), 0)));
8996 else
8997 #endif
8998 {
8999 lab = copy_to_reg (lab);
9000 emit_move_insn (hard_frame_pointer_rtx, fp);
9001 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
9002
9003 /* Put in the static chain register the address of the dummy
9004 function. */
9005 emit_move_insn (static_chain_rtx, XEXP (DECL_RTL (dummy_decl), 0));
9006 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
9007 emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
9008 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
9009 emit_indirect_jump (lab);
9010 }
9011
9012 return const0_rtx;
9013 }
9014
9015 default: /* just do library call, if unknown builtin */
9016 error ("built-in function `%s' not currently supported",
9017 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9018 }
9019
9020 /* The switch statement above can drop through to cause the function
9021 to be called normally. */
9022
9023 return expand_call (exp, target, ignore);
9024 }
9025 \f
9026 /* Built-in functions to perform an untyped call and return. */
9027
9028 /* For each register that may be used for calling a function, this
9029 gives a mode used to copy the register's value. VOIDmode indicates
9030 the register is not used for calling a function. If the machine
9031 has register windows, this gives only the outbound registers.
9032 INCOMING_REGNO gives the corresponding inbound register. */
9033 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
9034
9035 /* For each register that may be used for returning values, this gives
9036 a mode used to copy the register's value. VOIDmode indicates the
9037 register is not used for returning values. If the machine has
9038 register windows, this gives only the outbound registers.
9039 INCOMING_REGNO gives the corresponding inbound register. */
9040 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
9041
9042 /* For each register that may be used for calling a function, this
9043 gives the offset of that register into the block returned by
9044 __builtin_apply_args. 0 indicates that the register is not
9045 used for calling a function. */
9046 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
9047
9048 /* Return the offset of register REGNO into the block returned by
9049 __builtin_apply_args. This is not declared static, since it is
9050 needed in objc-act.c. */
9051
9052 int
9053 apply_args_register_offset (regno)
9054 int regno;
9055 {
9056 apply_args_size ();
9057
9058 /* Arguments are always put in outgoing registers (in the argument
9059 block) if such make sense. */
9060 #ifdef OUTGOING_REGNO
9061 regno = OUTGOING_REGNO(regno);
9062 #endif
9063 return apply_args_reg_offset[regno];
9064 }
9065
9066 /* Return the size required for the block returned by __builtin_apply_args,
9067 and initialize apply_args_mode. */
9068
9069 static int
9070 apply_args_size ()
9071 {
9072 static int size = -1;
9073 int align, regno;
9074 enum machine_mode mode;
9075
9076 /* The values computed by this function never change. */
9077 if (size < 0)
9078 {
9079 /* The first value is the incoming arg-pointer. */
9080 size = GET_MODE_SIZE (Pmode);
9081
9082 /* The second value is the structure value address unless this is
9083 passed as an "invisible" first argument. */
9084 if (struct_value_rtx)
9085 size += GET_MODE_SIZE (Pmode);
9086
9087 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9088 if (FUNCTION_ARG_REGNO_P (regno))
9089 {
9090 /* Search for the proper mode for copying this register's
9091 value. I'm not sure this is right, but it works so far. */
9092 enum machine_mode best_mode = VOIDmode;
9093
9094 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9095 mode != VOIDmode;
9096 mode = GET_MODE_WIDER_MODE (mode))
9097 if (HARD_REGNO_MODE_OK (regno, mode)
9098 && HARD_REGNO_NREGS (regno, mode) == 1)
9099 best_mode = mode;
9100
9101 if (best_mode == VOIDmode)
9102 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9103 mode != VOIDmode;
9104 mode = GET_MODE_WIDER_MODE (mode))
9105 if (HARD_REGNO_MODE_OK (regno, mode)
9106 && (mov_optab->handlers[(int) mode].insn_code
9107 != CODE_FOR_nothing))
9108 best_mode = mode;
9109
9110 mode = best_mode;
9111 if (mode == VOIDmode)
9112 abort ();
9113
9114 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9115 if (size % align != 0)
9116 size = CEIL (size, align) * align;
9117 apply_args_reg_offset[regno] = size;
9118 size += GET_MODE_SIZE (mode);
9119 apply_args_mode[regno] = mode;
9120 }
9121 else
9122 {
9123 apply_args_mode[regno] = VOIDmode;
9124 apply_args_reg_offset[regno] = 0;
9125 }
9126 }
9127 return size;
9128 }
9129
9130 /* Return the size required for the block returned by __builtin_apply,
9131 and initialize apply_result_mode. */
9132
9133 static int
9134 apply_result_size ()
9135 {
9136 static int size = -1;
9137 int align, regno;
9138 enum machine_mode mode;
9139
9140 /* The values computed by this function never change. */
9141 if (size < 0)
9142 {
9143 size = 0;
9144
9145 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9146 if (FUNCTION_VALUE_REGNO_P (regno))
9147 {
9148 /* Search for the proper mode for copying this register's
9149 value. I'm not sure this is right, but it works so far. */
9150 enum machine_mode best_mode = VOIDmode;
9151
9152 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9153 mode != TImode;
9154 mode = GET_MODE_WIDER_MODE (mode))
9155 if (HARD_REGNO_MODE_OK (regno, mode))
9156 best_mode = mode;
9157
9158 if (best_mode == VOIDmode)
9159 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9160 mode != VOIDmode;
9161 mode = GET_MODE_WIDER_MODE (mode))
9162 if (HARD_REGNO_MODE_OK (regno, mode)
9163 && (mov_optab->handlers[(int) mode].insn_code
9164 != CODE_FOR_nothing))
9165 best_mode = mode;
9166
9167 mode = best_mode;
9168 if (mode == VOIDmode)
9169 abort ();
9170
9171 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9172 if (size % align != 0)
9173 size = CEIL (size, align) * align;
9174 size += GET_MODE_SIZE (mode);
9175 apply_result_mode[regno] = mode;
9176 }
9177 else
9178 apply_result_mode[regno] = VOIDmode;
9179
9180 /* Allow targets that use untyped_call and untyped_return to override
9181 the size so that machine-specific information can be stored here. */
9182 #ifdef APPLY_RESULT_SIZE
9183 size = APPLY_RESULT_SIZE;
9184 #endif
9185 }
9186 return size;
9187 }
9188
9189 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
9190 /* Create a vector describing the result block RESULT. If SAVEP is true,
9191 the result block is used to save the values; otherwise it is used to
9192 restore the values. */
9193
9194 static rtx
9195 result_vector (savep, result)
9196 int savep;
9197 rtx result;
9198 {
9199 int regno, size, align, nelts;
9200 enum machine_mode mode;
9201 rtx reg, mem;
9202 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
9203
9204 size = nelts = 0;
9205 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9206 if ((mode = apply_result_mode[regno]) != VOIDmode)
9207 {
9208 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9209 if (size % align != 0)
9210 size = CEIL (size, align) * align;
9211 reg = gen_rtx (REG, mode, savep ? regno : INCOMING_REGNO (regno));
9212 mem = change_address (result, mode,
9213 plus_constant (XEXP (result, 0), size));
9214 savevec[nelts++] = (savep
9215 ? gen_rtx (SET, VOIDmode, mem, reg)
9216 : gen_rtx (SET, VOIDmode, reg, mem));
9217 size += GET_MODE_SIZE (mode);
9218 }
9219 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
9220 }
9221 #endif /* HAVE_untyped_call or HAVE_untyped_return */
9222
9223 /* Save the state required to perform an untyped call with the same
9224 arguments as were passed to the current function. */
9225
9226 static rtx
9227 expand_builtin_apply_args ()
9228 {
9229 rtx registers;
9230 int size, align, regno;
9231 enum machine_mode mode;
9232
9233 /* Create a block where the arg-pointer, structure value address,
9234 and argument registers can be saved. */
9235 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
9236
9237 /* Walk past the arg-pointer and structure value address. */
9238 size = GET_MODE_SIZE (Pmode);
9239 if (struct_value_rtx)
9240 size += GET_MODE_SIZE (Pmode);
9241
9242 /* Save each register used in calling a function to the block. */
9243 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9244 if ((mode = apply_args_mode[regno]) != VOIDmode)
9245 {
9246 rtx tem;
9247
9248 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9249 if (size % align != 0)
9250 size = CEIL (size, align) * align;
9251
9252 tem = gen_rtx (REG, mode, INCOMING_REGNO (regno));
9253
9254 #ifdef STACK_REGS
9255 /* For reg-stack.c's stack register household.
9256 Compare with a similar piece of code in function.c. */
9257
9258 emit_insn (gen_rtx (USE, mode, tem));
9259 #endif
9260
9261 emit_move_insn (change_address (registers, mode,
9262 plus_constant (XEXP (registers, 0),
9263 size)),
9264 tem);
9265 size += GET_MODE_SIZE (mode);
9266 }
9267
9268 /* Save the arg pointer to the block. */
9269 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
9270 copy_to_reg (virtual_incoming_args_rtx));
9271 size = GET_MODE_SIZE (Pmode);
9272
9273 /* Save the structure value address unless this is passed as an
9274 "invisible" first argument. */
9275 if (struct_value_incoming_rtx)
9276 {
9277 emit_move_insn (change_address (registers, Pmode,
9278 plus_constant (XEXP (registers, 0),
9279 size)),
9280 copy_to_reg (struct_value_incoming_rtx));
9281 size += GET_MODE_SIZE (Pmode);
9282 }
9283
9284 /* Return the address of the block. */
9285 return copy_addr_to_reg (XEXP (registers, 0));
9286 }
9287
9288 /* Perform an untyped call and save the state required to perform an
9289 untyped return of whatever value was returned by the given function. */
9290
9291 static rtx
9292 expand_builtin_apply (function, arguments, argsize)
9293 rtx function, arguments, argsize;
9294 {
9295 int size, align, regno;
9296 enum machine_mode mode;
9297 rtx incoming_args, result, reg, dest, call_insn;
9298 rtx old_stack_level = 0;
9299 rtx call_fusage = 0;
9300
9301 /* Create a block where the return registers can be saved. */
9302 result = assign_stack_local (BLKmode, apply_result_size (), -1);
9303
9304 /* ??? The argsize value should be adjusted here. */
9305
9306 /* Fetch the arg pointer from the ARGUMENTS block. */
9307 incoming_args = gen_reg_rtx (Pmode);
9308 emit_move_insn (incoming_args,
9309 gen_rtx (MEM, Pmode, arguments));
9310 #ifndef STACK_GROWS_DOWNWARD
9311 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
9312 incoming_args, 0, OPTAB_LIB_WIDEN);
9313 #endif
9314
9315 /* Perform postincrements before actually calling the function. */
9316 emit_queue ();
9317
9318 /* Push a new argument block and copy the arguments. */
9319 do_pending_stack_adjust ();
9320 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
9321
9322 /* Push a block of memory onto the stack to store the memory arguments.
9323 Save the address in a register, and copy the memory arguments. ??? I
9324 haven't figured out how the calling convention macros effect this,
9325 but it's likely that the source and/or destination addresses in
9326 the block copy will need updating in machine specific ways. */
9327 dest = copy_addr_to_reg (push_block (argsize, 0, 0));
9328 emit_block_move (gen_rtx (MEM, BLKmode, dest),
9329 gen_rtx (MEM, BLKmode, incoming_args),
9330 argsize,
9331 PARM_BOUNDARY / BITS_PER_UNIT);
9332
9333 /* Refer to the argument block. */
9334 apply_args_size ();
9335 arguments = gen_rtx (MEM, BLKmode, arguments);
9336
9337 /* Walk past the arg-pointer and structure value address. */
9338 size = GET_MODE_SIZE (Pmode);
9339 if (struct_value_rtx)
9340 size += GET_MODE_SIZE (Pmode);
9341
9342 /* Restore each of the registers previously saved. Make USE insns
9343 for each of these registers for use in making the call. */
9344 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9345 if ((mode = apply_args_mode[regno]) != VOIDmode)
9346 {
9347 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9348 if (size % align != 0)
9349 size = CEIL (size, align) * align;
9350 reg = gen_rtx (REG, mode, regno);
9351 emit_move_insn (reg,
9352 change_address (arguments, mode,
9353 plus_constant (XEXP (arguments, 0),
9354 size)));
9355
9356 use_reg (&call_fusage, reg);
9357 size += GET_MODE_SIZE (mode);
9358 }
9359
9360 /* Restore the structure value address unless this is passed as an
9361 "invisible" first argument. */
9362 size = GET_MODE_SIZE (Pmode);
9363 if (struct_value_rtx)
9364 {
9365 rtx value = gen_reg_rtx (Pmode);
9366 emit_move_insn (value,
9367 change_address (arguments, Pmode,
9368 plus_constant (XEXP (arguments, 0),
9369 size)));
9370 emit_move_insn (struct_value_rtx, value);
9371 if (GET_CODE (struct_value_rtx) == REG)
9372 use_reg (&call_fusage, struct_value_rtx);
9373 size += GET_MODE_SIZE (Pmode);
9374 }
9375
9376 /* All arguments and registers used for the call are set up by now! */
9377 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
9378
9379 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
9380 and we don't want to load it into a register as an optimization,
9381 because prepare_call_address already did it if it should be done. */
9382 if (GET_CODE (function) != SYMBOL_REF)
9383 function = memory_address (FUNCTION_MODE, function);
9384
9385 /* Generate the actual call instruction and save the return value. */
9386 #ifdef HAVE_untyped_call
9387 if (HAVE_untyped_call)
9388 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
9389 result, result_vector (1, result)));
9390 else
9391 #endif
9392 #ifdef HAVE_call_value
9393 if (HAVE_call_value)
9394 {
9395 rtx valreg = 0;
9396
9397 /* Locate the unique return register. It is not possible to
9398 express a call that sets more than one return register using
9399 call_value; use untyped_call for that. In fact, untyped_call
9400 only needs to save the return registers in the given block. */
9401 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9402 if ((mode = apply_result_mode[regno]) != VOIDmode)
9403 {
9404 if (valreg)
9405 abort (); /* HAVE_untyped_call required. */
9406 valreg = gen_rtx (REG, mode, regno);
9407 }
9408
9409 emit_call_insn (gen_call_value (valreg,
9410 gen_rtx (MEM, FUNCTION_MODE, function),
9411 const0_rtx, NULL_RTX, const0_rtx));
9412
9413 emit_move_insn (change_address (result, GET_MODE (valreg),
9414 XEXP (result, 0)),
9415 valreg);
9416 }
9417 else
9418 #endif
9419 abort ();
9420
9421 /* Find the CALL insn we just emitted. */
9422 for (call_insn = get_last_insn ();
9423 call_insn && GET_CODE (call_insn) != CALL_INSN;
9424 call_insn = PREV_INSN (call_insn))
9425 ;
9426
9427 if (! call_insn)
9428 abort ();
9429
9430 /* Put the register usage information on the CALL. If there is already
9431 some usage information, put ours at the end. */
9432 if (CALL_INSN_FUNCTION_USAGE (call_insn))
9433 {
9434 rtx link;
9435
9436 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
9437 link = XEXP (link, 1))
9438 ;
9439
9440 XEXP (link, 1) = call_fusage;
9441 }
9442 else
9443 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
9444
9445 /* Restore the stack. */
9446 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
9447
9448 /* Return the address of the result block. */
9449 return copy_addr_to_reg (XEXP (result, 0));
9450 }
9451
9452 /* Perform an untyped return. */
9453
9454 static void
9455 expand_builtin_return (result)
9456 rtx result;
9457 {
9458 int size, align, regno;
9459 enum machine_mode mode;
9460 rtx reg;
9461 rtx call_fusage = 0;
9462
9463 apply_result_size ();
9464 result = gen_rtx (MEM, BLKmode, result);
9465
9466 #ifdef HAVE_untyped_return
9467 if (HAVE_untyped_return)
9468 {
9469 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
9470 emit_barrier ();
9471 return;
9472 }
9473 #endif
9474
9475 /* Restore the return value and note that each value is used. */
9476 size = 0;
9477 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9478 if ((mode = apply_result_mode[regno]) != VOIDmode)
9479 {
9480 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9481 if (size % align != 0)
9482 size = CEIL (size, align) * align;
9483 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
9484 emit_move_insn (reg,
9485 change_address (result, mode,
9486 plus_constant (XEXP (result, 0),
9487 size)));
9488
9489 push_to_sequence (call_fusage);
9490 emit_insn (gen_rtx (USE, VOIDmode, reg));
9491 call_fusage = get_insns ();
9492 end_sequence ();
9493 size += GET_MODE_SIZE (mode);
9494 }
9495
9496 /* Put the USE insns before the return. */
9497 emit_insns (call_fusage);
9498
9499 /* Return whatever values was restored by jumping directly to the end
9500 of the function. */
9501 expand_null_return ();
9502 }
9503 \f
9504 /* Expand code for a post- or pre- increment or decrement
9505 and return the RTX for the result.
9506 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9507
9508 static rtx
9509 expand_increment (exp, post, ignore)
9510 register tree exp;
9511 int post, ignore;
9512 {
9513 register rtx op0, op1;
9514 register rtx temp, value;
9515 register tree incremented = TREE_OPERAND (exp, 0);
9516 optab this_optab = add_optab;
9517 int icode;
9518 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9519 int op0_is_copy = 0;
9520 int single_insn = 0;
9521 /* 1 means we can't store into OP0 directly,
9522 because it is a subreg narrower than a word,
9523 and we don't dare clobber the rest of the word. */
9524 int bad_subreg = 0;
9525
9526 if (output_bytecode)
9527 {
9528 bc_expand_expr (exp);
9529 return NULL_RTX;
9530 }
9531
9532 /* Stabilize any component ref that might need to be
9533 evaluated more than once below. */
9534 if (!post
9535 || TREE_CODE (incremented) == BIT_FIELD_REF
9536 || (TREE_CODE (incremented) == COMPONENT_REF
9537 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9538 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9539 incremented = stabilize_reference (incremented);
9540 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9541 ones into save exprs so that they don't accidentally get evaluated
9542 more than once by the code below. */
9543 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9544 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9545 incremented = save_expr (incremented);
9546
9547 /* Compute the operands as RTX.
9548 Note whether OP0 is the actual lvalue or a copy of it:
9549 I believe it is a copy iff it is a register or subreg
9550 and insns were generated in computing it. */
9551
9552 temp = get_last_insn ();
9553 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9554
9555 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9556 in place but instead must do sign- or zero-extension during assignment,
9557 so we copy it into a new register and let the code below use it as
9558 a copy.
9559
9560 Note that we can safely modify this SUBREG since it is know not to be
9561 shared (it was made by the expand_expr call above). */
9562
9563 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9564 {
9565 if (post)
9566 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9567 else
9568 bad_subreg = 1;
9569 }
9570 else if (GET_CODE (op0) == SUBREG
9571 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9572 {
9573 /* We cannot increment this SUBREG in place. If we are
9574 post-incrementing, get a copy of the old value. Otherwise,
9575 just mark that we cannot increment in place. */
9576 if (post)
9577 op0 = copy_to_reg (op0);
9578 else
9579 bad_subreg = 1;
9580 }
9581
9582 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9583 && temp != get_last_insn ());
9584 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9585
9586 /* Decide whether incrementing or decrementing. */
9587 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9588 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9589 this_optab = sub_optab;
9590
9591 /* Convert decrement by a constant into a negative increment. */
9592 if (this_optab == sub_optab
9593 && GET_CODE (op1) == CONST_INT)
9594 {
9595 op1 = GEN_INT (- INTVAL (op1));
9596 this_optab = add_optab;
9597 }
9598
9599 /* For a preincrement, see if we can do this with a single instruction. */
9600 if (!post)
9601 {
9602 icode = (int) this_optab->handlers[(int) mode].insn_code;
9603 if (icode != (int) CODE_FOR_nothing
9604 /* Make sure that OP0 is valid for operands 0 and 1
9605 of the insn we want to queue. */
9606 && (*insn_operand_predicate[icode][0]) (op0, mode)
9607 && (*insn_operand_predicate[icode][1]) (op0, mode)
9608 && (*insn_operand_predicate[icode][2]) (op1, mode))
9609 single_insn = 1;
9610 }
9611
9612 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9613 then we cannot just increment OP0. We must therefore contrive to
9614 increment the original value. Then, for postincrement, we can return
9615 OP0 since it is a copy of the old value. For preincrement, expand here
9616 unless we can do it with a single insn.
9617
9618 Likewise if storing directly into OP0 would clobber high bits
9619 we need to preserve (bad_subreg). */
9620 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9621 {
9622 /* This is the easiest way to increment the value wherever it is.
9623 Problems with multiple evaluation of INCREMENTED are prevented
9624 because either (1) it is a component_ref or preincrement,
9625 in which case it was stabilized above, or (2) it is an array_ref
9626 with constant index in an array in a register, which is
9627 safe to reevaluate. */
9628 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9629 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9630 ? MINUS_EXPR : PLUS_EXPR),
9631 TREE_TYPE (exp),
9632 incremented,
9633 TREE_OPERAND (exp, 1));
9634
9635 while (TREE_CODE (incremented) == NOP_EXPR
9636 || TREE_CODE (incremented) == CONVERT_EXPR)
9637 {
9638 newexp = convert (TREE_TYPE (incremented), newexp);
9639 incremented = TREE_OPERAND (incremented, 0);
9640 }
9641
9642 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9643 return post ? op0 : temp;
9644 }
9645
9646 if (post)
9647 {
9648 /* We have a true reference to the value in OP0.
9649 If there is an insn to add or subtract in this mode, queue it.
9650 Queueing the increment insn avoids the register shuffling
9651 that often results if we must increment now and first save
9652 the old value for subsequent use. */
9653
9654 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9655 op0 = stabilize (op0);
9656 #endif
9657
9658 icode = (int) this_optab->handlers[(int) mode].insn_code;
9659 if (icode != (int) CODE_FOR_nothing
9660 /* Make sure that OP0 is valid for operands 0 and 1
9661 of the insn we want to queue. */
9662 && (*insn_operand_predicate[icode][0]) (op0, mode)
9663 && (*insn_operand_predicate[icode][1]) (op0, mode))
9664 {
9665 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
9666 op1 = force_reg (mode, op1);
9667
9668 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9669 }
9670 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9671 {
9672 rtx addr = force_reg (Pmode, XEXP (op0, 0));
9673 rtx temp, result;
9674
9675 op0 = change_address (op0, VOIDmode, addr);
9676 temp = force_reg (GET_MODE (op0), op0);
9677 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
9678 op1 = force_reg (mode, op1);
9679
9680 /* The increment queue is LIFO, thus we have to `queue'
9681 the instructions in reverse order. */
9682 enqueue_insn (op0, gen_move_insn (op0, temp));
9683 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9684 return result;
9685 }
9686 }
9687
9688 /* Preincrement, or we can't increment with one simple insn. */
9689 if (post)
9690 /* Save a copy of the value before inc or dec, to return it later. */
9691 temp = value = copy_to_reg (op0);
9692 else
9693 /* Arrange to return the incremented value. */
9694 /* Copy the rtx because expand_binop will protect from the queue,
9695 and the results of that would be invalid for us to return
9696 if our caller does emit_queue before using our result. */
9697 temp = copy_rtx (value = op0);
9698
9699 /* Increment however we can. */
9700 op1 = expand_binop (mode, this_optab, value, op1, op0,
9701 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9702 /* Make sure the value is stored into OP0. */
9703 if (op1 != op0)
9704 emit_move_insn (op0, op1);
9705
9706 return temp;
9707 }
9708 \f
9709 /* Expand all function calls contained within EXP, innermost ones first.
9710 But don't look within expressions that have sequence points.
9711 For each CALL_EXPR, record the rtx for its value
9712 in the CALL_EXPR_RTL field. */
9713
9714 static void
9715 preexpand_calls (exp)
9716 tree exp;
9717 {
9718 register int nops, i;
9719 int type = TREE_CODE_CLASS (TREE_CODE (exp));
9720
9721 if (! do_preexpand_calls)
9722 return;
9723
9724 /* Only expressions and references can contain calls. */
9725
9726 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
9727 return;
9728
9729 switch (TREE_CODE (exp))
9730 {
9731 case CALL_EXPR:
9732 /* Do nothing if already expanded. */
9733 if (CALL_EXPR_RTL (exp) != 0
9734 /* Do nothing if the call returns a variable-sized object. */
9735 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
9736 /* Do nothing to built-in functions. */
9737 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
9738 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9739 == FUNCTION_DECL)
9740 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
9741 return;
9742
9743 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
9744 return;
9745
9746 case COMPOUND_EXPR:
9747 case COND_EXPR:
9748 case TRUTH_ANDIF_EXPR:
9749 case TRUTH_ORIF_EXPR:
9750 /* If we find one of these, then we can be sure
9751 the adjust will be done for it (since it makes jumps).
9752 Do it now, so that if this is inside an argument
9753 of a function, we don't get the stack adjustment
9754 after some other args have already been pushed. */
9755 do_pending_stack_adjust ();
9756 return;
9757
9758 case BLOCK:
9759 case RTL_EXPR:
9760 case WITH_CLEANUP_EXPR:
9761 case CLEANUP_POINT_EXPR:
9762 return;
9763
9764 case SAVE_EXPR:
9765 if (SAVE_EXPR_RTL (exp) != 0)
9766 return;
9767 }
9768
9769 nops = tree_code_length[(int) TREE_CODE (exp)];
9770 for (i = 0; i < nops; i++)
9771 if (TREE_OPERAND (exp, i) != 0)
9772 {
9773 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9774 if (type == 'e' || type == '<' || type == '1' || type == '2'
9775 || type == 'r')
9776 preexpand_calls (TREE_OPERAND (exp, i));
9777 }
9778 }
9779 \f
9780 /* At the start of a function, record that we have no previously-pushed
9781 arguments waiting to be popped. */
9782
9783 void
9784 init_pending_stack_adjust ()
9785 {
9786 pending_stack_adjust = 0;
9787 }
9788
9789 /* When exiting from function, if safe, clear out any pending stack adjust
9790 so the adjustment won't get done. */
9791
9792 void
9793 clear_pending_stack_adjust ()
9794 {
9795 #ifdef EXIT_IGNORE_STACK
9796 if (optimize > 0
9797 && ! flag_omit_frame_pointer && EXIT_IGNORE_STACK
9798 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9799 && ! flag_inline_functions)
9800 pending_stack_adjust = 0;
9801 #endif
9802 }
9803
9804 /* Pop any previously-pushed arguments that have not been popped yet. */
9805
9806 void
9807 do_pending_stack_adjust ()
9808 {
9809 if (inhibit_defer_pop == 0)
9810 {
9811 if (pending_stack_adjust != 0)
9812 adjust_stack (GEN_INT (pending_stack_adjust));
9813 pending_stack_adjust = 0;
9814 }
9815 }
9816
9817 /* Defer the expansion all cleanups up to OLD_CLEANUPS.
9818 Returns the cleanups to be performed. */
9819
9820 static tree
9821 defer_cleanups_to (old_cleanups)
9822 tree old_cleanups;
9823 {
9824 tree new_cleanups = NULL_TREE;
9825 tree cleanups = cleanups_this_call;
9826 tree last = NULL_TREE;
9827
9828 while (cleanups_this_call != old_cleanups)
9829 {
9830 expand_eh_region_end (TREE_VALUE (cleanups_this_call));
9831 last = cleanups_this_call;
9832 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
9833 }
9834
9835 if (last)
9836 {
9837 /* Remove the list from the chain of cleanups. */
9838 TREE_CHAIN (last) = NULL_TREE;
9839
9840 /* reverse them so that we can build them in the right order. */
9841 cleanups = nreverse (cleanups);
9842
9843 /* All cleanups must be on the function_obstack. */
9844 push_obstacks_nochange ();
9845 resume_temporary_allocation ();
9846
9847 while (cleanups)
9848 {
9849 if (new_cleanups)
9850 new_cleanups = build (COMPOUND_EXPR, TREE_TYPE (new_cleanups),
9851 TREE_VALUE (cleanups), new_cleanups);
9852 else
9853 new_cleanups = TREE_VALUE (cleanups);
9854
9855 cleanups = TREE_CHAIN (cleanups);
9856 }
9857
9858 pop_obstacks ();
9859 }
9860
9861 return new_cleanups;
9862 }
9863
9864 /* Expand all cleanups up to OLD_CLEANUPS.
9865 Needed here, and also for language-dependent calls. */
9866
9867 void
9868 expand_cleanups_to (old_cleanups)
9869 tree old_cleanups;
9870 {
9871 while (cleanups_this_call != old_cleanups)
9872 {
9873 expand_eh_region_end (TREE_VALUE (cleanups_this_call));
9874 expand_expr (TREE_VALUE (cleanups_this_call), const0_rtx, VOIDmode, 0);
9875 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
9876 }
9877 }
9878 \f
9879 /* Expand conditional expressions. */
9880
9881 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9882 LABEL is an rtx of code CODE_LABEL, in this function and all the
9883 functions here. */
9884
9885 void
9886 jumpifnot (exp, label)
9887 tree exp;
9888 rtx label;
9889 {
9890 do_jump (exp, label, NULL_RTX);
9891 }
9892
9893 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9894
9895 void
9896 jumpif (exp, label)
9897 tree exp;
9898 rtx label;
9899 {
9900 do_jump (exp, NULL_RTX, label);
9901 }
9902
9903 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9904 the result is zero, or IF_TRUE_LABEL if the result is one.
9905 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9906 meaning fall through in that case.
9907
9908 do_jump always does any pending stack adjust except when it does not
9909 actually perform a jump. An example where there is no jump
9910 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9911
9912 This function is responsible for optimizing cases such as
9913 &&, || and comparison operators in EXP. */
9914
9915 void
9916 do_jump (exp, if_false_label, if_true_label)
9917 tree exp;
9918 rtx if_false_label, if_true_label;
9919 {
9920 register enum tree_code code = TREE_CODE (exp);
9921 /* Some cases need to create a label to jump to
9922 in order to properly fall through.
9923 These cases set DROP_THROUGH_LABEL nonzero. */
9924 rtx drop_through_label = 0;
9925 rtx temp;
9926 rtx comparison = 0;
9927 int i;
9928 tree type;
9929 enum machine_mode mode;
9930
9931 emit_queue ();
9932
9933 switch (code)
9934 {
9935 case ERROR_MARK:
9936 break;
9937
9938 case INTEGER_CST:
9939 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9940 if (temp)
9941 emit_jump (temp);
9942 break;
9943
9944 #if 0
9945 /* This is not true with #pragma weak */
9946 case ADDR_EXPR:
9947 /* The address of something can never be zero. */
9948 if (if_true_label)
9949 emit_jump (if_true_label);
9950 break;
9951 #endif
9952
9953 case NOP_EXPR:
9954 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9955 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9956 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9957 goto normal;
9958 case CONVERT_EXPR:
9959 /* If we are narrowing the operand, we have to do the compare in the
9960 narrower mode. */
9961 if ((TYPE_PRECISION (TREE_TYPE (exp))
9962 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9963 goto normal;
9964 case NON_LVALUE_EXPR:
9965 case REFERENCE_EXPR:
9966 case ABS_EXPR:
9967 case NEGATE_EXPR:
9968 case LROTATE_EXPR:
9969 case RROTATE_EXPR:
9970 /* These cannot change zero->non-zero or vice versa. */
9971 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9972 break;
9973
9974 #if 0
9975 /* This is never less insns than evaluating the PLUS_EXPR followed by
9976 a test and can be longer if the test is eliminated. */
9977 case PLUS_EXPR:
9978 /* Reduce to minus. */
9979 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9980 TREE_OPERAND (exp, 0),
9981 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9982 TREE_OPERAND (exp, 1))));
9983 /* Process as MINUS. */
9984 #endif
9985
9986 case MINUS_EXPR:
9987 /* Non-zero iff operands of minus differ. */
9988 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
9989 TREE_OPERAND (exp, 0),
9990 TREE_OPERAND (exp, 1)),
9991 NE, NE);
9992 break;
9993
9994 case BIT_AND_EXPR:
9995 /* If we are AND'ing with a small constant, do this comparison in the
9996 smallest type that fits. If the machine doesn't have comparisons
9997 that small, it will be converted back to the wider comparison.
9998 This helps if we are testing the sign bit of a narrower object.
9999 combine can't do this for us because it can't know whether a
10000 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
10001
10002 if (! SLOW_BYTE_ACCESS
10003 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
10004 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
10005 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
10006 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
10007 && (type = type_for_mode (mode, 1)) != 0
10008 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10009 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10010 != CODE_FOR_nothing))
10011 {
10012 do_jump (convert (type, exp), if_false_label, if_true_label);
10013 break;
10014 }
10015 goto normal;
10016
10017 case TRUTH_NOT_EXPR:
10018 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10019 break;
10020
10021 case TRUTH_ANDIF_EXPR:
10022 {
10023 rtx seq1, seq2;
10024 tree cleanups, old_cleanups;
10025
10026 if (if_false_label == 0)
10027 if_false_label = drop_through_label = gen_label_rtx ();
10028 start_sequence ();
10029 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
10030 seq1 = get_insns ();
10031 end_sequence ();
10032
10033 old_cleanups = cleanups_this_call;
10034 start_sequence ();
10035 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10036 seq2 = get_insns ();
10037 cleanups = defer_cleanups_to (old_cleanups);
10038 end_sequence ();
10039
10040 if (cleanups)
10041 {
10042 rtx flag = gen_reg_rtx (word_mode);
10043 tree new_cleanups;
10044 tree cond;
10045
10046 /* Flag cleanups as not needed. */
10047 emit_move_insn (flag, const0_rtx);
10048 emit_insns (seq1);
10049
10050 /* Flag cleanups as needed. */
10051 emit_move_insn (flag, const1_rtx);
10052 emit_insns (seq2);
10053
10054 /* All cleanups must be on the function_obstack. */
10055 push_obstacks_nochange ();
10056 resume_temporary_allocation ();
10057
10058 /* convert flag, which is an rtx, into a tree. */
10059 cond = make_node (RTL_EXPR);
10060 TREE_TYPE (cond) = integer_type_node;
10061 RTL_EXPR_RTL (cond) = flag;
10062 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
10063 cond = save_expr (cond);
10064
10065 new_cleanups = build (COND_EXPR, void_type_node,
10066 truthvalue_conversion (cond),
10067 cleanups, integer_zero_node);
10068 new_cleanups = fold (new_cleanups);
10069
10070 pop_obstacks ();
10071
10072 /* Now add in the conditionalized cleanups. */
10073 cleanups_this_call
10074 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
10075 expand_eh_region_start ();
10076 }
10077 else
10078 {
10079 emit_insns (seq1);
10080 emit_insns (seq2);
10081 }
10082 }
10083 break;
10084
10085 case TRUTH_ORIF_EXPR:
10086 {
10087 rtx seq1, seq2;
10088 tree cleanups, old_cleanups;
10089
10090 if (if_true_label == 0)
10091 if_true_label = drop_through_label = gen_label_rtx ();
10092 start_sequence ();
10093 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
10094 seq1 = get_insns ();
10095 end_sequence ();
10096
10097 old_cleanups = cleanups_this_call;
10098 start_sequence ();
10099 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10100 seq2 = get_insns ();
10101 cleanups = defer_cleanups_to (old_cleanups);
10102 end_sequence ();
10103
10104 if (cleanups)
10105 {
10106 rtx flag = gen_reg_rtx (word_mode);
10107 tree new_cleanups;
10108 tree cond;
10109
10110 /* Flag cleanups as not needed. */
10111 emit_move_insn (flag, const0_rtx);
10112 emit_insns (seq1);
10113
10114 /* Flag cleanups as needed. */
10115 emit_move_insn (flag, const1_rtx);
10116 emit_insns (seq2);
10117
10118 /* All cleanups must be on the function_obstack. */
10119 push_obstacks_nochange ();
10120 resume_temporary_allocation ();
10121
10122 /* convert flag, which is an rtx, into a tree. */
10123 cond = make_node (RTL_EXPR);
10124 TREE_TYPE (cond) = integer_type_node;
10125 RTL_EXPR_RTL (cond) = flag;
10126 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
10127 cond = save_expr (cond);
10128
10129 new_cleanups = build (COND_EXPR, void_type_node,
10130 truthvalue_conversion (cond),
10131 cleanups, integer_zero_node);
10132 new_cleanups = fold (new_cleanups);
10133
10134 pop_obstacks ();
10135
10136 /* Now add in the conditionalized cleanups. */
10137 cleanups_this_call
10138 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
10139 expand_eh_region_start ();
10140 }
10141 else
10142 {
10143 emit_insns (seq1);
10144 emit_insns (seq2);
10145 }
10146 }
10147 break;
10148
10149 case COMPOUND_EXPR:
10150 push_temp_slots ();
10151 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
10152 preserve_temp_slots (NULL_RTX);
10153 free_temp_slots ();
10154 pop_temp_slots ();
10155 emit_queue ();
10156 do_pending_stack_adjust ();
10157 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10158 break;
10159
10160 case COMPONENT_REF:
10161 case BIT_FIELD_REF:
10162 case ARRAY_REF:
10163 {
10164 int bitsize, bitpos, unsignedp;
10165 enum machine_mode mode;
10166 tree type;
10167 tree offset;
10168 int volatilep = 0;
10169
10170 /* Get description of this reference. We don't actually care
10171 about the underlying object here. */
10172 get_inner_reference (exp, &bitsize, &bitpos, &offset,
10173 &mode, &unsignedp, &volatilep);
10174
10175 type = type_for_size (bitsize, unsignedp);
10176 if (! SLOW_BYTE_ACCESS
10177 && type != 0 && bitsize >= 0
10178 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10179 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10180 != CODE_FOR_nothing))
10181 {
10182 do_jump (convert (type, exp), if_false_label, if_true_label);
10183 break;
10184 }
10185 goto normal;
10186 }
10187
10188 case COND_EXPR:
10189 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
10190 if (integer_onep (TREE_OPERAND (exp, 1))
10191 && integer_zerop (TREE_OPERAND (exp, 2)))
10192 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10193
10194 else if (integer_zerop (TREE_OPERAND (exp, 1))
10195 && integer_onep (TREE_OPERAND (exp, 2)))
10196 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10197
10198 else
10199 {
10200 rtx seq1, seq2;
10201 tree cleanups_left_side, cleanups_right_side, old_cleanups;
10202
10203 register rtx label1 = gen_label_rtx ();
10204 drop_through_label = gen_label_rtx ();
10205
10206 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
10207
10208 /* We need to save the cleanups for the lhs and rhs separately.
10209 Keep track of the cleanups seen before the lhs. */
10210 old_cleanups = cleanups_this_call;
10211 start_sequence ();
10212 /* Now the THEN-expression. */
10213 do_jump (TREE_OPERAND (exp, 1),
10214 if_false_label ? if_false_label : drop_through_label,
10215 if_true_label ? if_true_label : drop_through_label);
10216 /* In case the do_jump just above never jumps. */
10217 do_pending_stack_adjust ();
10218 emit_label (label1);
10219 seq1 = get_insns ();
10220 /* Now grab the cleanups for the lhs. */
10221 cleanups_left_side = defer_cleanups_to (old_cleanups);
10222 end_sequence ();
10223
10224 /* And keep track of where we start before the rhs. */
10225 old_cleanups = cleanups_this_call;
10226 start_sequence ();
10227 /* Now the ELSE-expression. */
10228 do_jump (TREE_OPERAND (exp, 2),
10229 if_false_label ? if_false_label : drop_through_label,
10230 if_true_label ? if_true_label : drop_through_label);
10231 seq2 = get_insns ();
10232 /* Grab the cleanups for the rhs. */
10233 cleanups_right_side = defer_cleanups_to (old_cleanups);
10234 end_sequence ();
10235
10236 if (cleanups_left_side || cleanups_right_side)
10237 {
10238 /* Make the cleanups for the THEN and ELSE clauses
10239 conditional based on which half is executed. */
10240 rtx flag = gen_reg_rtx (word_mode);
10241 tree new_cleanups;
10242 tree cond;
10243
10244 /* Set the flag to 0 so that we know we executed the lhs. */
10245 emit_move_insn (flag, const0_rtx);
10246 emit_insns (seq1);
10247
10248 /* Set the flag to 1 so that we know we executed the rhs. */
10249 emit_move_insn (flag, const1_rtx);
10250 emit_insns (seq2);
10251
10252 /* Make sure the cleanup lives on the function_obstack. */
10253 push_obstacks_nochange ();
10254 resume_temporary_allocation ();
10255
10256 /* Now, build up a COND_EXPR that tests the value of the
10257 flag, and then either do the cleanups for the lhs or the
10258 rhs. */
10259 cond = make_node (RTL_EXPR);
10260 TREE_TYPE (cond) = integer_type_node;
10261 RTL_EXPR_RTL (cond) = flag;
10262 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
10263 cond = save_expr (cond);
10264
10265 new_cleanups = build (COND_EXPR, void_type_node,
10266 truthvalue_conversion (cond),
10267 cleanups_right_side, cleanups_left_side);
10268 new_cleanups = fold (new_cleanups);
10269
10270 pop_obstacks ();
10271
10272 /* Now add in the conditionalized cleanups. */
10273 cleanups_this_call
10274 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
10275 expand_eh_region_start ();
10276 }
10277 else
10278 {
10279 /* No cleanups were needed, so emit the two sequences
10280 directly. */
10281 emit_insns (seq1);
10282 emit_insns (seq2);
10283 }
10284 }
10285 break;
10286
10287 case EQ_EXPR:
10288 {
10289 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10290
10291 if (integer_zerop (TREE_OPERAND (exp, 1)))
10292 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10293 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10294 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10295 do_jump
10296 (fold
10297 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
10298 fold (build (EQ_EXPR, TREE_TYPE (exp),
10299 fold (build1 (REALPART_EXPR,
10300 TREE_TYPE (inner_type),
10301 TREE_OPERAND (exp, 0))),
10302 fold (build1 (REALPART_EXPR,
10303 TREE_TYPE (inner_type),
10304 TREE_OPERAND (exp, 1))))),
10305 fold (build (EQ_EXPR, TREE_TYPE (exp),
10306 fold (build1 (IMAGPART_EXPR,
10307 TREE_TYPE (inner_type),
10308 TREE_OPERAND (exp, 0))),
10309 fold (build1 (IMAGPART_EXPR,
10310 TREE_TYPE (inner_type),
10311 TREE_OPERAND (exp, 1))))))),
10312 if_false_label, if_true_label);
10313 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10314 && !can_compare_p (TYPE_MODE (inner_type)))
10315 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
10316 else
10317 comparison = compare (exp, EQ, EQ);
10318 break;
10319 }
10320
10321 case NE_EXPR:
10322 {
10323 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10324
10325 if (integer_zerop (TREE_OPERAND (exp, 1)))
10326 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10327 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10328 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10329 do_jump
10330 (fold
10331 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
10332 fold (build (NE_EXPR, TREE_TYPE (exp),
10333 fold (build1 (REALPART_EXPR,
10334 TREE_TYPE (inner_type),
10335 TREE_OPERAND (exp, 0))),
10336 fold (build1 (REALPART_EXPR,
10337 TREE_TYPE (inner_type),
10338 TREE_OPERAND (exp, 1))))),
10339 fold (build (NE_EXPR, TREE_TYPE (exp),
10340 fold (build1 (IMAGPART_EXPR,
10341 TREE_TYPE (inner_type),
10342 TREE_OPERAND (exp, 0))),
10343 fold (build1 (IMAGPART_EXPR,
10344 TREE_TYPE (inner_type),
10345 TREE_OPERAND (exp, 1))))))),
10346 if_false_label, if_true_label);
10347 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10348 && !can_compare_p (TYPE_MODE (inner_type)))
10349 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
10350 else
10351 comparison = compare (exp, NE, NE);
10352 break;
10353 }
10354
10355 case LT_EXPR:
10356 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10357 == MODE_INT)
10358 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10359 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
10360 else
10361 comparison = compare (exp, LT, LTU);
10362 break;
10363
10364 case LE_EXPR:
10365 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10366 == MODE_INT)
10367 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10368 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
10369 else
10370 comparison = compare (exp, LE, LEU);
10371 break;
10372
10373 case GT_EXPR:
10374 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10375 == MODE_INT)
10376 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10377 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
10378 else
10379 comparison = compare (exp, GT, GTU);
10380 break;
10381
10382 case GE_EXPR:
10383 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10384 == MODE_INT)
10385 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10386 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
10387 else
10388 comparison = compare (exp, GE, GEU);
10389 break;
10390
10391 default:
10392 normal:
10393 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10394 #if 0
10395 /* This is not needed any more and causes poor code since it causes
10396 comparisons and tests from non-SI objects to have different code
10397 sequences. */
10398 /* Copy to register to avoid generating bad insns by cse
10399 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10400 if (!cse_not_expected && GET_CODE (temp) == MEM)
10401 temp = copy_to_reg (temp);
10402 #endif
10403 do_pending_stack_adjust ();
10404 if (GET_CODE (temp) == CONST_INT)
10405 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
10406 else if (GET_CODE (temp) == LABEL_REF)
10407 comparison = const_true_rtx;
10408 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10409 && !can_compare_p (GET_MODE (temp)))
10410 /* Note swapping the labels gives us not-equal. */
10411 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10412 else if (GET_MODE (temp) != VOIDmode)
10413 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
10414 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10415 GET_MODE (temp), NULL_RTX, 0);
10416 else
10417 abort ();
10418 }
10419
10420 /* Do any postincrements in the expression that was tested. */
10421 emit_queue ();
10422
10423 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
10424 straight into a conditional jump instruction as the jump condition.
10425 Otherwise, all the work has been done already. */
10426
10427 if (comparison == const_true_rtx)
10428 {
10429 if (if_true_label)
10430 emit_jump (if_true_label);
10431 }
10432 else if (comparison == const0_rtx)
10433 {
10434 if (if_false_label)
10435 emit_jump (if_false_label);
10436 }
10437 else if (comparison)
10438 do_jump_for_compare (comparison, if_false_label, if_true_label);
10439
10440 if (drop_through_label)
10441 {
10442 /* If do_jump produces code that might be jumped around,
10443 do any stack adjusts from that code, before the place
10444 where control merges in. */
10445 do_pending_stack_adjust ();
10446 emit_label (drop_through_label);
10447 }
10448 }
10449 \f
10450 /* Given a comparison expression EXP for values too wide to be compared
10451 with one insn, test the comparison and jump to the appropriate label.
10452 The code of EXP is ignored; we always test GT if SWAP is 0,
10453 and LT if SWAP is 1. */
10454
10455 static void
10456 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10457 tree exp;
10458 int swap;
10459 rtx if_false_label, if_true_label;
10460 {
10461 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10462 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10463 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10464 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10465 rtx drop_through_label = 0;
10466 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10467 int i;
10468
10469 if (! if_true_label || ! if_false_label)
10470 drop_through_label = gen_label_rtx ();
10471 if (! if_true_label)
10472 if_true_label = drop_through_label;
10473 if (! if_false_label)
10474 if_false_label = drop_through_label;
10475
10476 /* Compare a word at a time, high order first. */
10477 for (i = 0; i < nwords; i++)
10478 {
10479 rtx comp;
10480 rtx op0_word, op1_word;
10481
10482 if (WORDS_BIG_ENDIAN)
10483 {
10484 op0_word = operand_subword_force (op0, i, mode);
10485 op1_word = operand_subword_force (op1, i, mode);
10486 }
10487 else
10488 {
10489 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10490 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10491 }
10492
10493 /* All but high-order word must be compared as unsigned. */
10494 comp = compare_from_rtx (op0_word, op1_word,
10495 (unsignedp || i > 0) ? GTU : GT,
10496 unsignedp, word_mode, NULL_RTX, 0);
10497 if (comp == const_true_rtx)
10498 emit_jump (if_true_label);
10499 else if (comp != const0_rtx)
10500 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10501
10502 /* Consider lower words only if these are equal. */
10503 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10504 NULL_RTX, 0);
10505 if (comp == const_true_rtx)
10506 emit_jump (if_false_label);
10507 else if (comp != const0_rtx)
10508 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10509 }
10510
10511 if (if_false_label)
10512 emit_jump (if_false_label);
10513 if (drop_through_label)
10514 emit_label (drop_through_label);
10515 }
10516
10517 /* Compare OP0 with OP1, word at a time, in mode MODE.
10518 UNSIGNEDP says to do unsigned comparison.
10519 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10520
10521 void
10522 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10523 enum machine_mode mode;
10524 int unsignedp;
10525 rtx op0, op1;
10526 rtx if_false_label, if_true_label;
10527 {
10528 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10529 rtx drop_through_label = 0;
10530 int i;
10531
10532 if (! if_true_label || ! if_false_label)
10533 drop_through_label = gen_label_rtx ();
10534 if (! if_true_label)
10535 if_true_label = drop_through_label;
10536 if (! if_false_label)
10537 if_false_label = drop_through_label;
10538
10539 /* Compare a word at a time, high order first. */
10540 for (i = 0; i < nwords; i++)
10541 {
10542 rtx comp;
10543 rtx op0_word, op1_word;
10544
10545 if (WORDS_BIG_ENDIAN)
10546 {
10547 op0_word = operand_subword_force (op0, i, mode);
10548 op1_word = operand_subword_force (op1, i, mode);
10549 }
10550 else
10551 {
10552 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10553 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10554 }
10555
10556 /* All but high-order word must be compared as unsigned. */
10557 comp = compare_from_rtx (op0_word, op1_word,
10558 (unsignedp || i > 0) ? GTU : GT,
10559 unsignedp, word_mode, NULL_RTX, 0);
10560 if (comp == const_true_rtx)
10561 emit_jump (if_true_label);
10562 else if (comp != const0_rtx)
10563 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10564
10565 /* Consider lower words only if these are equal. */
10566 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10567 NULL_RTX, 0);
10568 if (comp == const_true_rtx)
10569 emit_jump (if_false_label);
10570 else if (comp != const0_rtx)
10571 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10572 }
10573
10574 if (if_false_label)
10575 emit_jump (if_false_label);
10576 if (drop_through_label)
10577 emit_label (drop_through_label);
10578 }
10579
10580 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10581 with one insn, test the comparison and jump to the appropriate label. */
10582
10583 static void
10584 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10585 tree exp;
10586 rtx if_false_label, if_true_label;
10587 {
10588 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10589 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10590 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10591 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10592 int i;
10593 rtx drop_through_label = 0;
10594
10595 if (! if_false_label)
10596 drop_through_label = if_false_label = gen_label_rtx ();
10597
10598 for (i = 0; i < nwords; i++)
10599 {
10600 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
10601 operand_subword_force (op1, i, mode),
10602 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10603 word_mode, NULL_RTX, 0);
10604 if (comp == const_true_rtx)
10605 emit_jump (if_false_label);
10606 else if (comp != const0_rtx)
10607 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10608 }
10609
10610 if (if_true_label)
10611 emit_jump (if_true_label);
10612 if (drop_through_label)
10613 emit_label (drop_through_label);
10614 }
10615 \f
10616 /* Jump according to whether OP0 is 0.
10617 We assume that OP0 has an integer mode that is too wide
10618 for the available compare insns. */
10619
10620 static void
10621 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10622 rtx op0;
10623 rtx if_false_label, if_true_label;
10624 {
10625 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10626 int i;
10627 rtx drop_through_label = 0;
10628
10629 if (! if_false_label)
10630 drop_through_label = if_false_label = gen_label_rtx ();
10631
10632 for (i = 0; i < nwords; i++)
10633 {
10634 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
10635 GET_MODE (op0)),
10636 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
10637 if (comp == const_true_rtx)
10638 emit_jump (if_false_label);
10639 else if (comp != const0_rtx)
10640 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10641 }
10642
10643 if (if_true_label)
10644 emit_jump (if_true_label);
10645 if (drop_through_label)
10646 emit_label (drop_through_label);
10647 }
10648
10649 /* Given a comparison expression in rtl form, output conditional branches to
10650 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
10651
10652 static void
10653 do_jump_for_compare (comparison, if_false_label, if_true_label)
10654 rtx comparison, if_false_label, if_true_label;
10655 {
10656 if (if_true_label)
10657 {
10658 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10659 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
10660 else
10661 abort ();
10662
10663 if (if_false_label)
10664 emit_jump (if_false_label);
10665 }
10666 else if (if_false_label)
10667 {
10668 rtx insn;
10669 rtx prev = get_last_insn ();
10670 rtx branch = 0;
10671
10672 /* Output the branch with the opposite condition. Then try to invert
10673 what is generated. If more than one insn is a branch, or if the
10674 branch is not the last insn written, abort. If we can't invert
10675 the branch, emit make a true label, redirect this jump to that,
10676 emit a jump to the false label and define the true label. */
10677
10678 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10679 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])(if_false_label));
10680 else
10681 abort ();
10682
10683 /* Here we get the first insn that was just emitted. It used to be the
10684 case that, on some machines, emitting the branch would discard
10685 the previous compare insn and emit a replacement. This isn't
10686 done anymore, but abort if we see that PREV is deleted. */
10687
10688 if (prev == 0)
10689 insn = get_insns ();
10690 else if (INSN_DELETED_P (prev))
10691 abort ();
10692 else
10693 insn = NEXT_INSN (prev);
10694
10695 for (; insn; insn = NEXT_INSN (insn))
10696 if (GET_CODE (insn) == JUMP_INSN)
10697 {
10698 if (branch)
10699 abort ();
10700 branch = insn;
10701 }
10702
10703 if (branch != get_last_insn ())
10704 abort ();
10705
10706 JUMP_LABEL (branch) = if_false_label;
10707 if (! invert_jump (branch, if_false_label))
10708 {
10709 if_true_label = gen_label_rtx ();
10710 redirect_jump (branch, if_true_label);
10711 emit_jump (if_false_label);
10712 emit_label (if_true_label);
10713 }
10714 }
10715 }
10716 \f
10717 /* Generate code for a comparison expression EXP
10718 (including code to compute the values to be compared)
10719 and set (CC0) according to the result.
10720 SIGNED_CODE should be the rtx operation for this comparison for
10721 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10722
10723 We force a stack adjustment unless there are currently
10724 things pushed on the stack that aren't yet used. */
10725
10726 static rtx
10727 compare (exp, signed_code, unsigned_code)
10728 register tree exp;
10729 enum rtx_code signed_code, unsigned_code;
10730 {
10731 register rtx op0
10732 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10733 register rtx op1
10734 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10735 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
10736 register enum machine_mode mode = TYPE_MODE (type);
10737 int unsignedp = TREE_UNSIGNED (type);
10738 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
10739
10740 #ifdef HAVE_canonicalize_funcptr_for_compare
10741 /* If function pointers need to be "canonicalized" before they can
10742 be reliably compared, then canonicalize them. */
10743 if (HAVE_canonicalize_funcptr_for_compare
10744 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10745 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10746 == FUNCTION_TYPE))
10747 {
10748 rtx new_op0 = gen_reg_rtx (mode);
10749
10750 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10751 op0 = new_op0;
10752 }
10753
10754 if (HAVE_canonicalize_funcptr_for_compare
10755 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10756 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10757 == FUNCTION_TYPE))
10758 {
10759 rtx new_op1 = gen_reg_rtx (mode);
10760
10761 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10762 op1 = new_op1;
10763 }
10764 #endif
10765
10766 return compare_from_rtx (op0, op1, code, unsignedp, mode,
10767 ((mode == BLKmode)
10768 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10769 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
10770 }
10771
10772 /* Like compare but expects the values to compare as two rtx's.
10773 The decision as to signed or unsigned comparison must be made by the caller.
10774
10775 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10776 compared.
10777
10778 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10779 size of MODE should be used. */
10780
10781 rtx
10782 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10783 register rtx op0, op1;
10784 enum rtx_code code;
10785 int unsignedp;
10786 enum machine_mode mode;
10787 rtx size;
10788 int align;
10789 {
10790 rtx tem;
10791
10792 /* If one operand is constant, make it the second one. Only do this
10793 if the other operand is not constant as well. */
10794
10795 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10796 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
10797 {
10798 tem = op0;
10799 op0 = op1;
10800 op1 = tem;
10801 code = swap_condition (code);
10802 }
10803
10804 if (flag_force_mem)
10805 {
10806 op0 = force_not_mem (op0);
10807 op1 = force_not_mem (op1);
10808 }
10809
10810 do_pending_stack_adjust ();
10811
10812 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10813 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10814 return tem;
10815
10816 #if 0
10817 /* There's no need to do this now that combine.c can eliminate lots of
10818 sign extensions. This can be less efficient in certain cases on other
10819 machines. */
10820
10821 /* If this is a signed equality comparison, we can do it as an
10822 unsigned comparison since zero-extension is cheaper than sign
10823 extension and comparisons with zero are done as unsigned. This is
10824 the case even on machines that can do fast sign extension, since
10825 zero-extension is easier to combine with other operations than
10826 sign-extension is. If we are comparing against a constant, we must
10827 convert it to what it would look like unsigned. */
10828 if ((code == EQ || code == NE) && ! unsignedp
10829 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10830 {
10831 if (GET_CODE (op1) == CONST_INT
10832 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10833 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10834 unsignedp = 1;
10835 }
10836 #endif
10837
10838 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10839
10840 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
10841 }
10842 \f
10843 /* Generate code to calculate EXP using a store-flag instruction
10844 and return an rtx for the result. EXP is either a comparison
10845 or a TRUTH_NOT_EXPR whose operand is a comparison.
10846
10847 If TARGET is nonzero, store the result there if convenient.
10848
10849 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10850 cheap.
10851
10852 Return zero if there is no suitable set-flag instruction
10853 available on this machine.
10854
10855 Once expand_expr has been called on the arguments of the comparison,
10856 we are committed to doing the store flag, since it is not safe to
10857 re-evaluate the expression. We emit the store-flag insn by calling
10858 emit_store_flag, but only expand the arguments if we have a reason
10859 to believe that emit_store_flag will be successful. If we think that
10860 it will, but it isn't, we have to simulate the store-flag with a
10861 set/jump/set sequence. */
10862
10863 static rtx
10864 do_store_flag (exp, target, mode, only_cheap)
10865 tree exp;
10866 rtx target;
10867 enum machine_mode mode;
10868 int only_cheap;
10869 {
10870 enum rtx_code code;
10871 tree arg0, arg1, type;
10872 tree tem;
10873 enum machine_mode operand_mode;
10874 int invert = 0;
10875 int unsignedp;
10876 rtx op0, op1;
10877 enum insn_code icode;
10878 rtx subtarget = target;
10879 rtx result, label, pattern, jump_pat;
10880
10881 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10882 result at the end. We can't simply invert the test since it would
10883 have already been inverted if it were valid. This case occurs for
10884 some floating-point comparisons. */
10885
10886 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10887 invert = 1, exp = TREE_OPERAND (exp, 0);
10888
10889 arg0 = TREE_OPERAND (exp, 0);
10890 arg1 = TREE_OPERAND (exp, 1);
10891 type = TREE_TYPE (arg0);
10892 operand_mode = TYPE_MODE (type);
10893 unsignedp = TREE_UNSIGNED (type);
10894
10895 /* We won't bother with BLKmode store-flag operations because it would mean
10896 passing a lot of information to emit_store_flag. */
10897 if (operand_mode == BLKmode)
10898 return 0;
10899
10900 /* We won't bother with store-flag operations involving function pointers
10901 when function pointers must be canonicalized before comparisons. */
10902 #ifdef HAVE_canonicalize_funcptr_for_compare
10903 if (HAVE_canonicalize_funcptr_for_compare
10904 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10905 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10906 == FUNCTION_TYPE))
10907 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10908 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10909 == FUNCTION_TYPE))))
10910 return 0;
10911 #endif
10912
10913 STRIP_NOPS (arg0);
10914 STRIP_NOPS (arg1);
10915
10916 /* Get the rtx comparison code to use. We know that EXP is a comparison
10917 operation of some type. Some comparisons against 1 and -1 can be
10918 converted to comparisons with zero. Do so here so that the tests
10919 below will be aware that we have a comparison with zero. These
10920 tests will not catch constants in the first operand, but constants
10921 are rarely passed as the first operand. */
10922
10923 switch (TREE_CODE (exp))
10924 {
10925 case EQ_EXPR:
10926 code = EQ;
10927 break;
10928 case NE_EXPR:
10929 code = NE;
10930 break;
10931 case LT_EXPR:
10932 if (integer_onep (arg1))
10933 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10934 else
10935 code = unsignedp ? LTU : LT;
10936 break;
10937 case LE_EXPR:
10938 if (! unsignedp && integer_all_onesp (arg1))
10939 arg1 = integer_zero_node, code = LT;
10940 else
10941 code = unsignedp ? LEU : LE;
10942 break;
10943 case GT_EXPR:
10944 if (! unsignedp && integer_all_onesp (arg1))
10945 arg1 = integer_zero_node, code = GE;
10946 else
10947 code = unsignedp ? GTU : GT;
10948 break;
10949 case GE_EXPR:
10950 if (integer_onep (arg1))
10951 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10952 else
10953 code = unsignedp ? GEU : GE;
10954 break;
10955 default:
10956 abort ();
10957 }
10958
10959 /* Put a constant second. */
10960 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10961 {
10962 tem = arg0; arg0 = arg1; arg1 = tem;
10963 code = swap_condition (code);
10964 }
10965
10966 /* If this is an equality or inequality test of a single bit, we can
10967 do this by shifting the bit being tested to the low-order bit and
10968 masking the result with the constant 1. If the condition was EQ,
10969 we xor it with 1. This does not require an scc insn and is faster
10970 than an scc insn even if we have it. */
10971
10972 if ((code == NE || code == EQ)
10973 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10974 && integer_pow2p (TREE_OPERAND (arg0, 1))
10975 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
10976 {
10977 tree inner = TREE_OPERAND (arg0, 0);
10978 HOST_WIDE_INT tem;
10979 int bitnum;
10980 int ops_unsignedp;
10981
10982 tem = INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
10983 NULL_RTX, VOIDmode, 0));
10984 /* In this case, immed_double_const will sign extend the value to make
10985 it look the same on the host and target. We must remove the
10986 sign-extension before calling exact_log2, since exact_log2 will
10987 fail for negative values. */
10988 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT
10989 && BITS_PER_WORD == GET_MODE_BITSIZE (TYPE_MODE (type)))
10990 /* We don't use the obvious constant shift to generate the mask,
10991 because that generates compiler warnings when BITS_PER_WORD is
10992 greater than or equal to HOST_BITS_PER_WIDE_INT, even though this
10993 code is unreachable in that case. */
10994 tem = tem & GET_MODE_MASK (word_mode);
10995 bitnum = exact_log2 (tem);
10996
10997 /* If INNER is a right shift of a constant and it plus BITNUM does
10998 not overflow, adjust BITNUM and INNER. */
10999
11000 if (TREE_CODE (inner) == RSHIFT_EXPR
11001 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
11002 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
11003 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
11004 < TYPE_PRECISION (type)))
11005 {
11006 bitnum +=TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
11007 inner = TREE_OPERAND (inner, 0);
11008 }
11009
11010 /* If we are going to be able to omit the AND below, we must do our
11011 operations as unsigned. If we must use the AND, we have a choice.
11012 Normally unsigned is faster, but for some machines signed is. */
11013 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
11014 #ifdef LOAD_EXTEND_OP
11015 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
11016 #else
11017 : 1
11018 #endif
11019 );
11020
11021 if (subtarget == 0 || GET_CODE (subtarget) != REG
11022 || GET_MODE (subtarget) != operand_mode
11023 || ! safe_from_p (subtarget, inner))
11024 subtarget = 0;
11025
11026 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
11027
11028 if (bitnum != 0)
11029 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
11030 size_int (bitnum), subtarget, ops_unsignedp);
11031
11032 if (GET_MODE (op0) != mode)
11033 op0 = convert_to_mode (mode, op0, ops_unsignedp);
11034
11035 if ((code == EQ && ! invert) || (code == NE && invert))
11036 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
11037 ops_unsignedp, OPTAB_LIB_WIDEN);
11038
11039 /* Put the AND last so it can combine with more things. */
11040 if (bitnum != TYPE_PRECISION (type) - 1)
11041 op0 = expand_and (op0, const1_rtx, subtarget);
11042
11043 return op0;
11044 }
11045
11046 /* Now see if we are likely to be able to do this. Return if not. */
11047 if (! can_compare_p (operand_mode))
11048 return 0;
11049 icode = setcc_gen_code[(int) code];
11050 if (icode == CODE_FOR_nothing
11051 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
11052 {
11053 /* We can only do this if it is one of the special cases that
11054 can be handled without an scc insn. */
11055 if ((code == LT && integer_zerop (arg1))
11056 || (! only_cheap && code == GE && integer_zerop (arg1)))
11057 ;
11058 else if (BRANCH_COST >= 0
11059 && ! only_cheap && (code == NE || code == EQ)
11060 && TREE_CODE (type) != REAL_TYPE
11061 && ((abs_optab->handlers[(int) operand_mode].insn_code
11062 != CODE_FOR_nothing)
11063 || (ffs_optab->handlers[(int) operand_mode].insn_code
11064 != CODE_FOR_nothing)))
11065 ;
11066 else
11067 return 0;
11068 }
11069
11070 preexpand_calls (exp);
11071 if (subtarget == 0 || GET_CODE (subtarget) != REG
11072 || GET_MODE (subtarget) != operand_mode
11073 || ! safe_from_p (subtarget, arg1))
11074 subtarget = 0;
11075
11076 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
11077 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
11078
11079 if (target == 0)
11080 target = gen_reg_rtx (mode);
11081
11082 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
11083 because, if the emit_store_flag does anything it will succeed and
11084 OP0 and OP1 will not be used subsequently. */
11085
11086 result = emit_store_flag (target, code,
11087 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
11088 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
11089 operand_mode, unsignedp, 1);
11090
11091 if (result)
11092 {
11093 if (invert)
11094 result = expand_binop (mode, xor_optab, result, const1_rtx,
11095 result, 0, OPTAB_LIB_WIDEN);
11096 return result;
11097 }
11098
11099 /* If this failed, we have to do this with set/compare/jump/set code. */
11100 if (GET_CODE (target) != REG
11101 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
11102 target = gen_reg_rtx (GET_MODE (target));
11103
11104 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
11105 result = compare_from_rtx (op0, op1, code, unsignedp,
11106 operand_mode, NULL_RTX, 0);
11107 if (GET_CODE (result) == CONST_INT)
11108 return (((result == const0_rtx && ! invert)
11109 || (result != const0_rtx && invert))
11110 ? const0_rtx : const1_rtx);
11111
11112 label = gen_label_rtx ();
11113 if (bcc_gen_fctn[(int) code] == 0)
11114 abort ();
11115
11116 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
11117 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
11118 emit_label (label);
11119
11120 return target;
11121 }
11122 \f
11123 /* Generate a tablejump instruction (used for switch statements). */
11124
11125 #ifdef HAVE_tablejump
11126
11127 /* INDEX is the value being switched on, with the lowest value
11128 in the table already subtracted.
11129 MODE is its expected mode (needed if INDEX is constant).
11130 RANGE is the length of the jump table.
11131 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11132
11133 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11134 index value is out of range. */
11135
11136 void
11137 do_tablejump (index, mode, range, table_label, default_label)
11138 rtx index, range, table_label, default_label;
11139 enum machine_mode mode;
11140 {
11141 register rtx temp, vector;
11142
11143 /* Do an unsigned comparison (in the proper mode) between the index
11144 expression and the value which represents the length of the range.
11145 Since we just finished subtracting the lower bound of the range
11146 from the index expression, this comparison allows us to simultaneously
11147 check that the original index expression value is both greater than
11148 or equal to the minimum value of the range and less than or equal to
11149 the maximum value of the range. */
11150
11151 emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
11152 emit_jump_insn (gen_bgtu (default_label));
11153
11154 /* If index is in range, it must fit in Pmode.
11155 Convert to Pmode so we can index with it. */
11156 if (mode != Pmode)
11157 index = convert_to_mode (Pmode, index, 1);
11158
11159 /* Don't let a MEM slip thru, because then INDEX that comes
11160 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11161 and break_out_memory_refs will go to work on it and mess it up. */
11162 #ifdef PIC_CASE_VECTOR_ADDRESS
11163 if (flag_pic && GET_CODE (index) != REG)
11164 index = copy_to_mode_reg (Pmode, index);
11165 #endif
11166
11167 /* If flag_force_addr were to affect this address
11168 it could interfere with the tricky assumptions made
11169 about addresses that contain label-refs,
11170 which may be valid only very near the tablejump itself. */
11171 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11172 GET_MODE_SIZE, because this indicates how large insns are. The other
11173 uses should all be Pmode, because they are addresses. This code
11174 could fail if addresses and insns are not the same size. */
11175 index = gen_rtx (PLUS, Pmode,
11176 gen_rtx (MULT, Pmode, index,
11177 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
11178 gen_rtx (LABEL_REF, Pmode, table_label));
11179 #ifdef PIC_CASE_VECTOR_ADDRESS
11180 if (flag_pic)
11181 index = PIC_CASE_VECTOR_ADDRESS (index);
11182 else
11183 #endif
11184 index = memory_address_noforce (CASE_VECTOR_MODE, index);
11185 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11186 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
11187 RTX_UNCHANGING_P (vector) = 1;
11188 convert_move (temp, vector, 0);
11189
11190 emit_jump_insn (gen_tablejump (temp, table_label));
11191
11192 #ifndef CASE_VECTOR_PC_RELATIVE
11193 /* If we are generating PIC code or if the table is PC-relative, the
11194 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11195 if (! flag_pic)
11196 emit_barrier ();
11197 #endif
11198 }
11199
11200 #endif /* HAVE_tablejump */
11201
11202
11203 /* Emit a suitable bytecode to load a value from memory, assuming a pointer
11204 to that value is on the top of the stack. The resulting type is TYPE, and
11205 the source declaration is DECL. */
11206
11207 void
11208 bc_load_memory (type, decl)
11209 tree type, decl;
11210 {
11211 enum bytecode_opcode opcode;
11212
11213
11214 /* Bit fields are special. We only know about signed and
11215 unsigned ints, and enums. The latter are treated as
11216 signed integers. */
11217
11218 if (DECL_BIT_FIELD (decl))
11219 if (TREE_CODE (type) == ENUMERAL_TYPE
11220 || TREE_CODE (type) == INTEGER_TYPE)
11221 opcode = TREE_UNSIGNED (type) ? zxloadBI : sxloadBI;
11222 else
11223 abort ();
11224 else
11225 /* See corresponding comment in bc_store_memory(). */
11226 if (TYPE_MODE (type) == BLKmode
11227 || TYPE_MODE (type) == VOIDmode)
11228 return;
11229 else
11230 opcode = mode_to_load_map [(int) TYPE_MODE (type)];
11231
11232 if (opcode == neverneverland)
11233 abort ();
11234
11235 bc_emit_bytecode (opcode);
11236
11237 #ifdef DEBUG_PRINT_CODE
11238 fputc ('\n', stderr);
11239 #endif
11240 }
11241
11242
11243 /* Store the contents of the second stack slot to the address in the
11244 top stack slot. DECL is the declaration of the destination and is used
11245 to determine whether we're dealing with a bitfield. */
11246
11247 void
11248 bc_store_memory (type, decl)
11249 tree type, decl;
11250 {
11251 enum bytecode_opcode opcode;
11252
11253
11254 if (DECL_BIT_FIELD (decl))
11255 {
11256 if (TREE_CODE (type) == ENUMERAL_TYPE
11257 || TREE_CODE (type) == INTEGER_TYPE)
11258 opcode = sstoreBI;
11259 else
11260 abort ();
11261 }
11262 else
11263 if (TYPE_MODE (type) == BLKmode)
11264 {
11265 /* Copy structure. This expands to a block copy instruction, storeBLK.
11266 In addition to the arguments expected by the other store instructions,
11267 it also expects a type size (SImode) on top of the stack, which is the
11268 structure size in size units (usually bytes). The two first arguments
11269 are already on the stack; so we just put the size on level 1. For some
11270 other languages, the size may be variable, this is why we don't encode
11271 it as a storeBLK literal, but rather treat it as a full-fledged expression. */
11272
11273 bc_expand_expr (TYPE_SIZE (type));
11274 opcode = storeBLK;
11275 }
11276 else
11277 opcode = mode_to_store_map [(int) TYPE_MODE (type)];
11278
11279 if (opcode == neverneverland)
11280 abort ();
11281
11282 bc_emit_bytecode (opcode);
11283
11284 #ifdef DEBUG_PRINT_CODE
11285 fputc ('\n', stderr);
11286 #endif
11287 }
11288
11289
11290 /* Allocate local stack space sufficient to hold a value of the given
11291 SIZE at alignment boundary ALIGNMENT bits. ALIGNMENT must be an
11292 integral power of 2. A special case is locals of type VOID, which
11293 have size 0 and alignment 1 - any "voidish" SIZE or ALIGNMENT is
11294 remapped into the corresponding attribute of SI. */
11295
11296 rtx
11297 bc_allocate_local (size, alignment)
11298 int size, alignment;
11299 {
11300 rtx retval;
11301 int byte_alignment;
11302
11303 if (size < 0)
11304 abort ();
11305
11306 /* Normalize size and alignment */
11307 if (!size)
11308 size = UNITS_PER_WORD;
11309
11310 if (alignment < BITS_PER_UNIT)
11311 byte_alignment = 1 << (INT_ALIGN - 1);
11312 else
11313 /* Align */
11314 byte_alignment = alignment / BITS_PER_UNIT;
11315
11316 if (local_vars_size & (byte_alignment - 1))
11317 local_vars_size += byte_alignment - (local_vars_size & (byte_alignment - 1));
11318
11319 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
11320 local_vars_size += size;
11321
11322 return retval;
11323 }
11324
11325
11326 /* Allocate variable-sized local array. Variable-sized arrays are
11327 actually pointers to the address in memory where they are stored. */
11328
11329 rtx
11330 bc_allocate_variable_array (size)
11331 tree size;
11332 {
11333 rtx retval;
11334 const int ptralign = (1 << (PTR_ALIGN - 1));
11335
11336 /* Align pointer */
11337 if (local_vars_size & ptralign)
11338 local_vars_size += ptralign - (local_vars_size & ptralign);
11339
11340 /* Note down local space needed: pointer to block; also return
11341 dummy rtx */
11342
11343 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
11344 local_vars_size += POINTER_SIZE / BITS_PER_UNIT;
11345 return retval;
11346 }
11347
11348
11349 /* Push the machine address for the given external variable offset. */
11350
11351 void
11352 bc_load_externaddr (externaddr)
11353 rtx externaddr;
11354 {
11355 bc_emit_bytecode (constP);
11356 bc_emit_code_labelref (BYTECODE_LABEL (externaddr),
11357 BYTECODE_BC_LABEL (externaddr)->offset);
11358
11359 #ifdef DEBUG_PRINT_CODE
11360 fputc ('\n', stderr);
11361 #endif
11362 }
11363
11364
11365 /* Like above, but expects an IDENTIFIER. */
11366
11367 void
11368 bc_load_externaddr_id (id, offset)
11369 tree id;
11370 int offset;
11371 {
11372 if (!IDENTIFIER_POINTER (id))
11373 abort ();
11374
11375 bc_emit_bytecode (constP);
11376 bc_emit_code_labelref (xstrdup (IDENTIFIER_POINTER (id)), offset);
11377
11378 #ifdef DEBUG_PRINT_CODE
11379 fputc ('\n', stderr);
11380 #endif
11381 }
11382
11383
11384 /* Push the machine address for the given local variable offset. */
11385
11386 void
11387 bc_load_localaddr (localaddr)
11388 rtx localaddr;
11389 {
11390 bc_emit_instruction (localP, (HOST_WIDE_INT) BYTECODE_BC_LABEL (localaddr)->offset);
11391 }
11392
11393
11394 /* Push the machine address for the given parameter offset.
11395 NOTE: offset is in bits. */
11396
11397 void
11398 bc_load_parmaddr (parmaddr)
11399 rtx parmaddr;
11400 {
11401 bc_emit_instruction (argP, ((HOST_WIDE_INT) BYTECODE_BC_LABEL (parmaddr)->offset
11402 / BITS_PER_UNIT));
11403 }
11404
11405
11406 /* Convert a[i] into *(a + i). */
11407
11408 tree
11409 bc_canonicalize_array_ref (exp)
11410 tree exp;
11411 {
11412 tree type = TREE_TYPE (exp);
11413 tree array_adr = build1 (ADDR_EXPR, TYPE_POINTER_TO (type),
11414 TREE_OPERAND (exp, 0));
11415 tree index = TREE_OPERAND (exp, 1);
11416
11417
11418 /* Convert the integer argument to a type the same size as a pointer
11419 so the multiply won't overflow spuriously. */
11420
11421 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
11422 index = convert (type_for_size (POINTER_SIZE, 0), index);
11423
11424 /* The array address isn't volatile even if the array is.
11425 (Of course this isn't terribly relevant since the bytecode
11426 translator treats nearly everything as volatile anyway.) */
11427 TREE_THIS_VOLATILE (array_adr) = 0;
11428
11429 return build1 (INDIRECT_REF, type,
11430 fold (build (PLUS_EXPR,
11431 TYPE_POINTER_TO (type),
11432 array_adr,
11433 fold (build (MULT_EXPR,
11434 TYPE_POINTER_TO (type),
11435 index,
11436 size_in_bytes (type))))));
11437 }
11438
11439
11440 /* Load the address of the component referenced by the given
11441 COMPONENT_REF expression.
11442
11443 Returns innermost lvalue. */
11444
11445 tree
11446 bc_expand_component_address (exp)
11447 tree exp;
11448 {
11449 tree tem, chain;
11450 enum machine_mode mode;
11451 int bitpos = 0;
11452 HOST_WIDE_INT SIval;
11453
11454
11455 tem = TREE_OPERAND (exp, 1);
11456 mode = DECL_MODE (tem);
11457
11458
11459 /* Compute cumulative bit offset for nested component refs
11460 and array refs, and find the ultimate containing object. */
11461
11462 for (tem = exp;; tem = TREE_OPERAND (tem, 0))
11463 {
11464 if (TREE_CODE (tem) == COMPONENT_REF)
11465 bitpos += TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (tem, 1)));
11466 else
11467 if (TREE_CODE (tem) == ARRAY_REF
11468 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
11469 && TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) == INTEGER_CST)
11470
11471 bitpos += (TREE_INT_CST_LOW (TREE_OPERAND (tem, 1))
11472 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)))
11473 /* * TYPE_SIZE_UNIT (TREE_TYPE (tem)) */);
11474 else
11475 break;
11476 }
11477
11478 bc_expand_expr (tem);
11479
11480
11481 /* For bitfields also push their offset and size */
11482 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
11483 bc_push_offset_and_size (bitpos, /* DECL_SIZE_UNIT */ (TREE_OPERAND (exp, 1)));
11484 else
11485 if (SIval = bitpos / BITS_PER_UNIT)
11486 bc_emit_instruction (addconstPSI, SIval);
11487
11488 return (TREE_OPERAND (exp, 1));
11489 }
11490
11491
11492 /* Emit code to push two SI constants */
11493
11494 void
11495 bc_push_offset_and_size (offset, size)
11496 HOST_WIDE_INT offset, size;
11497 {
11498 bc_emit_instruction (constSI, offset);
11499 bc_emit_instruction (constSI, size);
11500 }
11501
11502
11503 /* Emit byte code to push the address of the given lvalue expression to
11504 the stack. If it's a bit field, we also push offset and size info.
11505
11506 Returns innermost component, which allows us to determine not only
11507 its type, but also whether it's a bitfield. */
11508
11509 tree
11510 bc_expand_address (exp)
11511 tree exp;
11512 {
11513 /* Safeguard */
11514 if (!exp || TREE_CODE (exp) == ERROR_MARK)
11515 return (exp);
11516
11517
11518 switch (TREE_CODE (exp))
11519 {
11520 case ARRAY_REF:
11521
11522 return (bc_expand_address (bc_canonicalize_array_ref (exp)));
11523
11524 case COMPONENT_REF:
11525
11526 return (bc_expand_component_address (exp));
11527
11528 case INDIRECT_REF:
11529
11530 bc_expand_expr (TREE_OPERAND (exp, 0));
11531
11532 /* For variable-sized types: retrieve pointer. Sometimes the
11533 TYPE_SIZE tree is NULL. Is this a bug or a feature? Let's
11534 also make sure we have an operand, just in case... */
11535
11536 if (TREE_OPERAND (exp, 0)
11537 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))
11538 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))) != INTEGER_CST)
11539 bc_emit_instruction (loadP);
11540
11541 /* If packed, also return offset and size */
11542 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 0)))
11543
11544 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (exp, 0))),
11545 TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (exp, 0))));
11546
11547 return (TREE_OPERAND (exp, 0));
11548
11549 case FUNCTION_DECL:
11550
11551 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
11552 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
11553 break;
11554
11555 case PARM_DECL:
11556
11557 bc_load_parmaddr (DECL_RTL (exp));
11558
11559 /* For variable-sized types: retrieve pointer */
11560 if (TYPE_SIZE (TREE_TYPE (exp))
11561 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
11562 bc_emit_instruction (loadP);
11563
11564 /* If packed, also return offset and size */
11565 if (DECL_BIT_FIELD (exp))
11566 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
11567 TREE_INT_CST_LOW (DECL_SIZE (exp)));
11568
11569 break;
11570
11571 case RESULT_DECL:
11572
11573 bc_emit_instruction (returnP);
11574 break;
11575
11576 case VAR_DECL:
11577
11578 #if 0
11579 if (BYTECODE_LABEL (DECL_RTL (exp)))
11580 bc_load_externaddr (DECL_RTL (exp));
11581 #endif
11582
11583 if (DECL_EXTERNAL (exp))
11584 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
11585 (BYTECODE_BC_LABEL (DECL_RTL (exp)))->offset);
11586 else
11587 bc_load_localaddr (DECL_RTL (exp));
11588
11589 /* For variable-sized types: retrieve pointer */
11590 if (TYPE_SIZE (TREE_TYPE (exp))
11591 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
11592 bc_emit_instruction (loadP);
11593
11594 /* If packed, also return offset and size */
11595 if (DECL_BIT_FIELD (exp))
11596 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
11597 TREE_INT_CST_LOW (DECL_SIZE (exp)));
11598
11599 break;
11600
11601 case STRING_CST:
11602 {
11603 rtx r;
11604
11605 bc_emit_bytecode (constP);
11606 r = output_constant_def (exp);
11607 bc_emit_code_labelref (BYTECODE_LABEL (r), BYTECODE_BC_LABEL (r)->offset);
11608
11609 #ifdef DEBUG_PRINT_CODE
11610 fputc ('\n', stderr);
11611 #endif
11612 }
11613 break;
11614
11615 default:
11616
11617 abort();
11618 break;
11619 }
11620
11621 /* Most lvalues don't have components. */
11622 return (exp);
11623 }
11624
11625
11626 /* Emit a type code to be used by the runtime support in handling
11627 parameter passing. The type code consists of the machine mode
11628 plus the minimal alignment shifted left 8 bits. */
11629
11630 tree
11631 bc_runtime_type_code (type)
11632 tree type;
11633 {
11634 int val;
11635
11636 switch (TREE_CODE (type))
11637 {
11638 case VOID_TYPE:
11639 case INTEGER_TYPE:
11640 case REAL_TYPE:
11641 case COMPLEX_TYPE:
11642 case ENUMERAL_TYPE:
11643 case POINTER_TYPE:
11644 case RECORD_TYPE:
11645
11646 val = (int) TYPE_MODE (type) | TYPE_ALIGN (type) << 8;
11647 break;
11648
11649 case ERROR_MARK:
11650
11651 val = 0;
11652 break;
11653
11654 default:
11655
11656 abort ();
11657 }
11658 return build_int_2 (val, 0);
11659 }
11660
11661
11662 /* Generate constructor label */
11663
11664 char *
11665 bc_gen_constr_label ()
11666 {
11667 static int label_counter;
11668 static char label[20];
11669
11670 sprintf (label, "*LR%d", label_counter++);
11671
11672 return (obstack_copy0 (&permanent_obstack, label, strlen (label)));
11673 }
11674
11675
11676 /* Evaluate constructor CONSTR and return pointer to it on level one. We
11677 expand the constructor data as static data, and push a pointer to it.
11678 The pointer is put in the pointer table and is retrieved by a constP
11679 bytecode instruction. We then loop and store each constructor member in
11680 the corresponding component. Finally, we return the original pointer on
11681 the stack. */
11682
11683 void
11684 bc_expand_constructor (constr)
11685 tree constr;
11686 {
11687 char *l;
11688 HOST_WIDE_INT ptroffs;
11689 rtx constr_rtx;
11690
11691
11692 /* Literal constructors are handled as constants, whereas
11693 non-literals are evaluated and stored element by element
11694 into the data segment. */
11695
11696 /* Allocate space in proper segment and push pointer to space on stack.
11697 */
11698
11699 l = bc_gen_constr_label ();
11700
11701 if (TREE_CONSTANT (constr))
11702 {
11703 text_section ();
11704
11705 bc_emit_const_labeldef (l);
11706 bc_output_constructor (constr, int_size_in_bytes (TREE_TYPE (constr)));
11707 }
11708 else
11709 {
11710 data_section ();
11711
11712 bc_emit_data_labeldef (l);
11713 bc_output_data_constructor (constr);
11714 }
11715
11716
11717 /* Add reference to pointer table and recall pointer to stack;
11718 this code is common for both types of constructors: literals
11719 and non-literals. */
11720
11721 ptroffs = bc_define_pointer (l);
11722 bc_emit_instruction (constP, ptroffs);
11723
11724 /* This is all that has to be done if it's a literal. */
11725 if (TREE_CONSTANT (constr))
11726 return;
11727
11728
11729 /* At this point, we have the pointer to the structure on top of the stack.
11730 Generate sequences of store_memory calls for the constructor. */
11731
11732 /* constructor type is structure */
11733 if (TREE_CODE (TREE_TYPE (constr)) == RECORD_TYPE)
11734 {
11735 register tree elt;
11736
11737 /* If the constructor has fewer fields than the structure,
11738 clear the whole structure first. */
11739
11740 if (list_length (CONSTRUCTOR_ELTS (constr))
11741 != list_length (TYPE_FIELDS (TREE_TYPE (constr))))
11742 {
11743 bc_emit_instruction (duplicate);
11744 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
11745 bc_emit_instruction (clearBLK);
11746 }
11747
11748 /* Store each element of the constructor into the corresponding
11749 field of TARGET. */
11750
11751 for (elt = CONSTRUCTOR_ELTS (constr); elt; elt = TREE_CHAIN (elt))
11752 {
11753 register tree field = TREE_PURPOSE (elt);
11754 register enum machine_mode mode;
11755 int bitsize;
11756 int bitpos;
11757 int unsignedp;
11758
11759 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field)) /* * DECL_SIZE_UNIT (field) */;
11760 mode = DECL_MODE (field);
11761 unsignedp = TREE_UNSIGNED (field);
11762
11763 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
11764
11765 bc_store_field (elt, bitsize, bitpos, mode, TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
11766 /* The alignment of TARGET is
11767 at least what its type requires. */
11768 VOIDmode, 0,
11769 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
11770 int_size_in_bytes (TREE_TYPE (constr)));
11771 }
11772 }
11773 else
11774
11775 /* Constructor type is array */
11776 if (TREE_CODE (TREE_TYPE (constr)) == ARRAY_TYPE)
11777 {
11778 register tree elt;
11779 register int i;
11780 tree domain = TYPE_DOMAIN (TREE_TYPE (constr));
11781 int minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
11782 int maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
11783 tree elttype = TREE_TYPE (TREE_TYPE (constr));
11784
11785 /* If the constructor has fewer fields than the structure,
11786 clear the whole structure first. */
11787
11788 if (list_length (CONSTRUCTOR_ELTS (constr)) < maxelt - minelt + 1)
11789 {
11790 bc_emit_instruction (duplicate);
11791 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
11792 bc_emit_instruction (clearBLK);
11793 }
11794
11795
11796 /* Store each element of the constructor into the corresponding
11797 element of TARGET, determined by counting the elements. */
11798
11799 for (elt = CONSTRUCTOR_ELTS (constr), i = 0;
11800 elt;
11801 elt = TREE_CHAIN (elt), i++)
11802 {
11803 register enum machine_mode mode;
11804 int bitsize;
11805 int bitpos;
11806 int unsignedp;
11807
11808 mode = TYPE_MODE (elttype);
11809 bitsize = GET_MODE_BITSIZE (mode);
11810 unsignedp = TREE_UNSIGNED (elttype);
11811
11812 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype))
11813 /* * TYPE_SIZE_UNIT (elttype) */ );
11814
11815 bc_store_field (elt, bitsize, bitpos, mode,
11816 TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
11817 /* The alignment of TARGET is
11818 at least what its type requires. */
11819 VOIDmode, 0,
11820 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
11821 int_size_in_bytes (TREE_TYPE (constr)));
11822 }
11823
11824 }
11825 }
11826
11827
11828 /* Store the value of EXP (an expression tree) into member FIELD of
11829 structure at address on stack, which has type TYPE, mode MODE and
11830 occupies BITSIZE bits, starting BITPOS bits from the beginning of the
11831 structure.
11832
11833 ALIGN is the alignment that TARGET is known to have, measured in bytes.
11834 TOTAL_SIZE is its size in bytes, or -1 if variable. */
11835
11836 void
11837 bc_store_field (field, bitsize, bitpos, mode, exp, type,
11838 value_mode, unsignedp, align, total_size)
11839 int bitsize, bitpos;
11840 enum machine_mode mode;
11841 tree field, exp, type;
11842 enum machine_mode value_mode;
11843 int unsignedp;
11844 int align;
11845 int total_size;
11846 {
11847
11848 /* Expand expression and copy pointer */
11849 bc_expand_expr (exp);
11850 bc_emit_instruction (over);
11851
11852
11853 /* If the component is a bit field, we cannot use addressing to access
11854 it. Use bit-field techniques to store in it. */
11855
11856 if (DECL_BIT_FIELD (field))
11857 {
11858 bc_store_bit_field (bitpos, bitsize, unsignedp);
11859 return;
11860 }
11861 else
11862 /* Not bit field */
11863 {
11864 HOST_WIDE_INT offset = bitpos / BITS_PER_UNIT;
11865
11866 /* Advance pointer to the desired member */
11867 if (offset)
11868 bc_emit_instruction (addconstPSI, offset);
11869
11870 /* Store */
11871 bc_store_memory (type, field);
11872 }
11873 }
11874
11875
11876 /* Store SI/SU in bitfield */
11877
11878 void
11879 bc_store_bit_field (offset, size, unsignedp)
11880 int offset, size, unsignedp;
11881 {
11882 /* Push bitfield offset and size */
11883 bc_push_offset_and_size (offset, size);
11884
11885 /* Store */
11886 bc_emit_instruction (sstoreBI);
11887 }
11888
11889
11890 /* Load SI/SU from bitfield */
11891
11892 void
11893 bc_load_bit_field (offset, size, unsignedp)
11894 int offset, size, unsignedp;
11895 {
11896 /* Push bitfield offset and size */
11897 bc_push_offset_and_size (offset, size);
11898
11899 /* Load: sign-extend if signed, else zero-extend */
11900 bc_emit_instruction (unsignedp ? zxloadBI : sxloadBI);
11901 }
11902
11903
11904 /* Adjust interpreter stack by NLEVELS. Positive means drop NLEVELS
11905 (adjust stack pointer upwards), negative means add that number of
11906 levels (adjust the stack pointer downwards). Only positive values
11907 normally make sense. */
11908
11909 void
11910 bc_adjust_stack (nlevels)
11911 int nlevels;
11912 {
11913 switch (nlevels)
11914 {
11915 case 0:
11916 break;
11917
11918 case 2:
11919 bc_emit_instruction (drop);
11920
11921 case 1:
11922 bc_emit_instruction (drop);
11923 break;
11924
11925 default:
11926
11927 bc_emit_instruction (adjstackSI, (HOST_WIDE_INT) nlevels);
11928 stack_depth -= nlevels;
11929 }
11930
11931 #if defined (VALIDATE_STACK_FOR_BC)
11932 VALIDATE_STACK_FOR_BC ();
11933 #endif
11934 }
This page took 0.5791 seconds and 4 git commands to generate.