]> gcc.gnu.org Git - gcc.git/blob - gcc/expr.c
(expand_expr, case COND_EXPR): Add additional cases to "singleton"
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 92, 93, 94, 95, 96, 1997 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 #include "config.h"
23 #include "machmode.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "obstack.h"
27 #include "flags.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "except.h"
31 #include "function.h"
32 #include "insn-flags.h"
33 #include "insn-codes.h"
34 #include "expr.h"
35 #include "insn-config.h"
36 #include "recog.h"
37 #include "output.h"
38 #include "typeclass.h"
39
40 #include "bytecode.h"
41 #include "bc-opcode.h"
42 #include "bc-typecd.h"
43 #include "bc-optab.h"
44 #include "bc-emit.h"
45
46
47 #define CEIL(x,y) (((x) + (y) - 1) / (y))
48
49 /* Decide whether a function's arguments should be processed
50 from first to last or from last to first.
51
52 They should if the stack and args grow in opposite directions, but
53 only if we have push insns. */
54
55 #ifdef PUSH_ROUNDING
56
57 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
58 #define PUSH_ARGS_REVERSED /* If it's last to first */
59 #endif
60
61 #endif
62
63 #ifndef STACK_PUSH_CODE
64 #ifdef STACK_GROWS_DOWNWARD
65 #define STACK_PUSH_CODE PRE_DEC
66 #else
67 #define STACK_PUSH_CODE PRE_INC
68 #endif
69 #endif
70
71 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
72 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
73
74 /* If this is nonzero, we do not bother generating VOLATILE
75 around volatile memory references, and we are willing to
76 output indirect addresses. If cse is to follow, we reject
77 indirect addresses so a useful potential cse is generated;
78 if it is used only once, instruction combination will produce
79 the same indirect address eventually. */
80 int cse_not_expected;
81
82 /* Nonzero to generate code for all the subroutines within an
83 expression before generating the upper levels of the expression.
84 Nowadays this is never zero. */
85 int do_preexpand_calls = 1;
86
87 /* Number of units that we should eventually pop off the stack.
88 These are the arguments to function calls that have already returned. */
89 int pending_stack_adjust;
90
91 /* Nonzero means stack pops must not be deferred, and deferred stack
92 pops must not be output. It is nonzero inside a function call,
93 inside a conditional expression, inside a statement expression,
94 and in other cases as well. */
95 int inhibit_defer_pop;
96
97 /* A list of all cleanups which belong to the arguments of
98 function calls being expanded by expand_call. */
99 tree cleanups_this_call;
100
101 /* When temporaries are created by TARGET_EXPRs, they are created at
102 this level of temp_slot_level, so that they can remain allocated
103 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
104 of TARGET_EXPRs. */
105 int target_temp_slot_level;
106
107 /* Nonzero means __builtin_saveregs has already been done in this function.
108 The value is the pseudoreg containing the value __builtin_saveregs
109 returned. */
110 static rtx saveregs_value;
111
112 /* Similarly for __builtin_apply_args. */
113 static rtx apply_args_value;
114
115 /* This structure is used by move_by_pieces to describe the move to
116 be performed. */
117
118 struct move_by_pieces
119 {
120 rtx to;
121 rtx to_addr;
122 int autinc_to;
123 int explicit_inc_to;
124 int to_struct;
125 rtx from;
126 rtx from_addr;
127 int autinc_from;
128 int explicit_inc_from;
129 int from_struct;
130 int len;
131 int offset;
132 int reverse;
133 };
134
135 /* This structure is used by clear_by_pieces to describe the clear to
136 be performed. */
137
138 struct clear_by_pieces
139 {
140 rtx to;
141 rtx to_addr;
142 int autinc_to;
143 int explicit_inc_to;
144 int to_struct;
145 int len;
146 int offset;
147 int reverse;
148 };
149
150 /* Used to generate bytecodes: keep track of size of local variables,
151 as well as depth of arithmetic stack. (Notice that variables are
152 stored on the machine's stack, not the arithmetic stack.) */
153
154 extern int local_vars_size;
155 extern int stack_depth;
156 extern int max_stack_depth;
157 extern struct obstack permanent_obstack;
158 extern rtx arg_pointer_save_area;
159
160 static rtx enqueue_insn PROTO((rtx, rtx));
161 static int queued_subexp_p PROTO((rtx));
162 static void init_queue PROTO((void));
163 static void move_by_pieces PROTO((rtx, rtx, int, int));
164 static int move_by_pieces_ninsns PROTO((unsigned int, int));
165 static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
166 struct move_by_pieces *));
167 static void clear_by_pieces PROTO((rtx, int, int));
168 static void clear_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
169 struct clear_by_pieces *));
170 static int is_zeros_p PROTO((tree));
171 static int mostly_zeros_p PROTO((tree));
172 static void store_constructor PROTO((tree, rtx, int));
173 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
174 enum machine_mode, int, int, int));
175 static int get_inner_unaligned_p PROTO((tree));
176 static tree save_noncopied_parts PROTO((tree, tree));
177 static tree init_noncopied_parts PROTO((tree, tree));
178 static int safe_from_p PROTO((rtx, tree));
179 static int fixed_type_p PROTO((tree));
180 static rtx var_rtx PROTO((tree));
181 static int get_pointer_alignment PROTO((tree, unsigned));
182 static tree string_constant PROTO((tree, tree *));
183 static tree c_strlen PROTO((tree));
184 static rtx expand_builtin PROTO((tree, rtx, rtx,
185 enum machine_mode, int));
186 static int apply_args_size PROTO((void));
187 static int apply_result_size PROTO((void));
188 static rtx result_vector PROTO((int, rtx));
189 static rtx expand_builtin_apply_args PROTO((void));
190 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
191 static void expand_builtin_return PROTO((rtx));
192 static rtx expand_increment PROTO((tree, int, int));
193 void bc_expand_increment PROTO((struct increment_operator *, tree));
194 rtx bc_allocate_local PROTO((int, int));
195 void bc_store_memory PROTO((tree, tree));
196 tree bc_expand_component_address PROTO((tree));
197 tree bc_expand_address PROTO((tree));
198 void bc_expand_constructor PROTO((tree));
199 void bc_adjust_stack PROTO((int));
200 tree bc_canonicalize_array_ref PROTO((tree));
201 void bc_load_memory PROTO((tree, tree));
202 void bc_load_externaddr PROTO((rtx));
203 void bc_load_externaddr_id PROTO((tree, int));
204 void bc_load_localaddr PROTO((rtx));
205 void bc_load_parmaddr PROTO((rtx));
206 static void preexpand_calls PROTO((tree));
207 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
208 void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
209 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
210 static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
211 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
212 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
213 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
214 static tree defer_cleanups_to PROTO((tree));
215 extern tree truthvalue_conversion PROTO((tree));
216
217 /* Record for each mode whether we can move a register directly to or
218 from an object of that mode in memory. If we can't, we won't try
219 to use that mode directly when accessing a field of that mode. */
220
221 static char direct_load[NUM_MACHINE_MODES];
222 static char direct_store[NUM_MACHINE_MODES];
223
224 /* MOVE_RATIO is the number of move instructions that is better than
225 a block move. */
226
227 #ifndef MOVE_RATIO
228 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
229 #define MOVE_RATIO 2
230 #else
231 /* A value of around 6 would minimize code size; infinity would minimize
232 execution time. */
233 #define MOVE_RATIO 15
234 #endif
235 #endif
236
237 /* This array records the insn_code of insns to perform block moves. */
238 enum insn_code movstr_optab[NUM_MACHINE_MODES];
239
240 /* This array records the insn_code of insns to perform block clears. */
241 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
242
243 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
244
245 #ifndef SLOW_UNALIGNED_ACCESS
246 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
247 #endif
248
249 /* Register mappings for target machines without register windows. */
250 #ifndef INCOMING_REGNO
251 #define INCOMING_REGNO(OUT) (OUT)
252 #endif
253 #ifndef OUTGOING_REGNO
254 #define OUTGOING_REGNO(IN) (IN)
255 #endif
256 \f
257 /* Maps used to convert modes to const, load, and store bytecodes. */
258 enum bytecode_opcode mode_to_const_map[MAX_MACHINE_MODE];
259 enum bytecode_opcode mode_to_load_map[MAX_MACHINE_MODE];
260 enum bytecode_opcode mode_to_store_map[MAX_MACHINE_MODE];
261
262 /* Initialize maps used to convert modes to const, load, and store
263 bytecodes. */
264
265 void
266 bc_init_mode_to_opcode_maps ()
267 {
268 int mode;
269
270 for (mode = 0; mode < (int) MAX_MACHINE_MODE; mode++)
271 mode_to_const_map[mode] =
272 mode_to_load_map[mode] =
273 mode_to_store_map[mode] = neverneverland;
274
275 #define DEF_MODEMAP(SYM, CODE, UCODE, CONST, LOAD, STORE) \
276 mode_to_const_map[(int) SYM] = CONST; \
277 mode_to_load_map[(int) SYM] = LOAD; \
278 mode_to_store_map[(int) SYM] = STORE;
279
280 #include "modemap.def"
281 #undef DEF_MODEMAP
282 }
283 \f
284 /* This is run once per compilation to set up which modes can be used
285 directly in memory and to initialize the block move optab. */
286
287 void
288 init_expr_once ()
289 {
290 rtx insn, pat;
291 enum machine_mode mode;
292 /* Try indexing by frame ptr and try by stack ptr.
293 It is known that on the Convex the stack ptr isn't a valid index.
294 With luck, one or the other is valid on any machine. */
295 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
296 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
297
298 start_sequence ();
299 insn = emit_insn (gen_rtx (SET, 0, 0));
300 pat = PATTERN (insn);
301
302 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
303 mode = (enum machine_mode) ((int) mode + 1))
304 {
305 int regno;
306 rtx reg;
307 int num_clobbers;
308
309 direct_load[(int) mode] = direct_store[(int) mode] = 0;
310 PUT_MODE (mem, mode);
311 PUT_MODE (mem1, mode);
312
313 /* See if there is some register that can be used in this mode and
314 directly loaded or stored from memory. */
315
316 if (mode != VOIDmode && mode != BLKmode)
317 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
318 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
319 regno++)
320 {
321 if (! HARD_REGNO_MODE_OK (regno, mode))
322 continue;
323
324 reg = gen_rtx (REG, mode, regno);
325
326 SET_SRC (pat) = mem;
327 SET_DEST (pat) = reg;
328 if (recog (pat, insn, &num_clobbers) >= 0)
329 direct_load[(int) mode] = 1;
330
331 SET_SRC (pat) = mem1;
332 SET_DEST (pat) = reg;
333 if (recog (pat, insn, &num_clobbers) >= 0)
334 direct_load[(int) mode] = 1;
335
336 SET_SRC (pat) = reg;
337 SET_DEST (pat) = mem;
338 if (recog (pat, insn, &num_clobbers) >= 0)
339 direct_store[(int) mode] = 1;
340
341 SET_SRC (pat) = reg;
342 SET_DEST (pat) = mem1;
343 if (recog (pat, insn, &num_clobbers) >= 0)
344 direct_store[(int) mode] = 1;
345 }
346 }
347
348 end_sequence ();
349 }
350
351 /* This is run at the start of compiling a function. */
352
353 void
354 init_expr ()
355 {
356 init_queue ();
357
358 pending_stack_adjust = 0;
359 inhibit_defer_pop = 0;
360 cleanups_this_call = 0;
361 saveregs_value = 0;
362 apply_args_value = 0;
363 forced_labels = 0;
364 }
365
366 /* Save all variables describing the current status into the structure *P.
367 This is used before starting a nested function. */
368
369 void
370 save_expr_status (p)
371 struct function *p;
372 {
373 /* Instead of saving the postincrement queue, empty it. */
374 emit_queue ();
375
376 p->pending_stack_adjust = pending_stack_adjust;
377 p->inhibit_defer_pop = inhibit_defer_pop;
378 p->cleanups_this_call = cleanups_this_call;
379 p->saveregs_value = saveregs_value;
380 p->apply_args_value = apply_args_value;
381 p->forced_labels = forced_labels;
382
383 pending_stack_adjust = 0;
384 inhibit_defer_pop = 0;
385 cleanups_this_call = 0;
386 saveregs_value = 0;
387 apply_args_value = 0;
388 forced_labels = 0;
389 }
390
391 /* Restore all variables describing the current status from the structure *P.
392 This is used after a nested function. */
393
394 void
395 restore_expr_status (p)
396 struct function *p;
397 {
398 pending_stack_adjust = p->pending_stack_adjust;
399 inhibit_defer_pop = p->inhibit_defer_pop;
400 cleanups_this_call = p->cleanups_this_call;
401 saveregs_value = p->saveregs_value;
402 apply_args_value = p->apply_args_value;
403 forced_labels = p->forced_labels;
404 }
405 \f
406 /* Manage the queue of increment instructions to be output
407 for POSTINCREMENT_EXPR expressions, etc. */
408
409 static rtx pending_chain;
410
411 /* Queue up to increment (or change) VAR later. BODY says how:
412 BODY should be the same thing you would pass to emit_insn
413 to increment right away. It will go to emit_insn later on.
414
415 The value is a QUEUED expression to be used in place of VAR
416 where you want to guarantee the pre-incrementation value of VAR. */
417
418 static rtx
419 enqueue_insn (var, body)
420 rtx var, body;
421 {
422 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
423 var, NULL_RTX, NULL_RTX, body, pending_chain);
424 return pending_chain;
425 }
426
427 /* Use protect_from_queue to convert a QUEUED expression
428 into something that you can put immediately into an instruction.
429 If the queued incrementation has not happened yet,
430 protect_from_queue returns the variable itself.
431 If the incrementation has happened, protect_from_queue returns a temp
432 that contains a copy of the old value of the variable.
433
434 Any time an rtx which might possibly be a QUEUED is to be put
435 into an instruction, it must be passed through protect_from_queue first.
436 QUEUED expressions are not meaningful in instructions.
437
438 Do not pass a value through protect_from_queue and then hold
439 on to it for a while before putting it in an instruction!
440 If the queue is flushed in between, incorrect code will result. */
441
442 rtx
443 protect_from_queue (x, modify)
444 register rtx x;
445 int modify;
446 {
447 register RTX_CODE code = GET_CODE (x);
448
449 #if 0 /* A QUEUED can hang around after the queue is forced out. */
450 /* Shortcut for most common case. */
451 if (pending_chain == 0)
452 return x;
453 #endif
454
455 if (code != QUEUED)
456 {
457 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
458 use of autoincrement. Make a copy of the contents of the memory
459 location rather than a copy of the address, but not if the value is
460 of mode BLKmode. Don't modify X in place since it might be
461 shared. */
462 if (code == MEM && GET_MODE (x) != BLKmode
463 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
464 {
465 register rtx y = XEXP (x, 0);
466 register rtx new = gen_rtx (MEM, GET_MODE (x), QUEUED_VAR (y));
467
468 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
469 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
470 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
471
472 if (QUEUED_INSN (y))
473 {
474 register rtx temp = gen_reg_rtx (GET_MODE (new));
475 emit_insn_before (gen_move_insn (temp, new),
476 QUEUED_INSN (y));
477 return temp;
478 }
479 return new;
480 }
481 /* Otherwise, recursively protect the subexpressions of all
482 the kinds of rtx's that can contain a QUEUED. */
483 if (code == MEM)
484 {
485 rtx tem = protect_from_queue (XEXP (x, 0), 0);
486 if (tem != XEXP (x, 0))
487 {
488 x = copy_rtx (x);
489 XEXP (x, 0) = tem;
490 }
491 }
492 else if (code == PLUS || code == MULT)
493 {
494 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
495 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
496 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
497 {
498 x = copy_rtx (x);
499 XEXP (x, 0) = new0;
500 XEXP (x, 1) = new1;
501 }
502 }
503 return x;
504 }
505 /* If the increment has not happened, use the variable itself. */
506 if (QUEUED_INSN (x) == 0)
507 return QUEUED_VAR (x);
508 /* If the increment has happened and a pre-increment copy exists,
509 use that copy. */
510 if (QUEUED_COPY (x) != 0)
511 return QUEUED_COPY (x);
512 /* The increment has happened but we haven't set up a pre-increment copy.
513 Set one up now, and use it. */
514 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
515 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
516 QUEUED_INSN (x));
517 return QUEUED_COPY (x);
518 }
519
520 /* Return nonzero if X contains a QUEUED expression:
521 if it contains anything that will be altered by a queued increment.
522 We handle only combinations of MEM, PLUS, MINUS and MULT operators
523 since memory addresses generally contain only those. */
524
525 static int
526 queued_subexp_p (x)
527 rtx x;
528 {
529 register enum rtx_code code = GET_CODE (x);
530 switch (code)
531 {
532 case QUEUED:
533 return 1;
534 case MEM:
535 return queued_subexp_p (XEXP (x, 0));
536 case MULT:
537 case PLUS:
538 case MINUS:
539 return queued_subexp_p (XEXP (x, 0))
540 || queued_subexp_p (XEXP (x, 1));
541 }
542 return 0;
543 }
544
545 /* Perform all the pending incrementations. */
546
547 void
548 emit_queue ()
549 {
550 register rtx p;
551 while (p = pending_chain)
552 {
553 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
554 pending_chain = QUEUED_NEXT (p);
555 }
556 }
557
558 static void
559 init_queue ()
560 {
561 if (pending_chain)
562 abort ();
563 }
564 \f
565 /* Copy data from FROM to TO, where the machine modes are not the same.
566 Both modes may be integer, or both may be floating.
567 UNSIGNEDP should be nonzero if FROM is an unsigned type.
568 This causes zero-extension instead of sign-extension. */
569
570 void
571 convert_move (to, from, unsignedp)
572 register rtx to, from;
573 int unsignedp;
574 {
575 enum machine_mode to_mode = GET_MODE (to);
576 enum machine_mode from_mode = GET_MODE (from);
577 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
578 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
579 enum insn_code code;
580 rtx libcall;
581
582 /* rtx code for making an equivalent value. */
583 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
584
585 to = protect_from_queue (to, 1);
586 from = protect_from_queue (from, 0);
587
588 if (to_real != from_real)
589 abort ();
590
591 /* If FROM is a SUBREG that indicates that we have already done at least
592 the required extension, strip it. We don't handle such SUBREGs as
593 TO here. */
594
595 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
596 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
597 >= GET_MODE_SIZE (to_mode))
598 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
599 from = gen_lowpart (to_mode, from), from_mode = to_mode;
600
601 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
602 abort ();
603
604 if (to_mode == from_mode
605 || (from_mode == VOIDmode && CONSTANT_P (from)))
606 {
607 emit_move_insn (to, from);
608 return;
609 }
610
611 if (to_real)
612 {
613 rtx value;
614
615 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
616 {
617 /* Try converting directly if the insn is supported. */
618 if ((code = can_extend_p (to_mode, from_mode, 0))
619 != CODE_FOR_nothing)
620 {
621 emit_unop_insn (code, to, from, UNKNOWN);
622 return;
623 }
624 }
625
626 #ifdef HAVE_trunchfqf2
627 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
628 {
629 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
630 return;
631 }
632 #endif
633 #ifdef HAVE_truncsfqf2
634 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
635 {
636 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
637 return;
638 }
639 #endif
640 #ifdef HAVE_truncdfqf2
641 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
642 {
643 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
644 return;
645 }
646 #endif
647 #ifdef HAVE_truncxfqf2
648 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
649 {
650 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
651 return;
652 }
653 #endif
654 #ifdef HAVE_trunctfqf2
655 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
656 {
657 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
658 return;
659 }
660 #endif
661
662 #ifdef HAVE_trunctqfhf2
663 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
664 {
665 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
666 return;
667 }
668 #endif
669 #ifdef HAVE_truncsfhf2
670 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
671 {
672 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
673 return;
674 }
675 #endif
676 #ifdef HAVE_truncdfhf2
677 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
678 {
679 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
680 return;
681 }
682 #endif
683 #ifdef HAVE_truncxfhf2
684 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
685 {
686 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
687 return;
688 }
689 #endif
690 #ifdef HAVE_trunctfhf2
691 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
692 {
693 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
694 return;
695 }
696 #endif
697
698 #ifdef HAVE_truncsftqf2
699 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
700 {
701 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
702 return;
703 }
704 #endif
705 #ifdef HAVE_truncdftqf2
706 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
707 {
708 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
709 return;
710 }
711 #endif
712 #ifdef HAVE_truncxftqf2
713 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
714 {
715 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
716 return;
717 }
718 #endif
719 #ifdef HAVE_trunctftqf2
720 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
721 {
722 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
723 return;
724 }
725 #endif
726
727 #ifdef HAVE_truncdfsf2
728 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
729 {
730 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
731 return;
732 }
733 #endif
734 #ifdef HAVE_truncxfsf2
735 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
736 {
737 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
738 return;
739 }
740 #endif
741 #ifdef HAVE_trunctfsf2
742 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
743 {
744 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
745 return;
746 }
747 #endif
748 #ifdef HAVE_truncxfdf2
749 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
750 {
751 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
752 return;
753 }
754 #endif
755 #ifdef HAVE_trunctfdf2
756 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
757 {
758 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
759 return;
760 }
761 #endif
762
763 libcall = (rtx) 0;
764 switch (from_mode)
765 {
766 case SFmode:
767 switch (to_mode)
768 {
769 case DFmode:
770 libcall = extendsfdf2_libfunc;
771 break;
772
773 case XFmode:
774 libcall = extendsfxf2_libfunc;
775 break;
776
777 case TFmode:
778 libcall = extendsftf2_libfunc;
779 break;
780 }
781 break;
782
783 case DFmode:
784 switch (to_mode)
785 {
786 case SFmode:
787 libcall = truncdfsf2_libfunc;
788 break;
789
790 case XFmode:
791 libcall = extenddfxf2_libfunc;
792 break;
793
794 case TFmode:
795 libcall = extenddftf2_libfunc;
796 break;
797 }
798 break;
799
800 case XFmode:
801 switch (to_mode)
802 {
803 case SFmode:
804 libcall = truncxfsf2_libfunc;
805 break;
806
807 case DFmode:
808 libcall = truncxfdf2_libfunc;
809 break;
810 }
811 break;
812
813 case TFmode:
814 switch (to_mode)
815 {
816 case SFmode:
817 libcall = trunctfsf2_libfunc;
818 break;
819
820 case DFmode:
821 libcall = trunctfdf2_libfunc;
822 break;
823 }
824 break;
825 }
826
827 if (libcall == (rtx) 0)
828 /* This conversion is not implemented yet. */
829 abort ();
830
831 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
832 1, from, from_mode);
833 emit_move_insn (to, value);
834 return;
835 }
836
837 /* Now both modes are integers. */
838
839 /* Handle expanding beyond a word. */
840 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
841 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
842 {
843 rtx insns;
844 rtx lowpart;
845 rtx fill_value;
846 rtx lowfrom;
847 int i;
848 enum machine_mode lowpart_mode;
849 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
850
851 /* Try converting directly if the insn is supported. */
852 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
853 != CODE_FOR_nothing)
854 {
855 /* If FROM is a SUBREG, put it into a register. Do this
856 so that we always generate the same set of insns for
857 better cse'ing; if an intermediate assignment occurred,
858 we won't be doing the operation directly on the SUBREG. */
859 if (optimize > 0 && GET_CODE (from) == SUBREG)
860 from = force_reg (from_mode, from);
861 emit_unop_insn (code, to, from, equiv_code);
862 return;
863 }
864 /* Next, try converting via full word. */
865 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
866 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
867 != CODE_FOR_nothing))
868 {
869 if (GET_CODE (to) == REG)
870 emit_insn (gen_rtx (CLOBBER, VOIDmode, to));
871 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
872 emit_unop_insn (code, to,
873 gen_lowpart (word_mode, to), equiv_code);
874 return;
875 }
876
877 /* No special multiword conversion insn; do it by hand. */
878 start_sequence ();
879
880 /* Since we will turn this into a no conflict block, we must ensure
881 that the source does not overlap the target. */
882
883 if (reg_overlap_mentioned_p (to, from))
884 from = force_reg (from_mode, from);
885
886 /* Get a copy of FROM widened to a word, if necessary. */
887 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
888 lowpart_mode = word_mode;
889 else
890 lowpart_mode = from_mode;
891
892 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
893
894 lowpart = gen_lowpart (lowpart_mode, to);
895 emit_move_insn (lowpart, lowfrom);
896
897 /* Compute the value to put in each remaining word. */
898 if (unsignedp)
899 fill_value = const0_rtx;
900 else
901 {
902 #ifdef HAVE_slt
903 if (HAVE_slt
904 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
905 && STORE_FLAG_VALUE == -1)
906 {
907 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
908 lowpart_mode, 0, 0);
909 fill_value = gen_reg_rtx (word_mode);
910 emit_insn (gen_slt (fill_value));
911 }
912 else
913 #endif
914 {
915 fill_value
916 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
917 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
918 NULL_RTX, 0);
919 fill_value = convert_to_mode (word_mode, fill_value, 1);
920 }
921 }
922
923 /* Fill the remaining words. */
924 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
925 {
926 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
927 rtx subword = operand_subword (to, index, 1, to_mode);
928
929 if (subword == 0)
930 abort ();
931
932 if (fill_value != subword)
933 emit_move_insn (subword, fill_value);
934 }
935
936 insns = get_insns ();
937 end_sequence ();
938
939 emit_no_conflict_block (insns, to, from, NULL_RTX,
940 gen_rtx (equiv_code, to_mode, copy_rtx (from)));
941 return;
942 }
943
944 /* Truncating multi-word to a word or less. */
945 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
946 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
947 {
948 if (!((GET_CODE (from) == MEM
949 && ! MEM_VOLATILE_P (from)
950 && direct_load[(int) to_mode]
951 && ! mode_dependent_address_p (XEXP (from, 0)))
952 || GET_CODE (from) == REG
953 || GET_CODE (from) == SUBREG))
954 from = force_reg (from_mode, from);
955 convert_move (to, gen_lowpart (word_mode, from), 0);
956 return;
957 }
958
959 /* Handle pointer conversion */ /* SPEE 900220 */
960 if (to_mode == PSImode)
961 {
962 if (from_mode != SImode)
963 from = convert_to_mode (SImode, from, unsignedp);
964
965 #ifdef HAVE_truncsipsi2
966 if (HAVE_truncsipsi2)
967 {
968 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
969 return;
970 }
971 #endif /* HAVE_truncsipsi2 */
972 abort ();
973 }
974
975 if (from_mode == PSImode)
976 {
977 if (to_mode != SImode)
978 {
979 from = convert_to_mode (SImode, from, unsignedp);
980 from_mode = SImode;
981 }
982 else
983 {
984 #ifdef HAVE_extendpsisi2
985 if (HAVE_extendpsisi2)
986 {
987 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
988 return;
989 }
990 #endif /* HAVE_extendpsisi2 */
991 abort ();
992 }
993 }
994
995 if (to_mode == PDImode)
996 {
997 if (from_mode != DImode)
998 from = convert_to_mode (DImode, from, unsignedp);
999
1000 #ifdef HAVE_truncdipdi2
1001 if (HAVE_truncdipdi2)
1002 {
1003 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1004 return;
1005 }
1006 #endif /* HAVE_truncdipdi2 */
1007 abort ();
1008 }
1009
1010 if (from_mode == PDImode)
1011 {
1012 if (to_mode != DImode)
1013 {
1014 from = convert_to_mode (DImode, from, unsignedp);
1015 from_mode = DImode;
1016 }
1017 else
1018 {
1019 #ifdef HAVE_extendpdidi2
1020 if (HAVE_extendpdidi2)
1021 {
1022 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1023 return;
1024 }
1025 #endif /* HAVE_extendpdidi2 */
1026 abort ();
1027 }
1028 }
1029
1030 /* Now follow all the conversions between integers
1031 no more than a word long. */
1032
1033 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1034 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1035 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1036 GET_MODE_BITSIZE (from_mode)))
1037 {
1038 if (!((GET_CODE (from) == MEM
1039 && ! MEM_VOLATILE_P (from)
1040 && direct_load[(int) to_mode]
1041 && ! mode_dependent_address_p (XEXP (from, 0)))
1042 || GET_CODE (from) == REG
1043 || GET_CODE (from) == SUBREG))
1044 from = force_reg (from_mode, from);
1045 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1046 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1047 from = copy_to_reg (from);
1048 emit_move_insn (to, gen_lowpart (to_mode, from));
1049 return;
1050 }
1051
1052 /* Handle extension. */
1053 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1054 {
1055 /* Convert directly if that works. */
1056 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1057 != CODE_FOR_nothing)
1058 {
1059 emit_unop_insn (code, to, from, equiv_code);
1060 return;
1061 }
1062 else
1063 {
1064 enum machine_mode intermediate;
1065
1066 /* Search for a mode to convert via. */
1067 for (intermediate = from_mode; intermediate != VOIDmode;
1068 intermediate = GET_MODE_WIDER_MODE (intermediate))
1069 if (((can_extend_p (to_mode, intermediate, unsignedp)
1070 != CODE_FOR_nothing)
1071 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1072 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
1073 && (can_extend_p (intermediate, from_mode, unsignedp)
1074 != CODE_FOR_nothing))
1075 {
1076 convert_move (to, convert_to_mode (intermediate, from,
1077 unsignedp), unsignedp);
1078 return;
1079 }
1080
1081 /* No suitable intermediate mode. */
1082 abort ();
1083 }
1084 }
1085
1086 /* Support special truncate insns for certain modes. */
1087
1088 if (from_mode == DImode && to_mode == SImode)
1089 {
1090 #ifdef HAVE_truncdisi2
1091 if (HAVE_truncdisi2)
1092 {
1093 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1094 return;
1095 }
1096 #endif
1097 convert_move (to, force_reg (from_mode, from), unsignedp);
1098 return;
1099 }
1100
1101 if (from_mode == DImode && to_mode == HImode)
1102 {
1103 #ifdef HAVE_truncdihi2
1104 if (HAVE_truncdihi2)
1105 {
1106 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1107 return;
1108 }
1109 #endif
1110 convert_move (to, force_reg (from_mode, from), unsignedp);
1111 return;
1112 }
1113
1114 if (from_mode == DImode && to_mode == QImode)
1115 {
1116 #ifdef HAVE_truncdiqi2
1117 if (HAVE_truncdiqi2)
1118 {
1119 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1120 return;
1121 }
1122 #endif
1123 convert_move (to, force_reg (from_mode, from), unsignedp);
1124 return;
1125 }
1126
1127 if (from_mode == SImode && to_mode == HImode)
1128 {
1129 #ifdef HAVE_truncsihi2
1130 if (HAVE_truncsihi2)
1131 {
1132 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1133 return;
1134 }
1135 #endif
1136 convert_move (to, force_reg (from_mode, from), unsignedp);
1137 return;
1138 }
1139
1140 if (from_mode == SImode && to_mode == QImode)
1141 {
1142 #ifdef HAVE_truncsiqi2
1143 if (HAVE_truncsiqi2)
1144 {
1145 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1146 return;
1147 }
1148 #endif
1149 convert_move (to, force_reg (from_mode, from), unsignedp);
1150 return;
1151 }
1152
1153 if (from_mode == HImode && to_mode == QImode)
1154 {
1155 #ifdef HAVE_trunchiqi2
1156 if (HAVE_trunchiqi2)
1157 {
1158 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1159 return;
1160 }
1161 #endif
1162 convert_move (to, force_reg (from_mode, from), unsignedp);
1163 return;
1164 }
1165
1166 if (from_mode == TImode && to_mode == DImode)
1167 {
1168 #ifdef HAVE_trunctidi2
1169 if (HAVE_trunctidi2)
1170 {
1171 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1172 return;
1173 }
1174 #endif
1175 convert_move (to, force_reg (from_mode, from), unsignedp);
1176 return;
1177 }
1178
1179 if (from_mode == TImode && to_mode == SImode)
1180 {
1181 #ifdef HAVE_trunctisi2
1182 if (HAVE_trunctisi2)
1183 {
1184 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1185 return;
1186 }
1187 #endif
1188 convert_move (to, force_reg (from_mode, from), unsignedp);
1189 return;
1190 }
1191
1192 if (from_mode == TImode && to_mode == HImode)
1193 {
1194 #ifdef HAVE_trunctihi2
1195 if (HAVE_trunctihi2)
1196 {
1197 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1198 return;
1199 }
1200 #endif
1201 convert_move (to, force_reg (from_mode, from), unsignedp);
1202 return;
1203 }
1204
1205 if (from_mode == TImode && to_mode == QImode)
1206 {
1207 #ifdef HAVE_trunctiqi2
1208 if (HAVE_trunctiqi2)
1209 {
1210 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1211 return;
1212 }
1213 #endif
1214 convert_move (to, force_reg (from_mode, from), unsignedp);
1215 return;
1216 }
1217
1218 /* Handle truncation of volatile memrefs, and so on;
1219 the things that couldn't be truncated directly,
1220 and for which there was no special instruction. */
1221 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1222 {
1223 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1224 emit_move_insn (to, temp);
1225 return;
1226 }
1227
1228 /* Mode combination is not recognized. */
1229 abort ();
1230 }
1231
1232 /* Return an rtx for a value that would result
1233 from converting X to mode MODE.
1234 Both X and MODE may be floating, or both integer.
1235 UNSIGNEDP is nonzero if X is an unsigned value.
1236 This can be done by referring to a part of X in place
1237 or by copying to a new temporary with conversion.
1238
1239 This function *must not* call protect_from_queue
1240 except when putting X into an insn (in which case convert_move does it). */
1241
1242 rtx
1243 convert_to_mode (mode, x, unsignedp)
1244 enum machine_mode mode;
1245 rtx x;
1246 int unsignedp;
1247 {
1248 return convert_modes (mode, VOIDmode, x, unsignedp);
1249 }
1250
1251 /* Return an rtx for a value that would result
1252 from converting X from mode OLDMODE to mode MODE.
1253 Both modes may be floating, or both integer.
1254 UNSIGNEDP is nonzero if X is an unsigned value.
1255
1256 This can be done by referring to a part of X in place
1257 or by copying to a new temporary with conversion.
1258
1259 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1260
1261 This function *must not* call protect_from_queue
1262 except when putting X into an insn (in which case convert_move does it). */
1263
1264 rtx
1265 convert_modes (mode, oldmode, x, unsignedp)
1266 enum machine_mode mode, oldmode;
1267 rtx x;
1268 int unsignedp;
1269 {
1270 register rtx temp;
1271
1272 /* If FROM is a SUBREG that indicates that we have already done at least
1273 the required extension, strip it. */
1274
1275 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1276 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1277 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1278 x = gen_lowpart (mode, x);
1279
1280 if (GET_MODE (x) != VOIDmode)
1281 oldmode = GET_MODE (x);
1282
1283 if (mode == oldmode)
1284 return x;
1285
1286 /* There is one case that we must handle specially: If we are converting
1287 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1288 we are to interpret the constant as unsigned, gen_lowpart will do
1289 the wrong if the constant appears negative. What we want to do is
1290 make the high-order word of the constant zero, not all ones. */
1291
1292 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1293 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1294 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1295 {
1296 HOST_WIDE_INT val = INTVAL (x);
1297
1298 if (oldmode != VOIDmode
1299 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1300 {
1301 int width = GET_MODE_BITSIZE (oldmode);
1302
1303 /* We need to zero extend VAL. */
1304 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1305 }
1306
1307 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1308 }
1309
1310 /* We can do this with a gen_lowpart if both desired and current modes
1311 are integer, and this is either a constant integer, a register, or a
1312 non-volatile MEM. Except for the constant case where MODE is no
1313 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1314
1315 if ((GET_CODE (x) == CONST_INT
1316 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1317 || (GET_MODE_CLASS (mode) == MODE_INT
1318 && GET_MODE_CLASS (oldmode) == MODE_INT
1319 && (GET_CODE (x) == CONST_DOUBLE
1320 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1321 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1322 && direct_load[(int) mode])
1323 || (GET_CODE (x) == REG
1324 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1325 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1326 {
1327 /* ?? If we don't know OLDMODE, we have to assume here that
1328 X does not need sign- or zero-extension. This may not be
1329 the case, but it's the best we can do. */
1330 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1331 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1332 {
1333 HOST_WIDE_INT val = INTVAL (x);
1334 int width = GET_MODE_BITSIZE (oldmode);
1335
1336 /* We must sign or zero-extend in this case. Start by
1337 zero-extending, then sign extend if we need to. */
1338 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1339 if (! unsignedp
1340 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1341 val |= (HOST_WIDE_INT) (-1) << width;
1342
1343 return GEN_INT (val);
1344 }
1345
1346 return gen_lowpart (mode, x);
1347 }
1348
1349 temp = gen_reg_rtx (mode);
1350 convert_move (temp, x, unsignedp);
1351 return temp;
1352 }
1353 \f
1354 /* Generate several move instructions to copy LEN bytes
1355 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1356 The caller must pass FROM and TO
1357 through protect_from_queue before calling.
1358 ALIGN (in bytes) is maximum alignment we can assume. */
1359
1360 static void
1361 move_by_pieces (to, from, len, align)
1362 rtx to, from;
1363 int len, align;
1364 {
1365 struct move_by_pieces data;
1366 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1367 int max_size = MOVE_MAX + 1;
1368
1369 data.offset = 0;
1370 data.to_addr = to_addr;
1371 data.from_addr = from_addr;
1372 data.to = to;
1373 data.from = from;
1374 data.autinc_to
1375 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1376 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1377 data.autinc_from
1378 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1379 || GET_CODE (from_addr) == POST_INC
1380 || GET_CODE (from_addr) == POST_DEC);
1381
1382 data.explicit_inc_from = 0;
1383 data.explicit_inc_to = 0;
1384 data.reverse
1385 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1386 if (data.reverse) data.offset = len;
1387 data.len = len;
1388
1389 data.to_struct = MEM_IN_STRUCT_P (to);
1390 data.from_struct = MEM_IN_STRUCT_P (from);
1391
1392 /* If copying requires more than two move insns,
1393 copy addresses to registers (to make displacements shorter)
1394 and use post-increment if available. */
1395 if (!(data.autinc_from && data.autinc_to)
1396 && move_by_pieces_ninsns (len, align) > 2)
1397 {
1398 #ifdef HAVE_PRE_DECREMENT
1399 if (data.reverse && ! data.autinc_from)
1400 {
1401 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1402 data.autinc_from = 1;
1403 data.explicit_inc_from = -1;
1404 }
1405 #endif
1406 #ifdef HAVE_POST_INCREMENT
1407 if (! data.autinc_from)
1408 {
1409 data.from_addr = copy_addr_to_reg (from_addr);
1410 data.autinc_from = 1;
1411 data.explicit_inc_from = 1;
1412 }
1413 #endif
1414 if (!data.autinc_from && CONSTANT_P (from_addr))
1415 data.from_addr = copy_addr_to_reg (from_addr);
1416 #ifdef HAVE_PRE_DECREMENT
1417 if (data.reverse && ! data.autinc_to)
1418 {
1419 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1420 data.autinc_to = 1;
1421 data.explicit_inc_to = -1;
1422 }
1423 #endif
1424 #ifdef HAVE_POST_INCREMENT
1425 if (! data.reverse && ! data.autinc_to)
1426 {
1427 data.to_addr = copy_addr_to_reg (to_addr);
1428 data.autinc_to = 1;
1429 data.explicit_inc_to = 1;
1430 }
1431 #endif
1432 if (!data.autinc_to && CONSTANT_P (to_addr))
1433 data.to_addr = copy_addr_to_reg (to_addr);
1434 }
1435
1436 if (! SLOW_UNALIGNED_ACCESS
1437 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1438 align = MOVE_MAX;
1439
1440 /* First move what we can in the largest integer mode, then go to
1441 successively smaller modes. */
1442
1443 while (max_size > 1)
1444 {
1445 enum machine_mode mode = VOIDmode, tmode;
1446 enum insn_code icode;
1447
1448 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1449 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1450 if (GET_MODE_SIZE (tmode) < max_size)
1451 mode = tmode;
1452
1453 if (mode == VOIDmode)
1454 break;
1455
1456 icode = mov_optab->handlers[(int) mode].insn_code;
1457 if (icode != CODE_FOR_nothing
1458 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1459 GET_MODE_SIZE (mode)))
1460 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1461
1462 max_size = GET_MODE_SIZE (mode);
1463 }
1464
1465 /* The code above should have handled everything. */
1466 if (data.len > 0)
1467 abort ();
1468 }
1469
1470 /* Return number of insns required to move L bytes by pieces.
1471 ALIGN (in bytes) is maximum alignment we can assume. */
1472
1473 static int
1474 move_by_pieces_ninsns (l, align)
1475 unsigned int l;
1476 int align;
1477 {
1478 register int n_insns = 0;
1479 int max_size = MOVE_MAX + 1;
1480
1481 if (! SLOW_UNALIGNED_ACCESS
1482 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1483 align = MOVE_MAX;
1484
1485 while (max_size > 1)
1486 {
1487 enum machine_mode mode = VOIDmode, tmode;
1488 enum insn_code icode;
1489
1490 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1491 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1492 if (GET_MODE_SIZE (tmode) < max_size)
1493 mode = tmode;
1494
1495 if (mode == VOIDmode)
1496 break;
1497
1498 icode = mov_optab->handlers[(int) mode].insn_code;
1499 if (icode != CODE_FOR_nothing
1500 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1501 GET_MODE_SIZE (mode)))
1502 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1503
1504 max_size = GET_MODE_SIZE (mode);
1505 }
1506
1507 return n_insns;
1508 }
1509
1510 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1511 with move instructions for mode MODE. GENFUN is the gen_... function
1512 to make a move insn for that mode. DATA has all the other info. */
1513
1514 static void
1515 move_by_pieces_1 (genfun, mode, data)
1516 rtx (*genfun) ();
1517 enum machine_mode mode;
1518 struct move_by_pieces *data;
1519 {
1520 register int size = GET_MODE_SIZE (mode);
1521 register rtx to1, from1;
1522
1523 while (data->len >= size)
1524 {
1525 if (data->reverse) data->offset -= size;
1526
1527 to1 = (data->autinc_to
1528 ? gen_rtx (MEM, mode, data->to_addr)
1529 : change_address (data->to, mode,
1530 plus_constant (data->to_addr, data->offset)));
1531 MEM_IN_STRUCT_P (to1) = data->to_struct;
1532 from1 =
1533 (data->autinc_from
1534 ? gen_rtx (MEM, mode, data->from_addr)
1535 : change_address (data->from, mode,
1536 plus_constant (data->from_addr, data->offset)));
1537 MEM_IN_STRUCT_P (from1) = data->from_struct;
1538
1539 #ifdef HAVE_PRE_DECREMENT
1540 if (data->explicit_inc_to < 0)
1541 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1542 if (data->explicit_inc_from < 0)
1543 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1544 #endif
1545
1546 emit_insn ((*genfun) (to1, from1));
1547 #ifdef HAVE_POST_INCREMENT
1548 if (data->explicit_inc_to > 0)
1549 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1550 if (data->explicit_inc_from > 0)
1551 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1552 #endif
1553
1554 if (! data->reverse) data->offset += size;
1555
1556 data->len -= size;
1557 }
1558 }
1559 \f
1560 /* Emit code to move a block Y to a block X.
1561 This may be done with string-move instructions,
1562 with multiple scalar move instructions, or with a library call.
1563
1564 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1565 with mode BLKmode.
1566 SIZE is an rtx that says how long they are.
1567 ALIGN is the maximum alignment we can assume they have,
1568 measured in bytes. */
1569
1570 void
1571 emit_block_move (x, y, size, align)
1572 rtx x, y;
1573 rtx size;
1574 int align;
1575 {
1576 if (GET_MODE (x) != BLKmode)
1577 abort ();
1578
1579 if (GET_MODE (y) != BLKmode)
1580 abort ();
1581
1582 x = protect_from_queue (x, 1);
1583 y = protect_from_queue (y, 0);
1584 size = protect_from_queue (size, 0);
1585
1586 if (GET_CODE (x) != MEM)
1587 abort ();
1588 if (GET_CODE (y) != MEM)
1589 abort ();
1590 if (size == 0)
1591 abort ();
1592
1593 if (GET_CODE (size) == CONST_INT
1594 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1595 move_by_pieces (x, y, INTVAL (size), align);
1596 else
1597 {
1598 /* Try the most limited insn first, because there's no point
1599 including more than one in the machine description unless
1600 the more limited one has some advantage. */
1601
1602 rtx opalign = GEN_INT (align);
1603 enum machine_mode mode;
1604
1605 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1606 mode = GET_MODE_WIDER_MODE (mode))
1607 {
1608 enum insn_code code = movstr_optab[(int) mode];
1609
1610 if (code != CODE_FOR_nothing
1611 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1612 here because if SIZE is less than the mode mask, as it is
1613 returned by the macro, it will definitely be less than the
1614 actual mode mask. */
1615 && ((GET_CODE (size) == CONST_INT
1616 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1617 <= GET_MODE_MASK (mode)))
1618 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1619 && (insn_operand_predicate[(int) code][0] == 0
1620 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1621 && (insn_operand_predicate[(int) code][1] == 0
1622 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1623 && (insn_operand_predicate[(int) code][3] == 0
1624 || (*insn_operand_predicate[(int) code][3]) (opalign,
1625 VOIDmode)))
1626 {
1627 rtx op2;
1628 rtx last = get_last_insn ();
1629 rtx pat;
1630
1631 op2 = convert_to_mode (mode, size, 1);
1632 if (insn_operand_predicate[(int) code][2] != 0
1633 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1634 op2 = copy_to_mode_reg (mode, op2);
1635
1636 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1637 if (pat)
1638 {
1639 emit_insn (pat);
1640 return;
1641 }
1642 else
1643 delete_insns_since (last);
1644 }
1645 }
1646
1647 #ifdef TARGET_MEM_FUNCTIONS
1648 emit_library_call (memcpy_libfunc, 0,
1649 VOIDmode, 3, XEXP (x, 0), Pmode,
1650 XEXP (y, 0), Pmode,
1651 convert_to_mode (TYPE_MODE (sizetype), size,
1652 TREE_UNSIGNED (sizetype)),
1653 TYPE_MODE (sizetype));
1654 #else
1655 emit_library_call (bcopy_libfunc, 0,
1656 VOIDmode, 3, XEXP (y, 0), Pmode,
1657 XEXP (x, 0), Pmode,
1658 convert_to_mode (TYPE_MODE (integer_type_node), size,
1659 TREE_UNSIGNED (integer_type_node)),
1660 TYPE_MODE (integer_type_node));
1661 #endif
1662 }
1663 }
1664 \f
1665 /* Copy all or part of a value X into registers starting at REGNO.
1666 The number of registers to be filled is NREGS. */
1667
1668 void
1669 move_block_to_reg (regno, x, nregs, mode)
1670 int regno;
1671 rtx x;
1672 int nregs;
1673 enum machine_mode mode;
1674 {
1675 int i;
1676 rtx pat, last;
1677
1678 if (nregs == 0)
1679 return;
1680
1681 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1682 x = validize_mem (force_const_mem (mode, x));
1683
1684 /* See if the machine can do this with a load multiple insn. */
1685 #ifdef HAVE_load_multiple
1686 if (HAVE_load_multiple)
1687 {
1688 last = get_last_insn ();
1689 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1690 GEN_INT (nregs));
1691 if (pat)
1692 {
1693 emit_insn (pat);
1694 return;
1695 }
1696 else
1697 delete_insns_since (last);
1698 }
1699 #endif
1700
1701 for (i = 0; i < nregs; i++)
1702 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1703 operand_subword_force (x, i, mode));
1704 }
1705
1706 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1707 The number of registers to be filled is NREGS. SIZE indicates the number
1708 of bytes in the object X. */
1709
1710
1711 void
1712 move_block_from_reg (regno, x, nregs, size)
1713 int regno;
1714 rtx x;
1715 int nregs;
1716 int size;
1717 {
1718 int i;
1719 rtx pat, last;
1720
1721 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1722 to the left before storing to memory. */
1723 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1724 {
1725 rtx tem = operand_subword (x, 0, 1, BLKmode);
1726 rtx shift;
1727
1728 if (tem == 0)
1729 abort ();
1730
1731 shift = expand_shift (LSHIFT_EXPR, word_mode,
1732 gen_rtx (REG, word_mode, regno),
1733 build_int_2 ((UNITS_PER_WORD - size)
1734 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1735 emit_move_insn (tem, shift);
1736 return;
1737 }
1738
1739 /* See if the machine can do this with a store multiple insn. */
1740 #ifdef HAVE_store_multiple
1741 if (HAVE_store_multiple)
1742 {
1743 last = get_last_insn ();
1744 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1745 GEN_INT (nregs));
1746 if (pat)
1747 {
1748 emit_insn (pat);
1749 return;
1750 }
1751 else
1752 delete_insns_since (last);
1753 }
1754 #endif
1755
1756 for (i = 0; i < nregs; i++)
1757 {
1758 rtx tem = operand_subword (x, i, 1, BLKmode);
1759
1760 if (tem == 0)
1761 abort ();
1762
1763 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1764 }
1765 }
1766
1767 /* Emit code to move a block Y to a block X, where X is non-consecutive
1768 registers represented by a PARALLEL. */
1769
1770 void
1771 emit_group_load (x, y)
1772 rtx x, y;
1773 {
1774 rtx target_reg, source;
1775 int i;
1776
1777 if (GET_CODE (x) != PARALLEL)
1778 abort ();
1779
1780 /* Check for a NULL entry, used to indicate that the parameter goes
1781 both on the stack and in registers. */
1782 if (XEXP (XVECEXP (x, 0, 0), 0))
1783 i = 0;
1784 else
1785 i = 1;
1786
1787 for (; i < XVECLEN (x, 0); i++)
1788 {
1789 rtx element = XVECEXP (x, 0, i);
1790
1791 target_reg = XEXP (element, 0);
1792
1793 if (GET_CODE (y) == MEM)
1794 source = change_address (y, GET_MODE (target_reg),
1795 plus_constant (XEXP (y, 0),
1796 INTVAL (XEXP (element, 1))));
1797 else if (XEXP (element, 1) == const0_rtx)
1798 {
1799 if (GET_MODE (target_reg) == GET_MODE (y))
1800 source = y;
1801 /* Allow for the target_reg to be smaller than the input register
1802 to allow for AIX with 4 DF arguments after a single SI arg. The
1803 last DF argument will only load 1 word into the integer registers,
1804 but load a DF value into the float registers. */
1805 else if (GET_MODE_SIZE (GET_MODE (target_reg))
1806 <= GET_MODE_SIZE (GET_MODE (y)))
1807 source = gen_rtx (SUBREG, GET_MODE (target_reg), y, 0);
1808 else
1809 abort ();
1810 }
1811 else
1812 abort ();
1813
1814 emit_move_insn (target_reg, source);
1815 }
1816 }
1817
1818 /* Emit code to move a block Y to a block X, where Y is non-consecutive
1819 registers represented by a PARALLEL. */
1820
1821 void
1822 emit_group_store (x, y)
1823 rtx x, y;
1824 {
1825 rtx source_reg, target;
1826 int i;
1827
1828 if (GET_CODE (y) != PARALLEL)
1829 abort ();
1830
1831 /* Check for a NULL entry, used to indicate that the parameter goes
1832 both on the stack and in registers. */
1833 if (XEXP (XVECEXP (y, 0, 0), 0))
1834 i = 0;
1835 else
1836 i = 1;
1837
1838 for (; i < XVECLEN (y, 0); i++)
1839 {
1840 rtx element = XVECEXP (y, 0, i);
1841
1842 source_reg = XEXP (element, 0);
1843
1844 if (GET_CODE (x) == MEM)
1845 target = change_address (x, GET_MODE (source_reg),
1846 plus_constant (XEXP (x, 0),
1847 INTVAL (XEXP (element, 1))));
1848 else if (XEXP (element, 1) == const0_rtx)
1849 {
1850 target = x;
1851 if (GET_MODE (target) != GET_MODE (source_reg))
1852 target = gen_lowpart (GET_MODE (source_reg), target);
1853 }
1854 else
1855 abort ();
1856
1857 emit_move_insn (target, source_reg);
1858 }
1859 }
1860
1861 /* Add a USE expression for REG to the (possibly empty) list pointed
1862 to by CALL_FUSAGE. REG must denote a hard register. */
1863
1864 void
1865 use_reg (call_fusage, reg)
1866 rtx *call_fusage, reg;
1867 {
1868 if (GET_CODE (reg) != REG
1869 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
1870 abort();
1871
1872 *call_fusage
1873 = gen_rtx (EXPR_LIST, VOIDmode,
1874 gen_rtx (USE, VOIDmode, reg), *call_fusage);
1875 }
1876
1877 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1878 starting at REGNO. All of these registers must be hard registers. */
1879
1880 void
1881 use_regs (call_fusage, regno, nregs)
1882 rtx *call_fusage;
1883 int regno;
1884 int nregs;
1885 {
1886 int i;
1887
1888 if (regno + nregs > FIRST_PSEUDO_REGISTER)
1889 abort ();
1890
1891 for (i = 0; i < nregs; i++)
1892 use_reg (call_fusage, gen_rtx (REG, reg_raw_mode[regno + i], regno + i));
1893 }
1894
1895 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
1896 PARALLEL REGS. This is for calls that pass values in multiple
1897 non-contiguous locations. The Irix 6 ABI has examples of this. */
1898
1899 void
1900 use_group_regs (call_fusage, regs)
1901 rtx *call_fusage;
1902 rtx regs;
1903 {
1904 int i;
1905
1906 /* Check for a NULL entry, used to indicate that the parameter goes
1907 both on the stack and in registers. */
1908 if (XEXP (XVECEXP (regs, 0, 0), 0))
1909 i = 0;
1910 else
1911 i = 1;
1912
1913 for (; i < XVECLEN (regs, 0); i++)
1914 use_reg (call_fusage, XEXP (XVECEXP (regs, 0, i), 0));
1915 }
1916 \f
1917 /* Generate several move instructions to clear LEN bytes of block TO.
1918 (A MEM rtx with BLKmode). The caller must pass TO through
1919 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
1920 we can assume. */
1921
1922 static void
1923 clear_by_pieces (to, len, align)
1924 rtx to;
1925 int len, align;
1926 {
1927 struct clear_by_pieces data;
1928 rtx to_addr = XEXP (to, 0);
1929 int max_size = MOVE_MAX + 1;
1930
1931 data.offset = 0;
1932 data.to_addr = to_addr;
1933 data.to = to;
1934 data.autinc_to
1935 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1936 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1937
1938 data.explicit_inc_to = 0;
1939 data.reverse
1940 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1941 if (data.reverse) data.offset = len;
1942 data.len = len;
1943
1944 data.to_struct = MEM_IN_STRUCT_P (to);
1945
1946 /* If copying requires more than two move insns,
1947 copy addresses to registers (to make displacements shorter)
1948 and use post-increment if available. */
1949 if (!data.autinc_to
1950 && move_by_pieces_ninsns (len, align) > 2)
1951 {
1952 #ifdef HAVE_PRE_DECREMENT
1953 if (data.reverse && ! data.autinc_to)
1954 {
1955 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1956 data.autinc_to = 1;
1957 data.explicit_inc_to = -1;
1958 }
1959 #endif
1960 #ifdef HAVE_POST_INCREMENT
1961 if (! data.reverse && ! data.autinc_to)
1962 {
1963 data.to_addr = copy_addr_to_reg (to_addr);
1964 data.autinc_to = 1;
1965 data.explicit_inc_to = 1;
1966 }
1967 #endif
1968 if (!data.autinc_to && CONSTANT_P (to_addr))
1969 data.to_addr = copy_addr_to_reg (to_addr);
1970 }
1971
1972 if (! SLOW_UNALIGNED_ACCESS
1973 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1974 align = MOVE_MAX;
1975
1976 /* First move what we can in the largest integer mode, then go to
1977 successively smaller modes. */
1978
1979 while (max_size > 1)
1980 {
1981 enum machine_mode mode = VOIDmode, tmode;
1982 enum insn_code icode;
1983
1984 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1985 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1986 if (GET_MODE_SIZE (tmode) < max_size)
1987 mode = tmode;
1988
1989 if (mode == VOIDmode)
1990 break;
1991
1992 icode = mov_optab->handlers[(int) mode].insn_code;
1993 if (icode != CODE_FOR_nothing
1994 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1995 GET_MODE_SIZE (mode)))
1996 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
1997
1998 max_size = GET_MODE_SIZE (mode);
1999 }
2000
2001 /* The code above should have handled everything. */
2002 if (data.len != 0)
2003 abort ();
2004 }
2005
2006 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2007 with move instructions for mode MODE. GENFUN is the gen_... function
2008 to make a move insn for that mode. DATA has all the other info. */
2009
2010 static void
2011 clear_by_pieces_1 (genfun, mode, data)
2012 rtx (*genfun) ();
2013 enum machine_mode mode;
2014 struct clear_by_pieces *data;
2015 {
2016 register int size = GET_MODE_SIZE (mode);
2017 register rtx to1;
2018
2019 while (data->len >= size)
2020 {
2021 if (data->reverse) data->offset -= size;
2022
2023 to1 = (data->autinc_to
2024 ? gen_rtx (MEM, mode, data->to_addr)
2025 : change_address (data->to, mode,
2026 plus_constant (data->to_addr, data->offset)));
2027 MEM_IN_STRUCT_P (to1) = data->to_struct;
2028
2029 #ifdef HAVE_PRE_DECREMENT
2030 if (data->explicit_inc_to < 0)
2031 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2032 #endif
2033
2034 emit_insn ((*genfun) (to1, const0_rtx));
2035 #ifdef HAVE_POST_INCREMENT
2036 if (data->explicit_inc_to > 0)
2037 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2038 #endif
2039
2040 if (! data->reverse) data->offset += size;
2041
2042 data->len -= size;
2043 }
2044 }
2045 \f
2046 /* Write zeros through the storage of OBJECT.
2047 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2048 the maximum alignment we can is has, measured in bytes. */
2049
2050 void
2051 clear_storage (object, size, align)
2052 rtx object;
2053 rtx size;
2054 int align;
2055 {
2056 if (GET_MODE (object) == BLKmode)
2057 {
2058 object = protect_from_queue (object, 1);
2059 size = protect_from_queue (size, 0);
2060
2061 if (GET_CODE (size) == CONST_INT
2062 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
2063 clear_by_pieces (object, INTVAL (size), align);
2064
2065 else
2066 {
2067 /* Try the most limited insn first, because there's no point
2068 including more than one in the machine description unless
2069 the more limited one has some advantage. */
2070
2071 rtx opalign = GEN_INT (align);
2072 enum machine_mode mode;
2073
2074 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2075 mode = GET_MODE_WIDER_MODE (mode))
2076 {
2077 enum insn_code code = clrstr_optab[(int) mode];
2078
2079 if (code != CODE_FOR_nothing
2080 /* We don't need MODE to be narrower than
2081 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2082 the mode mask, as it is returned by the macro, it will
2083 definitely be less than the actual mode mask. */
2084 && ((GET_CODE (size) == CONST_INT
2085 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2086 <= GET_MODE_MASK (mode)))
2087 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2088 && (insn_operand_predicate[(int) code][0] == 0
2089 || (*insn_operand_predicate[(int) code][0]) (object,
2090 BLKmode))
2091 && (insn_operand_predicate[(int) code][2] == 0
2092 || (*insn_operand_predicate[(int) code][2]) (opalign,
2093 VOIDmode)))
2094 {
2095 rtx op1;
2096 rtx last = get_last_insn ();
2097 rtx pat;
2098
2099 op1 = convert_to_mode (mode, size, 1);
2100 if (insn_operand_predicate[(int) code][1] != 0
2101 && ! (*insn_operand_predicate[(int) code][1]) (op1,
2102 mode))
2103 op1 = copy_to_mode_reg (mode, op1);
2104
2105 pat = GEN_FCN ((int) code) (object, op1, opalign);
2106 if (pat)
2107 {
2108 emit_insn (pat);
2109 return;
2110 }
2111 else
2112 delete_insns_since (last);
2113 }
2114 }
2115
2116
2117 #ifdef TARGET_MEM_FUNCTIONS
2118 emit_library_call (memset_libfunc, 0,
2119 VOIDmode, 3,
2120 XEXP (object, 0), Pmode,
2121 const0_rtx, TYPE_MODE (integer_type_node),
2122 convert_to_mode (TYPE_MODE (sizetype),
2123 size, TREE_UNSIGNED (sizetype)),
2124 TYPE_MODE (sizetype));
2125 #else
2126 emit_library_call (bzero_libfunc, 0,
2127 VOIDmode, 2,
2128 XEXP (object, 0), Pmode,
2129 convert_to_mode (TYPE_MODE (integer_type_node),
2130 size,
2131 TREE_UNSIGNED (integer_type_node)),
2132 TYPE_MODE (integer_type_node));
2133 #endif
2134 }
2135 }
2136 else
2137 emit_move_insn (object, const0_rtx);
2138 }
2139
2140 /* Generate code to copy Y into X.
2141 Both Y and X must have the same mode, except that
2142 Y can be a constant with VOIDmode.
2143 This mode cannot be BLKmode; use emit_block_move for that.
2144
2145 Return the last instruction emitted. */
2146
2147 rtx
2148 emit_move_insn (x, y)
2149 rtx x, y;
2150 {
2151 enum machine_mode mode = GET_MODE (x);
2152
2153 x = protect_from_queue (x, 1);
2154 y = protect_from_queue (y, 0);
2155
2156 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2157 abort ();
2158
2159 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2160 y = force_const_mem (mode, y);
2161
2162 /* If X or Y are memory references, verify that their addresses are valid
2163 for the machine. */
2164 if (GET_CODE (x) == MEM
2165 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2166 && ! push_operand (x, GET_MODE (x)))
2167 || (flag_force_addr
2168 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2169 x = change_address (x, VOIDmode, XEXP (x, 0));
2170
2171 if (GET_CODE (y) == MEM
2172 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2173 || (flag_force_addr
2174 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2175 y = change_address (y, VOIDmode, XEXP (y, 0));
2176
2177 if (mode == BLKmode)
2178 abort ();
2179
2180 return emit_move_insn_1 (x, y);
2181 }
2182
2183 /* Low level part of emit_move_insn.
2184 Called just like emit_move_insn, but assumes X and Y
2185 are basically valid. */
2186
2187 rtx
2188 emit_move_insn_1 (x, y)
2189 rtx x, y;
2190 {
2191 enum machine_mode mode = GET_MODE (x);
2192 enum machine_mode submode;
2193 enum mode_class class = GET_MODE_CLASS (mode);
2194 int i;
2195
2196 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2197 return
2198 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2199
2200 /* Expand complex moves by moving real part and imag part, if possible. */
2201 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2202 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2203 * BITS_PER_UNIT),
2204 (class == MODE_COMPLEX_INT
2205 ? MODE_INT : MODE_FLOAT),
2206 0))
2207 && (mov_optab->handlers[(int) submode].insn_code
2208 != CODE_FOR_nothing))
2209 {
2210 /* Don't split destination if it is a stack push. */
2211 int stack = push_operand (x, GET_MODE (x));
2212 rtx insns;
2213
2214 /* If this is a stack, push the highpart first, so it
2215 will be in the argument order.
2216
2217 In that case, change_address is used only to convert
2218 the mode, not to change the address. */
2219 if (stack)
2220 {
2221 /* Note that the real part always precedes the imag part in memory
2222 regardless of machine's endianness. */
2223 #ifdef STACK_GROWS_DOWNWARD
2224 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2225 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2226 gen_imagpart (submode, y)));
2227 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2228 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2229 gen_realpart (submode, y)));
2230 #else
2231 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2232 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2233 gen_realpart (submode, y)));
2234 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2235 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2236 gen_imagpart (submode, y)));
2237 #endif
2238 }
2239 else
2240 {
2241 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2242 (gen_realpart (submode, x), gen_realpart (submode, y)));
2243 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2244 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
2245 }
2246
2247 return get_last_insn ();
2248 }
2249
2250 /* This will handle any multi-word mode that lacks a move_insn pattern.
2251 However, you will get better code if you define such patterns,
2252 even if they must turn into multiple assembler instructions. */
2253 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2254 {
2255 rtx last_insn = 0;
2256 rtx insns;
2257
2258 #ifdef PUSH_ROUNDING
2259
2260 /* If X is a push on the stack, do the push now and replace
2261 X with a reference to the stack pointer. */
2262 if (push_operand (x, GET_MODE (x)))
2263 {
2264 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2265 x = change_address (x, VOIDmode, stack_pointer_rtx);
2266 }
2267 #endif
2268
2269 /* Show the output dies here. */
2270 if (x != y)
2271 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
2272
2273 for (i = 0;
2274 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2275 i++)
2276 {
2277 rtx xpart = operand_subword (x, i, 1, mode);
2278 rtx ypart = operand_subword (y, i, 1, mode);
2279
2280 /* If we can't get a part of Y, put Y into memory if it is a
2281 constant. Otherwise, force it into a register. If we still
2282 can't get a part of Y, abort. */
2283 if (ypart == 0 && CONSTANT_P (y))
2284 {
2285 y = force_const_mem (mode, y);
2286 ypart = operand_subword (y, i, 1, mode);
2287 }
2288 else if (ypart == 0)
2289 ypart = operand_subword_force (y, i, mode);
2290
2291 if (xpart == 0 || ypart == 0)
2292 abort ();
2293
2294 last_insn = emit_move_insn (xpart, ypart);
2295 }
2296
2297 return last_insn;
2298 }
2299 else
2300 abort ();
2301 }
2302 \f
2303 /* Pushing data onto the stack. */
2304
2305 /* Push a block of length SIZE (perhaps variable)
2306 and return an rtx to address the beginning of the block.
2307 Note that it is not possible for the value returned to be a QUEUED.
2308 The value may be virtual_outgoing_args_rtx.
2309
2310 EXTRA is the number of bytes of padding to push in addition to SIZE.
2311 BELOW nonzero means this padding comes at low addresses;
2312 otherwise, the padding comes at high addresses. */
2313
2314 rtx
2315 push_block (size, extra, below)
2316 rtx size;
2317 int extra, below;
2318 {
2319 register rtx temp;
2320
2321 size = convert_modes (Pmode, ptr_mode, size, 1);
2322 if (CONSTANT_P (size))
2323 anti_adjust_stack (plus_constant (size, extra));
2324 else if (GET_CODE (size) == REG && extra == 0)
2325 anti_adjust_stack (size);
2326 else
2327 {
2328 rtx temp = copy_to_mode_reg (Pmode, size);
2329 if (extra != 0)
2330 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2331 temp, 0, OPTAB_LIB_WIDEN);
2332 anti_adjust_stack (temp);
2333 }
2334
2335 #ifdef STACK_GROWS_DOWNWARD
2336 temp = virtual_outgoing_args_rtx;
2337 if (extra != 0 && below)
2338 temp = plus_constant (temp, extra);
2339 #else
2340 if (GET_CODE (size) == CONST_INT)
2341 temp = plus_constant (virtual_outgoing_args_rtx,
2342 - INTVAL (size) - (below ? 0 : extra));
2343 else if (extra != 0 && !below)
2344 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2345 negate_rtx (Pmode, plus_constant (size, extra)));
2346 else
2347 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2348 negate_rtx (Pmode, size));
2349 #endif
2350
2351 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2352 }
2353
2354 rtx
2355 gen_push_operand ()
2356 {
2357 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2358 }
2359
2360 /* Generate code to push X onto the stack, assuming it has mode MODE and
2361 type TYPE.
2362 MODE is redundant except when X is a CONST_INT (since they don't
2363 carry mode info).
2364 SIZE is an rtx for the size of data to be copied (in bytes),
2365 needed only if X is BLKmode.
2366
2367 ALIGN (in bytes) is maximum alignment we can assume.
2368
2369 If PARTIAL and REG are both nonzero, then copy that many of the first
2370 words of X into registers starting with REG, and push the rest of X.
2371 The amount of space pushed is decreased by PARTIAL words,
2372 rounded *down* to a multiple of PARM_BOUNDARY.
2373 REG must be a hard register in this case.
2374 If REG is zero but PARTIAL is not, take any all others actions for an
2375 argument partially in registers, but do not actually load any
2376 registers.
2377
2378 EXTRA is the amount in bytes of extra space to leave next to this arg.
2379 This is ignored if an argument block has already been allocated.
2380
2381 On a machine that lacks real push insns, ARGS_ADDR is the address of
2382 the bottom of the argument block for this call. We use indexing off there
2383 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2384 argument block has not been preallocated.
2385
2386 ARGS_SO_FAR is the size of args previously pushed for this call. */
2387
2388 void
2389 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2390 args_addr, args_so_far)
2391 register rtx x;
2392 enum machine_mode mode;
2393 tree type;
2394 rtx size;
2395 int align;
2396 int partial;
2397 rtx reg;
2398 int extra;
2399 rtx args_addr;
2400 rtx args_so_far;
2401 {
2402 rtx xinner;
2403 enum direction stack_direction
2404 #ifdef STACK_GROWS_DOWNWARD
2405 = downward;
2406 #else
2407 = upward;
2408 #endif
2409
2410 /* Decide where to pad the argument: `downward' for below,
2411 `upward' for above, or `none' for don't pad it.
2412 Default is below for small data on big-endian machines; else above. */
2413 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2414
2415 /* If we're placing part of X into a register and part of X onto
2416 the stack, indicate that the entire register is clobbered to
2417 keep flow from thinking the unused part of the register is live. */
2418 if (partial > 0 && reg != 0)
2419 emit_insn (gen_rtx (CLOBBER, VOIDmode, reg));
2420
2421 /* Invert direction if stack is post-update. */
2422 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2423 if (where_pad != none)
2424 where_pad = (where_pad == downward ? upward : downward);
2425
2426 xinner = x = protect_from_queue (x, 0);
2427
2428 if (mode == BLKmode)
2429 {
2430 /* Copy a block into the stack, entirely or partially. */
2431
2432 register rtx temp;
2433 int used = partial * UNITS_PER_WORD;
2434 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2435 int skip;
2436
2437 if (size == 0)
2438 abort ();
2439
2440 used -= offset;
2441
2442 /* USED is now the # of bytes we need not copy to the stack
2443 because registers will take care of them. */
2444
2445 if (partial != 0)
2446 xinner = change_address (xinner, BLKmode,
2447 plus_constant (XEXP (xinner, 0), used));
2448
2449 /* If the partial register-part of the arg counts in its stack size,
2450 skip the part of stack space corresponding to the registers.
2451 Otherwise, start copying to the beginning of the stack space,
2452 by setting SKIP to 0. */
2453 #ifndef REG_PARM_STACK_SPACE
2454 skip = 0;
2455 #else
2456 skip = used;
2457 #endif
2458
2459 #ifdef PUSH_ROUNDING
2460 /* Do it with several push insns if that doesn't take lots of insns
2461 and if there is no difficulty with push insns that skip bytes
2462 on the stack for alignment purposes. */
2463 if (args_addr == 0
2464 && GET_CODE (size) == CONST_INT
2465 && skip == 0
2466 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2467 < MOVE_RATIO)
2468 /* Here we avoid the case of a structure whose weak alignment
2469 forces many pushes of a small amount of data,
2470 and such small pushes do rounding that causes trouble. */
2471 && ((! SLOW_UNALIGNED_ACCESS)
2472 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2473 || PUSH_ROUNDING (align) == align)
2474 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2475 {
2476 /* Push padding now if padding above and stack grows down,
2477 or if padding below and stack grows up.
2478 But if space already allocated, this has already been done. */
2479 if (extra && args_addr == 0
2480 && where_pad != none && where_pad != stack_direction)
2481 anti_adjust_stack (GEN_INT (extra));
2482
2483 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
2484 INTVAL (size) - used, align);
2485 }
2486 else
2487 #endif /* PUSH_ROUNDING */
2488 {
2489 /* Otherwise make space on the stack and copy the data
2490 to the address of that space. */
2491
2492 /* Deduct words put into registers from the size we must copy. */
2493 if (partial != 0)
2494 {
2495 if (GET_CODE (size) == CONST_INT)
2496 size = GEN_INT (INTVAL (size) - used);
2497 else
2498 size = expand_binop (GET_MODE (size), sub_optab, size,
2499 GEN_INT (used), NULL_RTX, 0,
2500 OPTAB_LIB_WIDEN);
2501 }
2502
2503 /* Get the address of the stack space.
2504 In this case, we do not deal with EXTRA separately.
2505 A single stack adjust will do. */
2506 if (! args_addr)
2507 {
2508 temp = push_block (size, extra, where_pad == downward);
2509 extra = 0;
2510 }
2511 else if (GET_CODE (args_so_far) == CONST_INT)
2512 temp = memory_address (BLKmode,
2513 plus_constant (args_addr,
2514 skip + INTVAL (args_so_far)));
2515 else
2516 temp = memory_address (BLKmode,
2517 plus_constant (gen_rtx (PLUS, Pmode,
2518 args_addr, args_so_far),
2519 skip));
2520
2521 /* TEMP is the address of the block. Copy the data there. */
2522 if (GET_CODE (size) == CONST_INT
2523 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2524 < MOVE_RATIO))
2525 {
2526 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
2527 INTVAL (size), align);
2528 goto ret;
2529 }
2530 /* Try the most limited insn first, because there's no point
2531 including more than one in the machine description unless
2532 the more limited one has some advantage. */
2533 #ifdef HAVE_movstrqi
2534 if (HAVE_movstrqi
2535 && GET_CODE (size) == CONST_INT
2536 && ((unsigned) INTVAL (size)
2537 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
2538 {
2539 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
2540 xinner, size, GEN_INT (align));
2541 if (pat != 0)
2542 {
2543 emit_insn (pat);
2544 goto ret;
2545 }
2546 }
2547 #endif
2548 #ifdef HAVE_movstrhi
2549 if (HAVE_movstrhi
2550 && GET_CODE (size) == CONST_INT
2551 && ((unsigned) INTVAL (size)
2552 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
2553 {
2554 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
2555 xinner, size, GEN_INT (align));
2556 if (pat != 0)
2557 {
2558 emit_insn (pat);
2559 goto ret;
2560 }
2561 }
2562 #endif
2563 #ifdef HAVE_movstrsi
2564 if (HAVE_movstrsi)
2565 {
2566 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
2567 xinner, size, GEN_INT (align));
2568 if (pat != 0)
2569 {
2570 emit_insn (pat);
2571 goto ret;
2572 }
2573 }
2574 #endif
2575 #ifdef HAVE_movstrdi
2576 if (HAVE_movstrdi)
2577 {
2578 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
2579 xinner, size, GEN_INT (align));
2580 if (pat != 0)
2581 {
2582 emit_insn (pat);
2583 goto ret;
2584 }
2585 }
2586 #endif
2587
2588 #ifndef ACCUMULATE_OUTGOING_ARGS
2589 /* If the source is referenced relative to the stack pointer,
2590 copy it to another register to stabilize it. We do not need
2591 to do this if we know that we won't be changing sp. */
2592
2593 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2594 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2595 temp = copy_to_reg (temp);
2596 #endif
2597
2598 /* Make inhibit_defer_pop nonzero around the library call
2599 to force it to pop the bcopy-arguments right away. */
2600 NO_DEFER_POP;
2601 #ifdef TARGET_MEM_FUNCTIONS
2602 emit_library_call (memcpy_libfunc, 0,
2603 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
2604 convert_to_mode (TYPE_MODE (sizetype),
2605 size, TREE_UNSIGNED (sizetype)),
2606 TYPE_MODE (sizetype));
2607 #else
2608 emit_library_call (bcopy_libfunc, 0,
2609 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
2610 convert_to_mode (TYPE_MODE (integer_type_node),
2611 size,
2612 TREE_UNSIGNED (integer_type_node)),
2613 TYPE_MODE (integer_type_node));
2614 #endif
2615 OK_DEFER_POP;
2616 }
2617 }
2618 else if (partial > 0)
2619 {
2620 /* Scalar partly in registers. */
2621
2622 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2623 int i;
2624 int not_stack;
2625 /* # words of start of argument
2626 that we must make space for but need not store. */
2627 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2628 int args_offset = INTVAL (args_so_far);
2629 int skip;
2630
2631 /* Push padding now if padding above and stack grows down,
2632 or if padding below and stack grows up.
2633 But if space already allocated, this has already been done. */
2634 if (extra && args_addr == 0
2635 && where_pad != none && where_pad != stack_direction)
2636 anti_adjust_stack (GEN_INT (extra));
2637
2638 /* If we make space by pushing it, we might as well push
2639 the real data. Otherwise, we can leave OFFSET nonzero
2640 and leave the space uninitialized. */
2641 if (args_addr == 0)
2642 offset = 0;
2643
2644 /* Now NOT_STACK gets the number of words that we don't need to
2645 allocate on the stack. */
2646 not_stack = partial - offset;
2647
2648 /* If the partial register-part of the arg counts in its stack size,
2649 skip the part of stack space corresponding to the registers.
2650 Otherwise, start copying to the beginning of the stack space,
2651 by setting SKIP to 0. */
2652 #ifndef REG_PARM_STACK_SPACE
2653 skip = 0;
2654 #else
2655 skip = not_stack;
2656 #endif
2657
2658 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2659 x = validize_mem (force_const_mem (mode, x));
2660
2661 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2662 SUBREGs of such registers are not allowed. */
2663 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2664 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2665 x = copy_to_reg (x);
2666
2667 /* Loop over all the words allocated on the stack for this arg. */
2668 /* We can do it by words, because any scalar bigger than a word
2669 has a size a multiple of a word. */
2670 #ifndef PUSH_ARGS_REVERSED
2671 for (i = not_stack; i < size; i++)
2672 #else
2673 for (i = size - 1; i >= not_stack; i--)
2674 #endif
2675 if (i >= not_stack + offset)
2676 emit_push_insn (operand_subword_force (x, i, mode),
2677 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2678 0, args_addr,
2679 GEN_INT (args_offset + ((i - not_stack + skip)
2680 * UNITS_PER_WORD)));
2681 }
2682 else
2683 {
2684 rtx addr;
2685
2686 /* Push padding now if padding above and stack grows down,
2687 or if padding below and stack grows up.
2688 But if space already allocated, this has already been done. */
2689 if (extra && args_addr == 0
2690 && where_pad != none && where_pad != stack_direction)
2691 anti_adjust_stack (GEN_INT (extra));
2692
2693 #ifdef PUSH_ROUNDING
2694 if (args_addr == 0)
2695 addr = gen_push_operand ();
2696 else
2697 #endif
2698 if (GET_CODE (args_so_far) == CONST_INT)
2699 addr
2700 = memory_address (mode,
2701 plus_constant (args_addr, INTVAL (args_so_far)));
2702 else
2703 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2704 args_so_far));
2705
2706 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2707 }
2708
2709 ret:
2710 /* If part should go in registers, copy that part
2711 into the appropriate registers. Do this now, at the end,
2712 since mem-to-mem copies above may do function calls. */
2713 if (partial > 0 && reg != 0)
2714 {
2715 /* Handle calls that pass values in multiple non-contiguous locations.
2716 The Irix 6 ABI has examples of this. */
2717 if (GET_CODE (reg) == PARALLEL)
2718 emit_group_load (reg, x);
2719 else
2720 move_block_to_reg (REGNO (reg), x, partial, mode);
2721 }
2722
2723 if (extra && args_addr == 0 && where_pad == stack_direction)
2724 anti_adjust_stack (GEN_INT (extra));
2725 }
2726 \f
2727 /* Expand an assignment that stores the value of FROM into TO.
2728 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2729 (This may contain a QUEUED rtx;
2730 if the value is constant, this rtx is a constant.)
2731 Otherwise, the returned value is NULL_RTX.
2732
2733 SUGGEST_REG is no longer actually used.
2734 It used to mean, copy the value through a register
2735 and return that register, if that is possible.
2736 We now use WANT_VALUE to decide whether to do this. */
2737
2738 rtx
2739 expand_assignment (to, from, want_value, suggest_reg)
2740 tree to, from;
2741 int want_value;
2742 int suggest_reg;
2743 {
2744 register rtx to_rtx = 0;
2745 rtx result;
2746
2747 /* Don't crash if the lhs of the assignment was erroneous. */
2748
2749 if (TREE_CODE (to) == ERROR_MARK)
2750 {
2751 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
2752 return want_value ? result : NULL_RTX;
2753 }
2754
2755 if (output_bytecode)
2756 {
2757 tree dest_innermost;
2758
2759 bc_expand_expr (from);
2760 bc_emit_instruction (duplicate);
2761
2762 dest_innermost = bc_expand_address (to);
2763
2764 /* Can't deduce from TYPE that we're dealing with a bitfield, so
2765 take care of it here. */
2766
2767 bc_store_memory (TREE_TYPE (to), dest_innermost);
2768 return NULL;
2769 }
2770
2771 /* Assignment of a structure component needs special treatment
2772 if the structure component's rtx is not simply a MEM.
2773 Assignment of an array element at a constant index, and assignment of
2774 an array element in an unaligned packed structure field, has the same
2775 problem. */
2776
2777 if (TREE_CODE (to) == COMPONENT_REF
2778 || TREE_CODE (to) == BIT_FIELD_REF
2779 || (TREE_CODE (to) == ARRAY_REF
2780 && ((TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2781 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST)
2782 || (SLOW_UNALIGNED_ACCESS && get_inner_unaligned_p (to)))))
2783 {
2784 enum machine_mode mode1;
2785 int bitsize;
2786 int bitpos;
2787 tree offset;
2788 int unsignedp;
2789 int volatilep = 0;
2790 tree tem;
2791 int alignment;
2792
2793 push_temp_slots ();
2794 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
2795 &unsignedp, &volatilep, &alignment);
2796
2797 /* If we are going to use store_bit_field and extract_bit_field,
2798 make sure to_rtx will be safe for multiple use. */
2799
2800 if (mode1 == VOIDmode && want_value)
2801 tem = stabilize_reference (tem);
2802
2803 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
2804 if (offset != 0)
2805 {
2806 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2807
2808 if (GET_CODE (to_rtx) != MEM)
2809 abort ();
2810 to_rtx = change_address (to_rtx, VOIDmode,
2811 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
2812 force_reg (ptr_mode, offset_rtx)));
2813 }
2814 if (volatilep)
2815 {
2816 if (GET_CODE (to_rtx) == MEM)
2817 {
2818 /* When the offset is zero, to_rtx is the address of the
2819 structure we are storing into, and hence may be shared.
2820 We must make a new MEM before setting the volatile bit. */
2821 if (offset == 0)
2822 to_rtx = change_address (to_rtx, VOIDmode, XEXP (to_rtx, 0));
2823 MEM_VOLATILE_P (to_rtx) = 1;
2824 }
2825 #if 0 /* This was turned off because, when a field is volatile
2826 in an object which is not volatile, the object may be in a register,
2827 and then we would abort over here. */
2828 else
2829 abort ();
2830 #endif
2831 }
2832
2833 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2834 (want_value
2835 /* Spurious cast makes HPUX compiler happy. */
2836 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2837 : VOIDmode),
2838 unsignedp,
2839 /* Required alignment of containing datum. */
2840 alignment,
2841 int_size_in_bytes (TREE_TYPE (tem)));
2842 preserve_temp_slots (result);
2843 free_temp_slots ();
2844 pop_temp_slots ();
2845
2846 /* If the value is meaningful, convert RESULT to the proper mode.
2847 Otherwise, return nothing. */
2848 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
2849 TYPE_MODE (TREE_TYPE (from)),
2850 result,
2851 TREE_UNSIGNED (TREE_TYPE (to)))
2852 : NULL_RTX);
2853 }
2854
2855 /* If the rhs is a function call and its value is not an aggregate,
2856 call the function before we start to compute the lhs.
2857 This is needed for correct code for cases such as
2858 val = setjmp (buf) on machines where reference to val
2859 requires loading up part of an address in a separate insn.
2860
2861 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
2862 a promoted variable where the zero- or sign- extension needs to be done.
2863 Handling this in the normal way is safe because no computation is done
2864 before the call. */
2865 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
2866 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
2867 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
2868 {
2869 rtx value;
2870
2871 push_temp_slots ();
2872 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
2873 if (to_rtx == 0)
2874 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2875
2876 /* Handle calls that return values in multiple non-contiguous locations.
2877 The Irix 6 ABI has examples of this. */
2878 if (GET_CODE (to_rtx) == PARALLEL)
2879 emit_group_load (to_rtx, value);
2880 else if (GET_MODE (to_rtx) == BLKmode)
2881 emit_block_move (to_rtx, value, expr_size (from),
2882 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
2883 else
2884 emit_move_insn (to_rtx, value);
2885 preserve_temp_slots (to_rtx);
2886 free_temp_slots ();
2887 pop_temp_slots ();
2888 return want_value ? to_rtx : NULL_RTX;
2889 }
2890
2891 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2892 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2893
2894 if (to_rtx == 0)
2895 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2896
2897 /* Don't move directly into a return register. */
2898 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
2899 {
2900 rtx temp;
2901
2902 push_temp_slots ();
2903 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
2904 emit_move_insn (to_rtx, temp);
2905 preserve_temp_slots (to_rtx);
2906 free_temp_slots ();
2907 pop_temp_slots ();
2908 return want_value ? to_rtx : NULL_RTX;
2909 }
2910
2911 /* In case we are returning the contents of an object which overlaps
2912 the place the value is being stored, use a safe function when copying
2913 a value through a pointer into a structure value return block. */
2914 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2915 && current_function_returns_struct
2916 && !current_function_returns_pcc_struct)
2917 {
2918 rtx from_rtx, size;
2919
2920 push_temp_slots ();
2921 size = expr_size (from);
2922 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
2923
2924 #ifdef TARGET_MEM_FUNCTIONS
2925 emit_library_call (memcpy_libfunc, 0,
2926 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2927 XEXP (from_rtx, 0), Pmode,
2928 convert_to_mode (TYPE_MODE (sizetype),
2929 size, TREE_UNSIGNED (sizetype)),
2930 TYPE_MODE (sizetype));
2931 #else
2932 emit_library_call (bcopy_libfunc, 0,
2933 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2934 XEXP (to_rtx, 0), Pmode,
2935 convert_to_mode (TYPE_MODE (integer_type_node),
2936 size, TREE_UNSIGNED (integer_type_node)),
2937 TYPE_MODE (integer_type_node));
2938 #endif
2939
2940 preserve_temp_slots (to_rtx);
2941 free_temp_slots ();
2942 pop_temp_slots ();
2943 return want_value ? to_rtx : NULL_RTX;
2944 }
2945
2946 /* Compute FROM and store the value in the rtx we got. */
2947
2948 push_temp_slots ();
2949 result = store_expr (from, to_rtx, want_value);
2950 preserve_temp_slots (result);
2951 free_temp_slots ();
2952 pop_temp_slots ();
2953 return want_value ? result : NULL_RTX;
2954 }
2955
2956 /* Generate code for computing expression EXP,
2957 and storing the value into TARGET.
2958 TARGET may contain a QUEUED rtx.
2959
2960 If WANT_VALUE is nonzero, return a copy of the value
2961 not in TARGET, so that we can be sure to use the proper
2962 value in a containing expression even if TARGET has something
2963 else stored in it. If possible, we copy the value through a pseudo
2964 and return that pseudo. Or, if the value is constant, we try to
2965 return the constant. In some cases, we return a pseudo
2966 copied *from* TARGET.
2967
2968 If the mode is BLKmode then we may return TARGET itself.
2969 It turns out that in BLKmode it doesn't cause a problem.
2970 because C has no operators that could combine two different
2971 assignments into the same BLKmode object with different values
2972 with no sequence point. Will other languages need this to
2973 be more thorough?
2974
2975 If WANT_VALUE is 0, we return NULL, to make sure
2976 to catch quickly any cases where the caller uses the value
2977 and fails to set WANT_VALUE. */
2978
2979 rtx
2980 store_expr (exp, target, want_value)
2981 register tree exp;
2982 register rtx target;
2983 int want_value;
2984 {
2985 register rtx temp;
2986 int dont_return_target = 0;
2987
2988 if (TREE_CODE (exp) == COMPOUND_EXPR)
2989 {
2990 /* Perform first part of compound expression, then assign from second
2991 part. */
2992 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2993 emit_queue ();
2994 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
2995 }
2996 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2997 {
2998 /* For conditional expression, get safe form of the target. Then
2999 test the condition, doing the appropriate assignment on either
3000 side. This avoids the creation of unnecessary temporaries.
3001 For non-BLKmode, it is more efficient not to do this. */
3002
3003 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3004 rtx flag = NULL_RTX;
3005 tree left_cleanups = NULL_TREE;
3006 tree right_cleanups = NULL_TREE;
3007 tree old_cleanups = cleanups_this_call;
3008
3009 /* Used to save a pointer to the place to put the setting of
3010 the flag that indicates if this side of the conditional was
3011 taken. We backpatch the code, if we find out later that we
3012 have any conditional cleanups that need to be performed. */
3013 rtx dest_right_flag = NULL_RTX;
3014 rtx dest_left_flag = NULL_RTX;
3015
3016 emit_queue ();
3017 target = protect_from_queue (target, 1);
3018
3019 do_pending_stack_adjust ();
3020 NO_DEFER_POP;
3021 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3022 store_expr (TREE_OPERAND (exp, 1), target, 0);
3023 dest_left_flag = get_last_insn ();
3024 /* Handle conditional cleanups, if any. */
3025 left_cleanups = defer_cleanups_to (old_cleanups);
3026 emit_queue ();
3027 emit_jump_insn (gen_jump (lab2));
3028 emit_barrier ();
3029 emit_label (lab1);
3030 store_expr (TREE_OPERAND (exp, 2), target, 0);
3031 dest_right_flag = get_last_insn ();
3032 /* Handle conditional cleanups, if any. */
3033 right_cleanups = defer_cleanups_to (old_cleanups);
3034 emit_queue ();
3035 emit_label (lab2);
3036 OK_DEFER_POP;
3037
3038 /* Add back in any conditional cleanups. */
3039 if (left_cleanups || right_cleanups)
3040 {
3041 tree new_cleanups;
3042 tree cond;
3043 rtx last;
3044
3045 /* Now that we know that a flag is needed, go back and add in the
3046 setting of the flag. */
3047
3048 flag = gen_reg_rtx (word_mode);
3049
3050 /* Do the left side flag. */
3051 last = get_last_insn ();
3052 /* Flag left cleanups as needed. */
3053 emit_move_insn (flag, const1_rtx);
3054 /* ??? deprecated, use sequences instead. */
3055 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_left_flag);
3056
3057 /* Do the right side flag. */
3058 last = get_last_insn ();
3059 /* Flag left cleanups as needed. */
3060 emit_move_insn (flag, const0_rtx);
3061 /* ??? deprecated, use sequences instead. */
3062 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_right_flag);
3063
3064 /* All cleanups must be on the function_obstack. */
3065 push_obstacks_nochange ();
3066 resume_temporary_allocation ();
3067
3068 /* convert flag, which is an rtx, into a tree. */
3069 cond = make_node (RTL_EXPR);
3070 TREE_TYPE (cond) = integer_type_node;
3071 RTL_EXPR_RTL (cond) = flag;
3072 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
3073 cond = save_expr (cond);
3074
3075 if (! left_cleanups)
3076 left_cleanups = integer_zero_node;
3077 if (! right_cleanups)
3078 right_cleanups = integer_zero_node;
3079 new_cleanups = build (COND_EXPR, void_type_node,
3080 truthvalue_conversion (cond),
3081 left_cleanups, right_cleanups);
3082 new_cleanups = fold (new_cleanups);
3083
3084 pop_obstacks ();
3085
3086 /* Now add in the conditionalized cleanups. */
3087 cleanups_this_call
3088 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
3089 expand_eh_region_start ();
3090 }
3091 return want_value ? target : NULL_RTX;
3092 }
3093 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3094 && GET_MODE (target) != BLKmode)
3095 /* If target is in memory and caller wants value in a register instead,
3096 arrange that. Pass TARGET as target for expand_expr so that,
3097 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3098 We know expand_expr will not use the target in that case.
3099 Don't do this if TARGET is volatile because we are supposed
3100 to write it and then read it. */
3101 {
3102 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
3103 GET_MODE (target), 0);
3104 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3105 temp = copy_to_reg (temp);
3106 dont_return_target = 1;
3107 }
3108 else if (queued_subexp_p (target))
3109 /* If target contains a postincrement, let's not risk
3110 using it as the place to generate the rhs. */
3111 {
3112 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3113 {
3114 /* Expand EXP into a new pseudo. */
3115 temp = gen_reg_rtx (GET_MODE (target));
3116 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3117 }
3118 else
3119 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3120
3121 /* If target is volatile, ANSI requires accessing the value
3122 *from* the target, if it is accessed. So make that happen.
3123 In no case return the target itself. */
3124 if (! MEM_VOLATILE_P (target) && want_value)
3125 dont_return_target = 1;
3126 }
3127 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3128 /* If this is an scalar in a register that is stored in a wider mode
3129 than the declared mode, compute the result into its declared mode
3130 and then convert to the wider mode. Our value is the computed
3131 expression. */
3132 {
3133 /* If we don't want a value, we can do the conversion inside EXP,
3134 which will often result in some optimizations. Do the conversion
3135 in two steps: first change the signedness, if needed, then
3136 the extend. But don't do this if the type of EXP is a subtype
3137 of something else since then the conversion might involve
3138 more than just converting modes. */
3139 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3140 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3141 {
3142 if (TREE_UNSIGNED (TREE_TYPE (exp))
3143 != SUBREG_PROMOTED_UNSIGNED_P (target))
3144 exp
3145 = convert
3146 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3147 TREE_TYPE (exp)),
3148 exp);
3149
3150 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3151 SUBREG_PROMOTED_UNSIGNED_P (target)),
3152 exp);
3153 }
3154
3155 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3156
3157 /* If TEMP is a volatile MEM and we want a result value, make
3158 the access now so it gets done only once. Likewise if
3159 it contains TARGET. */
3160 if (GET_CODE (temp) == MEM && want_value
3161 && (MEM_VOLATILE_P (temp)
3162 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3163 temp = copy_to_reg (temp);
3164
3165 /* If TEMP is a VOIDmode constant, use convert_modes to make
3166 sure that we properly convert it. */
3167 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3168 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3169 TYPE_MODE (TREE_TYPE (exp)), temp,
3170 SUBREG_PROMOTED_UNSIGNED_P (target));
3171
3172 convert_move (SUBREG_REG (target), temp,
3173 SUBREG_PROMOTED_UNSIGNED_P (target));
3174 return want_value ? temp : NULL_RTX;
3175 }
3176 else
3177 {
3178 temp = expand_expr (exp, target, GET_MODE (target), 0);
3179 /* Return TARGET if it's a specified hardware register.
3180 If TARGET is a volatile mem ref, either return TARGET
3181 or return a reg copied *from* TARGET; ANSI requires this.
3182
3183 Otherwise, if TEMP is not TARGET, return TEMP
3184 if it is constant (for efficiency),
3185 or if we really want the correct value. */
3186 if (!(target && GET_CODE (target) == REG
3187 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3188 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3189 && temp != target
3190 && (CONSTANT_P (temp) || want_value))
3191 dont_return_target = 1;
3192 }
3193
3194 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3195 the same as that of TARGET, adjust the constant. This is needed, for
3196 example, in case it is a CONST_DOUBLE and we want only a word-sized
3197 value. */
3198 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3199 && TREE_CODE (exp) != ERROR_MARK
3200 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3201 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3202 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3203
3204 /* If value was not generated in the target, store it there.
3205 Convert the value to TARGET's type first if nec. */
3206
3207 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
3208 {
3209 target = protect_from_queue (target, 1);
3210 if (GET_MODE (temp) != GET_MODE (target)
3211 && GET_MODE (temp) != VOIDmode)
3212 {
3213 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3214 if (dont_return_target)
3215 {
3216 /* In this case, we will return TEMP,
3217 so make sure it has the proper mode.
3218 But don't forget to store the value into TARGET. */
3219 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3220 emit_move_insn (target, temp);
3221 }
3222 else
3223 convert_move (target, temp, unsignedp);
3224 }
3225
3226 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3227 {
3228 /* Handle copying a string constant into an array.
3229 The string constant may be shorter than the array.
3230 So copy just the string's actual length, and clear the rest. */
3231 rtx size;
3232 rtx addr;
3233
3234 /* Get the size of the data type of the string,
3235 which is actually the size of the target. */
3236 size = expr_size (exp);
3237 if (GET_CODE (size) == CONST_INT
3238 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3239 emit_block_move (target, temp, size,
3240 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3241 else
3242 {
3243 /* Compute the size of the data to copy from the string. */
3244 tree copy_size
3245 = size_binop (MIN_EXPR,
3246 make_tree (sizetype, size),
3247 convert (sizetype,
3248 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
3249 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3250 VOIDmode, 0);
3251 rtx label = 0;
3252
3253 /* Copy that much. */
3254 emit_block_move (target, temp, copy_size_rtx,
3255 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3256
3257 /* Figure out how much is left in TARGET that we have to clear.
3258 Do all calculations in ptr_mode. */
3259
3260 addr = XEXP (target, 0);
3261 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3262
3263 if (GET_CODE (copy_size_rtx) == CONST_INT)
3264 {
3265 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3266 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3267 }
3268 else
3269 {
3270 addr = force_reg (ptr_mode, addr);
3271 addr = expand_binop (ptr_mode, add_optab, addr,
3272 copy_size_rtx, NULL_RTX, 0,
3273 OPTAB_LIB_WIDEN);
3274
3275 size = expand_binop (ptr_mode, sub_optab, size,
3276 copy_size_rtx, NULL_RTX, 0,
3277 OPTAB_LIB_WIDEN);
3278
3279 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
3280 GET_MODE (size), 0, 0);
3281 label = gen_label_rtx ();
3282 emit_jump_insn (gen_blt (label));
3283 }
3284
3285 if (size != const0_rtx)
3286 {
3287 #ifdef TARGET_MEM_FUNCTIONS
3288 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3289 addr, ptr_mode,
3290 const0_rtx, TYPE_MODE (integer_type_node),
3291 convert_to_mode (TYPE_MODE (sizetype),
3292 size,
3293 TREE_UNSIGNED (sizetype)),
3294 TYPE_MODE (sizetype));
3295 #else
3296 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3297 addr, ptr_mode,
3298 convert_to_mode (TYPE_MODE (integer_type_node),
3299 size,
3300 TREE_UNSIGNED (integer_type_node)),
3301 TYPE_MODE (integer_type_node));
3302 #endif
3303 }
3304
3305 if (label)
3306 emit_label (label);
3307 }
3308 }
3309 /* Handle calls that return values in multiple non-contiguous locations.
3310 The Irix 6 ABI has examples of this. */
3311 else if (GET_CODE (target) == PARALLEL)
3312 emit_group_load (target, temp);
3313 else if (GET_MODE (temp) == BLKmode)
3314 emit_block_move (target, temp, expr_size (exp),
3315 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3316 else
3317 emit_move_insn (target, temp);
3318 }
3319
3320 /* If we don't want a value, return NULL_RTX. */
3321 if (! want_value)
3322 return NULL_RTX;
3323
3324 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3325 ??? The latter test doesn't seem to make sense. */
3326 else if (dont_return_target && GET_CODE (temp) != MEM)
3327 return temp;
3328
3329 /* Return TARGET itself if it is a hard register. */
3330 else if (want_value && GET_MODE (target) != BLKmode
3331 && ! (GET_CODE (target) == REG
3332 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3333 return copy_to_reg (target);
3334
3335 else
3336 return target;
3337 }
3338 \f
3339 /* Return 1 if EXP just contains zeros. */
3340
3341 static int
3342 is_zeros_p (exp)
3343 tree exp;
3344 {
3345 tree elt;
3346
3347 switch (TREE_CODE (exp))
3348 {
3349 case CONVERT_EXPR:
3350 case NOP_EXPR:
3351 case NON_LVALUE_EXPR:
3352 return is_zeros_p (TREE_OPERAND (exp, 0));
3353
3354 case INTEGER_CST:
3355 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3356
3357 case COMPLEX_CST:
3358 return
3359 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3360
3361 case REAL_CST:
3362 return REAL_VALUES_EQUAL (TREE_REAL_CST (exp), dconst0);
3363
3364 case CONSTRUCTOR:
3365 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3366 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
3367 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3368 if (! is_zeros_p (TREE_VALUE (elt)))
3369 return 0;
3370
3371 return 1;
3372 }
3373
3374 return 0;
3375 }
3376
3377 /* Return 1 if EXP contains mostly (3/4) zeros. */
3378
3379 static int
3380 mostly_zeros_p (exp)
3381 tree exp;
3382 {
3383 if (TREE_CODE (exp) == CONSTRUCTOR)
3384 {
3385 int elts = 0, zeros = 0;
3386 tree elt = CONSTRUCTOR_ELTS (exp);
3387 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3388 {
3389 /* If there are no ranges of true bits, it is all zero. */
3390 return elt == NULL_TREE;
3391 }
3392 for (; elt; elt = TREE_CHAIN (elt))
3393 {
3394 /* We do not handle the case where the index is a RANGE_EXPR,
3395 so the statistic will be somewhat inaccurate.
3396 We do make a more accurate count in store_constructor itself,
3397 so since this function is only used for nested array elements,
3398 this should be close enough. */
3399 if (mostly_zeros_p (TREE_VALUE (elt)))
3400 zeros++;
3401 elts++;
3402 }
3403
3404 return 4 * zeros >= 3 * elts;
3405 }
3406
3407 return is_zeros_p (exp);
3408 }
3409 \f
3410 /* Helper function for store_constructor.
3411 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3412 TYPE is the type of the CONSTRUCTOR, not the element type.
3413 CLEARED is as for store_constructor.
3414
3415 This provides a recursive shortcut back to store_constructor when it isn't
3416 necessary to go through store_field. This is so that we can pass through
3417 the cleared field to let store_constructor know that we may not have to
3418 clear a substructure if the outer structure has already been cleared. */
3419
3420 static void
3421 store_constructor_field (target, bitsize, bitpos,
3422 mode, exp, type, cleared)
3423 rtx target;
3424 int bitsize, bitpos;
3425 enum machine_mode mode;
3426 tree exp, type;
3427 int cleared;
3428 {
3429 if (TREE_CODE (exp) == CONSTRUCTOR
3430 && bitpos % BITS_PER_UNIT == 0
3431 /* If we have a non-zero bitpos for a register target, then we just
3432 let store_field do the bitfield handling. This is unlikely to
3433 generate unnecessary clear instructions anyways. */
3434 && (bitpos == 0 || GET_CODE (target) == MEM))
3435 {
3436 if (bitpos != 0)
3437 target = change_address (target, VOIDmode,
3438 plus_constant (XEXP (target, 0),
3439 bitpos / BITS_PER_UNIT));
3440 store_constructor (exp, target, cleared);
3441 }
3442 else
3443 store_field (target, bitsize, bitpos, mode, exp,
3444 VOIDmode, 0, TYPE_ALIGN (type) / BITS_PER_UNIT,
3445 int_size_in_bytes (type));
3446 }
3447
3448 /* Store the value of constructor EXP into the rtx TARGET.
3449 TARGET is either a REG or a MEM.
3450 CLEARED is true if TARGET is known to have been zero'd. */
3451
3452 static void
3453 store_constructor (exp, target, cleared)
3454 tree exp;
3455 rtx target;
3456 int cleared;
3457 {
3458 tree type = TREE_TYPE (exp);
3459
3460 /* We know our target cannot conflict, since safe_from_p has been called. */
3461 #if 0
3462 /* Don't try copying piece by piece into a hard register
3463 since that is vulnerable to being clobbered by EXP.
3464 Instead, construct in a pseudo register and then copy it all. */
3465 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
3466 {
3467 rtx temp = gen_reg_rtx (GET_MODE (target));
3468 store_constructor (exp, temp, 0);
3469 emit_move_insn (target, temp);
3470 return;
3471 }
3472 #endif
3473
3474 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
3475 || TREE_CODE (type) == QUAL_UNION_TYPE)
3476 {
3477 register tree elt;
3478
3479 /* Inform later passes that the whole union value is dead. */
3480 if (TREE_CODE (type) == UNION_TYPE
3481 || TREE_CODE (type) == QUAL_UNION_TYPE)
3482 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3483
3484 /* If we are building a static constructor into a register,
3485 set the initial value as zero so we can fold the value into
3486 a constant. But if more than one register is involved,
3487 this probably loses. */
3488 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
3489 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
3490 {
3491 if (! cleared)
3492 emit_move_insn (target, const0_rtx);
3493
3494 cleared = 1;
3495 }
3496
3497 /* If the constructor has fewer fields than the structure
3498 or if we are initializing the structure to mostly zeros,
3499 clear the whole structure first. */
3500 else if ((list_length (CONSTRUCTOR_ELTS (exp))
3501 != list_length (TYPE_FIELDS (type)))
3502 || mostly_zeros_p (exp))
3503 {
3504 if (! cleared)
3505 clear_storage (target, expr_size (exp),
3506 TYPE_ALIGN (type) / BITS_PER_UNIT);
3507
3508 cleared = 1;
3509 }
3510 else
3511 /* Inform later passes that the old value is dead. */
3512 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3513
3514 /* Store each element of the constructor into
3515 the corresponding field of TARGET. */
3516
3517 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3518 {
3519 register tree field = TREE_PURPOSE (elt);
3520 register enum machine_mode mode;
3521 int bitsize;
3522 int bitpos = 0;
3523 int unsignedp;
3524 tree pos, constant = 0, offset = 0;
3525 rtx to_rtx = target;
3526
3527 /* Just ignore missing fields.
3528 We cleared the whole structure, above,
3529 if any fields are missing. */
3530 if (field == 0)
3531 continue;
3532
3533 if (cleared && is_zeros_p (TREE_VALUE (elt)))
3534 continue;
3535
3536 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
3537 unsignedp = TREE_UNSIGNED (field);
3538 mode = DECL_MODE (field);
3539 if (DECL_BIT_FIELD (field))
3540 mode = VOIDmode;
3541
3542 pos = DECL_FIELD_BITPOS (field);
3543 if (TREE_CODE (pos) == INTEGER_CST)
3544 constant = pos;
3545 else if (TREE_CODE (pos) == PLUS_EXPR
3546 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3547 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
3548 else
3549 offset = pos;
3550
3551 if (constant)
3552 bitpos = TREE_INT_CST_LOW (constant);
3553
3554 if (offset)
3555 {
3556 rtx offset_rtx;
3557
3558 if (contains_placeholder_p (offset))
3559 offset = build (WITH_RECORD_EXPR, sizetype,
3560 offset, exp);
3561
3562 offset = size_binop (FLOOR_DIV_EXPR, offset,
3563 size_int (BITS_PER_UNIT));
3564
3565 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3566 if (GET_CODE (to_rtx) != MEM)
3567 abort ();
3568
3569 to_rtx
3570 = change_address (to_rtx, VOIDmode,
3571 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
3572 force_reg (ptr_mode, offset_rtx)));
3573 }
3574 if (TREE_READONLY (field))
3575 {
3576 if (GET_CODE (to_rtx) == MEM)
3577 to_rtx = change_address (to_rtx, GET_MODE (to_rtx),
3578 XEXP (to_rtx, 0));
3579 RTX_UNCHANGING_P (to_rtx) = 1;
3580 }
3581
3582 store_constructor_field (to_rtx, bitsize, bitpos,
3583 mode, TREE_VALUE (elt), type, cleared);
3584 }
3585 }
3586 else if (TREE_CODE (type) == ARRAY_TYPE)
3587 {
3588 register tree elt;
3589 register int i;
3590 int need_to_clear;
3591 tree domain = TYPE_DOMAIN (type);
3592 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
3593 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
3594 tree elttype = TREE_TYPE (type);
3595
3596 /* If the constructor has fewer elements than the array,
3597 clear the whole array first. Similarly if this this is
3598 static constructor of a non-BLKmode object. */
3599 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
3600 need_to_clear = 1;
3601 else
3602 {
3603 HOST_WIDE_INT count = 0, zero_count = 0;
3604 need_to_clear = 0;
3605 /* This loop is a more accurate version of the loop in
3606 mostly_zeros_p (it handles RANGE_EXPR in an index).
3607 It is also needed to check for missing elements. */
3608 for (elt = CONSTRUCTOR_ELTS (exp);
3609 elt != NULL_TREE;
3610 elt = TREE_CHAIN (elt))
3611 {
3612 tree index = TREE_PURPOSE (elt);
3613 HOST_WIDE_INT this_node_count;
3614 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3615 {
3616 tree lo_index = TREE_OPERAND (index, 0);
3617 tree hi_index = TREE_OPERAND (index, 1);
3618 if (TREE_CODE (lo_index) != INTEGER_CST
3619 || TREE_CODE (hi_index) != INTEGER_CST)
3620 {
3621 need_to_clear = 1;
3622 break;
3623 }
3624 this_node_count = TREE_INT_CST_LOW (hi_index)
3625 - TREE_INT_CST_LOW (lo_index) + 1;
3626 }
3627 else
3628 this_node_count = 1;
3629 count += this_node_count;
3630 if (mostly_zeros_p (TREE_VALUE (elt)))
3631 zero_count += this_node_count;
3632 }
3633 /* Clear the entire array first if there are any missing elements,
3634 or if the incidence of zero elements is >= 75%. */
3635 if (count < maxelt - minelt + 1
3636 || 4 * zero_count >= 3 * count)
3637 need_to_clear = 1;
3638 }
3639 if (need_to_clear)
3640 {
3641 if (! cleared)
3642 clear_storage (target, expr_size (exp),
3643 TYPE_ALIGN (type) / BITS_PER_UNIT);
3644 cleared = 1;
3645 }
3646 else
3647 /* Inform later passes that the old value is dead. */
3648 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3649
3650 /* Store each element of the constructor into
3651 the corresponding element of TARGET, determined
3652 by counting the elements. */
3653 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
3654 elt;
3655 elt = TREE_CHAIN (elt), i++)
3656 {
3657 register enum machine_mode mode;
3658 int bitsize;
3659 int bitpos;
3660 int unsignedp;
3661 tree value = TREE_VALUE (elt);
3662 tree index = TREE_PURPOSE (elt);
3663 rtx xtarget = target;
3664
3665 if (cleared && is_zeros_p (value))
3666 continue;
3667
3668 mode = TYPE_MODE (elttype);
3669 bitsize = GET_MODE_BITSIZE (mode);
3670 unsignedp = TREE_UNSIGNED (elttype);
3671
3672 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3673 {
3674 tree lo_index = TREE_OPERAND (index, 0);
3675 tree hi_index = TREE_OPERAND (index, 1);
3676 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
3677 struct nesting *loop;
3678 HOST_WIDE_INT lo, hi, count;
3679 tree position;
3680
3681 /* If the range is constant and "small", unroll the loop. */
3682 if (TREE_CODE (lo_index) == INTEGER_CST
3683 && TREE_CODE (hi_index) == INTEGER_CST
3684 && (lo = TREE_INT_CST_LOW (lo_index),
3685 hi = TREE_INT_CST_LOW (hi_index),
3686 count = hi - lo + 1,
3687 (GET_CODE (target) != MEM
3688 || count <= 2
3689 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
3690 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
3691 <= 40 * 8))))
3692 {
3693 lo -= minelt; hi -= minelt;
3694 for (; lo <= hi; lo++)
3695 {
3696 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
3697 store_constructor_field (target, bitsize, bitpos,
3698 mode, value, type, cleared);
3699 }
3700 }
3701 else
3702 {
3703 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
3704 loop_top = gen_label_rtx ();
3705 loop_end = gen_label_rtx ();
3706
3707 unsignedp = TREE_UNSIGNED (domain);
3708
3709 index = build_decl (VAR_DECL, NULL_TREE, domain);
3710
3711 DECL_RTL (index) = index_r
3712 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
3713 &unsignedp, 0));
3714
3715 if (TREE_CODE (value) == SAVE_EXPR
3716 && SAVE_EXPR_RTL (value) == 0)
3717 {
3718 /* Make sure value gets expanded once before the
3719 loop. */
3720 expand_expr (value, const0_rtx, VOIDmode, 0);
3721 emit_queue ();
3722 }
3723 store_expr (lo_index, index_r, 0);
3724 loop = expand_start_loop (0);
3725
3726 /* Assign value to element index. */
3727 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3728 size_int (BITS_PER_UNIT));
3729 position = size_binop (MULT_EXPR,
3730 size_binop (MINUS_EXPR, index,
3731 TYPE_MIN_VALUE (domain)),
3732 position);
3733 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3734 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
3735 xtarget = change_address (target, mode, addr);
3736 if (TREE_CODE (value) == CONSTRUCTOR)
3737 store_constructor (value, xtarget, cleared);
3738 else
3739 store_expr (value, xtarget, 0);
3740
3741 expand_exit_loop_if_false (loop,
3742 build (LT_EXPR, integer_type_node,
3743 index, hi_index));
3744
3745 expand_increment (build (PREINCREMENT_EXPR,
3746 TREE_TYPE (index),
3747 index, integer_one_node), 0, 0);
3748 expand_end_loop ();
3749 emit_label (loop_end);
3750
3751 /* Needed by stupid register allocation. to extend the
3752 lifetime of pseudo-regs used by target past the end
3753 of the loop. */
3754 emit_insn (gen_rtx (USE, GET_MODE (target), target));
3755 }
3756 }
3757 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
3758 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
3759 {
3760 rtx pos_rtx, addr;
3761 tree position;
3762
3763 if (index == 0)
3764 index = size_int (i);
3765
3766 if (minelt)
3767 index = size_binop (MINUS_EXPR, index,
3768 TYPE_MIN_VALUE (domain));
3769 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3770 size_int (BITS_PER_UNIT));
3771 position = size_binop (MULT_EXPR, index, position);
3772 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3773 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
3774 xtarget = change_address (target, mode, addr);
3775 store_expr (value, xtarget, 0);
3776 }
3777 else
3778 {
3779 if (index != 0)
3780 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
3781 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3782 else
3783 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3784 store_constructor_field (target, bitsize, bitpos,
3785 mode, value, type, cleared);
3786 }
3787 }
3788 }
3789 /* set constructor assignments */
3790 else if (TREE_CODE (type) == SET_TYPE)
3791 {
3792 tree elt = CONSTRUCTOR_ELTS (exp);
3793 rtx xtarget = XEXP (target, 0);
3794 int set_word_size = TYPE_ALIGN (type);
3795 int nbytes = int_size_in_bytes (type), nbits;
3796 tree domain = TYPE_DOMAIN (type);
3797 tree domain_min, domain_max, bitlength;
3798
3799 /* The default implementation strategy is to extract the constant
3800 parts of the constructor, use that to initialize the target,
3801 and then "or" in whatever non-constant ranges we need in addition.
3802
3803 If a large set is all zero or all ones, it is
3804 probably better to set it using memset (if available) or bzero.
3805 Also, if a large set has just a single range, it may also be
3806 better to first clear all the first clear the set (using
3807 bzero/memset), and set the bits we want. */
3808
3809 /* Check for all zeros. */
3810 if (elt == NULL_TREE)
3811 {
3812 if (!cleared)
3813 clear_storage (target, expr_size (exp),
3814 TYPE_ALIGN (type) / BITS_PER_UNIT);
3815 return;
3816 }
3817
3818 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
3819 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
3820 bitlength = size_binop (PLUS_EXPR,
3821 size_binop (MINUS_EXPR, domain_max, domain_min),
3822 size_one_node);
3823
3824 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
3825 abort ();
3826 nbits = TREE_INT_CST_LOW (bitlength);
3827
3828 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
3829 are "complicated" (more than one range), initialize (the
3830 constant parts) by copying from a constant. */
3831 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
3832 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
3833 {
3834 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
3835 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
3836 char *bit_buffer = (char *) alloca (nbits);
3837 HOST_WIDE_INT word = 0;
3838 int bit_pos = 0;
3839 int ibit = 0;
3840 int offset = 0; /* In bytes from beginning of set. */
3841 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
3842 for (;;)
3843 {
3844 if (bit_buffer[ibit])
3845 {
3846 if (BYTES_BIG_ENDIAN)
3847 word |= (1 << (set_word_size - 1 - bit_pos));
3848 else
3849 word |= 1 << bit_pos;
3850 }
3851 bit_pos++; ibit++;
3852 if (bit_pos >= set_word_size || ibit == nbits)
3853 {
3854 if (word != 0 || ! cleared)
3855 {
3856 rtx datum = GEN_INT (word);
3857 rtx to_rtx;
3858 /* The assumption here is that it is safe to use
3859 XEXP if the set is multi-word, but not if
3860 it's single-word. */
3861 if (GET_CODE (target) == MEM)
3862 {
3863 to_rtx = plus_constant (XEXP (target, 0), offset);
3864 to_rtx = change_address (target, mode, to_rtx);
3865 }
3866 else if (offset == 0)
3867 to_rtx = target;
3868 else
3869 abort ();
3870 emit_move_insn (to_rtx, datum);
3871 }
3872 if (ibit == nbits)
3873 break;
3874 word = 0;
3875 bit_pos = 0;
3876 offset += set_word_size / BITS_PER_UNIT;
3877 }
3878 }
3879 }
3880 else if (!cleared)
3881 {
3882 /* Don't bother clearing storage if the set is all ones. */
3883 if (TREE_CHAIN (elt) != NULL_TREE
3884 || (TREE_PURPOSE (elt) == NULL_TREE
3885 ? nbits != 1
3886 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
3887 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
3888 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
3889 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
3890 != nbits))))
3891 clear_storage (target, expr_size (exp),
3892 TYPE_ALIGN (type) / BITS_PER_UNIT);
3893 }
3894
3895 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
3896 {
3897 /* start of range of element or NULL */
3898 tree startbit = TREE_PURPOSE (elt);
3899 /* end of range of element, or element value */
3900 tree endbit = TREE_VALUE (elt);
3901 HOST_WIDE_INT startb, endb;
3902 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
3903
3904 bitlength_rtx = expand_expr (bitlength,
3905 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
3906
3907 /* handle non-range tuple element like [ expr ] */
3908 if (startbit == NULL_TREE)
3909 {
3910 startbit = save_expr (endbit);
3911 endbit = startbit;
3912 }
3913 startbit = convert (sizetype, startbit);
3914 endbit = convert (sizetype, endbit);
3915 if (! integer_zerop (domain_min))
3916 {
3917 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
3918 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
3919 }
3920 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
3921 EXPAND_CONST_ADDRESS);
3922 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
3923 EXPAND_CONST_ADDRESS);
3924
3925 if (REG_P (target))
3926 {
3927 targetx = assign_stack_temp (GET_MODE (target),
3928 GET_MODE_SIZE (GET_MODE (target)),
3929 0);
3930 emit_move_insn (targetx, target);
3931 }
3932 else if (GET_CODE (target) == MEM)
3933 targetx = target;
3934 else
3935 abort ();
3936
3937 #ifdef TARGET_MEM_FUNCTIONS
3938 /* Optimization: If startbit and endbit are
3939 constants divisible by BITS_PER_UNIT,
3940 call memset instead. */
3941 if (TREE_CODE (startbit) == INTEGER_CST
3942 && TREE_CODE (endbit) == INTEGER_CST
3943 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
3944 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
3945 {
3946 emit_library_call (memset_libfunc, 0,
3947 VOIDmode, 3,
3948 plus_constant (XEXP (targetx, 0),
3949 startb / BITS_PER_UNIT),
3950 Pmode,
3951 constm1_rtx, TYPE_MODE (integer_type_node),
3952 GEN_INT ((endb - startb) / BITS_PER_UNIT),
3953 TYPE_MODE (sizetype));
3954 }
3955 else
3956 #endif
3957 {
3958 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__setbits"),
3959 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
3960 bitlength_rtx, TYPE_MODE (sizetype),
3961 startbit_rtx, TYPE_MODE (sizetype),
3962 endbit_rtx, TYPE_MODE (sizetype));
3963 }
3964 if (REG_P (target))
3965 emit_move_insn (target, targetx);
3966 }
3967 }
3968
3969 else
3970 abort ();
3971 }
3972
3973 /* Store the value of EXP (an expression tree)
3974 into a subfield of TARGET which has mode MODE and occupies
3975 BITSIZE bits, starting BITPOS bits from the start of TARGET.
3976 If MODE is VOIDmode, it means that we are storing into a bit-field.
3977
3978 If VALUE_MODE is VOIDmode, return nothing in particular.
3979 UNSIGNEDP is not used in this case.
3980
3981 Otherwise, return an rtx for the value stored. This rtx
3982 has mode VALUE_MODE if that is convenient to do.
3983 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
3984
3985 ALIGN is the alignment that TARGET is known to have, measured in bytes.
3986 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
3987
3988 static rtx
3989 store_field (target, bitsize, bitpos, mode, exp, value_mode,
3990 unsignedp, align, total_size)
3991 rtx target;
3992 int bitsize, bitpos;
3993 enum machine_mode mode;
3994 tree exp;
3995 enum machine_mode value_mode;
3996 int unsignedp;
3997 int align;
3998 int total_size;
3999 {
4000 HOST_WIDE_INT width_mask = 0;
4001
4002 if (bitsize < HOST_BITS_PER_WIDE_INT)
4003 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4004
4005 /* If we are storing into an unaligned field of an aligned union that is
4006 in a register, we may have the mode of TARGET being an integer mode but
4007 MODE == BLKmode. In that case, get an aligned object whose size and
4008 alignment are the same as TARGET and store TARGET into it (we can avoid
4009 the store if the field being stored is the entire width of TARGET). Then
4010 call ourselves recursively to store the field into a BLKmode version of
4011 that object. Finally, load from the object into TARGET. This is not
4012 very efficient in general, but should only be slightly more expensive
4013 than the otherwise-required unaligned accesses. Perhaps this can be
4014 cleaned up later. */
4015
4016 if (mode == BLKmode
4017 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4018 {
4019 rtx object = assign_stack_temp (GET_MODE (target),
4020 GET_MODE_SIZE (GET_MODE (target)), 0);
4021 rtx blk_object = copy_rtx (object);
4022
4023 MEM_IN_STRUCT_P (object) = 1;
4024 MEM_IN_STRUCT_P (blk_object) = 1;
4025 PUT_MODE (blk_object, BLKmode);
4026
4027 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4028 emit_move_insn (object, target);
4029
4030 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4031 align, total_size);
4032
4033 /* Even though we aren't returning target, we need to
4034 give it the updated value. */
4035 emit_move_insn (target, object);
4036
4037 return blk_object;
4038 }
4039
4040 /* If the structure is in a register or if the component
4041 is a bit field, we cannot use addressing to access it.
4042 Use bit-field techniques or SUBREG to store in it. */
4043
4044 if (mode == VOIDmode
4045 || (mode != BLKmode && ! direct_store[(int) mode])
4046 || GET_CODE (target) == REG
4047 || GET_CODE (target) == SUBREG
4048 /* If the field isn't aligned enough to store as an ordinary memref,
4049 store it as a bit field. */
4050 || (SLOW_UNALIGNED_ACCESS
4051 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
4052 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
4053 {
4054 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4055
4056 /* If BITSIZE is narrower than the size of the type of EXP
4057 we will be narrowing TEMP. Normally, what's wanted are the
4058 low-order bits. However, if EXP's type is a record and this is
4059 big-endian machine, we want the upper BITSIZE bits. */
4060 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4061 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4062 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4063 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4064 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4065 - bitsize),
4066 temp, 1);
4067
4068 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4069 MODE. */
4070 if (mode != VOIDmode && mode != BLKmode
4071 && mode != TYPE_MODE (TREE_TYPE (exp)))
4072 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4073
4074 /* If the modes of TARGET and TEMP are both BLKmode, both
4075 must be in memory and BITPOS must be aligned on a byte
4076 boundary. If so, we simply do a block copy. */
4077 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4078 {
4079 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4080 || bitpos % BITS_PER_UNIT != 0)
4081 abort ();
4082
4083 target = change_address (target, VOIDmode,
4084 plus_constant (XEXP (target, 0),
4085 bitpos / BITS_PER_UNIT));
4086
4087 emit_block_move (target, temp,
4088 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4089 / BITS_PER_UNIT),
4090 1);
4091
4092 return value_mode == VOIDmode ? const0_rtx : target;
4093 }
4094
4095 /* Store the value in the bitfield. */
4096 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4097 if (value_mode != VOIDmode)
4098 {
4099 /* The caller wants an rtx for the value. */
4100 /* If possible, avoid refetching from the bitfield itself. */
4101 if (width_mask != 0
4102 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4103 {
4104 tree count;
4105 enum machine_mode tmode;
4106
4107 if (unsignedp)
4108 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4109 tmode = GET_MODE (temp);
4110 if (tmode == VOIDmode)
4111 tmode = value_mode;
4112 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4113 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4114 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4115 }
4116 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4117 NULL_RTX, value_mode, 0, align,
4118 total_size);
4119 }
4120 return const0_rtx;
4121 }
4122 else
4123 {
4124 rtx addr = XEXP (target, 0);
4125 rtx to_rtx;
4126
4127 /* If a value is wanted, it must be the lhs;
4128 so make the address stable for multiple use. */
4129
4130 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4131 && ! CONSTANT_ADDRESS_P (addr)
4132 /* A frame-pointer reference is already stable. */
4133 && ! (GET_CODE (addr) == PLUS
4134 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4135 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4136 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4137 addr = copy_to_reg (addr);
4138
4139 /* Now build a reference to just the desired component. */
4140
4141 to_rtx = change_address (target, mode,
4142 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
4143 MEM_IN_STRUCT_P (to_rtx) = 1;
4144
4145 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4146 }
4147 }
4148 \f
4149 /* Return true if any object containing the innermost array is an unaligned
4150 packed structure field. */
4151
4152 static int
4153 get_inner_unaligned_p (exp)
4154 tree exp;
4155 {
4156 int needed_alignment = TYPE_ALIGN (TREE_TYPE (exp));
4157
4158 while (1)
4159 {
4160 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4161 {
4162 if (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
4163 < needed_alignment)
4164 return 1;
4165 }
4166 else if (TREE_CODE (exp) != ARRAY_REF
4167 && TREE_CODE (exp) != NON_LVALUE_EXPR
4168 && ! ((TREE_CODE (exp) == NOP_EXPR
4169 || TREE_CODE (exp) == CONVERT_EXPR)
4170 && (TYPE_MODE (TREE_TYPE (exp))
4171 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4172 break;
4173
4174 exp = TREE_OPERAND (exp, 0);
4175 }
4176
4177 return 0;
4178 }
4179
4180 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4181 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4182 ARRAY_REFs and find the ultimate containing object, which we return.
4183
4184 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4185 bit position, and *PUNSIGNEDP to the signedness of the field.
4186 If the position of the field is variable, we store a tree
4187 giving the variable offset (in units) in *POFFSET.
4188 This offset is in addition to the bit position.
4189 If the position is not variable, we store 0 in *POFFSET.
4190 We set *PALIGNMENT to the alignment in bytes of the address that will be
4191 computed. This is the alignment of the thing we return if *POFFSET
4192 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4193
4194 If any of the extraction expressions is volatile,
4195 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4196
4197 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4198 is a mode that can be used to access the field. In that case, *PBITSIZE
4199 is redundant.
4200
4201 If the field describes a variable-sized object, *PMODE is set to
4202 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4203 this case, but the address of the object can be found. */
4204
4205 tree
4206 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4207 punsignedp, pvolatilep, palignment)
4208 tree exp;
4209 int *pbitsize;
4210 int *pbitpos;
4211 tree *poffset;
4212 enum machine_mode *pmode;
4213 int *punsignedp;
4214 int *pvolatilep;
4215 int *palignment;
4216 {
4217 tree orig_exp = exp;
4218 tree size_tree = 0;
4219 enum machine_mode mode = VOIDmode;
4220 tree offset = integer_zero_node;
4221 int alignment = BIGGEST_ALIGNMENT;
4222
4223 if (TREE_CODE (exp) == COMPONENT_REF)
4224 {
4225 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4226 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4227 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4228 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4229 }
4230 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4231 {
4232 size_tree = TREE_OPERAND (exp, 1);
4233 *punsignedp = TREE_UNSIGNED (exp);
4234 }
4235 else
4236 {
4237 mode = TYPE_MODE (TREE_TYPE (exp));
4238 *pbitsize = GET_MODE_BITSIZE (mode);
4239 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4240 }
4241
4242 if (size_tree)
4243 {
4244 if (TREE_CODE (size_tree) != INTEGER_CST)
4245 mode = BLKmode, *pbitsize = -1;
4246 else
4247 *pbitsize = TREE_INT_CST_LOW (size_tree);
4248 }
4249
4250 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4251 and find the ultimate containing object. */
4252
4253 *pbitpos = 0;
4254
4255 while (1)
4256 {
4257 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4258 {
4259 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4260 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4261 : TREE_OPERAND (exp, 2));
4262 tree constant = integer_zero_node, var = pos;
4263
4264 /* If this field hasn't been filled in yet, don't go
4265 past it. This should only happen when folding expressions
4266 made during type construction. */
4267 if (pos == 0)
4268 break;
4269
4270 /* Assume here that the offset is a multiple of a unit.
4271 If not, there should be an explicitly added constant. */
4272 if (TREE_CODE (pos) == PLUS_EXPR
4273 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4274 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
4275 else if (TREE_CODE (pos) == INTEGER_CST)
4276 constant = pos, var = integer_zero_node;
4277
4278 *pbitpos += TREE_INT_CST_LOW (constant);
4279 offset = size_binop (PLUS_EXPR, offset,
4280 size_binop (EXACT_DIV_EXPR, var,
4281 size_int (BITS_PER_UNIT)));
4282 }
4283
4284 else if (TREE_CODE (exp) == ARRAY_REF)
4285 {
4286 /* This code is based on the code in case ARRAY_REF in expand_expr
4287 below. We assume here that the size of an array element is
4288 always an integral multiple of BITS_PER_UNIT. */
4289
4290 tree index = TREE_OPERAND (exp, 1);
4291 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4292 tree low_bound
4293 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4294 tree index_type = TREE_TYPE (index);
4295
4296 if (! integer_zerop (low_bound))
4297 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
4298
4299 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
4300 {
4301 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4302 index);
4303 index_type = TREE_TYPE (index);
4304 }
4305
4306 index = fold (build (MULT_EXPR, index_type, index,
4307 TYPE_SIZE (TREE_TYPE (exp))));
4308
4309 if (TREE_CODE (index) == INTEGER_CST
4310 && TREE_INT_CST_HIGH (index) == 0)
4311 *pbitpos += TREE_INT_CST_LOW (index);
4312 else
4313 offset = size_binop (PLUS_EXPR, offset,
4314 size_binop (FLOOR_DIV_EXPR, index,
4315 size_int (BITS_PER_UNIT)));
4316 }
4317 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
4318 && ! ((TREE_CODE (exp) == NOP_EXPR
4319 || TREE_CODE (exp) == CONVERT_EXPR)
4320 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4321 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
4322 != UNION_TYPE))
4323 && (TYPE_MODE (TREE_TYPE (exp))
4324 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4325 break;
4326
4327 /* If any reference in the chain is volatile, the effect is volatile. */
4328 if (TREE_THIS_VOLATILE (exp))
4329 *pvolatilep = 1;
4330
4331 /* If the offset is non-constant already, then we can't assume any
4332 alignment more than the alignment here. */
4333 if (! integer_zerop (offset))
4334 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4335
4336 exp = TREE_OPERAND (exp, 0);
4337 }
4338
4339 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
4340 alignment = MIN (alignment, DECL_ALIGN (exp));
4341 else if (TREE_TYPE (exp) != 0)
4342 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4343
4344 if (integer_zerop (offset))
4345 offset = 0;
4346
4347 if (offset != 0 && contains_placeholder_p (offset))
4348 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
4349
4350 *pmode = mode;
4351 *poffset = offset;
4352 *palignment = alignment / BITS_PER_UNIT;
4353 return exp;
4354 }
4355 \f
4356 /* Given an rtx VALUE that may contain additions and multiplications,
4357 return an equivalent value that just refers to a register or memory.
4358 This is done by generating instructions to perform the arithmetic
4359 and returning a pseudo-register containing the value.
4360
4361 The returned value may be a REG, SUBREG, MEM or constant. */
4362
4363 rtx
4364 force_operand (value, target)
4365 rtx value, target;
4366 {
4367 register optab binoptab = 0;
4368 /* Use a temporary to force order of execution of calls to
4369 `force_operand'. */
4370 rtx tmp;
4371 register rtx op2;
4372 /* Use subtarget as the target for operand 0 of a binary operation. */
4373 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4374
4375 if (GET_CODE (value) == PLUS)
4376 binoptab = add_optab;
4377 else if (GET_CODE (value) == MINUS)
4378 binoptab = sub_optab;
4379 else if (GET_CODE (value) == MULT)
4380 {
4381 op2 = XEXP (value, 1);
4382 if (!CONSTANT_P (op2)
4383 && !(GET_CODE (op2) == REG && op2 != subtarget))
4384 subtarget = 0;
4385 tmp = force_operand (XEXP (value, 0), subtarget);
4386 return expand_mult (GET_MODE (value), tmp,
4387 force_operand (op2, NULL_RTX),
4388 target, 0);
4389 }
4390
4391 if (binoptab)
4392 {
4393 op2 = XEXP (value, 1);
4394 if (!CONSTANT_P (op2)
4395 && !(GET_CODE (op2) == REG && op2 != subtarget))
4396 subtarget = 0;
4397 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
4398 {
4399 binoptab = add_optab;
4400 op2 = negate_rtx (GET_MODE (value), op2);
4401 }
4402
4403 /* Check for an addition with OP2 a constant integer and our first
4404 operand a PLUS of a virtual register and something else. In that
4405 case, we want to emit the sum of the virtual register and the
4406 constant first and then add the other value. This allows virtual
4407 register instantiation to simply modify the constant rather than
4408 creating another one around this addition. */
4409 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
4410 && GET_CODE (XEXP (value, 0)) == PLUS
4411 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
4412 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
4413 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
4414 {
4415 rtx temp = expand_binop (GET_MODE (value), binoptab,
4416 XEXP (XEXP (value, 0), 0), op2,
4417 subtarget, 0, OPTAB_LIB_WIDEN);
4418 return expand_binop (GET_MODE (value), binoptab, temp,
4419 force_operand (XEXP (XEXP (value, 0), 1), 0),
4420 target, 0, OPTAB_LIB_WIDEN);
4421 }
4422
4423 tmp = force_operand (XEXP (value, 0), subtarget);
4424 return expand_binop (GET_MODE (value), binoptab, tmp,
4425 force_operand (op2, NULL_RTX),
4426 target, 0, OPTAB_LIB_WIDEN);
4427 /* We give UNSIGNEDP = 0 to expand_binop
4428 because the only operations we are expanding here are signed ones. */
4429 }
4430 return value;
4431 }
4432 \f
4433 /* Subroutine of expand_expr:
4434 save the non-copied parts (LIST) of an expr (LHS), and return a list
4435 which can restore these values to their previous values,
4436 should something modify their storage. */
4437
4438 static tree
4439 save_noncopied_parts (lhs, list)
4440 tree lhs;
4441 tree list;
4442 {
4443 tree tail;
4444 tree parts = 0;
4445
4446 for (tail = list; tail; tail = TREE_CHAIN (tail))
4447 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4448 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
4449 else
4450 {
4451 tree part = TREE_VALUE (tail);
4452 tree part_type = TREE_TYPE (part);
4453 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
4454 rtx target = assign_temp (part_type, 0, 1, 1);
4455 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
4456 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
4457 parts = tree_cons (to_be_saved,
4458 build (RTL_EXPR, part_type, NULL_TREE,
4459 (tree) target),
4460 parts);
4461 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
4462 }
4463 return parts;
4464 }
4465
4466 /* Subroutine of expand_expr:
4467 record the non-copied parts (LIST) of an expr (LHS), and return a list
4468 which specifies the initial values of these parts. */
4469
4470 static tree
4471 init_noncopied_parts (lhs, list)
4472 tree lhs;
4473 tree list;
4474 {
4475 tree tail;
4476 tree parts = 0;
4477
4478 for (tail = list; tail; tail = TREE_CHAIN (tail))
4479 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4480 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
4481 else
4482 {
4483 tree part = TREE_VALUE (tail);
4484 tree part_type = TREE_TYPE (part);
4485 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
4486 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
4487 }
4488 return parts;
4489 }
4490
4491 /* Subroutine of expand_expr: return nonzero iff there is no way that
4492 EXP can reference X, which is being modified. */
4493
4494 static int
4495 safe_from_p (x, exp)
4496 rtx x;
4497 tree exp;
4498 {
4499 rtx exp_rtl = 0;
4500 int i, nops;
4501
4502 if (x == 0
4503 /* If EXP has varying size, we MUST use a target since we currently
4504 have no way of allocating temporaries of variable size
4505 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
4506 So we assume here that something at a higher level has prevented a
4507 clash. This is somewhat bogus, but the best we can do. Only
4508 do this when X is BLKmode. */
4509 || (TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4510 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
4511 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
4512 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
4513 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
4514 != INTEGER_CST)
4515 && GET_MODE (x) == BLKmode))
4516 return 1;
4517
4518 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
4519 find the underlying pseudo. */
4520 if (GET_CODE (x) == SUBREG)
4521 {
4522 x = SUBREG_REG (x);
4523 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4524 return 0;
4525 }
4526
4527 /* If X is a location in the outgoing argument area, it is always safe. */
4528 if (GET_CODE (x) == MEM
4529 && (XEXP (x, 0) == virtual_outgoing_args_rtx
4530 || (GET_CODE (XEXP (x, 0)) == PLUS
4531 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
4532 return 1;
4533
4534 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4535 {
4536 case 'd':
4537 exp_rtl = DECL_RTL (exp);
4538 break;
4539
4540 case 'c':
4541 return 1;
4542
4543 case 'x':
4544 if (TREE_CODE (exp) == TREE_LIST)
4545 return ((TREE_VALUE (exp) == 0
4546 || safe_from_p (x, TREE_VALUE (exp)))
4547 && (TREE_CHAIN (exp) == 0
4548 || safe_from_p (x, TREE_CHAIN (exp))));
4549 else
4550 return 0;
4551
4552 case '1':
4553 return safe_from_p (x, TREE_OPERAND (exp, 0));
4554
4555 case '2':
4556 case '<':
4557 return (safe_from_p (x, TREE_OPERAND (exp, 0))
4558 && safe_from_p (x, TREE_OPERAND (exp, 1)));
4559
4560 case 'e':
4561 case 'r':
4562 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
4563 the expression. If it is set, we conflict iff we are that rtx or
4564 both are in memory. Otherwise, we check all operands of the
4565 expression recursively. */
4566
4567 switch (TREE_CODE (exp))
4568 {
4569 case ADDR_EXPR:
4570 return (staticp (TREE_OPERAND (exp, 0))
4571 || safe_from_p (x, TREE_OPERAND (exp, 0)));
4572
4573 case INDIRECT_REF:
4574 if (GET_CODE (x) == MEM)
4575 return 0;
4576 break;
4577
4578 case CALL_EXPR:
4579 exp_rtl = CALL_EXPR_RTL (exp);
4580 if (exp_rtl == 0)
4581 {
4582 /* Assume that the call will clobber all hard registers and
4583 all of memory. */
4584 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4585 || GET_CODE (x) == MEM)
4586 return 0;
4587 }
4588
4589 break;
4590
4591 case RTL_EXPR:
4592 /* If a sequence exists, we would have to scan every instruction
4593 in the sequence to see if it was safe. This is probably not
4594 worthwhile. */
4595 if (RTL_EXPR_SEQUENCE (exp))
4596 return 0;
4597
4598 exp_rtl = RTL_EXPR_RTL (exp);
4599 break;
4600
4601 case WITH_CLEANUP_EXPR:
4602 exp_rtl = RTL_EXPR_RTL (exp);
4603 break;
4604
4605 case CLEANUP_POINT_EXPR:
4606 return safe_from_p (x, TREE_OPERAND (exp, 0));
4607
4608 case SAVE_EXPR:
4609 exp_rtl = SAVE_EXPR_RTL (exp);
4610 break;
4611
4612 case BIND_EXPR:
4613 /* The only operand we look at is operand 1. The rest aren't
4614 part of the expression. */
4615 return safe_from_p (x, TREE_OPERAND (exp, 1));
4616
4617 case METHOD_CALL_EXPR:
4618 /* This takes a rtx argument, but shouldn't appear here. */
4619 abort ();
4620 }
4621
4622 /* If we have an rtx, we do not need to scan our operands. */
4623 if (exp_rtl)
4624 break;
4625
4626 nops = tree_code_length[(int) TREE_CODE (exp)];
4627 for (i = 0; i < nops; i++)
4628 if (TREE_OPERAND (exp, i) != 0
4629 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
4630 return 0;
4631 }
4632
4633 /* If we have an rtl, find any enclosed object. Then see if we conflict
4634 with it. */
4635 if (exp_rtl)
4636 {
4637 if (GET_CODE (exp_rtl) == SUBREG)
4638 {
4639 exp_rtl = SUBREG_REG (exp_rtl);
4640 if (GET_CODE (exp_rtl) == REG
4641 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
4642 return 0;
4643 }
4644
4645 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
4646 are memory and EXP is not readonly. */
4647 return ! (rtx_equal_p (x, exp_rtl)
4648 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
4649 && ! TREE_READONLY (exp)));
4650 }
4651
4652 /* If we reach here, it is safe. */
4653 return 1;
4654 }
4655
4656 /* Subroutine of expand_expr: return nonzero iff EXP is an
4657 expression whose type is statically determinable. */
4658
4659 static int
4660 fixed_type_p (exp)
4661 tree exp;
4662 {
4663 if (TREE_CODE (exp) == PARM_DECL
4664 || TREE_CODE (exp) == VAR_DECL
4665 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
4666 || TREE_CODE (exp) == COMPONENT_REF
4667 || TREE_CODE (exp) == ARRAY_REF)
4668 return 1;
4669 return 0;
4670 }
4671
4672 /* Subroutine of expand_expr: return rtx if EXP is a
4673 variable or parameter; else return 0. */
4674
4675 static rtx
4676 var_rtx (exp)
4677 tree exp;
4678 {
4679 STRIP_NOPS (exp);
4680 switch (TREE_CODE (exp))
4681 {
4682 case PARM_DECL:
4683 case VAR_DECL:
4684 return DECL_RTL (exp);
4685 default:
4686 return 0;
4687 }
4688 }
4689 \f
4690 /* expand_expr: generate code for computing expression EXP.
4691 An rtx for the computed value is returned. The value is never null.
4692 In the case of a void EXP, const0_rtx is returned.
4693
4694 The value may be stored in TARGET if TARGET is nonzero.
4695 TARGET is just a suggestion; callers must assume that
4696 the rtx returned may not be the same as TARGET.
4697
4698 If TARGET is CONST0_RTX, it means that the value will be ignored.
4699
4700 If TMODE is not VOIDmode, it suggests generating the
4701 result in mode TMODE. But this is done only when convenient.
4702 Otherwise, TMODE is ignored and the value generated in its natural mode.
4703 TMODE is just a suggestion; callers must assume that
4704 the rtx returned may not have mode TMODE.
4705
4706 Note that TARGET may have neither TMODE nor MODE. In that case, it
4707 probably will not be used.
4708
4709 If MODIFIER is EXPAND_SUM then when EXP is an addition
4710 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
4711 or a nest of (PLUS ...) and (MINUS ...) where the terms are
4712 products as above, or REG or MEM, or constant.
4713 Ordinarily in such cases we would output mul or add instructions
4714 and then return a pseudo reg containing the sum.
4715
4716 EXPAND_INITIALIZER is much like EXPAND_SUM except that
4717 it also marks a label as absolutely required (it can't be dead).
4718 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
4719 This is used for outputting expressions used in initializers.
4720
4721 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
4722 with a constant address even if that address is not normally legitimate.
4723 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
4724
4725 rtx
4726 expand_expr (exp, target, tmode, modifier)
4727 register tree exp;
4728 rtx target;
4729 enum machine_mode tmode;
4730 enum expand_modifier modifier;
4731 {
4732 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
4733 This is static so it will be accessible to our recursive callees. */
4734 static tree placeholder_list = 0;
4735 register rtx op0, op1, temp;
4736 tree type = TREE_TYPE (exp);
4737 int unsignedp = TREE_UNSIGNED (type);
4738 register enum machine_mode mode = TYPE_MODE (type);
4739 register enum tree_code code = TREE_CODE (exp);
4740 optab this_optab;
4741 /* Use subtarget as the target for operand 0 of a binary operation. */
4742 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4743 rtx original_target = target;
4744 /* Maybe defer this until sure not doing bytecode? */
4745 int ignore = (target == const0_rtx
4746 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
4747 || code == CONVERT_EXPR || code == REFERENCE_EXPR
4748 || code == COND_EXPR)
4749 && TREE_CODE (type) == VOID_TYPE));
4750 tree context;
4751
4752
4753 if (output_bytecode && modifier != EXPAND_INITIALIZER)
4754 {
4755 bc_expand_expr (exp);
4756 return NULL;
4757 }
4758
4759 /* Don't use hard regs as subtargets, because the combiner
4760 can only handle pseudo regs. */
4761 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
4762 subtarget = 0;
4763 /* Avoid subtargets inside loops,
4764 since they hide some invariant expressions. */
4765 if (preserve_subexpressions_p ())
4766 subtarget = 0;
4767
4768 /* If we are going to ignore this result, we need only do something
4769 if there is a side-effect somewhere in the expression. If there
4770 is, short-circuit the most common cases here. Note that we must
4771 not call expand_expr with anything but const0_rtx in case this
4772 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
4773
4774 if (ignore)
4775 {
4776 if (! TREE_SIDE_EFFECTS (exp))
4777 return const0_rtx;
4778
4779 /* Ensure we reference a volatile object even if value is ignored. */
4780 if (TREE_THIS_VOLATILE (exp)
4781 && TREE_CODE (exp) != FUNCTION_DECL
4782 && mode != VOIDmode && mode != BLKmode)
4783 {
4784 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
4785 if (GET_CODE (temp) == MEM)
4786 temp = copy_to_reg (temp);
4787 return const0_rtx;
4788 }
4789
4790 if (TREE_CODE_CLASS (code) == '1')
4791 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4792 VOIDmode, modifier);
4793 else if (TREE_CODE_CLASS (code) == '2'
4794 || TREE_CODE_CLASS (code) == '<')
4795 {
4796 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
4797 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
4798 return const0_rtx;
4799 }
4800 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
4801 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
4802 /* If the second operand has no side effects, just evaluate
4803 the first. */
4804 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4805 VOIDmode, modifier);
4806
4807 target = 0;
4808 }
4809
4810 /* If will do cse, generate all results into pseudo registers
4811 since 1) that allows cse to find more things
4812 and 2) otherwise cse could produce an insn the machine
4813 cannot support. */
4814
4815 if (! cse_not_expected && mode != BLKmode && target
4816 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
4817 target = subtarget;
4818
4819 switch (code)
4820 {
4821 case LABEL_DECL:
4822 {
4823 tree function = decl_function_context (exp);
4824 /* Handle using a label in a containing function. */
4825 if (function != current_function_decl && function != 0)
4826 {
4827 struct function *p = find_function_data (function);
4828 /* Allocate in the memory associated with the function
4829 that the label is in. */
4830 push_obstacks (p->function_obstack,
4831 p->function_maybepermanent_obstack);
4832
4833 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4834 label_rtx (exp), p->forced_labels);
4835 pop_obstacks ();
4836 }
4837 else if (modifier == EXPAND_INITIALIZER)
4838 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4839 label_rtx (exp), forced_labels);
4840 temp = gen_rtx (MEM, FUNCTION_MODE,
4841 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
4842 if (function != current_function_decl && function != 0)
4843 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
4844 return temp;
4845 }
4846
4847 case PARM_DECL:
4848 if (DECL_RTL (exp) == 0)
4849 {
4850 error_with_decl (exp, "prior parameter's size depends on `%s'");
4851 return CONST0_RTX (mode);
4852 }
4853
4854 /* ... fall through ... */
4855
4856 case VAR_DECL:
4857 /* If a static var's type was incomplete when the decl was written,
4858 but the type is complete now, lay out the decl now. */
4859 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4860 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
4861 {
4862 push_obstacks_nochange ();
4863 end_temporary_allocation ();
4864 layout_decl (exp, 0);
4865 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
4866 pop_obstacks ();
4867 }
4868
4869 /* ... fall through ... */
4870
4871 case FUNCTION_DECL:
4872 case RESULT_DECL:
4873 if (DECL_RTL (exp) == 0)
4874 abort ();
4875
4876 /* Ensure variable marked as used even if it doesn't go through
4877 a parser. If it hasn't be used yet, write out an external
4878 definition. */
4879 if (! TREE_USED (exp))
4880 {
4881 assemble_external (exp);
4882 TREE_USED (exp) = 1;
4883 }
4884
4885 /* Show we haven't gotten RTL for this yet. */
4886 temp = 0;
4887
4888 /* Handle variables inherited from containing functions. */
4889 context = decl_function_context (exp);
4890
4891 /* We treat inline_function_decl as an alias for the current function
4892 because that is the inline function whose vars, types, etc.
4893 are being merged into the current function.
4894 See expand_inline_function. */
4895
4896 if (context != 0 && context != current_function_decl
4897 && context != inline_function_decl
4898 /* If var is static, we don't need a static chain to access it. */
4899 && ! (GET_CODE (DECL_RTL (exp)) == MEM
4900 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
4901 {
4902 rtx addr;
4903
4904 /* Mark as non-local and addressable. */
4905 DECL_NONLOCAL (exp) = 1;
4906 if (DECL_NO_STATIC_CHAIN (current_function_decl))
4907 abort ();
4908 mark_addressable (exp);
4909 if (GET_CODE (DECL_RTL (exp)) != MEM)
4910 abort ();
4911 addr = XEXP (DECL_RTL (exp), 0);
4912 if (GET_CODE (addr) == MEM)
4913 addr = gen_rtx (MEM, Pmode,
4914 fix_lexical_addr (XEXP (addr, 0), exp));
4915 else
4916 addr = fix_lexical_addr (addr, exp);
4917 temp = change_address (DECL_RTL (exp), mode, addr);
4918 }
4919
4920 /* This is the case of an array whose size is to be determined
4921 from its initializer, while the initializer is still being parsed.
4922 See expand_decl. */
4923
4924 else if (GET_CODE (DECL_RTL (exp)) == MEM
4925 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
4926 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
4927 XEXP (DECL_RTL (exp), 0));
4928
4929 /* If DECL_RTL is memory, we are in the normal case and either
4930 the address is not valid or it is not a register and -fforce-addr
4931 is specified, get the address into a register. */
4932
4933 else if (GET_CODE (DECL_RTL (exp)) == MEM
4934 && modifier != EXPAND_CONST_ADDRESS
4935 && modifier != EXPAND_SUM
4936 && modifier != EXPAND_INITIALIZER
4937 && (! memory_address_p (DECL_MODE (exp),
4938 XEXP (DECL_RTL (exp), 0))
4939 || (flag_force_addr
4940 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
4941 temp = change_address (DECL_RTL (exp), VOIDmode,
4942 copy_rtx (XEXP (DECL_RTL (exp), 0)));
4943
4944 /* If we got something, return it. But first, set the alignment
4945 the address is a register. */
4946 if (temp != 0)
4947 {
4948 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
4949 mark_reg_pointer (XEXP (temp, 0),
4950 DECL_ALIGN (exp) / BITS_PER_UNIT);
4951
4952 return temp;
4953 }
4954
4955 /* If the mode of DECL_RTL does not match that of the decl, it
4956 must be a promoted value. We return a SUBREG of the wanted mode,
4957 but mark it so that we know that it was already extended. */
4958
4959 if (GET_CODE (DECL_RTL (exp)) == REG
4960 && GET_MODE (DECL_RTL (exp)) != mode)
4961 {
4962 /* Get the signedness used for this variable. Ensure we get the
4963 same mode we got when the variable was declared. */
4964 if (GET_MODE (DECL_RTL (exp))
4965 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
4966 abort ();
4967
4968 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
4969 SUBREG_PROMOTED_VAR_P (temp) = 1;
4970 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4971 return temp;
4972 }
4973
4974 return DECL_RTL (exp);
4975
4976 case INTEGER_CST:
4977 return immed_double_const (TREE_INT_CST_LOW (exp),
4978 TREE_INT_CST_HIGH (exp),
4979 mode);
4980
4981 case CONST_DECL:
4982 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
4983
4984 case REAL_CST:
4985 /* If optimized, generate immediate CONST_DOUBLE
4986 which will be turned into memory by reload if necessary.
4987
4988 We used to force a register so that loop.c could see it. But
4989 this does not allow gen_* patterns to perform optimizations with
4990 the constants. It also produces two insns in cases like "x = 1.0;".
4991 On most machines, floating-point constants are not permitted in
4992 many insns, so we'd end up copying it to a register in any case.
4993
4994 Now, we do the copying in expand_binop, if appropriate. */
4995 return immed_real_const (exp);
4996
4997 case COMPLEX_CST:
4998 case STRING_CST:
4999 if (! TREE_CST_RTL (exp))
5000 output_constant_def (exp);
5001
5002 /* TREE_CST_RTL probably contains a constant address.
5003 On RISC machines where a constant address isn't valid,
5004 make some insns to get that address into a register. */
5005 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
5006 && modifier != EXPAND_CONST_ADDRESS
5007 && modifier != EXPAND_INITIALIZER
5008 && modifier != EXPAND_SUM
5009 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
5010 || (flag_force_addr
5011 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
5012 return change_address (TREE_CST_RTL (exp), VOIDmode,
5013 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
5014 return TREE_CST_RTL (exp);
5015
5016 case SAVE_EXPR:
5017 context = decl_function_context (exp);
5018
5019 /* We treat inline_function_decl as an alias for the current function
5020 because that is the inline function whose vars, types, etc.
5021 are being merged into the current function.
5022 See expand_inline_function. */
5023 if (context == current_function_decl || context == inline_function_decl)
5024 context = 0;
5025
5026 /* If this is non-local, handle it. */
5027 if (context)
5028 {
5029 temp = SAVE_EXPR_RTL (exp);
5030 if (temp && GET_CODE (temp) == REG)
5031 {
5032 put_var_into_stack (exp);
5033 temp = SAVE_EXPR_RTL (exp);
5034 }
5035 if (temp == 0 || GET_CODE (temp) != MEM)
5036 abort ();
5037 return change_address (temp, mode,
5038 fix_lexical_addr (XEXP (temp, 0), exp));
5039 }
5040 if (SAVE_EXPR_RTL (exp) == 0)
5041 {
5042 if (mode == VOIDmode)
5043 temp = const0_rtx;
5044 else
5045 temp = assign_temp (type, 0, 0, 0);
5046
5047 SAVE_EXPR_RTL (exp) = temp;
5048 if (!optimize && GET_CODE (temp) == REG)
5049 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
5050 save_expr_regs);
5051
5052 /* If the mode of TEMP does not match that of the expression, it
5053 must be a promoted value. We pass store_expr a SUBREG of the
5054 wanted mode but mark it so that we know that it was already
5055 extended. Note that `unsignedp' was modified above in
5056 this case. */
5057
5058 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
5059 {
5060 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
5061 SUBREG_PROMOTED_VAR_P (temp) = 1;
5062 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5063 }
5064
5065 if (temp == const0_rtx)
5066 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
5067 else
5068 store_expr (TREE_OPERAND (exp, 0), temp, 0);
5069 }
5070
5071 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
5072 must be a promoted value. We return a SUBREG of the wanted mode,
5073 but mark it so that we know that it was already extended. */
5074
5075 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
5076 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
5077 {
5078 /* Compute the signedness and make the proper SUBREG. */
5079 promote_mode (type, mode, &unsignedp, 0);
5080 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
5081 SUBREG_PROMOTED_VAR_P (temp) = 1;
5082 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5083 return temp;
5084 }
5085
5086 return SAVE_EXPR_RTL (exp);
5087
5088 case UNSAVE_EXPR:
5089 {
5090 rtx temp;
5091 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5092 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
5093 return temp;
5094 }
5095
5096 case PLACEHOLDER_EXPR:
5097 /* If there is an object on the head of the placeholder list,
5098 see if some object in it's references is of type TYPE. For
5099 further information, see tree.def. */
5100 if (placeholder_list)
5101 {
5102 tree object;
5103 tree old_list = placeholder_list;
5104
5105 for (object = TREE_PURPOSE (placeholder_list);
5106 (TYPE_MAIN_VARIANT (TREE_TYPE (object))
5107 != TYPE_MAIN_VARIANT (type))
5108 && (TREE_CODE_CLASS (TREE_CODE (object)) == 'r'
5109 || TREE_CODE_CLASS (TREE_CODE (object)) == '1'
5110 || TREE_CODE_CLASS (TREE_CODE (object)) == '2'
5111 || TREE_CODE_CLASS (TREE_CODE (object)) == 'e');
5112 object = TREE_OPERAND (object, 0))
5113 ;
5114
5115 if (object != 0
5116 && (TYPE_MAIN_VARIANT (TREE_TYPE (object))
5117 == TYPE_MAIN_VARIANT (type)))
5118 {
5119 /* Expand this object skipping the list entries before
5120 it was found in case it is also a PLACEHOLDER_EXPR.
5121 In that case, we want to translate it using subsequent
5122 entries. */
5123 placeholder_list = TREE_CHAIN (placeholder_list);
5124 temp = expand_expr (object, original_target, tmode, modifier);
5125 placeholder_list = old_list;
5126 return temp;
5127 }
5128 }
5129
5130 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
5131 abort ();
5132
5133 case WITH_RECORD_EXPR:
5134 /* Put the object on the placeholder list, expand our first operand,
5135 and pop the list. */
5136 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
5137 placeholder_list);
5138 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
5139 tmode, modifier);
5140 placeholder_list = TREE_CHAIN (placeholder_list);
5141 return target;
5142
5143 case EXIT_EXPR:
5144 expand_exit_loop_if_false (NULL_PTR,
5145 invert_truthvalue (TREE_OPERAND (exp, 0)));
5146 return const0_rtx;
5147
5148 case LOOP_EXPR:
5149 push_temp_slots ();
5150 expand_start_loop (1);
5151 expand_expr_stmt (TREE_OPERAND (exp, 0));
5152 expand_end_loop ();
5153 pop_temp_slots ();
5154
5155 return const0_rtx;
5156
5157 case BIND_EXPR:
5158 {
5159 tree vars = TREE_OPERAND (exp, 0);
5160 int vars_need_expansion = 0;
5161
5162 /* Need to open a binding contour here because
5163 if there are any cleanups they most be contained here. */
5164 expand_start_bindings (0);
5165
5166 /* Mark the corresponding BLOCK for output in its proper place. */
5167 if (TREE_OPERAND (exp, 2) != 0
5168 && ! TREE_USED (TREE_OPERAND (exp, 2)))
5169 insert_block (TREE_OPERAND (exp, 2));
5170
5171 /* If VARS have not yet been expanded, expand them now. */
5172 while (vars)
5173 {
5174 if (DECL_RTL (vars) == 0)
5175 {
5176 vars_need_expansion = 1;
5177 expand_decl (vars);
5178 }
5179 expand_decl_init (vars);
5180 vars = TREE_CHAIN (vars);
5181 }
5182
5183 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
5184
5185 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
5186
5187 return temp;
5188 }
5189
5190 case RTL_EXPR:
5191 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
5192 abort ();
5193 emit_insns (RTL_EXPR_SEQUENCE (exp));
5194 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
5195 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
5196 free_temps_for_rtl_expr (exp);
5197 return RTL_EXPR_RTL (exp);
5198
5199 case CONSTRUCTOR:
5200 /* If we don't need the result, just ensure we evaluate any
5201 subexpressions. */
5202 if (ignore)
5203 {
5204 tree elt;
5205 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
5206 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
5207 return const0_rtx;
5208 }
5209
5210 /* All elts simple constants => refer to a constant in memory. But
5211 if this is a non-BLKmode mode, let it store a field at a time
5212 since that should make a CONST_INT or CONST_DOUBLE when we
5213 fold. Likewise, if we have a target we can use, it is best to
5214 store directly into the target unless the type is large enough
5215 that memcpy will be used. If we are making an initializer and
5216 all operands are constant, put it in memory as well. */
5217 else if ((TREE_STATIC (exp)
5218 && ((mode == BLKmode
5219 && ! (target != 0 && safe_from_p (target, exp)))
5220 || TREE_ADDRESSABLE (exp)
5221 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5222 && (move_by_pieces_ninsns
5223 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
5224 TYPE_ALIGN (type) / BITS_PER_UNIT)
5225 > MOVE_RATIO)
5226 && ! mostly_zeros_p (exp))))
5227 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
5228 {
5229 rtx constructor = output_constant_def (exp);
5230 if (modifier != EXPAND_CONST_ADDRESS
5231 && modifier != EXPAND_INITIALIZER
5232 && modifier != EXPAND_SUM
5233 && (! memory_address_p (GET_MODE (constructor),
5234 XEXP (constructor, 0))
5235 || (flag_force_addr
5236 && GET_CODE (XEXP (constructor, 0)) != REG)))
5237 constructor = change_address (constructor, VOIDmode,
5238 XEXP (constructor, 0));
5239 return constructor;
5240 }
5241
5242 else
5243 {
5244 /* Handle calls that pass values in multiple non-contiguous
5245 locations. The Irix 6 ABI has examples of this. */
5246 if (target == 0 || ! safe_from_p (target, exp)
5247 || GET_CODE (target) == PARALLEL)
5248 {
5249 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
5250 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5251 else
5252 target = assign_temp (type, 0, 1, 1);
5253 }
5254
5255 if (TREE_READONLY (exp))
5256 {
5257 if (GET_CODE (target) == MEM)
5258 target = change_address (target, GET_MODE (target),
5259 XEXP (target, 0));
5260 RTX_UNCHANGING_P (target) = 1;
5261 }
5262
5263 store_constructor (exp, target, 0);
5264 return target;
5265 }
5266
5267 case INDIRECT_REF:
5268 {
5269 tree exp1 = TREE_OPERAND (exp, 0);
5270 tree exp2;
5271
5272 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
5273 op0 = memory_address (mode, op0);
5274
5275 temp = gen_rtx (MEM, mode, op0);
5276 /* If address was computed by addition,
5277 mark this as an element of an aggregate. */
5278 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5279 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
5280 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
5281 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
5282 || (TREE_CODE (exp1) == ADDR_EXPR
5283 && (exp2 = TREE_OPERAND (exp1, 0))
5284 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
5285 MEM_IN_STRUCT_P (temp) = 1;
5286 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
5287
5288 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
5289 here, because, in C and C++, the fact that a location is accessed
5290 through a pointer to const does not mean that the value there can
5291 never change. Languages where it can never change should
5292 also set TREE_STATIC. */
5293 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
5294 return temp;
5295 }
5296
5297 case ARRAY_REF:
5298 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
5299 abort ();
5300
5301 {
5302 tree array = TREE_OPERAND (exp, 0);
5303 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5304 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
5305 tree index = TREE_OPERAND (exp, 1);
5306 tree index_type = TREE_TYPE (index);
5307 int i;
5308
5309 if (TREE_CODE (low_bound) != INTEGER_CST
5310 && contains_placeholder_p (low_bound))
5311 low_bound = build (WITH_RECORD_EXPR, sizetype, low_bound, exp);
5312
5313 /* Optimize the special-case of a zero lower bound.
5314
5315 We convert the low_bound to sizetype to avoid some problems
5316 with constant folding. (E.g. suppose the lower bound is 1,
5317 and its mode is QI. Without the conversion, (ARRAY
5318 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
5319 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
5320
5321 But sizetype isn't quite right either (especially if
5322 the lowbound is negative). FIXME */
5323
5324 if (! integer_zerop (low_bound))
5325 index = fold (build (MINUS_EXPR, index_type, index,
5326 convert (sizetype, low_bound)));
5327
5328 if ((TREE_CODE (index) != INTEGER_CST
5329 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5330 && (! SLOW_UNALIGNED_ACCESS || ! get_inner_unaligned_p (exp)))
5331 {
5332 /* Nonconstant array index or nonconstant element size, and
5333 not an array in an unaligned (packed) structure field.
5334 Generate the tree for *(&array+index) and expand that,
5335 except do it in a language-independent way
5336 and don't complain about non-lvalue arrays.
5337 `mark_addressable' should already have been called
5338 for any array for which this case will be reached. */
5339
5340 /* Don't forget the const or volatile flag from the array
5341 element. */
5342 tree variant_type = build_type_variant (type,
5343 TREE_READONLY (exp),
5344 TREE_THIS_VOLATILE (exp));
5345 tree array_adr = build1 (ADDR_EXPR,
5346 build_pointer_type (variant_type), array);
5347 tree elt;
5348 tree size = size_in_bytes (type);
5349
5350 /* Convert the integer argument to a type the same size as sizetype
5351 so the multiply won't overflow spuriously. */
5352 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
5353 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
5354 index);
5355
5356 if (TREE_CODE (size) != INTEGER_CST
5357 && contains_placeholder_p (size))
5358 size = build (WITH_RECORD_EXPR, sizetype, size, exp);
5359
5360 /* Don't think the address has side effects
5361 just because the array does.
5362 (In some cases the address might have side effects,
5363 and we fail to record that fact here. However, it should not
5364 matter, since expand_expr should not care.) */
5365 TREE_SIDE_EFFECTS (array_adr) = 0;
5366
5367 elt
5368 = build1
5369 (INDIRECT_REF, type,
5370 fold (build (PLUS_EXPR,
5371 TYPE_POINTER_TO (variant_type),
5372 array_adr,
5373 fold
5374 (build1
5375 (NOP_EXPR,
5376 TYPE_POINTER_TO (variant_type),
5377 fold (build (MULT_EXPR, TREE_TYPE (index),
5378 index,
5379 convert (TREE_TYPE (index),
5380 size))))))));;
5381
5382 /* Volatility, etc., of new expression is same as old
5383 expression. */
5384 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
5385 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
5386 TREE_READONLY (elt) = TREE_READONLY (exp);
5387
5388 return expand_expr (elt, target, tmode, modifier);
5389 }
5390
5391 /* Fold an expression like: "foo"[2].
5392 This is not done in fold so it won't happen inside &.
5393 Don't fold if this is for wide characters since it's too
5394 difficult to do correctly and this is a very rare case. */
5395
5396 if (TREE_CODE (array) == STRING_CST
5397 && TREE_CODE (index) == INTEGER_CST
5398 && !TREE_INT_CST_HIGH (index)
5399 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
5400 && GET_MODE_CLASS (mode) == MODE_INT
5401 && GET_MODE_SIZE (mode) == 1)
5402 return GEN_INT (TREE_STRING_POINTER (array)[i]);
5403
5404 /* If this is a constant index into a constant array,
5405 just get the value from the array. Handle both the cases when
5406 we have an explicit constructor and when our operand is a variable
5407 that was declared const. */
5408
5409 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
5410 {
5411 if (TREE_CODE (index) == INTEGER_CST
5412 && TREE_INT_CST_HIGH (index) == 0)
5413 {
5414 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
5415
5416 i = TREE_INT_CST_LOW (index);
5417 while (elem && i--)
5418 elem = TREE_CHAIN (elem);
5419 if (elem)
5420 return expand_expr (fold (TREE_VALUE (elem)), target,
5421 tmode, modifier);
5422 }
5423 }
5424
5425 else if (optimize >= 1
5426 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
5427 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
5428 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
5429 {
5430 if (TREE_CODE (index) == INTEGER_CST
5431 && TREE_INT_CST_HIGH (index) == 0)
5432 {
5433 tree init = DECL_INITIAL (array);
5434
5435 i = TREE_INT_CST_LOW (index);
5436 if (TREE_CODE (init) == CONSTRUCTOR)
5437 {
5438 tree elem = CONSTRUCTOR_ELTS (init);
5439
5440 while (elem
5441 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
5442 elem = TREE_CHAIN (elem);
5443 if (elem)
5444 return expand_expr (fold (TREE_VALUE (elem)), target,
5445 tmode, modifier);
5446 }
5447 else if (TREE_CODE (init) == STRING_CST
5448 && i < TREE_STRING_LENGTH (init))
5449 return GEN_INT (TREE_STRING_POINTER (init)[i]);
5450 }
5451 }
5452 }
5453
5454 /* Treat array-ref with constant index as a component-ref. */
5455
5456 case COMPONENT_REF:
5457 case BIT_FIELD_REF:
5458 /* If the operand is a CONSTRUCTOR, we can just extract the
5459 appropriate field if it is present. Don't do this if we have
5460 already written the data since we want to refer to that copy
5461 and varasm.c assumes that's what we'll do. */
5462 if (code != ARRAY_REF
5463 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
5464 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
5465 {
5466 tree elt;
5467
5468 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
5469 elt = TREE_CHAIN (elt))
5470 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
5471 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
5472 }
5473
5474 {
5475 enum machine_mode mode1;
5476 int bitsize;
5477 int bitpos;
5478 tree offset;
5479 int volatilep = 0;
5480 int alignment;
5481 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
5482 &mode1, &unsignedp, &volatilep,
5483 &alignment);
5484
5485 /* If we got back the original object, something is wrong. Perhaps
5486 we are evaluating an expression too early. In any event, don't
5487 infinitely recurse. */
5488 if (tem == exp)
5489 abort ();
5490
5491 /* If TEM's type is a union of variable size, pass TARGET to the inner
5492 computation, since it will need a temporary and TARGET is known
5493 to have to do. This occurs in unchecked conversion in Ada. */
5494
5495 op0 = expand_expr (tem,
5496 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
5497 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
5498 != INTEGER_CST)
5499 ? target : NULL_RTX),
5500 VOIDmode,
5501 modifier == EXPAND_INITIALIZER ? modifier : 0);
5502
5503 /* If this is a constant, put it into a register if it is a
5504 legitimate constant and memory if it isn't. */
5505 if (CONSTANT_P (op0))
5506 {
5507 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
5508 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
5509 op0 = force_reg (mode, op0);
5510 else
5511 op0 = validize_mem (force_const_mem (mode, op0));
5512 }
5513
5514 if (offset != 0)
5515 {
5516 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
5517
5518 if (GET_CODE (op0) != MEM)
5519 abort ();
5520 op0 = change_address (op0, VOIDmode,
5521 gen_rtx (PLUS, ptr_mode, XEXP (op0, 0),
5522 force_reg (ptr_mode, offset_rtx)));
5523 }
5524
5525 /* Don't forget about volatility even if this is a bitfield. */
5526 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
5527 {
5528 op0 = copy_rtx (op0);
5529 MEM_VOLATILE_P (op0) = 1;
5530 }
5531
5532 /* In cases where an aligned union has an unaligned object
5533 as a field, we might be extracting a BLKmode value from
5534 an integer-mode (e.g., SImode) object. Handle this case
5535 by doing the extract into an object as wide as the field
5536 (which we know to be the width of a basic mode), then
5537 storing into memory, and changing the mode to BLKmode.
5538 If we ultimately want the address (EXPAND_CONST_ADDRESS or
5539 EXPAND_INITIALIZER), then we must not copy to a temporary. */
5540 if (mode1 == VOIDmode
5541 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
5542 || (modifier != EXPAND_CONST_ADDRESS
5543 && modifier != EXPAND_INITIALIZER
5544 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
5545 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5546 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5547 /* If the field isn't aligned enough to fetch as a memref,
5548 fetch it as a bit field. */
5549 || (SLOW_UNALIGNED_ACCESS
5550 && ((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
5551 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
5552 {
5553 enum machine_mode ext_mode = mode;
5554
5555 if (ext_mode == BLKmode)
5556 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
5557
5558 if (ext_mode == BLKmode)
5559 {
5560 /* In this case, BITPOS must start at a byte boundary and
5561 TARGET, if specified, must be a MEM. */
5562 if (GET_CODE (op0) != MEM
5563 || (target != 0 && GET_CODE (target) != MEM)
5564 || bitpos % BITS_PER_UNIT != 0)
5565 abort ();
5566
5567 op0 = change_address (op0, VOIDmode,
5568 plus_constant (XEXP (op0, 0),
5569 bitpos / BITS_PER_UNIT));
5570 if (target == 0)
5571 target = assign_temp (type, 0, 1, 1);
5572
5573 emit_block_move (target, op0,
5574 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5575 / BITS_PER_UNIT),
5576 1);
5577
5578 return target;
5579 }
5580
5581 op0 = validize_mem (op0);
5582
5583 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
5584 mark_reg_pointer (XEXP (op0, 0), alignment);
5585
5586 op0 = extract_bit_field (op0, bitsize, bitpos,
5587 unsignedp, target, ext_mode, ext_mode,
5588 alignment,
5589 int_size_in_bytes (TREE_TYPE (tem)));
5590
5591 /* If the result is a record type and BITSIZE is narrower than
5592 the mode of OP0, an integral mode, and this is a big endian
5593 machine, we must put the field into the high-order bits. */
5594 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
5595 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
5596 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
5597 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
5598 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
5599 - bitsize),
5600 op0, 1);
5601
5602 if (mode == BLKmode)
5603 {
5604 rtx new = assign_stack_temp (ext_mode,
5605 bitsize / BITS_PER_UNIT, 0);
5606
5607 emit_move_insn (new, op0);
5608 op0 = copy_rtx (new);
5609 PUT_MODE (op0, BLKmode);
5610 MEM_IN_STRUCT_P (op0) = 1;
5611 }
5612
5613 return op0;
5614 }
5615
5616 /* If the result is BLKmode, use that to access the object
5617 now as well. */
5618 if (mode == BLKmode)
5619 mode1 = BLKmode;
5620
5621 /* Get a reference to just this component. */
5622 if (modifier == EXPAND_CONST_ADDRESS
5623 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5624 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
5625 (bitpos / BITS_PER_UNIT)));
5626 else
5627 op0 = change_address (op0, mode1,
5628 plus_constant (XEXP (op0, 0),
5629 (bitpos / BITS_PER_UNIT)));
5630 if (GET_CODE (XEXP (op0, 0)) == REG)
5631 mark_reg_pointer (XEXP (op0, 0), alignment);
5632
5633 MEM_IN_STRUCT_P (op0) = 1;
5634 MEM_VOLATILE_P (op0) |= volatilep;
5635 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
5636 return op0;
5637 if (target == 0)
5638 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5639 convert_move (target, op0, unsignedp);
5640 return target;
5641 }
5642
5643 /* Intended for a reference to a buffer of a file-object in Pascal.
5644 But it's not certain that a special tree code will really be
5645 necessary for these. INDIRECT_REF might work for them. */
5646 case BUFFER_REF:
5647 abort ();
5648
5649 case IN_EXPR:
5650 {
5651 /* Pascal set IN expression.
5652
5653 Algorithm:
5654 rlo = set_low - (set_low%bits_per_word);
5655 the_word = set [ (index - rlo)/bits_per_word ];
5656 bit_index = index % bits_per_word;
5657 bitmask = 1 << bit_index;
5658 return !!(the_word & bitmask); */
5659
5660 tree set = TREE_OPERAND (exp, 0);
5661 tree index = TREE_OPERAND (exp, 1);
5662 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
5663 tree set_type = TREE_TYPE (set);
5664 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
5665 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
5666 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
5667 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
5668 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
5669 rtx setval = expand_expr (set, 0, VOIDmode, 0);
5670 rtx setaddr = XEXP (setval, 0);
5671 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
5672 rtx rlow;
5673 rtx diff, quo, rem, addr, bit, result;
5674
5675 preexpand_calls (exp);
5676
5677 /* If domain is empty, answer is no. Likewise if index is constant
5678 and out of bounds. */
5679 if ((TREE_CODE (set_high_bound) == INTEGER_CST
5680 && TREE_CODE (set_low_bound) == INTEGER_CST
5681 && tree_int_cst_lt (set_high_bound, set_low_bound)
5682 || (TREE_CODE (index) == INTEGER_CST
5683 && TREE_CODE (set_low_bound) == INTEGER_CST
5684 && tree_int_cst_lt (index, set_low_bound))
5685 || (TREE_CODE (set_high_bound) == INTEGER_CST
5686 && TREE_CODE (index) == INTEGER_CST
5687 && tree_int_cst_lt (set_high_bound, index))))
5688 return const0_rtx;
5689
5690 if (target == 0)
5691 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5692
5693 /* If we get here, we have to generate the code for both cases
5694 (in range and out of range). */
5695
5696 op0 = gen_label_rtx ();
5697 op1 = gen_label_rtx ();
5698
5699 if (! (GET_CODE (index_val) == CONST_INT
5700 && GET_CODE (lo_r) == CONST_INT))
5701 {
5702 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
5703 GET_MODE (index_val), iunsignedp, 0);
5704 emit_jump_insn (gen_blt (op1));
5705 }
5706
5707 if (! (GET_CODE (index_val) == CONST_INT
5708 && GET_CODE (hi_r) == CONST_INT))
5709 {
5710 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
5711 GET_MODE (index_val), iunsignedp, 0);
5712 emit_jump_insn (gen_bgt (op1));
5713 }
5714
5715 /* Calculate the element number of bit zero in the first word
5716 of the set. */
5717 if (GET_CODE (lo_r) == CONST_INT)
5718 rlow = GEN_INT (INTVAL (lo_r)
5719 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
5720 else
5721 rlow = expand_binop (index_mode, and_optab, lo_r,
5722 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
5723 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
5724
5725 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
5726 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
5727
5728 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
5729 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
5730 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
5731 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
5732
5733 addr = memory_address (byte_mode,
5734 expand_binop (index_mode, add_optab, diff,
5735 setaddr, NULL_RTX, iunsignedp,
5736 OPTAB_LIB_WIDEN));
5737
5738 /* Extract the bit we want to examine */
5739 bit = expand_shift (RSHIFT_EXPR, byte_mode,
5740 gen_rtx (MEM, byte_mode, addr),
5741 make_tree (TREE_TYPE (index), rem),
5742 NULL_RTX, 1);
5743 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
5744 GET_MODE (target) == byte_mode ? target : 0,
5745 1, OPTAB_LIB_WIDEN);
5746
5747 if (result != target)
5748 convert_move (target, result, 1);
5749
5750 /* Output the code to handle the out-of-range case. */
5751 emit_jump (op0);
5752 emit_label (op1);
5753 emit_move_insn (target, const0_rtx);
5754 emit_label (op0);
5755 return target;
5756 }
5757
5758 case WITH_CLEANUP_EXPR:
5759 if (RTL_EXPR_RTL (exp) == 0)
5760 {
5761 RTL_EXPR_RTL (exp)
5762 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5763 cleanups_this_call
5764 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
5765 /* That's it for this cleanup. */
5766 TREE_OPERAND (exp, 2) = 0;
5767 expand_eh_region_start ();
5768 }
5769 return RTL_EXPR_RTL (exp);
5770
5771 case CLEANUP_POINT_EXPR:
5772 {
5773 extern int temp_slot_level;
5774 tree old_cleanups = cleanups_this_call;
5775 int old_temp_level = target_temp_slot_level;
5776 push_temp_slots ();
5777 target_temp_slot_level = temp_slot_level;
5778 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5779 /* If we're going to use this value, load it up now. */
5780 if (! ignore)
5781 op0 = force_not_mem (op0);
5782 expand_cleanups_to (old_cleanups);
5783 preserve_temp_slots (op0);
5784 free_temp_slots ();
5785 pop_temp_slots ();
5786 target_temp_slot_level = old_temp_level;
5787 }
5788 return op0;
5789
5790 case CALL_EXPR:
5791 /* Check for a built-in function. */
5792 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5793 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5794 == FUNCTION_DECL)
5795 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5796 return expand_builtin (exp, target, subtarget, tmode, ignore);
5797
5798 /* If this call was expanded already by preexpand_calls,
5799 just return the result we got. */
5800 if (CALL_EXPR_RTL (exp) != 0)
5801 return CALL_EXPR_RTL (exp);
5802
5803 return expand_call (exp, target, ignore);
5804
5805 case NON_LVALUE_EXPR:
5806 case NOP_EXPR:
5807 case CONVERT_EXPR:
5808 case REFERENCE_EXPR:
5809 if (TREE_CODE (type) == UNION_TYPE)
5810 {
5811 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
5812 if (target == 0)
5813 {
5814 if (mode != BLKmode)
5815 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5816 else
5817 target = assign_temp (type, 0, 1, 1);
5818 }
5819
5820 if (GET_CODE (target) == MEM)
5821 /* Store data into beginning of memory target. */
5822 store_expr (TREE_OPERAND (exp, 0),
5823 change_address (target, TYPE_MODE (valtype), 0), 0);
5824
5825 else if (GET_CODE (target) == REG)
5826 /* Store this field into a union of the proper type. */
5827 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
5828 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
5829 VOIDmode, 0, 1,
5830 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
5831 else
5832 abort ();
5833
5834 /* Return the entire union. */
5835 return target;
5836 }
5837
5838 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5839 {
5840 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
5841 modifier);
5842
5843 /* If the signedness of the conversion differs and OP0 is
5844 a promoted SUBREG, clear that indication since we now
5845 have to do the proper extension. */
5846 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
5847 && GET_CODE (op0) == SUBREG)
5848 SUBREG_PROMOTED_VAR_P (op0) = 0;
5849
5850 return op0;
5851 }
5852
5853 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
5854 if (GET_MODE (op0) == mode)
5855 return op0;
5856
5857 /* If OP0 is a constant, just convert it into the proper mode. */
5858 if (CONSTANT_P (op0))
5859 return
5860 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5861 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5862
5863 if (modifier == EXPAND_INITIALIZER)
5864 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
5865
5866 if (target == 0)
5867 return
5868 convert_to_mode (mode, op0,
5869 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5870 else
5871 convert_move (target, op0,
5872 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5873 return target;
5874
5875 case PLUS_EXPR:
5876 /* We come here from MINUS_EXPR when the second operand is a
5877 constant. */
5878 plus_expr:
5879 this_optab = add_optab;
5880
5881 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
5882 something else, make sure we add the register to the constant and
5883 then to the other thing. This case can occur during strength
5884 reduction and doing it this way will produce better code if the
5885 frame pointer or argument pointer is eliminated.
5886
5887 fold-const.c will ensure that the constant is always in the inner
5888 PLUS_EXPR, so the only case we need to do anything about is if
5889 sp, ap, or fp is our second argument, in which case we must swap
5890 the innermost first argument and our second argument. */
5891
5892 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5893 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
5894 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
5895 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
5896 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
5897 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
5898 {
5899 tree t = TREE_OPERAND (exp, 1);
5900
5901 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5902 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
5903 }
5904
5905 /* If the result is to be ptr_mode and we are adding an integer to
5906 something, we might be forming a constant. So try to use
5907 plus_constant. If it produces a sum and we can't accept it,
5908 use force_operand. This allows P = &ARR[const] to generate
5909 efficient code on machines where a SYMBOL_REF is not a valid
5910 address.
5911
5912 If this is an EXPAND_SUM call, always return the sum. */
5913 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
5914 || mode == ptr_mode)
5915 {
5916 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
5917 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5918 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
5919 {
5920 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
5921 EXPAND_SUM);
5922 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
5923 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5924 op1 = force_operand (op1, target);
5925 return op1;
5926 }
5927
5928 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
5929 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
5930 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
5931 {
5932 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
5933 EXPAND_SUM);
5934 if (! CONSTANT_P (op0))
5935 {
5936 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5937 VOIDmode, modifier);
5938 /* Don't go to both_summands if modifier
5939 says it's not right to return a PLUS. */
5940 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5941 goto binop2;
5942 goto both_summands;
5943 }
5944 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
5945 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5946 op0 = force_operand (op0, target);
5947 return op0;
5948 }
5949 }
5950
5951 /* No sense saving up arithmetic to be done
5952 if it's all in the wrong mode to form part of an address.
5953 And force_operand won't know whether to sign-extend or
5954 zero-extend. */
5955 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5956 || mode != ptr_mode)
5957 goto binop;
5958
5959 preexpand_calls (exp);
5960 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5961 subtarget = 0;
5962
5963 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
5964 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
5965
5966 both_summands:
5967 /* Make sure any term that's a sum with a constant comes last. */
5968 if (GET_CODE (op0) == PLUS
5969 && CONSTANT_P (XEXP (op0, 1)))
5970 {
5971 temp = op0;
5972 op0 = op1;
5973 op1 = temp;
5974 }
5975 /* If adding to a sum including a constant,
5976 associate it to put the constant outside. */
5977 if (GET_CODE (op1) == PLUS
5978 && CONSTANT_P (XEXP (op1, 1)))
5979 {
5980 rtx constant_term = const0_rtx;
5981
5982 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
5983 if (temp != 0)
5984 op0 = temp;
5985 /* Ensure that MULT comes first if there is one. */
5986 else if (GET_CODE (op0) == MULT)
5987 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
5988 else
5989 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
5990
5991 /* Let's also eliminate constants from op0 if possible. */
5992 op0 = eliminate_constant_term (op0, &constant_term);
5993
5994 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
5995 their sum should be a constant. Form it into OP1, since the
5996 result we want will then be OP0 + OP1. */
5997
5998 temp = simplify_binary_operation (PLUS, mode, constant_term,
5999 XEXP (op1, 1));
6000 if (temp != 0)
6001 op1 = temp;
6002 else
6003 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
6004 }
6005
6006 /* Put a constant term last and put a multiplication first. */
6007 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
6008 temp = op1, op1 = op0, op0 = temp;
6009
6010 temp = simplify_binary_operation (PLUS, mode, op0, op1);
6011 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
6012
6013 case MINUS_EXPR:
6014 /* For initializers, we are allowed to return a MINUS of two
6015 symbolic constants. Here we handle all cases when both operands
6016 are constant. */
6017 /* Handle difference of two symbolic constants,
6018 for the sake of an initializer. */
6019 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6020 && really_constant_p (TREE_OPERAND (exp, 0))
6021 && really_constant_p (TREE_OPERAND (exp, 1)))
6022 {
6023 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
6024 VOIDmode, modifier);
6025 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6026 VOIDmode, modifier);
6027
6028 /* If the last operand is a CONST_INT, use plus_constant of
6029 the negated constant. Else make the MINUS. */
6030 if (GET_CODE (op1) == CONST_INT)
6031 return plus_constant (op0, - INTVAL (op1));
6032 else
6033 return gen_rtx (MINUS, mode, op0, op1);
6034 }
6035 /* Convert A - const to A + (-const). */
6036 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6037 {
6038 tree negated = fold (build1 (NEGATE_EXPR, type,
6039 TREE_OPERAND (exp, 1)));
6040
6041 /* Deal with the case where we can't negate the constant
6042 in TYPE. */
6043 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
6044 {
6045 tree newtype = signed_type (type);
6046 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
6047 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
6048 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
6049
6050 if (! TREE_OVERFLOW (newneg))
6051 return expand_expr (convert (type,
6052 build (PLUS_EXPR, newtype,
6053 newop0, newneg)),
6054 target, tmode, modifier);
6055 }
6056 else
6057 {
6058 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
6059 goto plus_expr;
6060 }
6061 }
6062 this_optab = sub_optab;
6063 goto binop;
6064
6065 case MULT_EXPR:
6066 preexpand_calls (exp);
6067 /* If first operand is constant, swap them.
6068 Thus the following special case checks need only
6069 check the second operand. */
6070 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6071 {
6072 register tree t1 = TREE_OPERAND (exp, 0);
6073 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
6074 TREE_OPERAND (exp, 1) = t1;
6075 }
6076
6077 /* Attempt to return something suitable for generating an
6078 indexed address, for machines that support that. */
6079
6080 if (modifier == EXPAND_SUM && mode == ptr_mode
6081 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6082 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
6083 {
6084 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
6085
6086 /* Apply distributive law if OP0 is x+c. */
6087 if (GET_CODE (op0) == PLUS
6088 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
6089 return gen_rtx (PLUS, mode,
6090 gen_rtx (MULT, mode, XEXP (op0, 0),
6091 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
6092 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
6093 * INTVAL (XEXP (op0, 1))));
6094
6095 if (GET_CODE (op0) != REG)
6096 op0 = force_operand (op0, NULL_RTX);
6097 if (GET_CODE (op0) != REG)
6098 op0 = copy_to_mode_reg (mode, op0);
6099
6100 return gen_rtx (MULT, mode, op0,
6101 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
6102 }
6103
6104 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6105 subtarget = 0;
6106
6107 /* Check for multiplying things that have been extended
6108 from a narrower type. If this machine supports multiplying
6109 in that narrower type with a result in the desired type,
6110 do it that way, and avoid the explicit type-conversion. */
6111 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
6112 && TREE_CODE (type) == INTEGER_TYPE
6113 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6114 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
6115 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6116 && int_fits_type_p (TREE_OPERAND (exp, 1),
6117 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6118 /* Don't use a widening multiply if a shift will do. */
6119 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
6120 > HOST_BITS_PER_WIDE_INT)
6121 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
6122 ||
6123 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
6124 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6125 ==
6126 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
6127 /* If both operands are extended, they must either both
6128 be zero-extended or both be sign-extended. */
6129 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6130 ==
6131 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
6132 {
6133 enum machine_mode innermode
6134 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
6135 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6136 ? smul_widen_optab : umul_widen_optab);
6137 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6138 ? umul_widen_optab : smul_widen_optab);
6139 if (mode == GET_MODE_WIDER_MODE (innermode))
6140 {
6141 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
6142 {
6143 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6144 NULL_RTX, VOIDmode, 0);
6145 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6146 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6147 VOIDmode, 0);
6148 else
6149 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6150 NULL_RTX, VOIDmode, 0);
6151 goto binop2;
6152 }
6153 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
6154 && innermode == word_mode)
6155 {
6156 rtx htem;
6157 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6158 NULL_RTX, VOIDmode, 0);
6159 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6160 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6161 VOIDmode, 0);
6162 else
6163 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6164 NULL_RTX, VOIDmode, 0);
6165 temp = expand_binop (mode, other_optab, op0, op1, target,
6166 unsignedp, OPTAB_LIB_WIDEN);
6167 htem = expand_mult_highpart_adjust (innermode,
6168 gen_highpart (innermode, temp),
6169 op0, op1,
6170 gen_highpart (innermode, temp),
6171 unsignedp);
6172 emit_move_insn (gen_highpart (innermode, temp), htem);
6173 return temp;
6174 }
6175 }
6176 }
6177 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6178 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6179 return expand_mult (mode, op0, op1, target, unsignedp);
6180
6181 case TRUNC_DIV_EXPR:
6182 case FLOOR_DIV_EXPR:
6183 case CEIL_DIV_EXPR:
6184 case ROUND_DIV_EXPR:
6185 case EXACT_DIV_EXPR:
6186 preexpand_calls (exp);
6187 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6188 subtarget = 0;
6189 /* Possible optimization: compute the dividend with EXPAND_SUM
6190 then if the divisor is constant can optimize the case
6191 where some terms of the dividend have coeffs divisible by it. */
6192 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6193 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6194 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
6195
6196 case RDIV_EXPR:
6197 this_optab = flodiv_optab;
6198 goto binop;
6199
6200 case TRUNC_MOD_EXPR:
6201 case FLOOR_MOD_EXPR:
6202 case CEIL_MOD_EXPR:
6203 case ROUND_MOD_EXPR:
6204 preexpand_calls (exp);
6205 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6206 subtarget = 0;
6207 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6208 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6209 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
6210
6211 case FIX_ROUND_EXPR:
6212 case FIX_FLOOR_EXPR:
6213 case FIX_CEIL_EXPR:
6214 abort (); /* Not used for C. */
6215
6216 case FIX_TRUNC_EXPR:
6217 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6218 if (target == 0)
6219 target = gen_reg_rtx (mode);
6220 expand_fix (target, op0, unsignedp);
6221 return target;
6222
6223 case FLOAT_EXPR:
6224 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6225 if (target == 0)
6226 target = gen_reg_rtx (mode);
6227 /* expand_float can't figure out what to do if FROM has VOIDmode.
6228 So give it the correct mode. With -O, cse will optimize this. */
6229 if (GET_MODE (op0) == VOIDmode)
6230 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6231 op0);
6232 expand_float (target, op0,
6233 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6234 return target;
6235
6236 case NEGATE_EXPR:
6237 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6238 temp = expand_unop (mode, neg_optab, op0, target, 0);
6239 if (temp == 0)
6240 abort ();
6241 return temp;
6242
6243 case ABS_EXPR:
6244 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6245
6246 /* Handle complex values specially. */
6247 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
6248 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
6249 return expand_complex_abs (mode, op0, target, unsignedp);
6250
6251 /* Unsigned abs is simply the operand. Testing here means we don't
6252 risk generating incorrect code below. */
6253 if (TREE_UNSIGNED (type))
6254 return op0;
6255
6256 return expand_abs (mode, op0, target, unsignedp,
6257 safe_from_p (target, TREE_OPERAND (exp, 0)));
6258
6259 case MAX_EXPR:
6260 case MIN_EXPR:
6261 target = original_target;
6262 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
6263 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
6264 || GET_MODE (target) != mode
6265 || (GET_CODE (target) == REG
6266 && REGNO (target) < FIRST_PSEUDO_REGISTER))
6267 target = gen_reg_rtx (mode);
6268 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6269 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6270
6271 /* First try to do it with a special MIN or MAX instruction.
6272 If that does not win, use a conditional jump to select the proper
6273 value. */
6274 this_optab = (TREE_UNSIGNED (type)
6275 ? (code == MIN_EXPR ? umin_optab : umax_optab)
6276 : (code == MIN_EXPR ? smin_optab : smax_optab));
6277
6278 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
6279 OPTAB_WIDEN);
6280 if (temp != 0)
6281 return temp;
6282
6283 /* At this point, a MEM target is no longer useful; we will get better
6284 code without it. */
6285
6286 if (GET_CODE (target) == MEM)
6287 target = gen_reg_rtx (mode);
6288
6289 if (target != op0)
6290 emit_move_insn (target, op0);
6291
6292 op0 = gen_label_rtx ();
6293
6294 /* If this mode is an integer too wide to compare properly,
6295 compare word by word. Rely on cse to optimize constant cases. */
6296 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
6297 {
6298 if (code == MAX_EXPR)
6299 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6300 target, op1, NULL_RTX, op0);
6301 else
6302 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6303 op1, target, NULL_RTX, op0);
6304 emit_move_insn (target, op1);
6305 }
6306 else
6307 {
6308 if (code == MAX_EXPR)
6309 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
6310 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
6311 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
6312 else
6313 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
6314 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
6315 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
6316 if (temp == const0_rtx)
6317 emit_move_insn (target, op1);
6318 else if (temp != const_true_rtx)
6319 {
6320 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
6321 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
6322 else
6323 abort ();
6324 emit_move_insn (target, op1);
6325 }
6326 }
6327 emit_label (op0);
6328 return target;
6329
6330 case BIT_NOT_EXPR:
6331 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6332 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
6333 if (temp == 0)
6334 abort ();
6335 return temp;
6336
6337 case FFS_EXPR:
6338 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6339 temp = expand_unop (mode, ffs_optab, op0, target, 1);
6340 if (temp == 0)
6341 abort ();
6342 return temp;
6343
6344 /* ??? Can optimize bitwise operations with one arg constant.
6345 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
6346 and (a bitwise1 b) bitwise2 b (etc)
6347 but that is probably not worth while. */
6348
6349 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
6350 boolean values when we want in all cases to compute both of them. In
6351 general it is fastest to do TRUTH_AND_EXPR by computing both operands
6352 as actual zero-or-1 values and then bitwise anding. In cases where
6353 there cannot be any side effects, better code would be made by
6354 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
6355 how to recognize those cases. */
6356
6357 case TRUTH_AND_EXPR:
6358 case BIT_AND_EXPR:
6359 this_optab = and_optab;
6360 goto binop;
6361
6362 case TRUTH_OR_EXPR:
6363 case BIT_IOR_EXPR:
6364 this_optab = ior_optab;
6365 goto binop;
6366
6367 case TRUTH_XOR_EXPR:
6368 case BIT_XOR_EXPR:
6369 this_optab = xor_optab;
6370 goto binop;
6371
6372 case LSHIFT_EXPR:
6373 case RSHIFT_EXPR:
6374 case LROTATE_EXPR:
6375 case RROTATE_EXPR:
6376 preexpand_calls (exp);
6377 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6378 subtarget = 0;
6379 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6380 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
6381 unsignedp);
6382
6383 /* Could determine the answer when only additive constants differ. Also,
6384 the addition of one can be handled by changing the condition. */
6385 case LT_EXPR:
6386 case LE_EXPR:
6387 case GT_EXPR:
6388 case GE_EXPR:
6389 case EQ_EXPR:
6390 case NE_EXPR:
6391 preexpand_calls (exp);
6392 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
6393 if (temp != 0)
6394 return temp;
6395
6396 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
6397 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
6398 && original_target
6399 && GET_CODE (original_target) == REG
6400 && (GET_MODE (original_target)
6401 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6402 {
6403 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
6404 VOIDmode, 0);
6405
6406 if (temp != original_target)
6407 temp = copy_to_reg (temp);
6408
6409 op1 = gen_label_rtx ();
6410 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
6411 GET_MODE (temp), unsignedp, 0);
6412 emit_jump_insn (gen_beq (op1));
6413 emit_move_insn (temp, const1_rtx);
6414 emit_label (op1);
6415 return temp;
6416 }
6417
6418 /* If no set-flag instruction, must generate a conditional
6419 store into a temporary variable. Drop through
6420 and handle this like && and ||. */
6421
6422 case TRUTH_ANDIF_EXPR:
6423 case TRUTH_ORIF_EXPR:
6424 if (! ignore
6425 && (target == 0 || ! safe_from_p (target, exp)
6426 /* Make sure we don't have a hard reg (such as function's return
6427 value) live across basic blocks, if not optimizing. */
6428 || (!optimize && GET_CODE (target) == REG
6429 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
6430 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6431
6432 if (target)
6433 emit_clr_insn (target);
6434
6435 op1 = gen_label_rtx ();
6436 jumpifnot (exp, op1);
6437
6438 if (target)
6439 emit_0_to_1_insn (target);
6440
6441 emit_label (op1);
6442 return ignore ? const0_rtx : target;
6443
6444 case TRUTH_NOT_EXPR:
6445 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6446 /* The parser is careful to generate TRUTH_NOT_EXPR
6447 only with operands that are always zero or one. */
6448 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
6449 target, 1, OPTAB_LIB_WIDEN);
6450 if (temp == 0)
6451 abort ();
6452 return temp;
6453
6454 case COMPOUND_EXPR:
6455 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6456 emit_queue ();
6457 return expand_expr (TREE_OPERAND (exp, 1),
6458 (ignore ? const0_rtx : target),
6459 VOIDmode, 0);
6460
6461 case COND_EXPR:
6462 /* If we would have a "singleton" (see below) were it not for a
6463 conversion in each arm, bring that conversion back out. */
6464 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
6465 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
6466 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
6467 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
6468 {
6469 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
6470 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
6471
6472 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
6473 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
6474 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
6475 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
6476 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
6477 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
6478 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
6479 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
6480 return expand_expr (build1 (NOP_EXPR, type,
6481 build (COND_EXPR, TREE_TYPE (true),
6482 TREE_OPERAND (exp, 0),
6483 true, false)),
6484 target, tmode, modifier);
6485 }
6486
6487 {
6488 rtx flag = NULL_RTX;
6489 tree left_cleanups = NULL_TREE;
6490 tree right_cleanups = NULL_TREE;
6491
6492 /* Used to save a pointer to the place to put the setting of
6493 the flag that indicates if this side of the conditional was
6494 taken. We backpatch the code, if we find out later that we
6495 have any conditional cleanups that need to be performed. */
6496 rtx dest_right_flag = NULL_RTX;
6497 rtx dest_left_flag = NULL_RTX;
6498
6499 /* Note that COND_EXPRs whose type is a structure or union
6500 are required to be constructed to contain assignments of
6501 a temporary variable, so that we can evaluate them here
6502 for side effect only. If type is void, we must do likewise. */
6503
6504 /* If an arm of the branch requires a cleanup,
6505 only that cleanup is performed. */
6506
6507 tree singleton = 0;
6508 tree binary_op = 0, unary_op = 0;
6509 tree old_cleanups = cleanups_this_call;
6510
6511 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
6512 convert it to our mode, if necessary. */
6513 if (integer_onep (TREE_OPERAND (exp, 1))
6514 && integer_zerop (TREE_OPERAND (exp, 2))
6515 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6516 {
6517 if (ignore)
6518 {
6519 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6520 modifier);
6521 return const0_rtx;
6522 }
6523
6524 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
6525 if (GET_MODE (op0) == mode)
6526 return op0;
6527
6528 if (target == 0)
6529 target = gen_reg_rtx (mode);
6530 convert_move (target, op0, unsignedp);
6531 return target;
6532 }
6533
6534 /* Check for X ? A + B : A. If we have this, we can copy A to the
6535 output and conditionally add B. Similarly for unary operations.
6536 Don't do this if X has side-effects because those side effects
6537 might affect A or B and the "?" operation is a sequence point in
6538 ANSI. (operand_equal_p tests for side effects.) */
6539
6540 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
6541 && operand_equal_p (TREE_OPERAND (exp, 2),
6542 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6543 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
6544 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
6545 && operand_equal_p (TREE_OPERAND (exp, 1),
6546 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6547 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
6548 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
6549 && operand_equal_p (TREE_OPERAND (exp, 2),
6550 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6551 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
6552 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
6553 && operand_equal_p (TREE_OPERAND (exp, 1),
6554 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6555 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
6556
6557 /* If we are not to produce a result, we have no target. Otherwise,
6558 if a target was specified use it; it will not be used as an
6559 intermediate target unless it is safe. If no target, use a
6560 temporary. */
6561
6562 if (ignore)
6563 temp = 0;
6564 else if (original_target
6565 && (safe_from_p (original_target, TREE_OPERAND (exp, 0))
6566 || (singleton && GET_CODE (original_target) == REG
6567 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
6568 && original_target == var_rtx (singleton)))
6569 && GET_MODE (original_target) == mode
6570 && ! (GET_CODE (original_target) == MEM
6571 && MEM_VOLATILE_P (original_target)))
6572 temp = original_target;
6573 else if (TREE_ADDRESSABLE (type))
6574 abort ();
6575 else
6576 temp = assign_temp (type, 0, 0, 1);
6577
6578 /* If we had X ? A + C : A, with C a constant power of 2, and we can
6579 do the test of X as a store-flag operation, do this as
6580 A + ((X != 0) << log C). Similarly for other simple binary
6581 operators. Only do for C == 1 if BRANCH_COST is low. */
6582 if (temp && singleton && binary_op
6583 && (TREE_CODE (binary_op) == PLUS_EXPR
6584 || TREE_CODE (binary_op) == MINUS_EXPR
6585 || TREE_CODE (binary_op) == BIT_IOR_EXPR
6586 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
6587 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
6588 : integer_onep (TREE_OPERAND (binary_op, 1)))
6589 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6590 {
6591 rtx result;
6592 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
6593 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
6594 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
6595 : xor_optab);
6596
6597 /* If we had X ? A : A + 1, do this as A + (X == 0).
6598
6599 We have to invert the truth value here and then put it
6600 back later if do_store_flag fails. We cannot simply copy
6601 TREE_OPERAND (exp, 0) to another variable and modify that
6602 because invert_truthvalue can modify the tree pointed to
6603 by its argument. */
6604 if (singleton == TREE_OPERAND (exp, 1))
6605 TREE_OPERAND (exp, 0)
6606 = invert_truthvalue (TREE_OPERAND (exp, 0));
6607
6608 result = do_store_flag (TREE_OPERAND (exp, 0),
6609 (safe_from_p (temp, singleton)
6610 ? temp : NULL_RTX),
6611 mode, BRANCH_COST <= 1);
6612
6613 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
6614 result = expand_shift (LSHIFT_EXPR, mode, result,
6615 build_int_2 (tree_log2
6616 (TREE_OPERAND
6617 (binary_op, 1)),
6618 0),
6619 (safe_from_p (temp, singleton)
6620 ? temp : NULL_RTX), 0);
6621
6622 if (result)
6623 {
6624 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
6625 return expand_binop (mode, boptab, op1, result, temp,
6626 unsignedp, OPTAB_LIB_WIDEN);
6627 }
6628 else if (singleton == TREE_OPERAND (exp, 1))
6629 TREE_OPERAND (exp, 0)
6630 = invert_truthvalue (TREE_OPERAND (exp, 0));
6631 }
6632
6633 do_pending_stack_adjust ();
6634 NO_DEFER_POP;
6635 op0 = gen_label_rtx ();
6636
6637 flag = gen_reg_rtx (word_mode);
6638 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
6639 {
6640 if (temp != 0)
6641 {
6642 /* If the target conflicts with the other operand of the
6643 binary op, we can't use it. Also, we can't use the target
6644 if it is a hard register, because evaluating the condition
6645 might clobber it. */
6646 if ((binary_op
6647 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
6648 || (GET_CODE (temp) == REG
6649 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
6650 temp = gen_reg_rtx (mode);
6651 store_expr (singleton, temp, 0);
6652 }
6653 else
6654 expand_expr (singleton,
6655 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6656 dest_left_flag = get_last_insn ();
6657 if (singleton == TREE_OPERAND (exp, 1))
6658 jumpif (TREE_OPERAND (exp, 0), op0);
6659 else
6660 jumpifnot (TREE_OPERAND (exp, 0), op0);
6661
6662 /* Allows cleanups up to here. */
6663 old_cleanups = cleanups_this_call;
6664 if (binary_op && temp == 0)
6665 /* Just touch the other operand. */
6666 expand_expr (TREE_OPERAND (binary_op, 1),
6667 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6668 else if (binary_op)
6669 store_expr (build (TREE_CODE (binary_op), type,
6670 make_tree (type, temp),
6671 TREE_OPERAND (binary_op, 1)),
6672 temp, 0);
6673 else
6674 store_expr (build1 (TREE_CODE (unary_op), type,
6675 make_tree (type, temp)),
6676 temp, 0);
6677 op1 = op0;
6678 dest_right_flag = get_last_insn ();
6679 }
6680 #if 0
6681 /* This is now done in jump.c and is better done there because it
6682 produces shorter register lifetimes. */
6683
6684 /* Check for both possibilities either constants or variables
6685 in registers (but not the same as the target!). If so, can
6686 save branches by assigning one, branching, and assigning the
6687 other. */
6688 else if (temp && GET_MODE (temp) != BLKmode
6689 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
6690 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
6691 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
6692 && DECL_RTL (TREE_OPERAND (exp, 1))
6693 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
6694 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
6695 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
6696 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
6697 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
6698 && DECL_RTL (TREE_OPERAND (exp, 2))
6699 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
6700 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
6701 {
6702 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6703 temp = gen_reg_rtx (mode);
6704 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6705 dest_left_flag = get_last_insn ();
6706 jumpifnot (TREE_OPERAND (exp, 0), op0);
6707
6708 /* Allows cleanups up to here. */
6709 old_cleanups = cleanups_this_call;
6710 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6711 op1 = op0;
6712 dest_right_flag = get_last_insn ();
6713 }
6714 #endif
6715 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
6716 comparison operator. If we have one of these cases, set the
6717 output to A, branch on A (cse will merge these two references),
6718 then set the output to FOO. */
6719 else if (temp
6720 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6721 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6722 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6723 TREE_OPERAND (exp, 1), 0)
6724 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6725 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
6726 {
6727 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6728 temp = gen_reg_rtx (mode);
6729 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6730 dest_left_flag = get_last_insn ();
6731 jumpif (TREE_OPERAND (exp, 0), op0);
6732
6733 /* Allows cleanups up to here. */
6734 old_cleanups = cleanups_this_call;
6735 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6736 op1 = op0;
6737 dest_right_flag = get_last_insn ();
6738 }
6739 else if (temp
6740 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6741 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6742 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6743 TREE_OPERAND (exp, 2), 0)
6744 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6745 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
6746 {
6747 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6748 temp = gen_reg_rtx (mode);
6749 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6750 dest_left_flag = get_last_insn ();
6751 jumpifnot (TREE_OPERAND (exp, 0), op0);
6752
6753 /* Allows cleanups up to here. */
6754 old_cleanups = cleanups_this_call;
6755 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6756 op1 = op0;
6757 dest_right_flag = get_last_insn ();
6758 }
6759 else
6760 {
6761 op1 = gen_label_rtx ();
6762 jumpifnot (TREE_OPERAND (exp, 0), op0);
6763
6764 /* Allows cleanups up to here. */
6765 old_cleanups = cleanups_this_call;
6766 if (temp != 0)
6767 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6768 else
6769 expand_expr (TREE_OPERAND (exp, 1),
6770 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6771 dest_left_flag = get_last_insn ();
6772
6773 /* Handle conditional cleanups, if any. */
6774 left_cleanups = defer_cleanups_to (old_cleanups);
6775
6776 emit_queue ();
6777 emit_jump_insn (gen_jump (op1));
6778 emit_barrier ();
6779 emit_label (op0);
6780 if (temp != 0)
6781 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6782 else
6783 expand_expr (TREE_OPERAND (exp, 2),
6784 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6785 dest_right_flag = get_last_insn ();
6786 }
6787
6788 /* Handle conditional cleanups, if any. */
6789 right_cleanups = defer_cleanups_to (old_cleanups);
6790
6791 emit_queue ();
6792 emit_label (op1);
6793 OK_DEFER_POP;
6794
6795 /* Add back in, any conditional cleanups. */
6796 if (left_cleanups || right_cleanups)
6797 {
6798 tree new_cleanups;
6799 tree cond;
6800 rtx last;
6801
6802 /* Now that we know that a flag is needed, go back and add in the
6803 setting of the flag. */
6804
6805 /* Do the left side flag. */
6806 last = get_last_insn ();
6807 /* Flag left cleanups as needed. */
6808 emit_move_insn (flag, const1_rtx);
6809 /* ??? deprecated, use sequences instead. */
6810 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_left_flag);
6811
6812 /* Do the right side flag. */
6813 last = get_last_insn ();
6814 /* Flag left cleanups as needed. */
6815 emit_move_insn (flag, const0_rtx);
6816 /* ??? deprecated, use sequences instead. */
6817 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_right_flag);
6818
6819 /* All cleanups must be on the function_obstack. */
6820 push_obstacks_nochange ();
6821 resume_temporary_allocation ();
6822
6823 /* convert flag, which is an rtx, into a tree. */
6824 cond = make_node (RTL_EXPR);
6825 TREE_TYPE (cond) = integer_type_node;
6826 RTL_EXPR_RTL (cond) = flag;
6827 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
6828 cond = save_expr (cond);
6829
6830 if (! left_cleanups)
6831 left_cleanups = integer_zero_node;
6832 if (! right_cleanups)
6833 right_cleanups = integer_zero_node;
6834 new_cleanups = build (COND_EXPR, void_type_node,
6835 truthvalue_conversion (cond),
6836 left_cleanups, right_cleanups);
6837 new_cleanups = fold (new_cleanups);
6838
6839 pop_obstacks ();
6840
6841 /* Now add in the conditionalized cleanups. */
6842 cleanups_this_call
6843 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
6844 expand_eh_region_start ();
6845 }
6846 return temp;
6847 }
6848
6849 case TARGET_EXPR:
6850 {
6851 /* Something needs to be initialized, but we didn't know
6852 where that thing was when building the tree. For example,
6853 it could be the return value of a function, or a parameter
6854 to a function which lays down in the stack, or a temporary
6855 variable which must be passed by reference.
6856
6857 We guarantee that the expression will either be constructed
6858 or copied into our original target. */
6859
6860 tree slot = TREE_OPERAND (exp, 0);
6861 tree cleanups = NULL_TREE;
6862 tree exp1;
6863 rtx temp;
6864
6865 if (TREE_CODE (slot) != VAR_DECL)
6866 abort ();
6867
6868 if (! ignore)
6869 target = original_target;
6870
6871 if (target == 0)
6872 {
6873 if (DECL_RTL (slot) != 0)
6874 {
6875 target = DECL_RTL (slot);
6876 /* If we have already expanded the slot, so don't do
6877 it again. (mrs) */
6878 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6879 return target;
6880 }
6881 else
6882 {
6883 target = assign_temp (type, 2, 1, 1);
6884 /* All temp slots at this level must not conflict. */
6885 preserve_temp_slots (target);
6886 DECL_RTL (slot) = target;
6887
6888 /* Since SLOT is not known to the called function
6889 to belong to its stack frame, we must build an explicit
6890 cleanup. This case occurs when we must build up a reference
6891 to pass the reference as an argument. In this case,
6892 it is very likely that such a reference need not be
6893 built here. */
6894
6895 if (TREE_OPERAND (exp, 2) == 0)
6896 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
6897 cleanups = TREE_OPERAND (exp, 2);
6898 }
6899 }
6900 else
6901 {
6902 /* This case does occur, when expanding a parameter which
6903 needs to be constructed on the stack. The target
6904 is the actual stack address that we want to initialize.
6905 The function we call will perform the cleanup in this case. */
6906
6907 /* If we have already assigned it space, use that space,
6908 not target that we were passed in, as our target
6909 parameter is only a hint. */
6910 if (DECL_RTL (slot) != 0)
6911 {
6912 target = DECL_RTL (slot);
6913 /* If we have already expanded the slot, so don't do
6914 it again. (mrs) */
6915 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6916 return target;
6917 }
6918
6919 DECL_RTL (slot) = target;
6920 }
6921
6922 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
6923 /* Mark it as expanded. */
6924 TREE_OPERAND (exp, 1) = NULL_TREE;
6925
6926 store_expr (exp1, target, 0);
6927
6928 if (cleanups)
6929 {
6930 cleanups_this_call = tree_cons (NULL_TREE,
6931 cleanups,
6932 cleanups_this_call);
6933 expand_eh_region_start ();
6934 }
6935
6936 return target;
6937 }
6938
6939 case INIT_EXPR:
6940 {
6941 tree lhs = TREE_OPERAND (exp, 0);
6942 tree rhs = TREE_OPERAND (exp, 1);
6943 tree noncopied_parts = 0;
6944 tree lhs_type = TREE_TYPE (lhs);
6945
6946 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
6947 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
6948 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
6949 TYPE_NONCOPIED_PARTS (lhs_type));
6950 while (noncopied_parts != 0)
6951 {
6952 expand_assignment (TREE_VALUE (noncopied_parts),
6953 TREE_PURPOSE (noncopied_parts), 0, 0);
6954 noncopied_parts = TREE_CHAIN (noncopied_parts);
6955 }
6956 return temp;
6957 }
6958
6959 case MODIFY_EXPR:
6960 {
6961 /* If lhs is complex, expand calls in rhs before computing it.
6962 That's so we don't compute a pointer and save it over a call.
6963 If lhs is simple, compute it first so we can give it as a
6964 target if the rhs is just a call. This avoids an extra temp and copy
6965 and that prevents a partial-subsumption which makes bad code.
6966 Actually we could treat component_ref's of vars like vars. */
6967
6968 tree lhs = TREE_OPERAND (exp, 0);
6969 tree rhs = TREE_OPERAND (exp, 1);
6970 tree noncopied_parts = 0;
6971 tree lhs_type = TREE_TYPE (lhs);
6972
6973 temp = 0;
6974
6975 if (TREE_CODE (lhs) != VAR_DECL
6976 && TREE_CODE (lhs) != RESULT_DECL
6977 && TREE_CODE (lhs) != PARM_DECL)
6978 preexpand_calls (exp);
6979
6980 /* Check for |= or &= of a bitfield of size one into another bitfield
6981 of size 1. In this case, (unless we need the result of the
6982 assignment) we can do this more efficiently with a
6983 test followed by an assignment, if necessary.
6984
6985 ??? At this point, we can't get a BIT_FIELD_REF here. But if
6986 things change so we do, this code should be enhanced to
6987 support it. */
6988 if (ignore
6989 && TREE_CODE (lhs) == COMPONENT_REF
6990 && (TREE_CODE (rhs) == BIT_IOR_EXPR
6991 || TREE_CODE (rhs) == BIT_AND_EXPR)
6992 && TREE_OPERAND (rhs, 0) == lhs
6993 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
6994 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
6995 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
6996 {
6997 rtx label = gen_label_rtx ();
6998
6999 do_jump (TREE_OPERAND (rhs, 1),
7000 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
7001 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
7002 expand_assignment (lhs, convert (TREE_TYPE (rhs),
7003 (TREE_CODE (rhs) == BIT_IOR_EXPR
7004 ? integer_one_node
7005 : integer_zero_node)),
7006 0, 0);
7007 do_pending_stack_adjust ();
7008 emit_label (label);
7009 return const0_rtx;
7010 }
7011
7012 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
7013 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
7014 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
7015 TYPE_NONCOPIED_PARTS (lhs_type));
7016
7017 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7018 while (noncopied_parts != 0)
7019 {
7020 expand_assignment (TREE_PURPOSE (noncopied_parts),
7021 TREE_VALUE (noncopied_parts), 0, 0);
7022 noncopied_parts = TREE_CHAIN (noncopied_parts);
7023 }
7024 return temp;
7025 }
7026
7027 case PREINCREMENT_EXPR:
7028 case PREDECREMENT_EXPR:
7029 return expand_increment (exp, 0, ignore);
7030
7031 case POSTINCREMENT_EXPR:
7032 case POSTDECREMENT_EXPR:
7033 /* Faster to treat as pre-increment if result is not used. */
7034 return expand_increment (exp, ! ignore, ignore);
7035
7036 case ADDR_EXPR:
7037 /* If nonzero, TEMP will be set to the address of something that might
7038 be a MEM corresponding to a stack slot. */
7039 temp = 0;
7040
7041 /* Are we taking the address of a nested function? */
7042 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
7043 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
7044 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0)))
7045 {
7046 op0 = trampoline_address (TREE_OPERAND (exp, 0));
7047 op0 = force_operand (op0, target);
7048 }
7049 /* If we are taking the address of something erroneous, just
7050 return a zero. */
7051 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
7052 return const0_rtx;
7053 else
7054 {
7055 /* We make sure to pass const0_rtx down if we came in with
7056 ignore set, to avoid doing the cleanups twice for something. */
7057 op0 = expand_expr (TREE_OPERAND (exp, 0),
7058 ignore ? const0_rtx : NULL_RTX, VOIDmode,
7059 (modifier == EXPAND_INITIALIZER
7060 ? modifier : EXPAND_CONST_ADDRESS));
7061
7062 /* If we are going to ignore the result, OP0 will have been set
7063 to const0_rtx, so just return it. Don't get confused and
7064 think we are taking the address of the constant. */
7065 if (ignore)
7066 return op0;
7067
7068 op0 = protect_from_queue (op0, 0);
7069
7070 /* We would like the object in memory. If it is a constant,
7071 we can have it be statically allocated into memory. For
7072 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
7073 memory and store the value into it. */
7074
7075 if (CONSTANT_P (op0))
7076 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7077 op0);
7078 else if (GET_CODE (op0) == MEM)
7079 {
7080 mark_temp_addr_taken (op0);
7081 temp = XEXP (op0, 0);
7082 }
7083
7084 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7085 || GET_CODE (op0) == CONCAT)
7086 {
7087 /* If this object is in a register, it must be not
7088 be BLKmode. */
7089 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7090 rtx memloc = assign_temp (inner_type, 1, 1, 1);
7091
7092 mark_temp_addr_taken (memloc);
7093 emit_move_insn (memloc, op0);
7094 op0 = memloc;
7095 }
7096
7097 if (GET_CODE (op0) != MEM)
7098 abort ();
7099
7100 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7101 {
7102 temp = XEXP (op0, 0);
7103 #ifdef POINTERS_EXTEND_UNSIGNED
7104 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
7105 && mode == ptr_mode)
7106 temp = convert_memory_address (ptr_mode, temp);
7107 #endif
7108 return temp;
7109 }
7110
7111 op0 = force_operand (XEXP (op0, 0), target);
7112 }
7113
7114 if (flag_force_addr && GET_CODE (op0) != REG)
7115 op0 = force_reg (Pmode, op0);
7116
7117 if (GET_CODE (op0) == REG
7118 && ! REG_USERVAR_P (op0))
7119 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
7120
7121 /* If we might have had a temp slot, add an equivalent address
7122 for it. */
7123 if (temp != 0)
7124 update_temp_slot_address (temp, op0);
7125
7126 #ifdef POINTERS_EXTEND_UNSIGNED
7127 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
7128 && mode == ptr_mode)
7129 op0 = convert_memory_address (ptr_mode, op0);
7130 #endif
7131
7132 return op0;
7133
7134 case ENTRY_VALUE_EXPR:
7135 abort ();
7136
7137 /* COMPLEX type for Extended Pascal & Fortran */
7138 case COMPLEX_EXPR:
7139 {
7140 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7141 rtx insns;
7142
7143 /* Get the rtx code of the operands. */
7144 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7145 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
7146
7147 if (! target)
7148 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7149
7150 start_sequence ();
7151
7152 /* Move the real (op0) and imaginary (op1) parts to their location. */
7153 emit_move_insn (gen_realpart (mode, target), op0);
7154 emit_move_insn (gen_imagpart (mode, target), op1);
7155
7156 insns = get_insns ();
7157 end_sequence ();
7158
7159 /* Complex construction should appear as a single unit. */
7160 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
7161 each with a separate pseudo as destination.
7162 It's not correct for flow to treat them as a unit. */
7163 if (GET_CODE (target) != CONCAT)
7164 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
7165 else
7166 emit_insns (insns);
7167
7168 return target;
7169 }
7170
7171 case REALPART_EXPR:
7172 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7173 return gen_realpart (mode, op0);
7174
7175 case IMAGPART_EXPR:
7176 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7177 return gen_imagpart (mode, op0);
7178
7179 case CONJ_EXPR:
7180 {
7181 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7182 rtx imag_t;
7183 rtx insns;
7184
7185 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7186
7187 if (! target)
7188 target = gen_reg_rtx (mode);
7189
7190 start_sequence ();
7191
7192 /* Store the realpart and the negated imagpart to target. */
7193 emit_move_insn (gen_realpart (partmode, target),
7194 gen_realpart (partmode, op0));
7195
7196 imag_t = gen_imagpart (partmode, target);
7197 temp = expand_unop (partmode, neg_optab,
7198 gen_imagpart (partmode, op0), imag_t, 0);
7199 if (temp != imag_t)
7200 emit_move_insn (imag_t, temp);
7201
7202 insns = get_insns ();
7203 end_sequence ();
7204
7205 /* Conjugate should appear as a single unit
7206 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
7207 each with a separate pseudo as destination.
7208 It's not correct for flow to treat them as a unit. */
7209 if (GET_CODE (target) != CONCAT)
7210 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
7211 else
7212 emit_insns (insns);
7213
7214 return target;
7215 }
7216
7217 case ERROR_MARK:
7218 op0 = CONST0_RTX (tmode);
7219 if (op0 != 0)
7220 return op0;
7221 return const0_rtx;
7222
7223 default:
7224 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7225 }
7226
7227 /* Here to do an ordinary binary operator, generating an instruction
7228 from the optab already placed in `this_optab'. */
7229 binop:
7230 preexpand_calls (exp);
7231 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
7232 subtarget = 0;
7233 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7234 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7235 binop2:
7236 temp = expand_binop (mode, this_optab, op0, op1, target,
7237 unsignedp, OPTAB_LIB_WIDEN);
7238 if (temp == 0)
7239 abort ();
7240 return temp;
7241 }
7242
7243
7244 /* Emit bytecode to evaluate the given expression EXP to the stack. */
7245
7246 void
7247 bc_expand_expr (exp)
7248 tree exp;
7249 {
7250 enum tree_code code;
7251 tree type, arg0;
7252 rtx r;
7253 struct binary_operator *binoptab;
7254 struct unary_operator *unoptab;
7255 struct increment_operator *incroptab;
7256 struct bc_label *lab, *lab1;
7257 enum bytecode_opcode opcode;
7258
7259
7260 code = TREE_CODE (exp);
7261
7262 switch (code)
7263 {
7264 case PARM_DECL:
7265
7266 if (DECL_RTL (exp) == 0)
7267 {
7268 error_with_decl (exp, "prior parameter's size depends on `%s'");
7269 return;
7270 }
7271
7272 bc_load_parmaddr (DECL_RTL (exp));
7273 bc_load_memory (TREE_TYPE (exp), exp);
7274
7275 return;
7276
7277 case VAR_DECL:
7278
7279 if (DECL_RTL (exp) == 0)
7280 abort ();
7281
7282 #if 0
7283 if (BYTECODE_LABEL (DECL_RTL (exp)))
7284 bc_load_externaddr (DECL_RTL (exp));
7285 else
7286 bc_load_localaddr (DECL_RTL (exp));
7287 #endif
7288 if (TREE_PUBLIC (exp))
7289 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
7290 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
7291 else
7292 bc_load_localaddr (DECL_RTL (exp));
7293
7294 bc_load_memory (TREE_TYPE (exp), exp);
7295 return;
7296
7297 case INTEGER_CST:
7298
7299 #ifdef DEBUG_PRINT_CODE
7300 fprintf (stderr, " [%x]\n", TREE_INT_CST_LOW (exp));
7301 #endif
7302 bc_emit_instruction (mode_to_const_map[(int) (DECL_BIT_FIELD (exp)
7303 ? SImode
7304 : TYPE_MODE (TREE_TYPE (exp)))],
7305 (HOST_WIDE_INT) TREE_INT_CST_LOW (exp));
7306 return;
7307
7308 case REAL_CST:
7309
7310 #if 0
7311 #ifdef DEBUG_PRINT_CODE
7312 fprintf (stderr, " [%g]\n", (double) TREE_INT_CST_LOW (exp));
7313 #endif
7314 /* FIX THIS: find a better way to pass real_cst's. -bson */
7315 bc_emit_instruction (mode_to_const_map[TYPE_MODE (TREE_TYPE (exp))],
7316 (double) TREE_REAL_CST (exp));
7317 #else
7318 abort ();
7319 #endif
7320
7321 return;
7322
7323 case CALL_EXPR:
7324
7325 /* We build a call description vector describing the type of
7326 the return value and of the arguments; this call vector,
7327 together with a pointer to a location for the return value
7328 and the base of the argument list, is passed to the low
7329 level machine dependent call subroutine, which is responsible
7330 for putting the arguments wherever real functions expect
7331 them, as well as getting the return value back. */
7332 {
7333 tree calldesc = 0, arg;
7334 int nargs = 0, i;
7335 rtx retval;
7336
7337 /* Push the evaluated args on the evaluation stack in reverse
7338 order. Also make an entry for each arg in the calldesc
7339 vector while we're at it. */
7340
7341 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
7342
7343 for (arg = TREE_OPERAND (exp, 1); arg; arg = TREE_CHAIN (arg))
7344 {
7345 ++nargs;
7346 bc_expand_expr (TREE_VALUE (arg));
7347
7348 calldesc = tree_cons ((tree) 0,
7349 size_in_bytes (TREE_TYPE (TREE_VALUE (arg))),
7350 calldesc);
7351 calldesc = tree_cons ((tree) 0,
7352 bc_runtime_type_code (TREE_TYPE (TREE_VALUE (arg))),
7353 calldesc);
7354 }
7355
7356 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
7357
7358 /* Allocate a location for the return value and push its
7359 address on the evaluation stack. Also make an entry
7360 at the front of the calldesc for the return value type. */
7361
7362 type = TREE_TYPE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7363 retval = bc_allocate_local (int_size_in_bytes (type), TYPE_ALIGN (type));
7364 bc_load_localaddr (retval);
7365
7366 calldesc = tree_cons ((tree) 0, size_in_bytes (type), calldesc);
7367 calldesc = tree_cons ((tree) 0, bc_runtime_type_code (type), calldesc);
7368
7369 /* Prepend the argument count. */
7370 calldesc = tree_cons ((tree) 0,
7371 build_int_2 (nargs, 0),
7372 calldesc);
7373
7374 /* Push the address of the call description vector on the stack. */
7375 calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
7376 TREE_TYPE (calldesc) = build_array_type (integer_type_node,
7377 build_index_type (build_int_2 (nargs * 2, 0)));
7378 r = output_constant_def (calldesc);
7379 bc_load_externaddr (r);
7380
7381 /* Push the address of the function to be called. */
7382 bc_expand_expr (TREE_OPERAND (exp, 0));
7383
7384 /* Call the function, popping its address and the calldesc vector
7385 address off the evaluation stack in the process. */
7386 bc_emit_instruction (call);
7387
7388 /* Pop the arguments off the stack. */
7389 bc_adjust_stack (nargs);
7390
7391 /* Load the return value onto the stack. */
7392 bc_load_localaddr (retval);
7393 bc_load_memory (type, TREE_OPERAND (exp, 0));
7394 }
7395 return;
7396
7397 case SAVE_EXPR:
7398
7399 if (!SAVE_EXPR_RTL (exp))
7400 {
7401 /* First time around: copy to local variable */
7402 SAVE_EXPR_RTL (exp) = bc_allocate_local (int_size_in_bytes (TREE_TYPE (exp)),
7403 TYPE_ALIGN (TREE_TYPE(exp)));
7404 bc_expand_expr (TREE_OPERAND (exp, 0));
7405 bc_emit_instruction (duplicate);
7406
7407 bc_load_localaddr (SAVE_EXPR_RTL (exp));
7408 bc_store_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7409 }
7410 else
7411 {
7412 /* Consecutive reference: use saved copy */
7413 bc_load_localaddr (SAVE_EXPR_RTL (exp));
7414 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7415 }
7416 return;
7417
7418 #if 0
7419 /* FIXME: the XXXX_STMT codes have been removed in GCC2, but
7420 how are they handled instead? */
7421 case LET_STMT:
7422
7423 TREE_USED (exp) = 1;
7424 bc_expand_expr (STMT_BODY (exp));
7425 return;
7426 #endif
7427
7428 case NOP_EXPR:
7429 case CONVERT_EXPR:
7430
7431 bc_expand_expr (TREE_OPERAND (exp, 0));
7432 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)), TREE_TYPE (exp));
7433 return;
7434
7435 case MODIFY_EXPR:
7436
7437 expand_assignment (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 0, 0);
7438 return;
7439
7440 case ADDR_EXPR:
7441
7442 bc_expand_address (TREE_OPERAND (exp, 0));
7443 return;
7444
7445 case INDIRECT_REF:
7446
7447 bc_expand_expr (TREE_OPERAND (exp, 0));
7448 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7449 return;
7450
7451 case ARRAY_REF:
7452
7453 bc_expand_expr (bc_canonicalize_array_ref (exp));
7454 return;
7455
7456 case COMPONENT_REF:
7457
7458 bc_expand_component_address (exp);
7459
7460 /* If we have a bitfield, generate a proper load */
7461 bc_load_memory (TREE_TYPE (TREE_OPERAND (exp, 1)), TREE_OPERAND (exp, 1));
7462 return;
7463
7464 case COMPOUND_EXPR:
7465
7466 bc_expand_expr (TREE_OPERAND (exp, 0));
7467 bc_emit_instruction (drop);
7468 bc_expand_expr (TREE_OPERAND (exp, 1));
7469 return;
7470
7471 case COND_EXPR:
7472
7473 bc_expand_expr (TREE_OPERAND (exp, 0));
7474 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
7475 lab = bc_get_bytecode_label ();
7476 bc_emit_bytecode (xjumpifnot);
7477 bc_emit_bytecode_labelref (lab);
7478
7479 #ifdef DEBUG_PRINT_CODE
7480 fputc ('\n', stderr);
7481 #endif
7482 bc_expand_expr (TREE_OPERAND (exp, 1));
7483 lab1 = bc_get_bytecode_label ();
7484 bc_emit_bytecode (jump);
7485 bc_emit_bytecode_labelref (lab1);
7486
7487 #ifdef DEBUG_PRINT_CODE
7488 fputc ('\n', stderr);
7489 #endif
7490
7491 bc_emit_bytecode_labeldef (lab);
7492 bc_expand_expr (TREE_OPERAND (exp, 2));
7493 bc_emit_bytecode_labeldef (lab1);
7494 return;
7495
7496 case TRUTH_ANDIF_EXPR:
7497
7498 opcode = xjumpifnot;
7499 goto andorif;
7500
7501 case TRUTH_ORIF_EXPR:
7502
7503 opcode = xjumpif;
7504 goto andorif;
7505
7506 case PLUS_EXPR:
7507
7508 binoptab = optab_plus_expr;
7509 goto binop;
7510
7511 case MINUS_EXPR:
7512
7513 binoptab = optab_minus_expr;
7514 goto binop;
7515
7516 case MULT_EXPR:
7517
7518 binoptab = optab_mult_expr;
7519 goto binop;
7520
7521 case TRUNC_DIV_EXPR:
7522 case FLOOR_DIV_EXPR:
7523 case CEIL_DIV_EXPR:
7524 case ROUND_DIV_EXPR:
7525 case EXACT_DIV_EXPR:
7526
7527 binoptab = optab_trunc_div_expr;
7528 goto binop;
7529
7530 case TRUNC_MOD_EXPR:
7531 case FLOOR_MOD_EXPR:
7532 case CEIL_MOD_EXPR:
7533 case ROUND_MOD_EXPR:
7534
7535 binoptab = optab_trunc_mod_expr;
7536 goto binop;
7537
7538 case FIX_ROUND_EXPR:
7539 case FIX_FLOOR_EXPR:
7540 case FIX_CEIL_EXPR:
7541 abort (); /* Not used for C. */
7542
7543 case FIX_TRUNC_EXPR:
7544 case FLOAT_EXPR:
7545 case MAX_EXPR:
7546 case MIN_EXPR:
7547 case FFS_EXPR:
7548 case LROTATE_EXPR:
7549 case RROTATE_EXPR:
7550 abort (); /* FIXME */
7551
7552 case RDIV_EXPR:
7553
7554 binoptab = optab_rdiv_expr;
7555 goto binop;
7556
7557 case BIT_AND_EXPR:
7558
7559 binoptab = optab_bit_and_expr;
7560 goto binop;
7561
7562 case BIT_IOR_EXPR:
7563
7564 binoptab = optab_bit_ior_expr;
7565 goto binop;
7566
7567 case BIT_XOR_EXPR:
7568
7569 binoptab = optab_bit_xor_expr;
7570 goto binop;
7571
7572 case LSHIFT_EXPR:
7573
7574 binoptab = optab_lshift_expr;
7575 goto binop;
7576
7577 case RSHIFT_EXPR:
7578
7579 binoptab = optab_rshift_expr;
7580 goto binop;
7581
7582 case TRUTH_AND_EXPR:
7583
7584 binoptab = optab_truth_and_expr;
7585 goto binop;
7586
7587 case TRUTH_OR_EXPR:
7588
7589 binoptab = optab_truth_or_expr;
7590 goto binop;
7591
7592 case LT_EXPR:
7593
7594 binoptab = optab_lt_expr;
7595 goto binop;
7596
7597 case LE_EXPR:
7598
7599 binoptab = optab_le_expr;
7600 goto binop;
7601
7602 case GE_EXPR:
7603
7604 binoptab = optab_ge_expr;
7605 goto binop;
7606
7607 case GT_EXPR:
7608
7609 binoptab = optab_gt_expr;
7610 goto binop;
7611
7612 case EQ_EXPR:
7613
7614 binoptab = optab_eq_expr;
7615 goto binop;
7616
7617 case NE_EXPR:
7618
7619 binoptab = optab_ne_expr;
7620 goto binop;
7621
7622 case NEGATE_EXPR:
7623
7624 unoptab = optab_negate_expr;
7625 goto unop;
7626
7627 case BIT_NOT_EXPR:
7628
7629 unoptab = optab_bit_not_expr;
7630 goto unop;
7631
7632 case TRUTH_NOT_EXPR:
7633
7634 unoptab = optab_truth_not_expr;
7635 goto unop;
7636
7637 case PREDECREMENT_EXPR:
7638
7639 incroptab = optab_predecrement_expr;
7640 goto increment;
7641
7642 case PREINCREMENT_EXPR:
7643
7644 incroptab = optab_preincrement_expr;
7645 goto increment;
7646
7647 case POSTDECREMENT_EXPR:
7648
7649 incroptab = optab_postdecrement_expr;
7650 goto increment;
7651
7652 case POSTINCREMENT_EXPR:
7653
7654 incroptab = optab_postincrement_expr;
7655 goto increment;
7656
7657 case CONSTRUCTOR:
7658
7659 bc_expand_constructor (exp);
7660 return;
7661
7662 case ERROR_MARK:
7663 case RTL_EXPR:
7664
7665 return;
7666
7667 case BIND_EXPR:
7668 {
7669 tree vars = TREE_OPERAND (exp, 0);
7670 int vars_need_expansion = 0;
7671
7672 /* Need to open a binding contour here because
7673 if there are any cleanups they most be contained here. */
7674 expand_start_bindings (0);
7675
7676 /* Mark the corresponding BLOCK for output. */
7677 if (TREE_OPERAND (exp, 2) != 0)
7678 TREE_USED (TREE_OPERAND (exp, 2)) = 1;
7679
7680 /* If VARS have not yet been expanded, expand them now. */
7681 while (vars)
7682 {
7683 if (DECL_RTL (vars) == 0)
7684 {
7685 vars_need_expansion = 1;
7686 expand_decl (vars);
7687 }
7688 expand_decl_init (vars);
7689 vars = TREE_CHAIN (vars);
7690 }
7691
7692 bc_expand_expr (TREE_OPERAND (exp, 1));
7693
7694 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
7695
7696 return;
7697 }
7698 }
7699
7700 abort ();
7701
7702 binop:
7703
7704 bc_expand_binary_operation (binoptab, TREE_TYPE (exp),
7705 TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1));
7706 return;
7707
7708
7709 unop:
7710
7711 bc_expand_unary_operation (unoptab, TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7712 return;
7713
7714
7715 andorif:
7716
7717 bc_expand_expr (TREE_OPERAND (exp, 0));
7718 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
7719 lab = bc_get_bytecode_label ();
7720
7721 bc_emit_instruction (duplicate);
7722 bc_emit_bytecode (opcode);
7723 bc_emit_bytecode_labelref (lab);
7724
7725 #ifdef DEBUG_PRINT_CODE
7726 fputc ('\n', stderr);
7727 #endif
7728
7729 bc_emit_instruction (drop);
7730
7731 bc_expand_expr (TREE_OPERAND (exp, 1));
7732 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)));
7733 bc_emit_bytecode_labeldef (lab);
7734 return;
7735
7736
7737 increment:
7738
7739 type = TREE_TYPE (TREE_OPERAND (exp, 0));
7740
7741 /* Push the quantum. */
7742 bc_expand_expr (TREE_OPERAND (exp, 1));
7743
7744 /* Convert it to the lvalue's type. */
7745 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)), type);
7746
7747 /* Push the address of the lvalue */
7748 bc_expand_expr (build1 (ADDR_EXPR, TYPE_POINTER_TO (type), TREE_OPERAND (exp, 0)));
7749
7750 /* Perform actual increment */
7751 bc_expand_increment (incroptab, type);
7752 return;
7753 }
7754 \f
7755 /* Return the alignment in bits of EXP, a pointer valued expression.
7756 But don't return more than MAX_ALIGN no matter what.
7757 The alignment returned is, by default, the alignment of the thing that
7758 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
7759
7760 Otherwise, look at the expression to see if we can do better, i.e., if the
7761 expression is actually pointing at an object whose alignment is tighter. */
7762
7763 static int
7764 get_pointer_alignment (exp, max_align)
7765 tree exp;
7766 unsigned max_align;
7767 {
7768 unsigned align, inner;
7769
7770 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7771 return 0;
7772
7773 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7774 align = MIN (align, max_align);
7775
7776 while (1)
7777 {
7778 switch (TREE_CODE (exp))
7779 {
7780 case NOP_EXPR:
7781 case CONVERT_EXPR:
7782 case NON_LVALUE_EXPR:
7783 exp = TREE_OPERAND (exp, 0);
7784 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7785 return align;
7786 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7787 align = MIN (inner, max_align);
7788 break;
7789
7790 case PLUS_EXPR:
7791 /* If sum of pointer + int, restrict our maximum alignment to that
7792 imposed by the integer. If not, we can't do any better than
7793 ALIGN. */
7794 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
7795 return align;
7796
7797 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
7798 & (max_align - 1))
7799 != 0)
7800 max_align >>= 1;
7801
7802 exp = TREE_OPERAND (exp, 0);
7803 break;
7804
7805 case ADDR_EXPR:
7806 /* See what we are pointing at and look at its alignment. */
7807 exp = TREE_OPERAND (exp, 0);
7808 if (TREE_CODE (exp) == FUNCTION_DECL)
7809 align = FUNCTION_BOUNDARY;
7810 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
7811 align = DECL_ALIGN (exp);
7812 #ifdef CONSTANT_ALIGNMENT
7813 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
7814 align = CONSTANT_ALIGNMENT (exp, align);
7815 #endif
7816 return MIN (align, max_align);
7817
7818 default:
7819 return align;
7820 }
7821 }
7822 }
7823 \f
7824 /* Return the tree node and offset if a given argument corresponds to
7825 a string constant. */
7826
7827 static tree
7828 string_constant (arg, ptr_offset)
7829 tree arg;
7830 tree *ptr_offset;
7831 {
7832 STRIP_NOPS (arg);
7833
7834 if (TREE_CODE (arg) == ADDR_EXPR
7835 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
7836 {
7837 *ptr_offset = integer_zero_node;
7838 return TREE_OPERAND (arg, 0);
7839 }
7840 else if (TREE_CODE (arg) == PLUS_EXPR)
7841 {
7842 tree arg0 = TREE_OPERAND (arg, 0);
7843 tree arg1 = TREE_OPERAND (arg, 1);
7844
7845 STRIP_NOPS (arg0);
7846 STRIP_NOPS (arg1);
7847
7848 if (TREE_CODE (arg0) == ADDR_EXPR
7849 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
7850 {
7851 *ptr_offset = arg1;
7852 return TREE_OPERAND (arg0, 0);
7853 }
7854 else if (TREE_CODE (arg1) == ADDR_EXPR
7855 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
7856 {
7857 *ptr_offset = arg0;
7858 return TREE_OPERAND (arg1, 0);
7859 }
7860 }
7861
7862 return 0;
7863 }
7864
7865 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
7866 way, because it could contain a zero byte in the middle.
7867 TREE_STRING_LENGTH is the size of the character array, not the string.
7868
7869 Unfortunately, string_constant can't access the values of const char
7870 arrays with initializers, so neither can we do so here. */
7871
7872 static tree
7873 c_strlen (src)
7874 tree src;
7875 {
7876 tree offset_node;
7877 int offset, max;
7878 char *ptr;
7879
7880 src = string_constant (src, &offset_node);
7881 if (src == 0)
7882 return 0;
7883 max = TREE_STRING_LENGTH (src);
7884 ptr = TREE_STRING_POINTER (src);
7885 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
7886 {
7887 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
7888 compute the offset to the following null if we don't know where to
7889 start searching for it. */
7890 int i;
7891 for (i = 0; i < max; i++)
7892 if (ptr[i] == 0)
7893 return 0;
7894 /* We don't know the starting offset, but we do know that the string
7895 has no internal zero bytes. We can assume that the offset falls
7896 within the bounds of the string; otherwise, the programmer deserves
7897 what he gets. Subtract the offset from the length of the string,
7898 and return that. */
7899 /* This would perhaps not be valid if we were dealing with named
7900 arrays in addition to literal string constants. */
7901 return size_binop (MINUS_EXPR, size_int (max), offset_node);
7902 }
7903
7904 /* We have a known offset into the string. Start searching there for
7905 a null character. */
7906 if (offset_node == 0)
7907 offset = 0;
7908 else
7909 {
7910 /* Did we get a long long offset? If so, punt. */
7911 if (TREE_INT_CST_HIGH (offset_node) != 0)
7912 return 0;
7913 offset = TREE_INT_CST_LOW (offset_node);
7914 }
7915 /* If the offset is known to be out of bounds, warn, and call strlen at
7916 runtime. */
7917 if (offset < 0 || offset > max)
7918 {
7919 warning ("offset outside bounds of constant string");
7920 return 0;
7921 }
7922 /* Use strlen to search for the first zero byte. Since any strings
7923 constructed with build_string will have nulls appended, we win even
7924 if we get handed something like (char[4])"abcd".
7925
7926 Since OFFSET is our starting index into the string, no further
7927 calculation is needed. */
7928 return size_int (strlen (ptr + offset));
7929 }
7930
7931 rtx
7932 expand_builtin_return_addr (fndecl_code, count, tem)
7933 enum built_in_function fndecl_code;
7934 int count;
7935 rtx tem;
7936 {
7937 int i;
7938
7939 /* Some machines need special handling before we can access
7940 arbitrary frames. For example, on the sparc, we must first flush
7941 all register windows to the stack. */
7942 #ifdef SETUP_FRAME_ADDRESSES
7943 SETUP_FRAME_ADDRESSES ();
7944 #endif
7945
7946 /* On the sparc, the return address is not in the frame, it is in a
7947 register. There is no way to access it off of the current frame
7948 pointer, but it can be accessed off the previous frame pointer by
7949 reading the value from the register window save area. */
7950 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
7951 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
7952 count--;
7953 #endif
7954
7955 /* Scan back COUNT frames to the specified frame. */
7956 for (i = 0; i < count; i++)
7957 {
7958 /* Assume the dynamic chain pointer is in the word that the
7959 frame address points to, unless otherwise specified. */
7960 #ifdef DYNAMIC_CHAIN_ADDRESS
7961 tem = DYNAMIC_CHAIN_ADDRESS (tem);
7962 #endif
7963 tem = memory_address (Pmode, tem);
7964 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
7965 }
7966
7967 /* For __builtin_frame_address, return what we've got. */
7968 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
7969 return tem;
7970
7971 /* For __builtin_return_address, Get the return address from that
7972 frame. */
7973 #ifdef RETURN_ADDR_RTX
7974 tem = RETURN_ADDR_RTX (count, tem);
7975 #else
7976 tem = memory_address (Pmode,
7977 plus_constant (tem, GET_MODE_SIZE (Pmode)));
7978 tem = gen_rtx (MEM, Pmode, tem);
7979 #endif
7980 return tem;
7981 }
7982 \f
7983 /* Expand an expression EXP that calls a built-in function,
7984 with result going to TARGET if that's convenient
7985 (and in mode MODE if that's convenient).
7986 SUBTARGET may be used as the target for computing one of EXP's operands.
7987 IGNORE is nonzero if the value is to be ignored. */
7988
7989 #define CALLED_AS_BUILT_IN(NODE) \
7990 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
7991
7992 static rtx
7993 expand_builtin (exp, target, subtarget, mode, ignore)
7994 tree exp;
7995 rtx target;
7996 rtx subtarget;
7997 enum machine_mode mode;
7998 int ignore;
7999 {
8000 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8001 tree arglist = TREE_OPERAND (exp, 1);
8002 rtx op0;
8003 rtx lab1, insns;
8004 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
8005 optab builtin_optab;
8006
8007 switch (DECL_FUNCTION_CODE (fndecl))
8008 {
8009 case BUILT_IN_ABS:
8010 case BUILT_IN_LABS:
8011 case BUILT_IN_FABS:
8012 /* build_function_call changes these into ABS_EXPR. */
8013 abort ();
8014
8015 case BUILT_IN_SIN:
8016 case BUILT_IN_COS:
8017 /* Treat these like sqrt, but only if the user asks for them. */
8018 if (! flag_fast_math)
8019 break;
8020 case BUILT_IN_FSQRT:
8021 /* If not optimizing, call the library function. */
8022 if (! optimize)
8023 break;
8024
8025 if (arglist == 0
8026 /* Arg could be wrong type if user redeclared this fcn wrong. */
8027 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
8028 break;
8029
8030 /* Stabilize and compute the argument. */
8031 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
8032 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
8033 {
8034 exp = copy_node (exp);
8035 arglist = copy_node (arglist);
8036 TREE_OPERAND (exp, 1) = arglist;
8037 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
8038 }
8039 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8040
8041 /* Make a suitable register to place result in. */
8042 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8043
8044 emit_queue ();
8045 start_sequence ();
8046
8047 switch (DECL_FUNCTION_CODE (fndecl))
8048 {
8049 case BUILT_IN_SIN:
8050 builtin_optab = sin_optab; break;
8051 case BUILT_IN_COS:
8052 builtin_optab = cos_optab; break;
8053 case BUILT_IN_FSQRT:
8054 builtin_optab = sqrt_optab; break;
8055 default:
8056 abort ();
8057 }
8058
8059 /* Compute into TARGET.
8060 Set TARGET to wherever the result comes back. */
8061 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8062 builtin_optab, op0, target, 0);
8063
8064 /* If we were unable to expand via the builtin, stop the
8065 sequence (without outputting the insns) and break, causing
8066 a call the the library function. */
8067 if (target == 0)
8068 {
8069 end_sequence ();
8070 break;
8071 }
8072
8073 /* Check the results by default. But if flag_fast_math is turned on,
8074 then assume sqrt will always be called with valid arguments. */
8075
8076 if (! flag_fast_math)
8077 {
8078 /* Don't define the builtin FP instructions
8079 if your machine is not IEEE. */
8080 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
8081 abort ();
8082
8083 lab1 = gen_label_rtx ();
8084
8085 /* Test the result; if it is NaN, set errno=EDOM because
8086 the argument was not in the domain. */
8087 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
8088 emit_jump_insn (gen_beq (lab1));
8089
8090 #ifdef TARGET_EDOM
8091 {
8092 #ifdef GEN_ERRNO_RTX
8093 rtx errno_rtx = GEN_ERRNO_RTX;
8094 #else
8095 rtx errno_rtx
8096 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "errno"));
8097 #endif
8098
8099 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
8100 }
8101 #else
8102 /* We can't set errno=EDOM directly; let the library call do it.
8103 Pop the arguments right away in case the call gets deleted. */
8104 NO_DEFER_POP;
8105 expand_call (exp, target, 0);
8106 OK_DEFER_POP;
8107 #endif
8108
8109 emit_label (lab1);
8110 }
8111
8112 /* Output the entire sequence. */
8113 insns = get_insns ();
8114 end_sequence ();
8115 emit_insns (insns);
8116
8117 return target;
8118
8119 /* __builtin_apply_args returns block of memory allocated on
8120 the stack into which is stored the arg pointer, structure
8121 value address, static chain, and all the registers that might
8122 possibly be used in performing a function call. The code is
8123 moved to the start of the function so the incoming values are
8124 saved. */
8125 case BUILT_IN_APPLY_ARGS:
8126 /* Don't do __builtin_apply_args more than once in a function.
8127 Save the result of the first call and reuse it. */
8128 if (apply_args_value != 0)
8129 return apply_args_value;
8130 {
8131 /* When this function is called, it means that registers must be
8132 saved on entry to this function. So we migrate the
8133 call to the first insn of this function. */
8134 rtx temp;
8135 rtx seq;
8136
8137 start_sequence ();
8138 temp = expand_builtin_apply_args ();
8139 seq = get_insns ();
8140 end_sequence ();
8141
8142 apply_args_value = temp;
8143
8144 /* Put the sequence after the NOTE that starts the function.
8145 If this is inside a SEQUENCE, make the outer-level insn
8146 chain current, so the code is placed at the start of the
8147 function. */
8148 push_topmost_sequence ();
8149 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8150 pop_topmost_sequence ();
8151 return temp;
8152 }
8153
8154 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8155 FUNCTION with a copy of the parameters described by
8156 ARGUMENTS, and ARGSIZE. It returns a block of memory
8157 allocated on the stack into which is stored all the registers
8158 that might possibly be used for returning the result of a
8159 function. ARGUMENTS is the value returned by
8160 __builtin_apply_args. ARGSIZE is the number of bytes of
8161 arguments that must be copied. ??? How should this value be
8162 computed? We'll also need a safe worst case value for varargs
8163 functions. */
8164 case BUILT_IN_APPLY:
8165 if (arglist == 0
8166 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8167 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8168 || TREE_CHAIN (arglist) == 0
8169 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8170 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8171 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8172 return const0_rtx;
8173 else
8174 {
8175 int i;
8176 tree t;
8177 rtx ops[3];
8178
8179 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
8180 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
8181
8182 return expand_builtin_apply (ops[0], ops[1], ops[2]);
8183 }
8184
8185 /* __builtin_return (RESULT) causes the function to return the
8186 value described by RESULT. RESULT is address of the block of
8187 memory returned by __builtin_apply. */
8188 case BUILT_IN_RETURN:
8189 if (arglist
8190 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8191 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
8192 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
8193 NULL_RTX, VOIDmode, 0));
8194 return const0_rtx;
8195
8196 case BUILT_IN_SAVEREGS:
8197 /* Don't do __builtin_saveregs more than once in a function.
8198 Save the result of the first call and reuse it. */
8199 if (saveregs_value != 0)
8200 return saveregs_value;
8201 {
8202 /* When this function is called, it means that registers must be
8203 saved on entry to this function. So we migrate the
8204 call to the first insn of this function. */
8205 rtx temp;
8206 rtx seq;
8207
8208 /* Now really call the function. `expand_call' does not call
8209 expand_builtin, so there is no danger of infinite recursion here. */
8210 start_sequence ();
8211
8212 #ifdef EXPAND_BUILTIN_SAVEREGS
8213 /* Do whatever the machine needs done in this case. */
8214 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
8215 #else
8216 /* The register where the function returns its value
8217 is likely to have something else in it, such as an argument.
8218 So preserve that register around the call. */
8219
8220 if (value_mode != VOIDmode)
8221 {
8222 rtx valreg = hard_libcall_value (value_mode);
8223 rtx saved_valreg = gen_reg_rtx (value_mode);
8224
8225 emit_move_insn (saved_valreg, valreg);
8226 temp = expand_call (exp, target, ignore);
8227 emit_move_insn (valreg, saved_valreg);
8228 }
8229 else
8230 /* Generate the call, putting the value in a pseudo. */
8231 temp = expand_call (exp, target, ignore);
8232 #endif
8233
8234 seq = get_insns ();
8235 end_sequence ();
8236
8237 saveregs_value = temp;
8238
8239 /* Put the sequence after the NOTE that starts the function.
8240 If this is inside a SEQUENCE, make the outer-level insn
8241 chain current, so the code is placed at the start of the
8242 function. */
8243 push_topmost_sequence ();
8244 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8245 pop_topmost_sequence ();
8246 return temp;
8247 }
8248
8249 /* __builtin_args_info (N) returns word N of the arg space info
8250 for the current function. The number and meanings of words
8251 is controlled by the definition of CUMULATIVE_ARGS. */
8252 case BUILT_IN_ARGS_INFO:
8253 {
8254 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
8255 int i;
8256 int *word_ptr = (int *) &current_function_args_info;
8257 tree type, elts, result;
8258
8259 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
8260 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
8261 __FILE__, __LINE__);
8262
8263 if (arglist != 0)
8264 {
8265 tree arg = TREE_VALUE (arglist);
8266 if (TREE_CODE (arg) != INTEGER_CST)
8267 error ("argument of `__builtin_args_info' must be constant");
8268 else
8269 {
8270 int wordnum = TREE_INT_CST_LOW (arg);
8271
8272 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
8273 error ("argument of `__builtin_args_info' out of range");
8274 else
8275 return GEN_INT (word_ptr[wordnum]);
8276 }
8277 }
8278 else
8279 error ("missing argument in `__builtin_args_info'");
8280
8281 return const0_rtx;
8282
8283 #if 0
8284 for (i = 0; i < nwords; i++)
8285 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
8286
8287 type = build_array_type (integer_type_node,
8288 build_index_type (build_int_2 (nwords, 0)));
8289 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
8290 TREE_CONSTANT (result) = 1;
8291 TREE_STATIC (result) = 1;
8292 result = build (INDIRECT_REF, build_pointer_type (type), result);
8293 TREE_CONSTANT (result) = 1;
8294 return expand_expr (result, NULL_RTX, VOIDmode, 0);
8295 #endif
8296 }
8297
8298 /* Return the address of the first anonymous stack arg. */
8299 case BUILT_IN_NEXT_ARG:
8300 {
8301 tree fntype = TREE_TYPE (current_function_decl);
8302
8303 if ((TYPE_ARG_TYPES (fntype) == 0
8304 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
8305 == void_type_node))
8306 && ! current_function_varargs)
8307 {
8308 error ("`va_start' used in function with fixed args");
8309 return const0_rtx;
8310 }
8311
8312 if (arglist)
8313 {
8314 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
8315 tree arg = TREE_VALUE (arglist);
8316
8317 /* Strip off all nops for the sake of the comparison. This
8318 is not quite the same as STRIP_NOPS. It does more.
8319 We must also strip off INDIRECT_EXPR for C++ reference
8320 parameters. */
8321 while (TREE_CODE (arg) == NOP_EXPR
8322 || TREE_CODE (arg) == CONVERT_EXPR
8323 || TREE_CODE (arg) == NON_LVALUE_EXPR
8324 || TREE_CODE (arg) == INDIRECT_REF)
8325 arg = TREE_OPERAND (arg, 0);
8326 if (arg != last_parm)
8327 warning ("second parameter of `va_start' not last named argument");
8328 }
8329 else if (! current_function_varargs)
8330 /* Evidently an out of date version of <stdarg.h>; can't validate
8331 va_start's second argument, but can still work as intended. */
8332 warning ("`__builtin_next_arg' called without an argument");
8333 }
8334
8335 return expand_binop (Pmode, add_optab,
8336 current_function_internal_arg_pointer,
8337 current_function_arg_offset_rtx,
8338 NULL_RTX, 0, OPTAB_LIB_WIDEN);
8339
8340 case BUILT_IN_CLASSIFY_TYPE:
8341 if (arglist != 0)
8342 {
8343 tree type = TREE_TYPE (TREE_VALUE (arglist));
8344 enum tree_code code = TREE_CODE (type);
8345 if (code == VOID_TYPE)
8346 return GEN_INT (void_type_class);
8347 if (code == INTEGER_TYPE)
8348 return GEN_INT (integer_type_class);
8349 if (code == CHAR_TYPE)
8350 return GEN_INT (char_type_class);
8351 if (code == ENUMERAL_TYPE)
8352 return GEN_INT (enumeral_type_class);
8353 if (code == BOOLEAN_TYPE)
8354 return GEN_INT (boolean_type_class);
8355 if (code == POINTER_TYPE)
8356 return GEN_INT (pointer_type_class);
8357 if (code == REFERENCE_TYPE)
8358 return GEN_INT (reference_type_class);
8359 if (code == OFFSET_TYPE)
8360 return GEN_INT (offset_type_class);
8361 if (code == REAL_TYPE)
8362 return GEN_INT (real_type_class);
8363 if (code == COMPLEX_TYPE)
8364 return GEN_INT (complex_type_class);
8365 if (code == FUNCTION_TYPE)
8366 return GEN_INT (function_type_class);
8367 if (code == METHOD_TYPE)
8368 return GEN_INT (method_type_class);
8369 if (code == RECORD_TYPE)
8370 return GEN_INT (record_type_class);
8371 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
8372 return GEN_INT (union_type_class);
8373 if (code == ARRAY_TYPE)
8374 {
8375 if (TYPE_STRING_FLAG (type))
8376 return GEN_INT (string_type_class);
8377 else
8378 return GEN_INT (array_type_class);
8379 }
8380 if (code == SET_TYPE)
8381 return GEN_INT (set_type_class);
8382 if (code == FILE_TYPE)
8383 return GEN_INT (file_type_class);
8384 if (code == LANG_TYPE)
8385 return GEN_INT (lang_type_class);
8386 }
8387 return GEN_INT (no_type_class);
8388
8389 case BUILT_IN_CONSTANT_P:
8390 if (arglist == 0)
8391 return const0_rtx;
8392 else
8393 {
8394 tree arg = TREE_VALUE (arglist);
8395
8396 STRIP_NOPS (arg);
8397 return (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c'
8398 || (TREE_CODE (arg) == ADDR_EXPR
8399 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8400 ? const1_rtx : const0_rtx);
8401 }
8402
8403 case BUILT_IN_FRAME_ADDRESS:
8404 /* The argument must be a nonnegative integer constant.
8405 It counts the number of frames to scan up the stack.
8406 The value is the address of that frame. */
8407 case BUILT_IN_RETURN_ADDRESS:
8408 /* The argument must be a nonnegative integer constant.
8409 It counts the number of frames to scan up the stack.
8410 The value is the return address saved in that frame. */
8411 if (arglist == 0)
8412 /* Warning about missing arg was already issued. */
8413 return const0_rtx;
8414 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
8415 {
8416 error ("invalid arg to `__builtin_return_address'");
8417 return const0_rtx;
8418 }
8419 else if (tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
8420 {
8421 error ("invalid arg to `__builtin_return_address'");
8422 return const0_rtx;
8423 }
8424 else
8425 {
8426 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
8427 TREE_INT_CST_LOW (TREE_VALUE (arglist)),
8428 hard_frame_pointer_rtx);
8429
8430 /* For __builtin_frame_address, return what we've got. */
8431 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8432 return tem;
8433
8434 if (GET_CODE (tem) != REG)
8435 tem = copy_to_reg (tem);
8436 return tem;
8437 }
8438
8439 case BUILT_IN_ALLOCA:
8440 if (arglist == 0
8441 /* Arg could be non-integer if user redeclared this fcn wrong. */
8442 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8443 break;
8444
8445 /* Compute the argument. */
8446 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
8447
8448 /* Allocate the desired space. */
8449 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
8450
8451 case BUILT_IN_FFS:
8452 /* If not optimizing, call the library function. */
8453 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8454 break;
8455
8456 if (arglist == 0
8457 /* Arg could be non-integer if user redeclared this fcn wrong. */
8458 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8459 break;
8460
8461 /* Compute the argument. */
8462 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8463 /* Compute ffs, into TARGET if possible.
8464 Set TARGET to wherever the result comes back. */
8465 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8466 ffs_optab, op0, target, 1);
8467 if (target == 0)
8468 abort ();
8469 return target;
8470
8471 case BUILT_IN_STRLEN:
8472 /* If not optimizing, call the library function. */
8473 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8474 break;
8475
8476 if (arglist == 0
8477 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8478 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8479 break;
8480 else
8481 {
8482 tree src = TREE_VALUE (arglist);
8483 tree len = c_strlen (src);
8484
8485 int align
8486 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8487
8488 rtx result, src_rtx, char_rtx;
8489 enum machine_mode insn_mode = value_mode, char_mode;
8490 enum insn_code icode;
8491
8492 /* If the length is known, just return it. */
8493 if (len != 0)
8494 return expand_expr (len, target, mode, 0);
8495
8496 /* If SRC is not a pointer type, don't do this operation inline. */
8497 if (align == 0)
8498 break;
8499
8500 /* Call a function if we can't compute strlen in the right mode. */
8501
8502 while (insn_mode != VOIDmode)
8503 {
8504 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
8505 if (icode != CODE_FOR_nothing)
8506 break;
8507
8508 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
8509 }
8510 if (insn_mode == VOIDmode)
8511 break;
8512
8513 /* Make a place to write the result of the instruction. */
8514 result = target;
8515 if (! (result != 0
8516 && GET_CODE (result) == REG
8517 && GET_MODE (result) == insn_mode
8518 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8519 result = gen_reg_rtx (insn_mode);
8520
8521 /* Make sure the operands are acceptable to the predicates. */
8522
8523 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
8524 result = gen_reg_rtx (insn_mode);
8525
8526 src_rtx = memory_address (BLKmode,
8527 expand_expr (src, NULL_RTX, ptr_mode,
8528 EXPAND_NORMAL));
8529 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
8530 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
8531
8532 char_rtx = const0_rtx;
8533 char_mode = insn_operand_mode[(int)icode][2];
8534 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
8535 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
8536
8537 emit_insn (GEN_FCN (icode) (result,
8538 gen_rtx (MEM, BLKmode, src_rtx),
8539 char_rtx, GEN_INT (align)));
8540
8541 /* Return the value in the proper mode for this function. */
8542 if (GET_MODE (result) == value_mode)
8543 return result;
8544 else if (target != 0)
8545 {
8546 convert_move (target, result, 0);
8547 return target;
8548 }
8549 else
8550 return convert_to_mode (value_mode, result, 0);
8551 }
8552
8553 case BUILT_IN_STRCPY:
8554 /* If not optimizing, call the library function. */
8555 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8556 break;
8557
8558 if (arglist == 0
8559 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8560 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8561 || TREE_CHAIN (arglist) == 0
8562 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
8563 break;
8564 else
8565 {
8566 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
8567
8568 if (len == 0)
8569 break;
8570
8571 len = size_binop (PLUS_EXPR, len, integer_one_node);
8572
8573 chainon (arglist, build_tree_list (NULL_TREE, len));
8574 }
8575
8576 /* Drops in. */
8577 case BUILT_IN_MEMCPY:
8578 /* If not optimizing, call the library function. */
8579 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8580 break;
8581
8582 if (arglist == 0
8583 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8584 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8585 || TREE_CHAIN (arglist) == 0
8586 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8587 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8588 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8589 break;
8590 else
8591 {
8592 tree dest = TREE_VALUE (arglist);
8593 tree src = TREE_VALUE (TREE_CHAIN (arglist));
8594 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8595 tree type;
8596
8597 int src_align
8598 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8599 int dest_align
8600 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8601 rtx dest_rtx, dest_mem, src_mem;
8602
8603 /* If either SRC or DEST is not a pointer type, don't do
8604 this operation in-line. */
8605 if (src_align == 0 || dest_align == 0)
8606 {
8607 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
8608 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8609 break;
8610 }
8611
8612 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
8613 dest_mem = gen_rtx (MEM, BLKmode,
8614 memory_address (BLKmode, dest_rtx));
8615 /* There could be a void* cast on top of the object. */
8616 while (TREE_CODE (dest) == NOP_EXPR)
8617 dest = TREE_OPERAND (dest, 0);
8618 type = TREE_TYPE (TREE_TYPE (dest));
8619 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
8620 src_mem = gen_rtx (MEM, BLKmode,
8621 memory_address (BLKmode,
8622 expand_expr (src, NULL_RTX,
8623 ptr_mode,
8624 EXPAND_SUM)));
8625 /* There could be a void* cast on top of the object. */
8626 while (TREE_CODE (src) == NOP_EXPR)
8627 src = TREE_OPERAND (src, 0);
8628 type = TREE_TYPE (TREE_TYPE (src));
8629 MEM_IN_STRUCT_P (src_mem) = AGGREGATE_TYPE_P (type);
8630
8631 /* Copy word part most expediently. */
8632 emit_block_move (dest_mem, src_mem,
8633 expand_expr (len, NULL_RTX, VOIDmode, 0),
8634 MIN (src_align, dest_align));
8635 return force_operand (dest_rtx, NULL_RTX);
8636 }
8637
8638 case BUILT_IN_MEMSET:
8639 /* If not optimizing, call the library function. */
8640 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8641 break;
8642
8643 if (arglist == 0
8644 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8645 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8646 || TREE_CHAIN (arglist) == 0
8647 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
8648 != INTEGER_TYPE)
8649 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8650 || (INTEGER_CST
8651 != (TREE_CODE (TREE_TYPE
8652 (TREE_VALUE
8653 (TREE_CHAIN (TREE_CHAIN (arglist))))))))
8654 break;
8655 else
8656 {
8657 tree dest = TREE_VALUE (arglist);
8658 tree val = TREE_VALUE (TREE_CHAIN (arglist));
8659 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8660 tree type;
8661
8662 int dest_align
8663 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8664 rtx dest_rtx, dest_mem;
8665
8666 /* If DEST is not a pointer type, don't do this
8667 operation in-line. */
8668 if (dest_align == 0)
8669 break;
8670
8671 /* If VAL is not 0, don't do this operation in-line. */
8672 if (expand_expr (val, NULL_RTX, VOIDmode, 0) != const0_rtx)
8673 break;
8674
8675 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
8676 dest_mem = gen_rtx (MEM, BLKmode,
8677 memory_address (BLKmode, dest_rtx));
8678 /* There could be a void* cast on top of the object. */
8679 while (TREE_CODE (dest) == NOP_EXPR)
8680 dest = TREE_OPERAND (dest, 0);
8681 type = TREE_TYPE (TREE_TYPE (dest));
8682 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
8683
8684 clear_storage (dest_mem, expand_expr (len, NULL_RTX, VOIDmode, 0),
8685 dest_align);
8686
8687 return force_operand (dest_rtx, NULL_RTX);
8688 }
8689
8690 /* These comparison functions need an instruction that returns an actual
8691 index. An ordinary compare that just sets the condition codes
8692 is not enough. */
8693 #ifdef HAVE_cmpstrsi
8694 case BUILT_IN_STRCMP:
8695 /* If not optimizing, call the library function. */
8696 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8697 break;
8698
8699 if (arglist == 0
8700 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8701 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8702 || TREE_CHAIN (arglist) == 0
8703 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
8704 break;
8705 else if (!HAVE_cmpstrsi)
8706 break;
8707 {
8708 tree arg1 = TREE_VALUE (arglist);
8709 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
8710 tree offset;
8711 tree len, len2;
8712
8713 len = c_strlen (arg1);
8714 if (len)
8715 len = size_binop (PLUS_EXPR, integer_one_node, len);
8716 len2 = c_strlen (arg2);
8717 if (len2)
8718 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
8719
8720 /* If we don't have a constant length for the first, use the length
8721 of the second, if we know it. We don't require a constant for
8722 this case; some cost analysis could be done if both are available
8723 but neither is constant. For now, assume they're equally cheap.
8724
8725 If both strings have constant lengths, use the smaller. This
8726 could arise if optimization results in strcpy being called with
8727 two fixed strings, or if the code was machine-generated. We should
8728 add some code to the `memcmp' handler below to deal with such
8729 situations, someday. */
8730 if (!len || TREE_CODE (len) != INTEGER_CST)
8731 {
8732 if (len2)
8733 len = len2;
8734 else if (len == 0)
8735 break;
8736 }
8737 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
8738 {
8739 if (tree_int_cst_lt (len2, len))
8740 len = len2;
8741 }
8742
8743 chainon (arglist, build_tree_list (NULL_TREE, len));
8744 }
8745
8746 /* Drops in. */
8747 case BUILT_IN_MEMCMP:
8748 /* If not optimizing, call the library function. */
8749 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8750 break;
8751
8752 if (arglist == 0
8753 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8754 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8755 || TREE_CHAIN (arglist) == 0
8756 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8757 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8758 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8759 break;
8760 else if (!HAVE_cmpstrsi)
8761 break;
8762 {
8763 tree arg1 = TREE_VALUE (arglist);
8764 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
8765 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8766 rtx result;
8767
8768 int arg1_align
8769 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8770 int arg2_align
8771 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8772 enum machine_mode insn_mode
8773 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
8774
8775 /* If we don't have POINTER_TYPE, call the function. */
8776 if (arg1_align == 0 || arg2_align == 0)
8777 {
8778 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
8779 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8780 break;
8781 }
8782
8783 /* Make a place to write the result of the instruction. */
8784 result = target;
8785 if (! (result != 0
8786 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
8787 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8788 result = gen_reg_rtx (insn_mode);
8789
8790 emit_insn (gen_cmpstrsi (result,
8791 gen_rtx (MEM, BLKmode,
8792 expand_expr (arg1, NULL_RTX,
8793 ptr_mode,
8794 EXPAND_NORMAL)),
8795 gen_rtx (MEM, BLKmode,
8796 expand_expr (arg2, NULL_RTX,
8797 ptr_mode,
8798 EXPAND_NORMAL)),
8799 expand_expr (len, NULL_RTX, VOIDmode, 0),
8800 GEN_INT (MIN (arg1_align, arg2_align))));
8801
8802 /* Return the value in the proper mode for this function. */
8803 mode = TYPE_MODE (TREE_TYPE (exp));
8804 if (GET_MODE (result) == mode)
8805 return result;
8806 else if (target != 0)
8807 {
8808 convert_move (target, result, 0);
8809 return target;
8810 }
8811 else
8812 return convert_to_mode (mode, result, 0);
8813 }
8814 #else
8815 case BUILT_IN_STRCMP:
8816 case BUILT_IN_MEMCMP:
8817 break;
8818 #endif
8819
8820 /* __builtin_setjmp is passed a pointer to an array of five words
8821 (not all will be used on all machines). It operates similarly to
8822 the C library function of the same name, but is more efficient.
8823 Much of the code below (and for longjmp) is copied from the handling
8824 of non-local gotos.
8825
8826 NOTE: This is intended for use by GNAT and will only work in
8827 the method used by it. This code will likely NOT survive to
8828 the GCC 2.8.0 release. */
8829 case BUILT_IN_SETJMP:
8830 if (arglist == 0
8831 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8832 break;
8833
8834 {
8835 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
8836 VOIDmode, 0);
8837 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
8838 enum machine_mode sa_mode = Pmode;
8839 rtx stack_save;
8840 int old_inhibit_defer_pop = inhibit_defer_pop;
8841 int return_pops = RETURN_POPS_ARGS (get_identifier ("__dummy"),
8842 get_identifier ("__dummy"), 0);
8843 rtx next_arg_reg;
8844 CUMULATIVE_ARGS args_so_far;
8845 int i;
8846
8847 #ifdef POINTERS_EXTEND_UNSIGNED
8848 buf_addr = convert_memory_address (Pmode, buf_addr);
8849 #endif
8850
8851 buf_addr = force_reg (Pmode, buf_addr);
8852
8853 if (target == 0 || GET_CODE (target) != REG
8854 || REGNO (target) < FIRST_PSEUDO_REGISTER)
8855 target = gen_reg_rtx (value_mode);
8856
8857 emit_queue ();
8858
8859 CONST_CALL_P (emit_note (NULL_PTR, NOTE_INSN_SETJMP)) = 1;
8860 current_function_calls_setjmp = 1;
8861
8862 /* We store the frame pointer and the address of lab1 in the buffer
8863 and use the rest of it for the stack save area, which is
8864 machine-dependent. */
8865 emit_move_insn (gen_rtx (MEM, Pmode, buf_addr),
8866 virtual_stack_vars_rtx);
8867 emit_move_insn
8868 (validize_mem (gen_rtx (MEM, Pmode,
8869 plus_constant (buf_addr,
8870 GET_MODE_SIZE (Pmode)))),
8871 gen_rtx (LABEL_REF, Pmode, lab1));
8872
8873 #ifdef HAVE_save_stack_nonlocal
8874 if (HAVE_save_stack_nonlocal)
8875 sa_mode = insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0];
8876 #endif
8877
8878 stack_save = gen_rtx (MEM, sa_mode,
8879 plus_constant (buf_addr,
8880 2 * GET_MODE_SIZE (Pmode)));
8881 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
8882
8883 #ifdef HAVE_setjmp
8884 if (HAVE_setjmp)
8885 emit_insn (gen_setjmp ());
8886 #endif
8887
8888 /* Set TARGET to zero and branch around the other case. */
8889 emit_move_insn (target, const0_rtx);
8890 emit_jump_insn (gen_jump (lab2));
8891 emit_barrier ();
8892 emit_label (lab1);
8893
8894 /* Note that setjmp clobbers FP when we get here, so we have to
8895 make sure it's marked as used by this function. */
8896 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
8897
8898 /* Mark the static chain as clobbered here so life information
8899 doesn't get messed up for it. */
8900 emit_insn (gen_rtx (CLOBBER, VOIDmode, static_chain_rtx));
8901
8902 /* Now put in the code to restore the frame pointer, and argument
8903 pointer, if needed. The code below is from expand_end_bindings
8904 in stmt.c; see detailed documentation there. */
8905 #ifdef HAVE_nonlocal_goto
8906 if (! HAVE_nonlocal_goto)
8907 #endif
8908 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
8909
8910 current_function_has_nonlocal_goto = 1;
8911
8912 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
8913 if (fixed_regs[ARG_POINTER_REGNUM])
8914 {
8915 #ifdef ELIMINABLE_REGS
8916 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
8917
8918 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
8919 if (elim_regs[i].from == ARG_POINTER_REGNUM
8920 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
8921 break;
8922
8923 if (i == sizeof elim_regs / sizeof elim_regs [0])
8924 #endif
8925 {
8926 /* Now restore our arg pointer from the address at which it
8927 was saved in our stack frame.
8928 If there hasn't be space allocated for it yet, make
8929 some now. */
8930 if (arg_pointer_save_area == 0)
8931 arg_pointer_save_area
8932 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
8933 emit_move_insn (virtual_incoming_args_rtx,
8934 copy_to_reg (arg_pointer_save_area));
8935 }
8936 }
8937 #endif
8938
8939 #ifdef HAVE_nonlocal_goto_receiver
8940 if (HAVE_nonlocal_goto_receiver)
8941 emit_insn (gen_nonlocal_goto_receiver ());
8942 #endif
8943 /* The static chain pointer contains the address of dummy function.
8944 We need to call it here to handle some PIC cases of restoring
8945 a global pointer. Then return 1. */
8946 op0 = copy_to_mode_reg (Pmode, static_chain_rtx);
8947
8948 /* We can't actually call emit_library_call here, so do everything
8949 it does, which isn't much for a libfunc with no args. */
8950 op0 = memory_address (FUNCTION_MODE, op0);
8951
8952 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE,
8953 gen_rtx (SYMBOL_REF, Pmode, "__dummy"), 1);
8954 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1);
8955
8956 #ifndef ACCUMULATE_OUTGOING_ARGS
8957 #ifdef HAVE_call_pop
8958 if (HAVE_call_pop)
8959 emit_call_insn (gen_call_pop (gen_rtx (MEM, FUNCTION_MODE, op0),
8960 const0_rtx, next_arg_reg,
8961 GEN_INT (return_pops)));
8962 else
8963 #endif
8964 #endif
8965
8966 #ifdef HAVE_call
8967 if (HAVE_call)
8968 emit_call_insn (gen_call (gen_rtx (MEM, FUNCTION_MODE, op0),
8969 const0_rtx, next_arg_reg, const0_rtx));
8970 else
8971 #endif
8972 abort ();
8973
8974 emit_move_insn (target, const1_rtx);
8975 emit_label (lab2);
8976 return target;
8977 }
8978
8979 /* __builtin_longjmp is passed a pointer to an array of five words
8980 and a value, which is a dummy. It's similar to the C library longjmp
8981 function but works with __builtin_setjmp above. */
8982 case BUILT_IN_LONGJMP:
8983 if (arglist == 0 || TREE_CHAIN (arglist) == 0
8984 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8985 break;
8986
8987 {
8988 tree dummy_id = get_identifier ("__dummy");
8989 tree dummy_type = build_function_type (void_type_node, NULL_TREE);
8990 tree dummy_decl = build_decl (FUNCTION_DECL, dummy_id, dummy_type);
8991 #ifdef POINTERS_EXTEND_UNSIGNED
8992 rtx buf_addr
8993 = force_reg (Pmode,
8994 convert_memory_address
8995 (Pmode,
8996 expand_expr (TREE_VALUE (arglist),
8997 NULL_RTX, VOIDmode, 0)));
8998 #else
8999 rtx buf_addr
9000 = force_reg (Pmode, expand_expr (TREE_VALUE (arglist),
9001 NULL_RTX,
9002 VOIDmode, 0));
9003 #endif
9004 rtx fp = gen_rtx (MEM, Pmode, buf_addr);
9005 rtx lab = gen_rtx (MEM, Pmode,
9006 plus_constant (buf_addr, GET_MODE_SIZE (Pmode)));
9007 enum machine_mode sa_mode
9008 #ifdef HAVE_save_stack_nonlocal
9009 = (HAVE_save_stack_nonlocal
9010 ? insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0]
9011 : Pmode);
9012 #else
9013 = Pmode;
9014 #endif
9015 rtx stack = gen_rtx (MEM, sa_mode,
9016 plus_constant (buf_addr,
9017 2 * GET_MODE_SIZE (Pmode)));
9018
9019 DECL_EXTERNAL (dummy_decl) = 1;
9020 TREE_PUBLIC (dummy_decl) = 1;
9021 make_decl_rtl (dummy_decl, NULL_PTR, 1);
9022
9023 /* Expand the second expression just for side-effects. */
9024 expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
9025 const0_rtx, VOIDmode, 0);
9026
9027 assemble_external (dummy_decl);
9028
9029 /* Pick up FP, label, and SP from the block and jump. This code is
9030 from expand_goto in stmt.c; see there for detailed comments. */
9031 #if HAVE_nonlocal_goto
9032 if (HAVE_nonlocal_goto)
9033 emit_insn (gen_nonlocal_goto (fp, lab, stack,
9034 XEXP (DECL_RTL (dummy_decl), 0)));
9035 else
9036 #endif
9037 {
9038 lab = copy_to_reg (lab);
9039 emit_move_insn (hard_frame_pointer_rtx, fp);
9040 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
9041
9042 /* Put in the static chain register the address of the dummy
9043 function. */
9044 emit_move_insn (static_chain_rtx, XEXP (DECL_RTL (dummy_decl), 0));
9045 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
9046 emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
9047 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
9048 emit_indirect_jump (lab);
9049 }
9050
9051 return const0_rtx;
9052 }
9053
9054 default: /* just do library call, if unknown builtin */
9055 error ("built-in function `%s' not currently supported",
9056 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9057 }
9058
9059 /* The switch statement above can drop through to cause the function
9060 to be called normally. */
9061
9062 return expand_call (exp, target, ignore);
9063 }
9064 \f
9065 /* Built-in functions to perform an untyped call and return. */
9066
9067 /* For each register that may be used for calling a function, this
9068 gives a mode used to copy the register's value. VOIDmode indicates
9069 the register is not used for calling a function. If the machine
9070 has register windows, this gives only the outbound registers.
9071 INCOMING_REGNO gives the corresponding inbound register. */
9072 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
9073
9074 /* For each register that may be used for returning values, this gives
9075 a mode used to copy the register's value. VOIDmode indicates the
9076 register is not used for returning values. If the machine has
9077 register windows, this gives only the outbound registers.
9078 INCOMING_REGNO gives the corresponding inbound register. */
9079 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
9080
9081 /* For each register that may be used for calling a function, this
9082 gives the offset of that register into the block returned by
9083 __builtin_apply_args. 0 indicates that the register is not
9084 used for calling a function. */
9085 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
9086
9087 /* Return the offset of register REGNO into the block returned by
9088 __builtin_apply_args. This is not declared static, since it is
9089 needed in objc-act.c. */
9090
9091 int
9092 apply_args_register_offset (regno)
9093 int regno;
9094 {
9095 apply_args_size ();
9096
9097 /* Arguments are always put in outgoing registers (in the argument
9098 block) if such make sense. */
9099 #ifdef OUTGOING_REGNO
9100 regno = OUTGOING_REGNO(regno);
9101 #endif
9102 return apply_args_reg_offset[regno];
9103 }
9104
9105 /* Return the size required for the block returned by __builtin_apply_args,
9106 and initialize apply_args_mode. */
9107
9108 static int
9109 apply_args_size ()
9110 {
9111 static int size = -1;
9112 int align, regno;
9113 enum machine_mode mode;
9114
9115 /* The values computed by this function never change. */
9116 if (size < 0)
9117 {
9118 /* The first value is the incoming arg-pointer. */
9119 size = GET_MODE_SIZE (Pmode);
9120
9121 /* The second value is the structure value address unless this is
9122 passed as an "invisible" first argument. */
9123 if (struct_value_rtx)
9124 size += GET_MODE_SIZE (Pmode);
9125
9126 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9127 if (FUNCTION_ARG_REGNO_P (regno))
9128 {
9129 /* Search for the proper mode for copying this register's
9130 value. I'm not sure this is right, but it works so far. */
9131 enum machine_mode best_mode = VOIDmode;
9132
9133 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9134 mode != VOIDmode;
9135 mode = GET_MODE_WIDER_MODE (mode))
9136 if (HARD_REGNO_MODE_OK (regno, mode)
9137 && HARD_REGNO_NREGS (regno, mode) == 1)
9138 best_mode = mode;
9139
9140 if (best_mode == VOIDmode)
9141 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9142 mode != VOIDmode;
9143 mode = GET_MODE_WIDER_MODE (mode))
9144 if (HARD_REGNO_MODE_OK (regno, mode)
9145 && (mov_optab->handlers[(int) mode].insn_code
9146 != CODE_FOR_nothing))
9147 best_mode = mode;
9148
9149 mode = best_mode;
9150 if (mode == VOIDmode)
9151 abort ();
9152
9153 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9154 if (size % align != 0)
9155 size = CEIL (size, align) * align;
9156 apply_args_reg_offset[regno] = size;
9157 size += GET_MODE_SIZE (mode);
9158 apply_args_mode[regno] = mode;
9159 }
9160 else
9161 {
9162 apply_args_mode[regno] = VOIDmode;
9163 apply_args_reg_offset[regno] = 0;
9164 }
9165 }
9166 return size;
9167 }
9168
9169 /* Return the size required for the block returned by __builtin_apply,
9170 and initialize apply_result_mode. */
9171
9172 static int
9173 apply_result_size ()
9174 {
9175 static int size = -1;
9176 int align, regno;
9177 enum machine_mode mode;
9178
9179 /* The values computed by this function never change. */
9180 if (size < 0)
9181 {
9182 size = 0;
9183
9184 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9185 if (FUNCTION_VALUE_REGNO_P (regno))
9186 {
9187 /* Search for the proper mode for copying this register's
9188 value. I'm not sure this is right, but it works so far. */
9189 enum machine_mode best_mode = VOIDmode;
9190
9191 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9192 mode != TImode;
9193 mode = GET_MODE_WIDER_MODE (mode))
9194 if (HARD_REGNO_MODE_OK (regno, mode))
9195 best_mode = mode;
9196
9197 if (best_mode == VOIDmode)
9198 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9199 mode != VOIDmode;
9200 mode = GET_MODE_WIDER_MODE (mode))
9201 if (HARD_REGNO_MODE_OK (regno, mode)
9202 && (mov_optab->handlers[(int) mode].insn_code
9203 != CODE_FOR_nothing))
9204 best_mode = mode;
9205
9206 mode = best_mode;
9207 if (mode == VOIDmode)
9208 abort ();
9209
9210 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9211 if (size % align != 0)
9212 size = CEIL (size, align) * align;
9213 size += GET_MODE_SIZE (mode);
9214 apply_result_mode[regno] = mode;
9215 }
9216 else
9217 apply_result_mode[regno] = VOIDmode;
9218
9219 /* Allow targets that use untyped_call and untyped_return to override
9220 the size so that machine-specific information can be stored here. */
9221 #ifdef APPLY_RESULT_SIZE
9222 size = APPLY_RESULT_SIZE;
9223 #endif
9224 }
9225 return size;
9226 }
9227
9228 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
9229 /* Create a vector describing the result block RESULT. If SAVEP is true,
9230 the result block is used to save the values; otherwise it is used to
9231 restore the values. */
9232
9233 static rtx
9234 result_vector (savep, result)
9235 int savep;
9236 rtx result;
9237 {
9238 int regno, size, align, nelts;
9239 enum machine_mode mode;
9240 rtx reg, mem;
9241 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
9242
9243 size = nelts = 0;
9244 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9245 if ((mode = apply_result_mode[regno]) != VOIDmode)
9246 {
9247 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9248 if (size % align != 0)
9249 size = CEIL (size, align) * align;
9250 reg = gen_rtx (REG, mode, savep ? regno : INCOMING_REGNO (regno));
9251 mem = change_address (result, mode,
9252 plus_constant (XEXP (result, 0), size));
9253 savevec[nelts++] = (savep
9254 ? gen_rtx (SET, VOIDmode, mem, reg)
9255 : gen_rtx (SET, VOIDmode, reg, mem));
9256 size += GET_MODE_SIZE (mode);
9257 }
9258 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
9259 }
9260 #endif /* HAVE_untyped_call or HAVE_untyped_return */
9261
9262 /* Save the state required to perform an untyped call with the same
9263 arguments as were passed to the current function. */
9264
9265 static rtx
9266 expand_builtin_apply_args ()
9267 {
9268 rtx registers;
9269 int size, align, regno;
9270 enum machine_mode mode;
9271
9272 /* Create a block where the arg-pointer, structure value address,
9273 and argument registers can be saved. */
9274 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
9275
9276 /* Walk past the arg-pointer and structure value address. */
9277 size = GET_MODE_SIZE (Pmode);
9278 if (struct_value_rtx)
9279 size += GET_MODE_SIZE (Pmode);
9280
9281 /* Save each register used in calling a function to the block. */
9282 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9283 if ((mode = apply_args_mode[regno]) != VOIDmode)
9284 {
9285 rtx tem;
9286
9287 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9288 if (size % align != 0)
9289 size = CEIL (size, align) * align;
9290
9291 tem = gen_rtx (REG, mode, INCOMING_REGNO (regno));
9292
9293 #ifdef STACK_REGS
9294 /* For reg-stack.c's stack register household.
9295 Compare with a similar piece of code in function.c. */
9296
9297 emit_insn (gen_rtx (USE, mode, tem));
9298 #endif
9299
9300 emit_move_insn (change_address (registers, mode,
9301 plus_constant (XEXP (registers, 0),
9302 size)),
9303 tem);
9304 size += GET_MODE_SIZE (mode);
9305 }
9306
9307 /* Save the arg pointer to the block. */
9308 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
9309 copy_to_reg (virtual_incoming_args_rtx));
9310 size = GET_MODE_SIZE (Pmode);
9311
9312 /* Save the structure value address unless this is passed as an
9313 "invisible" first argument. */
9314 if (struct_value_incoming_rtx)
9315 {
9316 emit_move_insn (change_address (registers, Pmode,
9317 plus_constant (XEXP (registers, 0),
9318 size)),
9319 copy_to_reg (struct_value_incoming_rtx));
9320 size += GET_MODE_SIZE (Pmode);
9321 }
9322
9323 /* Return the address of the block. */
9324 return copy_addr_to_reg (XEXP (registers, 0));
9325 }
9326
9327 /* Perform an untyped call and save the state required to perform an
9328 untyped return of whatever value was returned by the given function. */
9329
9330 static rtx
9331 expand_builtin_apply (function, arguments, argsize)
9332 rtx function, arguments, argsize;
9333 {
9334 int size, align, regno;
9335 enum machine_mode mode;
9336 rtx incoming_args, result, reg, dest, call_insn;
9337 rtx old_stack_level = 0;
9338 rtx call_fusage = 0;
9339
9340 /* Create a block where the return registers can be saved. */
9341 result = assign_stack_local (BLKmode, apply_result_size (), -1);
9342
9343 /* ??? The argsize value should be adjusted here. */
9344
9345 /* Fetch the arg pointer from the ARGUMENTS block. */
9346 incoming_args = gen_reg_rtx (Pmode);
9347 emit_move_insn (incoming_args,
9348 gen_rtx (MEM, Pmode, arguments));
9349 #ifndef STACK_GROWS_DOWNWARD
9350 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
9351 incoming_args, 0, OPTAB_LIB_WIDEN);
9352 #endif
9353
9354 /* Perform postincrements before actually calling the function. */
9355 emit_queue ();
9356
9357 /* Push a new argument block and copy the arguments. */
9358 do_pending_stack_adjust ();
9359 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
9360
9361 /* Push a block of memory onto the stack to store the memory arguments.
9362 Save the address in a register, and copy the memory arguments. ??? I
9363 haven't figured out how the calling convention macros effect this,
9364 but it's likely that the source and/or destination addresses in
9365 the block copy will need updating in machine specific ways. */
9366 dest = copy_addr_to_reg (push_block (argsize, 0, 0));
9367 emit_block_move (gen_rtx (MEM, BLKmode, dest),
9368 gen_rtx (MEM, BLKmode, incoming_args),
9369 argsize,
9370 PARM_BOUNDARY / BITS_PER_UNIT);
9371
9372 /* Refer to the argument block. */
9373 apply_args_size ();
9374 arguments = gen_rtx (MEM, BLKmode, arguments);
9375
9376 /* Walk past the arg-pointer and structure value address. */
9377 size = GET_MODE_SIZE (Pmode);
9378 if (struct_value_rtx)
9379 size += GET_MODE_SIZE (Pmode);
9380
9381 /* Restore each of the registers previously saved. Make USE insns
9382 for each of these registers for use in making the call. */
9383 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9384 if ((mode = apply_args_mode[regno]) != VOIDmode)
9385 {
9386 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9387 if (size % align != 0)
9388 size = CEIL (size, align) * align;
9389 reg = gen_rtx (REG, mode, regno);
9390 emit_move_insn (reg,
9391 change_address (arguments, mode,
9392 plus_constant (XEXP (arguments, 0),
9393 size)));
9394
9395 use_reg (&call_fusage, reg);
9396 size += GET_MODE_SIZE (mode);
9397 }
9398
9399 /* Restore the structure value address unless this is passed as an
9400 "invisible" first argument. */
9401 size = GET_MODE_SIZE (Pmode);
9402 if (struct_value_rtx)
9403 {
9404 rtx value = gen_reg_rtx (Pmode);
9405 emit_move_insn (value,
9406 change_address (arguments, Pmode,
9407 plus_constant (XEXP (arguments, 0),
9408 size)));
9409 emit_move_insn (struct_value_rtx, value);
9410 if (GET_CODE (struct_value_rtx) == REG)
9411 use_reg (&call_fusage, struct_value_rtx);
9412 size += GET_MODE_SIZE (Pmode);
9413 }
9414
9415 /* All arguments and registers used for the call are set up by now! */
9416 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
9417
9418 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
9419 and we don't want to load it into a register as an optimization,
9420 because prepare_call_address already did it if it should be done. */
9421 if (GET_CODE (function) != SYMBOL_REF)
9422 function = memory_address (FUNCTION_MODE, function);
9423
9424 /* Generate the actual call instruction and save the return value. */
9425 #ifdef HAVE_untyped_call
9426 if (HAVE_untyped_call)
9427 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
9428 result, result_vector (1, result)));
9429 else
9430 #endif
9431 #ifdef HAVE_call_value
9432 if (HAVE_call_value)
9433 {
9434 rtx valreg = 0;
9435
9436 /* Locate the unique return register. It is not possible to
9437 express a call that sets more than one return register using
9438 call_value; use untyped_call for that. In fact, untyped_call
9439 only needs to save the return registers in the given block. */
9440 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9441 if ((mode = apply_result_mode[regno]) != VOIDmode)
9442 {
9443 if (valreg)
9444 abort (); /* HAVE_untyped_call required. */
9445 valreg = gen_rtx (REG, mode, regno);
9446 }
9447
9448 emit_call_insn (gen_call_value (valreg,
9449 gen_rtx (MEM, FUNCTION_MODE, function),
9450 const0_rtx, NULL_RTX, const0_rtx));
9451
9452 emit_move_insn (change_address (result, GET_MODE (valreg),
9453 XEXP (result, 0)),
9454 valreg);
9455 }
9456 else
9457 #endif
9458 abort ();
9459
9460 /* Find the CALL insn we just emitted. */
9461 for (call_insn = get_last_insn ();
9462 call_insn && GET_CODE (call_insn) != CALL_INSN;
9463 call_insn = PREV_INSN (call_insn))
9464 ;
9465
9466 if (! call_insn)
9467 abort ();
9468
9469 /* Put the register usage information on the CALL. If there is already
9470 some usage information, put ours at the end. */
9471 if (CALL_INSN_FUNCTION_USAGE (call_insn))
9472 {
9473 rtx link;
9474
9475 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
9476 link = XEXP (link, 1))
9477 ;
9478
9479 XEXP (link, 1) = call_fusage;
9480 }
9481 else
9482 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
9483
9484 /* Restore the stack. */
9485 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
9486
9487 /* Return the address of the result block. */
9488 return copy_addr_to_reg (XEXP (result, 0));
9489 }
9490
9491 /* Perform an untyped return. */
9492
9493 static void
9494 expand_builtin_return (result)
9495 rtx result;
9496 {
9497 int size, align, regno;
9498 enum machine_mode mode;
9499 rtx reg;
9500 rtx call_fusage = 0;
9501
9502 apply_result_size ();
9503 result = gen_rtx (MEM, BLKmode, result);
9504
9505 #ifdef HAVE_untyped_return
9506 if (HAVE_untyped_return)
9507 {
9508 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
9509 emit_barrier ();
9510 return;
9511 }
9512 #endif
9513
9514 /* Restore the return value and note that each value is used. */
9515 size = 0;
9516 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9517 if ((mode = apply_result_mode[regno]) != VOIDmode)
9518 {
9519 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9520 if (size % align != 0)
9521 size = CEIL (size, align) * align;
9522 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
9523 emit_move_insn (reg,
9524 change_address (result, mode,
9525 plus_constant (XEXP (result, 0),
9526 size)));
9527
9528 push_to_sequence (call_fusage);
9529 emit_insn (gen_rtx (USE, VOIDmode, reg));
9530 call_fusage = get_insns ();
9531 end_sequence ();
9532 size += GET_MODE_SIZE (mode);
9533 }
9534
9535 /* Put the USE insns before the return. */
9536 emit_insns (call_fusage);
9537
9538 /* Return whatever values was restored by jumping directly to the end
9539 of the function. */
9540 expand_null_return ();
9541 }
9542 \f
9543 /* Expand code for a post- or pre- increment or decrement
9544 and return the RTX for the result.
9545 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9546
9547 static rtx
9548 expand_increment (exp, post, ignore)
9549 register tree exp;
9550 int post, ignore;
9551 {
9552 register rtx op0, op1;
9553 register rtx temp, value;
9554 register tree incremented = TREE_OPERAND (exp, 0);
9555 optab this_optab = add_optab;
9556 int icode;
9557 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9558 int op0_is_copy = 0;
9559 int single_insn = 0;
9560 /* 1 means we can't store into OP0 directly,
9561 because it is a subreg narrower than a word,
9562 and we don't dare clobber the rest of the word. */
9563 int bad_subreg = 0;
9564
9565 if (output_bytecode)
9566 {
9567 bc_expand_expr (exp);
9568 return NULL_RTX;
9569 }
9570
9571 /* Stabilize any component ref that might need to be
9572 evaluated more than once below. */
9573 if (!post
9574 || TREE_CODE (incremented) == BIT_FIELD_REF
9575 || (TREE_CODE (incremented) == COMPONENT_REF
9576 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9577 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9578 incremented = stabilize_reference (incremented);
9579 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9580 ones into save exprs so that they don't accidentally get evaluated
9581 more than once by the code below. */
9582 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9583 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9584 incremented = save_expr (incremented);
9585
9586 /* Compute the operands as RTX.
9587 Note whether OP0 is the actual lvalue or a copy of it:
9588 I believe it is a copy iff it is a register or subreg
9589 and insns were generated in computing it. */
9590
9591 temp = get_last_insn ();
9592 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9593
9594 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9595 in place but instead must do sign- or zero-extension during assignment,
9596 so we copy it into a new register and let the code below use it as
9597 a copy.
9598
9599 Note that we can safely modify this SUBREG since it is know not to be
9600 shared (it was made by the expand_expr call above). */
9601
9602 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9603 {
9604 if (post)
9605 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9606 else
9607 bad_subreg = 1;
9608 }
9609 else if (GET_CODE (op0) == SUBREG
9610 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9611 {
9612 /* We cannot increment this SUBREG in place. If we are
9613 post-incrementing, get a copy of the old value. Otherwise,
9614 just mark that we cannot increment in place. */
9615 if (post)
9616 op0 = copy_to_reg (op0);
9617 else
9618 bad_subreg = 1;
9619 }
9620
9621 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9622 && temp != get_last_insn ());
9623 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9624
9625 /* Decide whether incrementing or decrementing. */
9626 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9627 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9628 this_optab = sub_optab;
9629
9630 /* Convert decrement by a constant into a negative increment. */
9631 if (this_optab == sub_optab
9632 && GET_CODE (op1) == CONST_INT)
9633 {
9634 op1 = GEN_INT (- INTVAL (op1));
9635 this_optab = add_optab;
9636 }
9637
9638 /* For a preincrement, see if we can do this with a single instruction. */
9639 if (!post)
9640 {
9641 icode = (int) this_optab->handlers[(int) mode].insn_code;
9642 if (icode != (int) CODE_FOR_nothing
9643 /* Make sure that OP0 is valid for operands 0 and 1
9644 of the insn we want to queue. */
9645 && (*insn_operand_predicate[icode][0]) (op0, mode)
9646 && (*insn_operand_predicate[icode][1]) (op0, mode)
9647 && (*insn_operand_predicate[icode][2]) (op1, mode))
9648 single_insn = 1;
9649 }
9650
9651 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9652 then we cannot just increment OP0. We must therefore contrive to
9653 increment the original value. Then, for postincrement, we can return
9654 OP0 since it is a copy of the old value. For preincrement, expand here
9655 unless we can do it with a single insn.
9656
9657 Likewise if storing directly into OP0 would clobber high bits
9658 we need to preserve (bad_subreg). */
9659 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9660 {
9661 /* This is the easiest way to increment the value wherever it is.
9662 Problems with multiple evaluation of INCREMENTED are prevented
9663 because either (1) it is a component_ref or preincrement,
9664 in which case it was stabilized above, or (2) it is an array_ref
9665 with constant index in an array in a register, which is
9666 safe to reevaluate. */
9667 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9668 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9669 ? MINUS_EXPR : PLUS_EXPR),
9670 TREE_TYPE (exp),
9671 incremented,
9672 TREE_OPERAND (exp, 1));
9673
9674 while (TREE_CODE (incremented) == NOP_EXPR
9675 || TREE_CODE (incremented) == CONVERT_EXPR)
9676 {
9677 newexp = convert (TREE_TYPE (incremented), newexp);
9678 incremented = TREE_OPERAND (incremented, 0);
9679 }
9680
9681 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9682 return post ? op0 : temp;
9683 }
9684
9685 if (post)
9686 {
9687 /* We have a true reference to the value in OP0.
9688 If there is an insn to add or subtract in this mode, queue it.
9689 Queueing the increment insn avoids the register shuffling
9690 that often results if we must increment now and first save
9691 the old value for subsequent use. */
9692
9693 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9694 op0 = stabilize (op0);
9695 #endif
9696
9697 icode = (int) this_optab->handlers[(int) mode].insn_code;
9698 if (icode != (int) CODE_FOR_nothing
9699 /* Make sure that OP0 is valid for operands 0 and 1
9700 of the insn we want to queue. */
9701 && (*insn_operand_predicate[icode][0]) (op0, mode)
9702 && (*insn_operand_predicate[icode][1]) (op0, mode))
9703 {
9704 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
9705 op1 = force_reg (mode, op1);
9706
9707 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9708 }
9709 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9710 {
9711 rtx addr = force_reg (Pmode, XEXP (op0, 0));
9712 rtx temp, result;
9713
9714 op0 = change_address (op0, VOIDmode, addr);
9715 temp = force_reg (GET_MODE (op0), op0);
9716 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
9717 op1 = force_reg (mode, op1);
9718
9719 /* The increment queue is LIFO, thus we have to `queue'
9720 the instructions in reverse order. */
9721 enqueue_insn (op0, gen_move_insn (op0, temp));
9722 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9723 return result;
9724 }
9725 }
9726
9727 /* Preincrement, or we can't increment with one simple insn. */
9728 if (post)
9729 /* Save a copy of the value before inc or dec, to return it later. */
9730 temp = value = copy_to_reg (op0);
9731 else
9732 /* Arrange to return the incremented value. */
9733 /* Copy the rtx because expand_binop will protect from the queue,
9734 and the results of that would be invalid for us to return
9735 if our caller does emit_queue before using our result. */
9736 temp = copy_rtx (value = op0);
9737
9738 /* Increment however we can. */
9739 op1 = expand_binop (mode, this_optab, value, op1, op0,
9740 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9741 /* Make sure the value is stored into OP0. */
9742 if (op1 != op0)
9743 emit_move_insn (op0, op1);
9744
9745 return temp;
9746 }
9747 \f
9748 /* Expand all function calls contained within EXP, innermost ones first.
9749 But don't look within expressions that have sequence points.
9750 For each CALL_EXPR, record the rtx for its value
9751 in the CALL_EXPR_RTL field. */
9752
9753 static void
9754 preexpand_calls (exp)
9755 tree exp;
9756 {
9757 register int nops, i;
9758 int type = TREE_CODE_CLASS (TREE_CODE (exp));
9759
9760 if (! do_preexpand_calls)
9761 return;
9762
9763 /* Only expressions and references can contain calls. */
9764
9765 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
9766 return;
9767
9768 switch (TREE_CODE (exp))
9769 {
9770 case CALL_EXPR:
9771 /* Do nothing if already expanded. */
9772 if (CALL_EXPR_RTL (exp) != 0
9773 /* Do nothing if the call returns a variable-sized object. */
9774 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
9775 /* Do nothing to built-in functions. */
9776 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
9777 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9778 == FUNCTION_DECL)
9779 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
9780 return;
9781
9782 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
9783 return;
9784
9785 case COMPOUND_EXPR:
9786 case COND_EXPR:
9787 case TRUTH_ANDIF_EXPR:
9788 case TRUTH_ORIF_EXPR:
9789 /* If we find one of these, then we can be sure
9790 the adjust will be done for it (since it makes jumps).
9791 Do it now, so that if this is inside an argument
9792 of a function, we don't get the stack adjustment
9793 after some other args have already been pushed. */
9794 do_pending_stack_adjust ();
9795 return;
9796
9797 case BLOCK:
9798 case RTL_EXPR:
9799 case WITH_CLEANUP_EXPR:
9800 case CLEANUP_POINT_EXPR:
9801 return;
9802
9803 case SAVE_EXPR:
9804 if (SAVE_EXPR_RTL (exp) != 0)
9805 return;
9806 }
9807
9808 nops = tree_code_length[(int) TREE_CODE (exp)];
9809 for (i = 0; i < nops; i++)
9810 if (TREE_OPERAND (exp, i) != 0)
9811 {
9812 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9813 if (type == 'e' || type == '<' || type == '1' || type == '2'
9814 || type == 'r')
9815 preexpand_calls (TREE_OPERAND (exp, i));
9816 }
9817 }
9818 \f
9819 /* At the start of a function, record that we have no previously-pushed
9820 arguments waiting to be popped. */
9821
9822 void
9823 init_pending_stack_adjust ()
9824 {
9825 pending_stack_adjust = 0;
9826 }
9827
9828 /* When exiting from function, if safe, clear out any pending stack adjust
9829 so the adjustment won't get done. */
9830
9831 void
9832 clear_pending_stack_adjust ()
9833 {
9834 #ifdef EXIT_IGNORE_STACK
9835 if (optimize > 0
9836 && ! flag_omit_frame_pointer && EXIT_IGNORE_STACK
9837 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9838 && ! flag_inline_functions)
9839 pending_stack_adjust = 0;
9840 #endif
9841 }
9842
9843 /* Pop any previously-pushed arguments that have not been popped yet. */
9844
9845 void
9846 do_pending_stack_adjust ()
9847 {
9848 if (inhibit_defer_pop == 0)
9849 {
9850 if (pending_stack_adjust != 0)
9851 adjust_stack (GEN_INT (pending_stack_adjust));
9852 pending_stack_adjust = 0;
9853 }
9854 }
9855
9856 /* Defer the expansion all cleanups up to OLD_CLEANUPS.
9857 Returns the cleanups to be performed. */
9858
9859 static tree
9860 defer_cleanups_to (old_cleanups)
9861 tree old_cleanups;
9862 {
9863 tree new_cleanups = NULL_TREE;
9864 tree cleanups = cleanups_this_call;
9865 tree last = NULL_TREE;
9866
9867 while (cleanups_this_call != old_cleanups)
9868 {
9869 expand_eh_region_end (TREE_VALUE (cleanups_this_call));
9870 last = cleanups_this_call;
9871 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
9872 }
9873
9874 if (last)
9875 {
9876 /* Remove the list from the chain of cleanups. */
9877 TREE_CHAIN (last) = NULL_TREE;
9878
9879 /* reverse them so that we can build them in the right order. */
9880 cleanups = nreverse (cleanups);
9881
9882 /* All cleanups must be on the function_obstack. */
9883 push_obstacks_nochange ();
9884 resume_temporary_allocation ();
9885
9886 while (cleanups)
9887 {
9888 if (new_cleanups)
9889 new_cleanups = build (COMPOUND_EXPR, TREE_TYPE (new_cleanups),
9890 TREE_VALUE (cleanups), new_cleanups);
9891 else
9892 new_cleanups = TREE_VALUE (cleanups);
9893
9894 cleanups = TREE_CHAIN (cleanups);
9895 }
9896
9897 pop_obstacks ();
9898 }
9899
9900 return new_cleanups;
9901 }
9902
9903 /* Expand all cleanups up to OLD_CLEANUPS.
9904 Needed here, and also for language-dependent calls. */
9905
9906 void
9907 expand_cleanups_to (old_cleanups)
9908 tree old_cleanups;
9909 {
9910 while (cleanups_this_call != old_cleanups)
9911 {
9912 expand_eh_region_end (TREE_VALUE (cleanups_this_call));
9913 expand_expr (TREE_VALUE (cleanups_this_call), const0_rtx, VOIDmode, 0);
9914 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
9915 }
9916 }
9917 \f
9918 /* Expand conditional expressions. */
9919
9920 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9921 LABEL is an rtx of code CODE_LABEL, in this function and all the
9922 functions here. */
9923
9924 void
9925 jumpifnot (exp, label)
9926 tree exp;
9927 rtx label;
9928 {
9929 do_jump (exp, label, NULL_RTX);
9930 }
9931
9932 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9933
9934 void
9935 jumpif (exp, label)
9936 tree exp;
9937 rtx label;
9938 {
9939 do_jump (exp, NULL_RTX, label);
9940 }
9941
9942 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9943 the result is zero, or IF_TRUE_LABEL if the result is one.
9944 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9945 meaning fall through in that case.
9946
9947 do_jump always does any pending stack adjust except when it does not
9948 actually perform a jump. An example where there is no jump
9949 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9950
9951 This function is responsible for optimizing cases such as
9952 &&, || and comparison operators in EXP. */
9953
9954 void
9955 do_jump (exp, if_false_label, if_true_label)
9956 tree exp;
9957 rtx if_false_label, if_true_label;
9958 {
9959 register enum tree_code code = TREE_CODE (exp);
9960 /* Some cases need to create a label to jump to
9961 in order to properly fall through.
9962 These cases set DROP_THROUGH_LABEL nonzero. */
9963 rtx drop_through_label = 0;
9964 rtx temp;
9965 rtx comparison = 0;
9966 int i;
9967 tree type;
9968 enum machine_mode mode;
9969
9970 emit_queue ();
9971
9972 switch (code)
9973 {
9974 case ERROR_MARK:
9975 break;
9976
9977 case INTEGER_CST:
9978 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9979 if (temp)
9980 emit_jump (temp);
9981 break;
9982
9983 #if 0
9984 /* This is not true with #pragma weak */
9985 case ADDR_EXPR:
9986 /* The address of something can never be zero. */
9987 if (if_true_label)
9988 emit_jump (if_true_label);
9989 break;
9990 #endif
9991
9992 case NOP_EXPR:
9993 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9994 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9995 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9996 goto normal;
9997 case CONVERT_EXPR:
9998 /* If we are narrowing the operand, we have to do the compare in the
9999 narrower mode. */
10000 if ((TYPE_PRECISION (TREE_TYPE (exp))
10001 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10002 goto normal;
10003 case NON_LVALUE_EXPR:
10004 case REFERENCE_EXPR:
10005 case ABS_EXPR:
10006 case NEGATE_EXPR:
10007 case LROTATE_EXPR:
10008 case RROTATE_EXPR:
10009 /* These cannot change zero->non-zero or vice versa. */
10010 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10011 break;
10012
10013 #if 0
10014 /* This is never less insns than evaluating the PLUS_EXPR followed by
10015 a test and can be longer if the test is eliminated. */
10016 case PLUS_EXPR:
10017 /* Reduce to minus. */
10018 exp = build (MINUS_EXPR, TREE_TYPE (exp),
10019 TREE_OPERAND (exp, 0),
10020 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
10021 TREE_OPERAND (exp, 1))));
10022 /* Process as MINUS. */
10023 #endif
10024
10025 case MINUS_EXPR:
10026 /* Non-zero iff operands of minus differ. */
10027 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
10028 TREE_OPERAND (exp, 0),
10029 TREE_OPERAND (exp, 1)),
10030 NE, NE);
10031 break;
10032
10033 case BIT_AND_EXPR:
10034 /* If we are AND'ing with a small constant, do this comparison in the
10035 smallest type that fits. If the machine doesn't have comparisons
10036 that small, it will be converted back to the wider comparison.
10037 This helps if we are testing the sign bit of a narrower object.
10038 combine can't do this for us because it can't know whether a
10039 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
10040
10041 if (! SLOW_BYTE_ACCESS
10042 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
10043 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
10044 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
10045 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
10046 && (type = type_for_mode (mode, 1)) != 0
10047 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10048 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10049 != CODE_FOR_nothing))
10050 {
10051 do_jump (convert (type, exp), if_false_label, if_true_label);
10052 break;
10053 }
10054 goto normal;
10055
10056 case TRUTH_NOT_EXPR:
10057 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10058 break;
10059
10060 case TRUTH_ANDIF_EXPR:
10061 {
10062 rtx seq1, seq2;
10063 tree cleanups, old_cleanups;
10064
10065 if (if_false_label == 0)
10066 if_false_label = drop_through_label = gen_label_rtx ();
10067 start_sequence ();
10068 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
10069 seq1 = get_insns ();
10070 end_sequence ();
10071
10072 old_cleanups = cleanups_this_call;
10073 start_sequence ();
10074 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10075 seq2 = get_insns ();
10076 cleanups = defer_cleanups_to (old_cleanups);
10077 end_sequence ();
10078
10079 if (cleanups)
10080 {
10081 rtx flag = gen_reg_rtx (word_mode);
10082 tree new_cleanups;
10083 tree cond;
10084
10085 /* Flag cleanups as not needed. */
10086 emit_move_insn (flag, const0_rtx);
10087 emit_insns (seq1);
10088
10089 /* Flag cleanups as needed. */
10090 emit_move_insn (flag, const1_rtx);
10091 emit_insns (seq2);
10092
10093 /* All cleanups must be on the function_obstack. */
10094 push_obstacks_nochange ();
10095 resume_temporary_allocation ();
10096
10097 /* convert flag, which is an rtx, into a tree. */
10098 cond = make_node (RTL_EXPR);
10099 TREE_TYPE (cond) = integer_type_node;
10100 RTL_EXPR_RTL (cond) = flag;
10101 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
10102 cond = save_expr (cond);
10103
10104 new_cleanups = build (COND_EXPR, void_type_node,
10105 truthvalue_conversion (cond),
10106 cleanups, integer_zero_node);
10107 new_cleanups = fold (new_cleanups);
10108
10109 pop_obstacks ();
10110
10111 /* Now add in the conditionalized cleanups. */
10112 cleanups_this_call
10113 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
10114 expand_eh_region_start ();
10115 }
10116 else
10117 {
10118 emit_insns (seq1);
10119 emit_insns (seq2);
10120 }
10121 }
10122 break;
10123
10124 case TRUTH_ORIF_EXPR:
10125 {
10126 rtx seq1, seq2;
10127 tree cleanups, old_cleanups;
10128
10129 if (if_true_label == 0)
10130 if_true_label = drop_through_label = gen_label_rtx ();
10131 start_sequence ();
10132 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
10133 seq1 = get_insns ();
10134 end_sequence ();
10135
10136 old_cleanups = cleanups_this_call;
10137 start_sequence ();
10138 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10139 seq2 = get_insns ();
10140 cleanups = defer_cleanups_to (old_cleanups);
10141 end_sequence ();
10142
10143 if (cleanups)
10144 {
10145 rtx flag = gen_reg_rtx (word_mode);
10146 tree new_cleanups;
10147 tree cond;
10148
10149 /* Flag cleanups as not needed. */
10150 emit_move_insn (flag, const0_rtx);
10151 emit_insns (seq1);
10152
10153 /* Flag cleanups as needed. */
10154 emit_move_insn (flag, const1_rtx);
10155 emit_insns (seq2);
10156
10157 /* All cleanups must be on the function_obstack. */
10158 push_obstacks_nochange ();
10159 resume_temporary_allocation ();
10160
10161 /* convert flag, which is an rtx, into a tree. */
10162 cond = make_node (RTL_EXPR);
10163 TREE_TYPE (cond) = integer_type_node;
10164 RTL_EXPR_RTL (cond) = flag;
10165 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
10166 cond = save_expr (cond);
10167
10168 new_cleanups = build (COND_EXPR, void_type_node,
10169 truthvalue_conversion (cond),
10170 cleanups, integer_zero_node);
10171 new_cleanups = fold (new_cleanups);
10172
10173 pop_obstacks ();
10174
10175 /* Now add in the conditionalized cleanups. */
10176 cleanups_this_call
10177 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
10178 expand_eh_region_start ();
10179 }
10180 else
10181 {
10182 emit_insns (seq1);
10183 emit_insns (seq2);
10184 }
10185 }
10186 break;
10187
10188 case COMPOUND_EXPR:
10189 push_temp_slots ();
10190 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
10191 preserve_temp_slots (NULL_RTX);
10192 free_temp_slots ();
10193 pop_temp_slots ();
10194 emit_queue ();
10195 do_pending_stack_adjust ();
10196 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10197 break;
10198
10199 case COMPONENT_REF:
10200 case BIT_FIELD_REF:
10201 case ARRAY_REF:
10202 {
10203 int bitsize, bitpos, unsignedp;
10204 enum machine_mode mode;
10205 tree type;
10206 tree offset;
10207 int volatilep = 0;
10208 int alignment;
10209
10210 /* Get description of this reference. We don't actually care
10211 about the underlying object here. */
10212 get_inner_reference (exp, &bitsize, &bitpos, &offset,
10213 &mode, &unsignedp, &volatilep,
10214 &alignment);
10215
10216 type = type_for_size (bitsize, unsignedp);
10217 if (! SLOW_BYTE_ACCESS
10218 && type != 0 && bitsize >= 0
10219 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10220 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10221 != CODE_FOR_nothing))
10222 {
10223 do_jump (convert (type, exp), if_false_label, if_true_label);
10224 break;
10225 }
10226 goto normal;
10227 }
10228
10229 case COND_EXPR:
10230 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
10231 if (integer_onep (TREE_OPERAND (exp, 1))
10232 && integer_zerop (TREE_OPERAND (exp, 2)))
10233 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10234
10235 else if (integer_zerop (TREE_OPERAND (exp, 1))
10236 && integer_onep (TREE_OPERAND (exp, 2)))
10237 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10238
10239 else
10240 {
10241 rtx seq1, seq2;
10242 tree cleanups_left_side, cleanups_right_side, old_cleanups;
10243
10244 register rtx label1 = gen_label_rtx ();
10245 drop_through_label = gen_label_rtx ();
10246
10247 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
10248
10249 /* We need to save the cleanups for the lhs and rhs separately.
10250 Keep track of the cleanups seen before the lhs. */
10251 old_cleanups = cleanups_this_call;
10252 start_sequence ();
10253 /* Now the THEN-expression. */
10254 do_jump (TREE_OPERAND (exp, 1),
10255 if_false_label ? if_false_label : drop_through_label,
10256 if_true_label ? if_true_label : drop_through_label);
10257 /* In case the do_jump just above never jumps. */
10258 do_pending_stack_adjust ();
10259 emit_label (label1);
10260 seq1 = get_insns ();
10261 /* Now grab the cleanups for the lhs. */
10262 cleanups_left_side = defer_cleanups_to (old_cleanups);
10263 end_sequence ();
10264
10265 /* And keep track of where we start before the rhs. */
10266 old_cleanups = cleanups_this_call;
10267 start_sequence ();
10268 /* Now the ELSE-expression. */
10269 do_jump (TREE_OPERAND (exp, 2),
10270 if_false_label ? if_false_label : drop_through_label,
10271 if_true_label ? if_true_label : drop_through_label);
10272 seq2 = get_insns ();
10273 /* Grab the cleanups for the rhs. */
10274 cleanups_right_side = defer_cleanups_to (old_cleanups);
10275 end_sequence ();
10276
10277 if (cleanups_left_side || cleanups_right_side)
10278 {
10279 /* Make the cleanups for the THEN and ELSE clauses
10280 conditional based on which half is executed. */
10281 rtx flag = gen_reg_rtx (word_mode);
10282 tree new_cleanups;
10283 tree cond;
10284
10285 /* Set the flag to 0 so that we know we executed the lhs. */
10286 emit_move_insn (flag, const0_rtx);
10287 emit_insns (seq1);
10288
10289 /* Set the flag to 1 so that we know we executed the rhs. */
10290 emit_move_insn (flag, const1_rtx);
10291 emit_insns (seq2);
10292
10293 /* Make sure the cleanup lives on the function_obstack. */
10294 push_obstacks_nochange ();
10295 resume_temporary_allocation ();
10296
10297 /* Now, build up a COND_EXPR that tests the value of the
10298 flag, and then either do the cleanups for the lhs or the
10299 rhs. */
10300 cond = make_node (RTL_EXPR);
10301 TREE_TYPE (cond) = integer_type_node;
10302 RTL_EXPR_RTL (cond) = flag;
10303 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
10304 cond = save_expr (cond);
10305
10306 new_cleanups = build (COND_EXPR, void_type_node,
10307 truthvalue_conversion (cond),
10308 cleanups_right_side, cleanups_left_side);
10309 new_cleanups = fold (new_cleanups);
10310
10311 pop_obstacks ();
10312
10313 /* Now add in the conditionalized cleanups. */
10314 cleanups_this_call
10315 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
10316 expand_eh_region_start ();
10317 }
10318 else
10319 {
10320 /* No cleanups were needed, so emit the two sequences
10321 directly. */
10322 emit_insns (seq1);
10323 emit_insns (seq2);
10324 }
10325 }
10326 break;
10327
10328 case EQ_EXPR:
10329 {
10330 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10331
10332 if (integer_zerop (TREE_OPERAND (exp, 1)))
10333 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10334 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10335 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10336 do_jump
10337 (fold
10338 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
10339 fold (build (EQ_EXPR, TREE_TYPE (exp),
10340 fold (build1 (REALPART_EXPR,
10341 TREE_TYPE (inner_type),
10342 TREE_OPERAND (exp, 0))),
10343 fold (build1 (REALPART_EXPR,
10344 TREE_TYPE (inner_type),
10345 TREE_OPERAND (exp, 1))))),
10346 fold (build (EQ_EXPR, TREE_TYPE (exp),
10347 fold (build1 (IMAGPART_EXPR,
10348 TREE_TYPE (inner_type),
10349 TREE_OPERAND (exp, 0))),
10350 fold (build1 (IMAGPART_EXPR,
10351 TREE_TYPE (inner_type),
10352 TREE_OPERAND (exp, 1))))))),
10353 if_false_label, if_true_label);
10354 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10355 && !can_compare_p (TYPE_MODE (inner_type)))
10356 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
10357 else
10358 comparison = compare (exp, EQ, EQ);
10359 break;
10360 }
10361
10362 case NE_EXPR:
10363 {
10364 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10365
10366 if (integer_zerop (TREE_OPERAND (exp, 1)))
10367 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10368 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10369 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10370 do_jump
10371 (fold
10372 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
10373 fold (build (NE_EXPR, TREE_TYPE (exp),
10374 fold (build1 (REALPART_EXPR,
10375 TREE_TYPE (inner_type),
10376 TREE_OPERAND (exp, 0))),
10377 fold (build1 (REALPART_EXPR,
10378 TREE_TYPE (inner_type),
10379 TREE_OPERAND (exp, 1))))),
10380 fold (build (NE_EXPR, TREE_TYPE (exp),
10381 fold (build1 (IMAGPART_EXPR,
10382 TREE_TYPE (inner_type),
10383 TREE_OPERAND (exp, 0))),
10384 fold (build1 (IMAGPART_EXPR,
10385 TREE_TYPE (inner_type),
10386 TREE_OPERAND (exp, 1))))))),
10387 if_false_label, if_true_label);
10388 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10389 && !can_compare_p (TYPE_MODE (inner_type)))
10390 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
10391 else
10392 comparison = compare (exp, NE, NE);
10393 break;
10394 }
10395
10396 case LT_EXPR:
10397 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10398 == MODE_INT)
10399 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10400 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
10401 else
10402 comparison = compare (exp, LT, LTU);
10403 break;
10404
10405 case LE_EXPR:
10406 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10407 == MODE_INT)
10408 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10409 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
10410 else
10411 comparison = compare (exp, LE, LEU);
10412 break;
10413
10414 case GT_EXPR:
10415 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10416 == MODE_INT)
10417 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10418 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
10419 else
10420 comparison = compare (exp, GT, GTU);
10421 break;
10422
10423 case GE_EXPR:
10424 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10425 == MODE_INT)
10426 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10427 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
10428 else
10429 comparison = compare (exp, GE, GEU);
10430 break;
10431
10432 default:
10433 normal:
10434 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10435 #if 0
10436 /* This is not needed any more and causes poor code since it causes
10437 comparisons and tests from non-SI objects to have different code
10438 sequences. */
10439 /* Copy to register to avoid generating bad insns by cse
10440 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10441 if (!cse_not_expected && GET_CODE (temp) == MEM)
10442 temp = copy_to_reg (temp);
10443 #endif
10444 do_pending_stack_adjust ();
10445 if (GET_CODE (temp) == CONST_INT)
10446 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
10447 else if (GET_CODE (temp) == LABEL_REF)
10448 comparison = const_true_rtx;
10449 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10450 && !can_compare_p (GET_MODE (temp)))
10451 /* Note swapping the labels gives us not-equal. */
10452 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10453 else if (GET_MODE (temp) != VOIDmode)
10454 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
10455 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10456 GET_MODE (temp), NULL_RTX, 0);
10457 else
10458 abort ();
10459 }
10460
10461 /* Do any postincrements in the expression that was tested. */
10462 emit_queue ();
10463
10464 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
10465 straight into a conditional jump instruction as the jump condition.
10466 Otherwise, all the work has been done already. */
10467
10468 if (comparison == const_true_rtx)
10469 {
10470 if (if_true_label)
10471 emit_jump (if_true_label);
10472 }
10473 else if (comparison == const0_rtx)
10474 {
10475 if (if_false_label)
10476 emit_jump (if_false_label);
10477 }
10478 else if (comparison)
10479 do_jump_for_compare (comparison, if_false_label, if_true_label);
10480
10481 if (drop_through_label)
10482 {
10483 /* If do_jump produces code that might be jumped around,
10484 do any stack adjusts from that code, before the place
10485 where control merges in. */
10486 do_pending_stack_adjust ();
10487 emit_label (drop_through_label);
10488 }
10489 }
10490 \f
10491 /* Given a comparison expression EXP for values too wide to be compared
10492 with one insn, test the comparison and jump to the appropriate label.
10493 The code of EXP is ignored; we always test GT if SWAP is 0,
10494 and LT if SWAP is 1. */
10495
10496 static void
10497 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10498 tree exp;
10499 int swap;
10500 rtx if_false_label, if_true_label;
10501 {
10502 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10503 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10504 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10505 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10506 rtx drop_through_label = 0;
10507 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10508 int i;
10509
10510 if (! if_true_label || ! if_false_label)
10511 drop_through_label = gen_label_rtx ();
10512 if (! if_true_label)
10513 if_true_label = drop_through_label;
10514 if (! if_false_label)
10515 if_false_label = drop_through_label;
10516
10517 /* Compare a word at a time, high order first. */
10518 for (i = 0; i < nwords; i++)
10519 {
10520 rtx comp;
10521 rtx op0_word, op1_word;
10522
10523 if (WORDS_BIG_ENDIAN)
10524 {
10525 op0_word = operand_subword_force (op0, i, mode);
10526 op1_word = operand_subword_force (op1, i, mode);
10527 }
10528 else
10529 {
10530 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10531 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10532 }
10533
10534 /* All but high-order word must be compared as unsigned. */
10535 comp = compare_from_rtx (op0_word, op1_word,
10536 (unsignedp || i > 0) ? GTU : GT,
10537 unsignedp, word_mode, NULL_RTX, 0);
10538 if (comp == const_true_rtx)
10539 emit_jump (if_true_label);
10540 else if (comp != const0_rtx)
10541 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10542
10543 /* Consider lower words only if these are equal. */
10544 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10545 NULL_RTX, 0);
10546 if (comp == const_true_rtx)
10547 emit_jump (if_false_label);
10548 else if (comp != const0_rtx)
10549 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10550 }
10551
10552 if (if_false_label)
10553 emit_jump (if_false_label);
10554 if (drop_through_label)
10555 emit_label (drop_through_label);
10556 }
10557
10558 /* Compare OP0 with OP1, word at a time, in mode MODE.
10559 UNSIGNEDP says to do unsigned comparison.
10560 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10561
10562 void
10563 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10564 enum machine_mode mode;
10565 int unsignedp;
10566 rtx op0, op1;
10567 rtx if_false_label, if_true_label;
10568 {
10569 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10570 rtx drop_through_label = 0;
10571 int i;
10572
10573 if (! if_true_label || ! if_false_label)
10574 drop_through_label = gen_label_rtx ();
10575 if (! if_true_label)
10576 if_true_label = drop_through_label;
10577 if (! if_false_label)
10578 if_false_label = drop_through_label;
10579
10580 /* Compare a word at a time, high order first. */
10581 for (i = 0; i < nwords; i++)
10582 {
10583 rtx comp;
10584 rtx op0_word, op1_word;
10585
10586 if (WORDS_BIG_ENDIAN)
10587 {
10588 op0_word = operand_subword_force (op0, i, mode);
10589 op1_word = operand_subword_force (op1, i, mode);
10590 }
10591 else
10592 {
10593 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10594 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10595 }
10596
10597 /* All but high-order word must be compared as unsigned. */
10598 comp = compare_from_rtx (op0_word, op1_word,
10599 (unsignedp || i > 0) ? GTU : GT,
10600 unsignedp, word_mode, NULL_RTX, 0);
10601 if (comp == const_true_rtx)
10602 emit_jump (if_true_label);
10603 else if (comp != const0_rtx)
10604 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10605
10606 /* Consider lower words only if these are equal. */
10607 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10608 NULL_RTX, 0);
10609 if (comp == const_true_rtx)
10610 emit_jump (if_false_label);
10611 else if (comp != const0_rtx)
10612 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10613 }
10614
10615 if (if_false_label)
10616 emit_jump (if_false_label);
10617 if (drop_through_label)
10618 emit_label (drop_through_label);
10619 }
10620
10621 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10622 with one insn, test the comparison and jump to the appropriate label. */
10623
10624 static void
10625 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10626 tree exp;
10627 rtx if_false_label, if_true_label;
10628 {
10629 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10630 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10631 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10632 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10633 int i;
10634 rtx drop_through_label = 0;
10635
10636 if (! if_false_label)
10637 drop_through_label = if_false_label = gen_label_rtx ();
10638
10639 for (i = 0; i < nwords; i++)
10640 {
10641 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
10642 operand_subword_force (op1, i, mode),
10643 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10644 word_mode, NULL_RTX, 0);
10645 if (comp == const_true_rtx)
10646 emit_jump (if_false_label);
10647 else if (comp != const0_rtx)
10648 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10649 }
10650
10651 if (if_true_label)
10652 emit_jump (if_true_label);
10653 if (drop_through_label)
10654 emit_label (drop_through_label);
10655 }
10656 \f
10657 /* Jump according to whether OP0 is 0.
10658 We assume that OP0 has an integer mode that is too wide
10659 for the available compare insns. */
10660
10661 static void
10662 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10663 rtx op0;
10664 rtx if_false_label, if_true_label;
10665 {
10666 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10667 int i;
10668 rtx drop_through_label = 0;
10669
10670 if (! if_false_label)
10671 drop_through_label = if_false_label = gen_label_rtx ();
10672
10673 for (i = 0; i < nwords; i++)
10674 {
10675 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
10676 GET_MODE (op0)),
10677 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
10678 if (comp == const_true_rtx)
10679 emit_jump (if_false_label);
10680 else if (comp != const0_rtx)
10681 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10682 }
10683
10684 if (if_true_label)
10685 emit_jump (if_true_label);
10686 if (drop_through_label)
10687 emit_label (drop_through_label);
10688 }
10689
10690 /* Given a comparison expression in rtl form, output conditional branches to
10691 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
10692
10693 static void
10694 do_jump_for_compare (comparison, if_false_label, if_true_label)
10695 rtx comparison, if_false_label, if_true_label;
10696 {
10697 if (if_true_label)
10698 {
10699 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10700 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
10701 else
10702 abort ();
10703
10704 if (if_false_label)
10705 emit_jump (if_false_label);
10706 }
10707 else if (if_false_label)
10708 {
10709 rtx insn;
10710 rtx prev = get_last_insn ();
10711 rtx branch = 0;
10712
10713 /* Output the branch with the opposite condition. Then try to invert
10714 what is generated. If more than one insn is a branch, or if the
10715 branch is not the last insn written, abort. If we can't invert
10716 the branch, emit make a true label, redirect this jump to that,
10717 emit a jump to the false label and define the true label. */
10718
10719 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10720 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])(if_false_label));
10721 else
10722 abort ();
10723
10724 /* Here we get the first insn that was just emitted. It used to be the
10725 case that, on some machines, emitting the branch would discard
10726 the previous compare insn and emit a replacement. This isn't
10727 done anymore, but abort if we see that PREV is deleted. */
10728
10729 if (prev == 0)
10730 insn = get_insns ();
10731 else if (INSN_DELETED_P (prev))
10732 abort ();
10733 else
10734 insn = NEXT_INSN (prev);
10735
10736 for (; insn; insn = NEXT_INSN (insn))
10737 if (GET_CODE (insn) == JUMP_INSN)
10738 {
10739 if (branch)
10740 abort ();
10741 branch = insn;
10742 }
10743
10744 if (branch != get_last_insn ())
10745 abort ();
10746
10747 JUMP_LABEL (branch) = if_false_label;
10748 if (! invert_jump (branch, if_false_label))
10749 {
10750 if_true_label = gen_label_rtx ();
10751 redirect_jump (branch, if_true_label);
10752 emit_jump (if_false_label);
10753 emit_label (if_true_label);
10754 }
10755 }
10756 }
10757 \f
10758 /* Generate code for a comparison expression EXP
10759 (including code to compute the values to be compared)
10760 and set (CC0) according to the result.
10761 SIGNED_CODE should be the rtx operation for this comparison for
10762 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10763
10764 We force a stack adjustment unless there are currently
10765 things pushed on the stack that aren't yet used. */
10766
10767 static rtx
10768 compare (exp, signed_code, unsigned_code)
10769 register tree exp;
10770 enum rtx_code signed_code, unsigned_code;
10771 {
10772 register rtx op0
10773 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10774 register rtx op1
10775 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10776 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
10777 register enum machine_mode mode = TYPE_MODE (type);
10778 int unsignedp = TREE_UNSIGNED (type);
10779 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
10780
10781 #ifdef HAVE_canonicalize_funcptr_for_compare
10782 /* If function pointers need to be "canonicalized" before they can
10783 be reliably compared, then canonicalize them. */
10784 if (HAVE_canonicalize_funcptr_for_compare
10785 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10786 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10787 == FUNCTION_TYPE))
10788 {
10789 rtx new_op0 = gen_reg_rtx (mode);
10790
10791 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10792 op0 = new_op0;
10793 }
10794
10795 if (HAVE_canonicalize_funcptr_for_compare
10796 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10797 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10798 == FUNCTION_TYPE))
10799 {
10800 rtx new_op1 = gen_reg_rtx (mode);
10801
10802 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10803 op1 = new_op1;
10804 }
10805 #endif
10806
10807 return compare_from_rtx (op0, op1, code, unsignedp, mode,
10808 ((mode == BLKmode)
10809 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10810 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
10811 }
10812
10813 /* Like compare but expects the values to compare as two rtx's.
10814 The decision as to signed or unsigned comparison must be made by the caller.
10815
10816 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10817 compared.
10818
10819 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10820 size of MODE should be used. */
10821
10822 rtx
10823 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10824 register rtx op0, op1;
10825 enum rtx_code code;
10826 int unsignedp;
10827 enum machine_mode mode;
10828 rtx size;
10829 int align;
10830 {
10831 rtx tem;
10832
10833 /* If one operand is constant, make it the second one. Only do this
10834 if the other operand is not constant as well. */
10835
10836 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10837 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
10838 {
10839 tem = op0;
10840 op0 = op1;
10841 op1 = tem;
10842 code = swap_condition (code);
10843 }
10844
10845 if (flag_force_mem)
10846 {
10847 op0 = force_not_mem (op0);
10848 op1 = force_not_mem (op1);
10849 }
10850
10851 do_pending_stack_adjust ();
10852
10853 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10854 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10855 return tem;
10856
10857 #if 0
10858 /* There's no need to do this now that combine.c can eliminate lots of
10859 sign extensions. This can be less efficient in certain cases on other
10860 machines. */
10861
10862 /* If this is a signed equality comparison, we can do it as an
10863 unsigned comparison since zero-extension is cheaper than sign
10864 extension and comparisons with zero are done as unsigned. This is
10865 the case even on machines that can do fast sign extension, since
10866 zero-extension is easier to combine with other operations than
10867 sign-extension is. If we are comparing against a constant, we must
10868 convert it to what it would look like unsigned. */
10869 if ((code == EQ || code == NE) && ! unsignedp
10870 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10871 {
10872 if (GET_CODE (op1) == CONST_INT
10873 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10874 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10875 unsignedp = 1;
10876 }
10877 #endif
10878
10879 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10880
10881 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
10882 }
10883 \f
10884 /* Generate code to calculate EXP using a store-flag instruction
10885 and return an rtx for the result. EXP is either a comparison
10886 or a TRUTH_NOT_EXPR whose operand is a comparison.
10887
10888 If TARGET is nonzero, store the result there if convenient.
10889
10890 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10891 cheap.
10892
10893 Return zero if there is no suitable set-flag instruction
10894 available on this machine.
10895
10896 Once expand_expr has been called on the arguments of the comparison,
10897 we are committed to doing the store flag, since it is not safe to
10898 re-evaluate the expression. We emit the store-flag insn by calling
10899 emit_store_flag, but only expand the arguments if we have a reason
10900 to believe that emit_store_flag will be successful. If we think that
10901 it will, but it isn't, we have to simulate the store-flag with a
10902 set/jump/set sequence. */
10903
10904 static rtx
10905 do_store_flag (exp, target, mode, only_cheap)
10906 tree exp;
10907 rtx target;
10908 enum machine_mode mode;
10909 int only_cheap;
10910 {
10911 enum rtx_code code;
10912 tree arg0, arg1, type;
10913 tree tem;
10914 enum machine_mode operand_mode;
10915 int invert = 0;
10916 int unsignedp;
10917 rtx op0, op1;
10918 enum insn_code icode;
10919 rtx subtarget = target;
10920 rtx result, label, pattern, jump_pat;
10921
10922 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10923 result at the end. We can't simply invert the test since it would
10924 have already been inverted if it were valid. This case occurs for
10925 some floating-point comparisons. */
10926
10927 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10928 invert = 1, exp = TREE_OPERAND (exp, 0);
10929
10930 arg0 = TREE_OPERAND (exp, 0);
10931 arg1 = TREE_OPERAND (exp, 1);
10932 type = TREE_TYPE (arg0);
10933 operand_mode = TYPE_MODE (type);
10934 unsignedp = TREE_UNSIGNED (type);
10935
10936 /* We won't bother with BLKmode store-flag operations because it would mean
10937 passing a lot of information to emit_store_flag. */
10938 if (operand_mode == BLKmode)
10939 return 0;
10940
10941 /* We won't bother with store-flag operations involving function pointers
10942 when function pointers must be canonicalized before comparisons. */
10943 #ifdef HAVE_canonicalize_funcptr_for_compare
10944 if (HAVE_canonicalize_funcptr_for_compare
10945 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10946 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10947 == FUNCTION_TYPE))
10948 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10949 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10950 == FUNCTION_TYPE))))
10951 return 0;
10952 #endif
10953
10954 STRIP_NOPS (arg0);
10955 STRIP_NOPS (arg1);
10956
10957 /* Get the rtx comparison code to use. We know that EXP is a comparison
10958 operation of some type. Some comparisons against 1 and -1 can be
10959 converted to comparisons with zero. Do so here so that the tests
10960 below will be aware that we have a comparison with zero. These
10961 tests will not catch constants in the first operand, but constants
10962 are rarely passed as the first operand. */
10963
10964 switch (TREE_CODE (exp))
10965 {
10966 case EQ_EXPR:
10967 code = EQ;
10968 break;
10969 case NE_EXPR:
10970 code = NE;
10971 break;
10972 case LT_EXPR:
10973 if (integer_onep (arg1))
10974 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10975 else
10976 code = unsignedp ? LTU : LT;
10977 break;
10978 case LE_EXPR:
10979 if (! unsignedp && integer_all_onesp (arg1))
10980 arg1 = integer_zero_node, code = LT;
10981 else
10982 code = unsignedp ? LEU : LE;
10983 break;
10984 case GT_EXPR:
10985 if (! unsignedp && integer_all_onesp (arg1))
10986 arg1 = integer_zero_node, code = GE;
10987 else
10988 code = unsignedp ? GTU : GT;
10989 break;
10990 case GE_EXPR:
10991 if (integer_onep (arg1))
10992 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10993 else
10994 code = unsignedp ? GEU : GE;
10995 break;
10996 default:
10997 abort ();
10998 }
10999
11000 /* Put a constant second. */
11001 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
11002 {
11003 tem = arg0; arg0 = arg1; arg1 = tem;
11004 code = swap_condition (code);
11005 }
11006
11007 /* If this is an equality or inequality test of a single bit, we can
11008 do this by shifting the bit being tested to the low-order bit and
11009 masking the result with the constant 1. If the condition was EQ,
11010 we xor it with 1. This does not require an scc insn and is faster
11011 than an scc insn even if we have it. */
11012
11013 if ((code == NE || code == EQ)
11014 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
11015 && integer_pow2p (TREE_OPERAND (arg0, 1))
11016 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
11017 {
11018 tree inner = TREE_OPERAND (arg0, 0);
11019 HOST_WIDE_INT tem;
11020 int bitnum;
11021 int ops_unsignedp;
11022
11023 tem = INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
11024 NULL_RTX, VOIDmode, 0));
11025 /* In this case, immed_double_const will sign extend the value to make
11026 it look the same on the host and target. We must remove the
11027 sign-extension before calling exact_log2, since exact_log2 will
11028 fail for negative values. */
11029 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT
11030 && BITS_PER_WORD == GET_MODE_BITSIZE (TYPE_MODE (type)))
11031 /* We don't use the obvious constant shift to generate the mask,
11032 because that generates compiler warnings when BITS_PER_WORD is
11033 greater than or equal to HOST_BITS_PER_WIDE_INT, even though this
11034 code is unreachable in that case. */
11035 tem = tem & GET_MODE_MASK (word_mode);
11036 bitnum = exact_log2 (tem);
11037
11038 /* If INNER is a right shift of a constant and it plus BITNUM does
11039 not overflow, adjust BITNUM and INNER. */
11040
11041 if (TREE_CODE (inner) == RSHIFT_EXPR
11042 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
11043 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
11044 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
11045 < TYPE_PRECISION (type)))
11046 {
11047 bitnum +=TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
11048 inner = TREE_OPERAND (inner, 0);
11049 }
11050
11051 /* If we are going to be able to omit the AND below, we must do our
11052 operations as unsigned. If we must use the AND, we have a choice.
11053 Normally unsigned is faster, but for some machines signed is. */
11054 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
11055 #ifdef LOAD_EXTEND_OP
11056 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
11057 #else
11058 : 1
11059 #endif
11060 );
11061
11062 if (subtarget == 0 || GET_CODE (subtarget) != REG
11063 || GET_MODE (subtarget) != operand_mode
11064 || ! safe_from_p (subtarget, inner))
11065 subtarget = 0;
11066
11067 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
11068
11069 if (bitnum != 0)
11070 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
11071 size_int (bitnum), subtarget, ops_unsignedp);
11072
11073 if (GET_MODE (op0) != mode)
11074 op0 = convert_to_mode (mode, op0, ops_unsignedp);
11075
11076 if ((code == EQ && ! invert) || (code == NE && invert))
11077 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
11078 ops_unsignedp, OPTAB_LIB_WIDEN);
11079
11080 /* Put the AND last so it can combine with more things. */
11081 if (bitnum != TYPE_PRECISION (type) - 1)
11082 op0 = expand_and (op0, const1_rtx, subtarget);
11083
11084 return op0;
11085 }
11086
11087 /* Now see if we are likely to be able to do this. Return if not. */
11088 if (! can_compare_p (operand_mode))
11089 return 0;
11090 icode = setcc_gen_code[(int) code];
11091 if (icode == CODE_FOR_nothing
11092 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
11093 {
11094 /* We can only do this if it is one of the special cases that
11095 can be handled without an scc insn. */
11096 if ((code == LT && integer_zerop (arg1))
11097 || (! only_cheap && code == GE && integer_zerop (arg1)))
11098 ;
11099 else if (BRANCH_COST >= 0
11100 && ! only_cheap && (code == NE || code == EQ)
11101 && TREE_CODE (type) != REAL_TYPE
11102 && ((abs_optab->handlers[(int) operand_mode].insn_code
11103 != CODE_FOR_nothing)
11104 || (ffs_optab->handlers[(int) operand_mode].insn_code
11105 != CODE_FOR_nothing)))
11106 ;
11107 else
11108 return 0;
11109 }
11110
11111 preexpand_calls (exp);
11112 if (subtarget == 0 || GET_CODE (subtarget) != REG
11113 || GET_MODE (subtarget) != operand_mode
11114 || ! safe_from_p (subtarget, arg1))
11115 subtarget = 0;
11116
11117 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
11118 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
11119
11120 if (target == 0)
11121 target = gen_reg_rtx (mode);
11122
11123 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
11124 because, if the emit_store_flag does anything it will succeed and
11125 OP0 and OP1 will not be used subsequently. */
11126
11127 result = emit_store_flag (target, code,
11128 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
11129 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
11130 operand_mode, unsignedp, 1);
11131
11132 if (result)
11133 {
11134 if (invert)
11135 result = expand_binop (mode, xor_optab, result, const1_rtx,
11136 result, 0, OPTAB_LIB_WIDEN);
11137 return result;
11138 }
11139
11140 /* If this failed, we have to do this with set/compare/jump/set code. */
11141 if (GET_CODE (target) != REG
11142 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
11143 target = gen_reg_rtx (GET_MODE (target));
11144
11145 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
11146 result = compare_from_rtx (op0, op1, code, unsignedp,
11147 operand_mode, NULL_RTX, 0);
11148 if (GET_CODE (result) == CONST_INT)
11149 return (((result == const0_rtx && ! invert)
11150 || (result != const0_rtx && invert))
11151 ? const0_rtx : const1_rtx);
11152
11153 label = gen_label_rtx ();
11154 if (bcc_gen_fctn[(int) code] == 0)
11155 abort ();
11156
11157 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
11158 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
11159 emit_label (label);
11160
11161 return target;
11162 }
11163 \f
11164 /* Generate a tablejump instruction (used for switch statements). */
11165
11166 #ifdef HAVE_tablejump
11167
11168 /* INDEX is the value being switched on, with the lowest value
11169 in the table already subtracted.
11170 MODE is its expected mode (needed if INDEX is constant).
11171 RANGE is the length of the jump table.
11172 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11173
11174 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11175 index value is out of range. */
11176
11177 void
11178 do_tablejump (index, mode, range, table_label, default_label)
11179 rtx index, range, table_label, default_label;
11180 enum machine_mode mode;
11181 {
11182 register rtx temp, vector;
11183
11184 /* Do an unsigned comparison (in the proper mode) between the index
11185 expression and the value which represents the length of the range.
11186 Since we just finished subtracting the lower bound of the range
11187 from the index expression, this comparison allows us to simultaneously
11188 check that the original index expression value is both greater than
11189 or equal to the minimum value of the range and less than or equal to
11190 the maximum value of the range. */
11191
11192 emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
11193 emit_jump_insn (gen_bgtu (default_label));
11194
11195 /* If index is in range, it must fit in Pmode.
11196 Convert to Pmode so we can index with it. */
11197 if (mode != Pmode)
11198 index = convert_to_mode (Pmode, index, 1);
11199
11200 /* Don't let a MEM slip thru, because then INDEX that comes
11201 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11202 and break_out_memory_refs will go to work on it and mess it up. */
11203 #ifdef PIC_CASE_VECTOR_ADDRESS
11204 if (flag_pic && GET_CODE (index) != REG)
11205 index = copy_to_mode_reg (Pmode, index);
11206 #endif
11207
11208 /* If flag_force_addr were to affect this address
11209 it could interfere with the tricky assumptions made
11210 about addresses that contain label-refs,
11211 which may be valid only very near the tablejump itself. */
11212 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11213 GET_MODE_SIZE, because this indicates how large insns are. The other
11214 uses should all be Pmode, because they are addresses. This code
11215 could fail if addresses and insns are not the same size. */
11216 index = gen_rtx (PLUS, Pmode,
11217 gen_rtx (MULT, Pmode, index,
11218 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
11219 gen_rtx (LABEL_REF, Pmode, table_label));
11220 #ifdef PIC_CASE_VECTOR_ADDRESS
11221 if (flag_pic)
11222 index = PIC_CASE_VECTOR_ADDRESS (index);
11223 else
11224 #endif
11225 index = memory_address_noforce (CASE_VECTOR_MODE, index);
11226 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11227 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
11228 RTX_UNCHANGING_P (vector) = 1;
11229 convert_move (temp, vector, 0);
11230
11231 emit_jump_insn (gen_tablejump (temp, table_label));
11232
11233 #ifndef CASE_VECTOR_PC_RELATIVE
11234 /* If we are generating PIC code or if the table is PC-relative, the
11235 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11236 if (! flag_pic)
11237 emit_barrier ();
11238 #endif
11239 }
11240
11241 #endif /* HAVE_tablejump */
11242
11243
11244 /* Emit a suitable bytecode to load a value from memory, assuming a pointer
11245 to that value is on the top of the stack. The resulting type is TYPE, and
11246 the source declaration is DECL. */
11247
11248 void
11249 bc_load_memory (type, decl)
11250 tree type, decl;
11251 {
11252 enum bytecode_opcode opcode;
11253
11254
11255 /* Bit fields are special. We only know about signed and
11256 unsigned ints, and enums. The latter are treated as
11257 signed integers. */
11258
11259 if (DECL_BIT_FIELD (decl))
11260 if (TREE_CODE (type) == ENUMERAL_TYPE
11261 || TREE_CODE (type) == INTEGER_TYPE)
11262 opcode = TREE_UNSIGNED (type) ? zxloadBI : sxloadBI;
11263 else
11264 abort ();
11265 else
11266 /* See corresponding comment in bc_store_memory(). */
11267 if (TYPE_MODE (type) == BLKmode
11268 || TYPE_MODE (type) == VOIDmode)
11269 return;
11270 else
11271 opcode = mode_to_load_map [(int) TYPE_MODE (type)];
11272
11273 if (opcode == neverneverland)
11274 abort ();
11275
11276 bc_emit_bytecode (opcode);
11277
11278 #ifdef DEBUG_PRINT_CODE
11279 fputc ('\n', stderr);
11280 #endif
11281 }
11282
11283
11284 /* Store the contents of the second stack slot to the address in the
11285 top stack slot. DECL is the declaration of the destination and is used
11286 to determine whether we're dealing with a bitfield. */
11287
11288 void
11289 bc_store_memory (type, decl)
11290 tree type, decl;
11291 {
11292 enum bytecode_opcode opcode;
11293
11294
11295 if (DECL_BIT_FIELD (decl))
11296 {
11297 if (TREE_CODE (type) == ENUMERAL_TYPE
11298 || TREE_CODE (type) == INTEGER_TYPE)
11299 opcode = sstoreBI;
11300 else
11301 abort ();
11302 }
11303 else
11304 if (TYPE_MODE (type) == BLKmode)
11305 {
11306 /* Copy structure. This expands to a block copy instruction, storeBLK.
11307 In addition to the arguments expected by the other store instructions,
11308 it also expects a type size (SImode) on top of the stack, which is the
11309 structure size in size units (usually bytes). The two first arguments
11310 are already on the stack; so we just put the size on level 1. For some
11311 other languages, the size may be variable, this is why we don't encode
11312 it as a storeBLK literal, but rather treat it as a full-fledged expression. */
11313
11314 bc_expand_expr (TYPE_SIZE (type));
11315 opcode = storeBLK;
11316 }
11317 else
11318 opcode = mode_to_store_map [(int) TYPE_MODE (type)];
11319
11320 if (opcode == neverneverland)
11321 abort ();
11322
11323 bc_emit_bytecode (opcode);
11324
11325 #ifdef DEBUG_PRINT_CODE
11326 fputc ('\n', stderr);
11327 #endif
11328 }
11329
11330
11331 /* Allocate local stack space sufficient to hold a value of the given
11332 SIZE at alignment boundary ALIGNMENT bits. ALIGNMENT must be an
11333 integral power of 2. A special case is locals of type VOID, which
11334 have size 0 and alignment 1 - any "voidish" SIZE or ALIGNMENT is
11335 remapped into the corresponding attribute of SI. */
11336
11337 rtx
11338 bc_allocate_local (size, alignment)
11339 int size, alignment;
11340 {
11341 rtx retval;
11342 int byte_alignment;
11343
11344 if (size < 0)
11345 abort ();
11346
11347 /* Normalize size and alignment */
11348 if (!size)
11349 size = UNITS_PER_WORD;
11350
11351 if (alignment < BITS_PER_UNIT)
11352 byte_alignment = 1 << (INT_ALIGN - 1);
11353 else
11354 /* Align */
11355 byte_alignment = alignment / BITS_PER_UNIT;
11356
11357 if (local_vars_size & (byte_alignment - 1))
11358 local_vars_size += byte_alignment - (local_vars_size & (byte_alignment - 1));
11359
11360 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
11361 local_vars_size += size;
11362
11363 return retval;
11364 }
11365
11366
11367 /* Allocate variable-sized local array. Variable-sized arrays are
11368 actually pointers to the address in memory where they are stored. */
11369
11370 rtx
11371 bc_allocate_variable_array (size)
11372 tree size;
11373 {
11374 rtx retval;
11375 const int ptralign = (1 << (PTR_ALIGN - 1));
11376
11377 /* Align pointer */
11378 if (local_vars_size & ptralign)
11379 local_vars_size += ptralign - (local_vars_size & ptralign);
11380
11381 /* Note down local space needed: pointer to block; also return
11382 dummy rtx */
11383
11384 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
11385 local_vars_size += POINTER_SIZE / BITS_PER_UNIT;
11386 return retval;
11387 }
11388
11389
11390 /* Push the machine address for the given external variable offset. */
11391
11392 void
11393 bc_load_externaddr (externaddr)
11394 rtx externaddr;
11395 {
11396 bc_emit_bytecode (constP);
11397 bc_emit_code_labelref (BYTECODE_LABEL (externaddr),
11398 BYTECODE_BC_LABEL (externaddr)->offset);
11399
11400 #ifdef DEBUG_PRINT_CODE
11401 fputc ('\n', stderr);
11402 #endif
11403 }
11404
11405
11406 /* Like above, but expects an IDENTIFIER. */
11407
11408 void
11409 bc_load_externaddr_id (id, offset)
11410 tree id;
11411 int offset;
11412 {
11413 if (!IDENTIFIER_POINTER (id))
11414 abort ();
11415
11416 bc_emit_bytecode (constP);
11417 bc_emit_code_labelref (xstrdup (IDENTIFIER_POINTER (id)), offset);
11418
11419 #ifdef DEBUG_PRINT_CODE
11420 fputc ('\n', stderr);
11421 #endif
11422 }
11423
11424
11425 /* Push the machine address for the given local variable offset. */
11426
11427 void
11428 bc_load_localaddr (localaddr)
11429 rtx localaddr;
11430 {
11431 bc_emit_instruction (localP, (HOST_WIDE_INT) BYTECODE_BC_LABEL (localaddr)->offset);
11432 }
11433
11434
11435 /* Push the machine address for the given parameter offset.
11436 NOTE: offset is in bits. */
11437
11438 void
11439 bc_load_parmaddr (parmaddr)
11440 rtx parmaddr;
11441 {
11442 bc_emit_instruction (argP, ((HOST_WIDE_INT) BYTECODE_BC_LABEL (parmaddr)->offset
11443 / BITS_PER_UNIT));
11444 }
11445
11446
11447 /* Convert a[i] into *(a + i). */
11448
11449 tree
11450 bc_canonicalize_array_ref (exp)
11451 tree exp;
11452 {
11453 tree type = TREE_TYPE (exp);
11454 tree array_adr = build1 (ADDR_EXPR, TYPE_POINTER_TO (type),
11455 TREE_OPERAND (exp, 0));
11456 tree index = TREE_OPERAND (exp, 1);
11457
11458
11459 /* Convert the integer argument to a type the same size as a pointer
11460 so the multiply won't overflow spuriously. */
11461
11462 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
11463 index = convert (type_for_size (POINTER_SIZE, 0), index);
11464
11465 /* The array address isn't volatile even if the array is.
11466 (Of course this isn't terribly relevant since the bytecode
11467 translator treats nearly everything as volatile anyway.) */
11468 TREE_THIS_VOLATILE (array_adr) = 0;
11469
11470 return build1 (INDIRECT_REF, type,
11471 fold (build (PLUS_EXPR,
11472 TYPE_POINTER_TO (type),
11473 array_adr,
11474 fold (build (MULT_EXPR,
11475 TYPE_POINTER_TO (type),
11476 index,
11477 size_in_bytes (type))))));
11478 }
11479
11480
11481 /* Load the address of the component referenced by the given
11482 COMPONENT_REF expression.
11483
11484 Returns innermost lvalue. */
11485
11486 tree
11487 bc_expand_component_address (exp)
11488 tree exp;
11489 {
11490 tree tem, chain;
11491 enum machine_mode mode;
11492 int bitpos = 0;
11493 HOST_WIDE_INT SIval;
11494
11495
11496 tem = TREE_OPERAND (exp, 1);
11497 mode = DECL_MODE (tem);
11498
11499
11500 /* Compute cumulative bit offset for nested component refs
11501 and array refs, and find the ultimate containing object. */
11502
11503 for (tem = exp;; tem = TREE_OPERAND (tem, 0))
11504 {
11505 if (TREE_CODE (tem) == COMPONENT_REF)
11506 bitpos += TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (tem, 1)));
11507 else
11508 if (TREE_CODE (tem) == ARRAY_REF
11509 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
11510 && TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) == INTEGER_CST)
11511
11512 bitpos += (TREE_INT_CST_LOW (TREE_OPERAND (tem, 1))
11513 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)))
11514 /* * TYPE_SIZE_UNIT (TREE_TYPE (tem)) */);
11515 else
11516 break;
11517 }
11518
11519 bc_expand_expr (tem);
11520
11521
11522 /* For bitfields also push their offset and size */
11523 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
11524 bc_push_offset_and_size (bitpos, /* DECL_SIZE_UNIT */ (TREE_OPERAND (exp, 1)));
11525 else
11526 if (SIval = bitpos / BITS_PER_UNIT)
11527 bc_emit_instruction (addconstPSI, SIval);
11528
11529 return (TREE_OPERAND (exp, 1));
11530 }
11531
11532
11533 /* Emit code to push two SI constants */
11534
11535 void
11536 bc_push_offset_and_size (offset, size)
11537 HOST_WIDE_INT offset, size;
11538 {
11539 bc_emit_instruction (constSI, offset);
11540 bc_emit_instruction (constSI, size);
11541 }
11542
11543
11544 /* Emit byte code to push the address of the given lvalue expression to
11545 the stack. If it's a bit field, we also push offset and size info.
11546
11547 Returns innermost component, which allows us to determine not only
11548 its type, but also whether it's a bitfield. */
11549
11550 tree
11551 bc_expand_address (exp)
11552 tree exp;
11553 {
11554 /* Safeguard */
11555 if (!exp || TREE_CODE (exp) == ERROR_MARK)
11556 return (exp);
11557
11558
11559 switch (TREE_CODE (exp))
11560 {
11561 case ARRAY_REF:
11562
11563 return (bc_expand_address (bc_canonicalize_array_ref (exp)));
11564
11565 case COMPONENT_REF:
11566
11567 return (bc_expand_component_address (exp));
11568
11569 case INDIRECT_REF:
11570
11571 bc_expand_expr (TREE_OPERAND (exp, 0));
11572
11573 /* For variable-sized types: retrieve pointer. Sometimes the
11574 TYPE_SIZE tree is NULL. Is this a bug or a feature? Let's
11575 also make sure we have an operand, just in case... */
11576
11577 if (TREE_OPERAND (exp, 0)
11578 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))
11579 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))) != INTEGER_CST)
11580 bc_emit_instruction (loadP);
11581
11582 /* If packed, also return offset and size */
11583 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 0)))
11584
11585 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (exp, 0))),
11586 TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (exp, 0))));
11587
11588 return (TREE_OPERAND (exp, 0));
11589
11590 case FUNCTION_DECL:
11591
11592 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
11593 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
11594 break;
11595
11596 case PARM_DECL:
11597
11598 bc_load_parmaddr (DECL_RTL (exp));
11599
11600 /* For variable-sized types: retrieve pointer */
11601 if (TYPE_SIZE (TREE_TYPE (exp))
11602 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
11603 bc_emit_instruction (loadP);
11604
11605 /* If packed, also return offset and size */
11606 if (DECL_BIT_FIELD (exp))
11607 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
11608 TREE_INT_CST_LOW (DECL_SIZE (exp)));
11609
11610 break;
11611
11612 case RESULT_DECL:
11613
11614 bc_emit_instruction (returnP);
11615 break;
11616
11617 case VAR_DECL:
11618
11619 #if 0
11620 if (BYTECODE_LABEL (DECL_RTL (exp)))
11621 bc_load_externaddr (DECL_RTL (exp));
11622 #endif
11623
11624 if (DECL_EXTERNAL (exp))
11625 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
11626 (BYTECODE_BC_LABEL (DECL_RTL (exp)))->offset);
11627 else
11628 bc_load_localaddr (DECL_RTL (exp));
11629
11630 /* For variable-sized types: retrieve pointer */
11631 if (TYPE_SIZE (TREE_TYPE (exp))
11632 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
11633 bc_emit_instruction (loadP);
11634
11635 /* If packed, also return offset and size */
11636 if (DECL_BIT_FIELD (exp))
11637 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
11638 TREE_INT_CST_LOW (DECL_SIZE (exp)));
11639
11640 break;
11641
11642 case STRING_CST:
11643 {
11644 rtx r;
11645
11646 bc_emit_bytecode (constP);
11647 r = output_constant_def (exp);
11648 bc_emit_code_labelref (BYTECODE_LABEL (r), BYTECODE_BC_LABEL (r)->offset);
11649
11650 #ifdef DEBUG_PRINT_CODE
11651 fputc ('\n', stderr);
11652 #endif
11653 }
11654 break;
11655
11656 default:
11657
11658 abort();
11659 break;
11660 }
11661
11662 /* Most lvalues don't have components. */
11663 return (exp);
11664 }
11665
11666
11667 /* Emit a type code to be used by the runtime support in handling
11668 parameter passing. The type code consists of the machine mode
11669 plus the minimal alignment shifted left 8 bits. */
11670
11671 tree
11672 bc_runtime_type_code (type)
11673 tree type;
11674 {
11675 int val;
11676
11677 switch (TREE_CODE (type))
11678 {
11679 case VOID_TYPE:
11680 case INTEGER_TYPE:
11681 case REAL_TYPE:
11682 case COMPLEX_TYPE:
11683 case ENUMERAL_TYPE:
11684 case POINTER_TYPE:
11685 case RECORD_TYPE:
11686
11687 val = (int) TYPE_MODE (type) | TYPE_ALIGN (type) << 8;
11688 break;
11689
11690 case ERROR_MARK:
11691
11692 val = 0;
11693 break;
11694
11695 default:
11696
11697 abort ();
11698 }
11699 return build_int_2 (val, 0);
11700 }
11701
11702
11703 /* Generate constructor label */
11704
11705 char *
11706 bc_gen_constr_label ()
11707 {
11708 static int label_counter;
11709 static char label[20];
11710
11711 sprintf (label, "*LR%d", label_counter++);
11712
11713 return (obstack_copy0 (&permanent_obstack, label, strlen (label)));
11714 }
11715
11716
11717 /* Evaluate constructor CONSTR and return pointer to it on level one. We
11718 expand the constructor data as static data, and push a pointer to it.
11719 The pointer is put in the pointer table and is retrieved by a constP
11720 bytecode instruction. We then loop and store each constructor member in
11721 the corresponding component. Finally, we return the original pointer on
11722 the stack. */
11723
11724 void
11725 bc_expand_constructor (constr)
11726 tree constr;
11727 {
11728 char *l;
11729 HOST_WIDE_INT ptroffs;
11730 rtx constr_rtx;
11731
11732
11733 /* Literal constructors are handled as constants, whereas
11734 non-literals are evaluated and stored element by element
11735 into the data segment. */
11736
11737 /* Allocate space in proper segment and push pointer to space on stack.
11738 */
11739
11740 l = bc_gen_constr_label ();
11741
11742 if (TREE_CONSTANT (constr))
11743 {
11744 text_section ();
11745
11746 bc_emit_const_labeldef (l);
11747 bc_output_constructor (constr, int_size_in_bytes (TREE_TYPE (constr)));
11748 }
11749 else
11750 {
11751 data_section ();
11752
11753 bc_emit_data_labeldef (l);
11754 bc_output_data_constructor (constr);
11755 }
11756
11757
11758 /* Add reference to pointer table and recall pointer to stack;
11759 this code is common for both types of constructors: literals
11760 and non-literals. */
11761
11762 ptroffs = bc_define_pointer (l);
11763 bc_emit_instruction (constP, ptroffs);
11764
11765 /* This is all that has to be done if it's a literal. */
11766 if (TREE_CONSTANT (constr))
11767 return;
11768
11769
11770 /* At this point, we have the pointer to the structure on top of the stack.
11771 Generate sequences of store_memory calls for the constructor. */
11772
11773 /* constructor type is structure */
11774 if (TREE_CODE (TREE_TYPE (constr)) == RECORD_TYPE)
11775 {
11776 register tree elt;
11777
11778 /* If the constructor has fewer fields than the structure,
11779 clear the whole structure first. */
11780
11781 if (list_length (CONSTRUCTOR_ELTS (constr))
11782 != list_length (TYPE_FIELDS (TREE_TYPE (constr))))
11783 {
11784 bc_emit_instruction (duplicate);
11785 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
11786 bc_emit_instruction (clearBLK);
11787 }
11788
11789 /* Store each element of the constructor into the corresponding
11790 field of TARGET. */
11791
11792 for (elt = CONSTRUCTOR_ELTS (constr); elt; elt = TREE_CHAIN (elt))
11793 {
11794 register tree field = TREE_PURPOSE (elt);
11795 register enum machine_mode mode;
11796 int bitsize;
11797 int bitpos;
11798 int unsignedp;
11799
11800 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field)) /* * DECL_SIZE_UNIT (field) */;
11801 mode = DECL_MODE (field);
11802 unsignedp = TREE_UNSIGNED (field);
11803
11804 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
11805
11806 bc_store_field (elt, bitsize, bitpos, mode, TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
11807 /* The alignment of TARGET is
11808 at least what its type requires. */
11809 VOIDmode, 0,
11810 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
11811 int_size_in_bytes (TREE_TYPE (constr)));
11812 }
11813 }
11814 else
11815
11816 /* Constructor type is array */
11817 if (TREE_CODE (TREE_TYPE (constr)) == ARRAY_TYPE)
11818 {
11819 register tree elt;
11820 register int i;
11821 tree domain = TYPE_DOMAIN (TREE_TYPE (constr));
11822 int minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
11823 int maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
11824 tree elttype = TREE_TYPE (TREE_TYPE (constr));
11825
11826 /* If the constructor has fewer fields than the structure,
11827 clear the whole structure first. */
11828
11829 if (list_length (CONSTRUCTOR_ELTS (constr)) < maxelt - minelt + 1)
11830 {
11831 bc_emit_instruction (duplicate);
11832 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
11833 bc_emit_instruction (clearBLK);
11834 }
11835
11836
11837 /* Store each element of the constructor into the corresponding
11838 element of TARGET, determined by counting the elements. */
11839
11840 for (elt = CONSTRUCTOR_ELTS (constr), i = 0;
11841 elt;
11842 elt = TREE_CHAIN (elt), i++)
11843 {
11844 register enum machine_mode mode;
11845 int bitsize;
11846 int bitpos;
11847 int unsignedp;
11848
11849 mode = TYPE_MODE (elttype);
11850 bitsize = GET_MODE_BITSIZE (mode);
11851 unsignedp = TREE_UNSIGNED (elttype);
11852
11853 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype))
11854 /* * TYPE_SIZE_UNIT (elttype) */ );
11855
11856 bc_store_field (elt, bitsize, bitpos, mode,
11857 TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
11858 /* The alignment of TARGET is
11859 at least what its type requires. */
11860 VOIDmode, 0,
11861 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
11862 int_size_in_bytes (TREE_TYPE (constr)));
11863 }
11864
11865 }
11866 }
11867
11868
11869 /* Store the value of EXP (an expression tree) into member FIELD of
11870 structure at address on stack, which has type TYPE, mode MODE and
11871 occupies BITSIZE bits, starting BITPOS bits from the beginning of the
11872 structure.
11873
11874 ALIGN is the alignment that TARGET is known to have, measured in bytes.
11875 TOTAL_SIZE is its size in bytes, or -1 if variable. */
11876
11877 void
11878 bc_store_field (field, bitsize, bitpos, mode, exp, type,
11879 value_mode, unsignedp, align, total_size)
11880 int bitsize, bitpos;
11881 enum machine_mode mode;
11882 tree field, exp, type;
11883 enum machine_mode value_mode;
11884 int unsignedp;
11885 int align;
11886 int total_size;
11887 {
11888
11889 /* Expand expression and copy pointer */
11890 bc_expand_expr (exp);
11891 bc_emit_instruction (over);
11892
11893
11894 /* If the component is a bit field, we cannot use addressing to access
11895 it. Use bit-field techniques to store in it. */
11896
11897 if (DECL_BIT_FIELD (field))
11898 {
11899 bc_store_bit_field (bitpos, bitsize, unsignedp);
11900 return;
11901 }
11902 else
11903 /* Not bit field */
11904 {
11905 HOST_WIDE_INT offset = bitpos / BITS_PER_UNIT;
11906
11907 /* Advance pointer to the desired member */
11908 if (offset)
11909 bc_emit_instruction (addconstPSI, offset);
11910
11911 /* Store */
11912 bc_store_memory (type, field);
11913 }
11914 }
11915
11916
11917 /* Store SI/SU in bitfield */
11918
11919 void
11920 bc_store_bit_field (offset, size, unsignedp)
11921 int offset, size, unsignedp;
11922 {
11923 /* Push bitfield offset and size */
11924 bc_push_offset_and_size (offset, size);
11925
11926 /* Store */
11927 bc_emit_instruction (sstoreBI);
11928 }
11929
11930
11931 /* Load SI/SU from bitfield */
11932
11933 void
11934 bc_load_bit_field (offset, size, unsignedp)
11935 int offset, size, unsignedp;
11936 {
11937 /* Push bitfield offset and size */
11938 bc_push_offset_and_size (offset, size);
11939
11940 /* Load: sign-extend if signed, else zero-extend */
11941 bc_emit_instruction (unsignedp ? zxloadBI : sxloadBI);
11942 }
11943
11944
11945 /* Adjust interpreter stack by NLEVELS. Positive means drop NLEVELS
11946 (adjust stack pointer upwards), negative means add that number of
11947 levels (adjust the stack pointer downwards). Only positive values
11948 normally make sense. */
11949
11950 void
11951 bc_adjust_stack (nlevels)
11952 int nlevels;
11953 {
11954 switch (nlevels)
11955 {
11956 case 0:
11957 break;
11958
11959 case 2:
11960 bc_emit_instruction (drop);
11961
11962 case 1:
11963 bc_emit_instruction (drop);
11964 break;
11965
11966 default:
11967
11968 bc_emit_instruction (adjstackSI, (HOST_WIDE_INT) nlevels);
11969 stack_depth -= nlevels;
11970 }
11971
11972 #if defined (VALIDATE_STACK_FOR_BC)
11973 VALIDATE_STACK_FOR_BC ();
11974 #endif
11975 }
This page took 0.635025 seconds and 6 git commands to generate.