]> gcc.gnu.org Git - gcc.git/blob - gcc/expr.c
Fix SI followed by 4 DFs under AIX
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 92, 93, 94, 95, 1996 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 #include "config.h"
23 #include "machmode.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "obstack.h"
27 #include "flags.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "except.h"
31 #include "function.h"
32 #include "insn-flags.h"
33 #include "insn-codes.h"
34 #include "expr.h"
35 #include "insn-config.h"
36 #include "recog.h"
37 #include "output.h"
38 #include "typeclass.h"
39
40 #include "bytecode.h"
41 #include "bc-opcode.h"
42 #include "bc-typecd.h"
43 #include "bc-optab.h"
44 #include "bc-emit.h"
45
46
47 #define CEIL(x,y) (((x) + (y) - 1) / (y))
48
49 /* Decide whether a function's arguments should be processed
50 from first to last or from last to first.
51
52 They should if the stack and args grow in opposite directions, but
53 only if we have push insns. */
54
55 #ifdef PUSH_ROUNDING
56
57 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
58 #define PUSH_ARGS_REVERSED /* If it's last to first */
59 #endif
60
61 #endif
62
63 #ifndef STACK_PUSH_CODE
64 #ifdef STACK_GROWS_DOWNWARD
65 #define STACK_PUSH_CODE PRE_DEC
66 #else
67 #define STACK_PUSH_CODE PRE_INC
68 #endif
69 #endif
70
71 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
72 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
73
74 /* If this is nonzero, we do not bother generating VOLATILE
75 around volatile memory references, and we are willing to
76 output indirect addresses. If cse is to follow, we reject
77 indirect addresses so a useful potential cse is generated;
78 if it is used only once, instruction combination will produce
79 the same indirect address eventually. */
80 int cse_not_expected;
81
82 /* Nonzero to generate code for all the subroutines within an
83 expression before generating the upper levels of the expression.
84 Nowadays this is never zero. */
85 int do_preexpand_calls = 1;
86
87 /* Number of units that we should eventually pop off the stack.
88 These are the arguments to function calls that have already returned. */
89 int pending_stack_adjust;
90
91 /* Nonzero means stack pops must not be deferred, and deferred stack
92 pops must not be output. It is nonzero inside a function call,
93 inside a conditional expression, inside a statement expression,
94 and in other cases as well. */
95 int inhibit_defer_pop;
96
97 /* A list of all cleanups which belong to the arguments of
98 function calls being expanded by expand_call. */
99 tree cleanups_this_call;
100
101 /* When temporaries are created by TARGET_EXPRs, they are created at
102 this level of temp_slot_level, so that they can remain allocated
103 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
104 of TARGET_EXPRs. */
105 int target_temp_slot_level;
106
107 /* Nonzero means __builtin_saveregs has already been done in this function.
108 The value is the pseudoreg containing the value __builtin_saveregs
109 returned. */
110 static rtx saveregs_value;
111
112 /* Similarly for __builtin_apply_args. */
113 static rtx apply_args_value;
114
115 /* This structure is used by move_by_pieces to describe the move to
116 be performed. */
117
118 struct move_by_pieces
119 {
120 rtx to;
121 rtx to_addr;
122 int autinc_to;
123 int explicit_inc_to;
124 int to_struct;
125 rtx from;
126 rtx from_addr;
127 int autinc_from;
128 int explicit_inc_from;
129 int from_struct;
130 int len;
131 int offset;
132 int reverse;
133 };
134
135 /* This structure is used by clear_by_pieces to describe the clear to
136 be performed. */
137
138 struct clear_by_pieces
139 {
140 rtx to;
141 rtx to_addr;
142 int autinc_to;
143 int explicit_inc_to;
144 int to_struct;
145 int len;
146 int offset;
147 int reverse;
148 };
149
150 /* Used to generate bytecodes: keep track of size of local variables,
151 as well as depth of arithmetic stack. (Notice that variables are
152 stored on the machine's stack, not the arithmetic stack.) */
153
154 extern int local_vars_size;
155 extern int stack_depth;
156 extern int max_stack_depth;
157 extern struct obstack permanent_obstack;
158 extern rtx arg_pointer_save_area;
159
160 static rtx enqueue_insn PROTO((rtx, rtx));
161 static int queued_subexp_p PROTO((rtx));
162 static void init_queue PROTO((void));
163 static void move_by_pieces PROTO((rtx, rtx, int, int));
164 static int move_by_pieces_ninsns PROTO((unsigned int, int));
165 static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
166 struct move_by_pieces *));
167 static void clear_by_pieces PROTO((rtx, int, int));
168 static void clear_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
169 struct clear_by_pieces *));
170 static int is_zeros_p PROTO((tree));
171 static int mostly_zeros_p PROTO((tree));
172 static void store_constructor PROTO((tree, rtx, int));
173 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
174 enum machine_mode, int, int, int));
175 static int get_inner_unaligned_p PROTO((tree));
176 static tree save_noncopied_parts PROTO((tree, tree));
177 static tree init_noncopied_parts PROTO((tree, tree));
178 static int safe_from_p PROTO((rtx, tree));
179 static int fixed_type_p PROTO((tree));
180 static rtx var_rtx PROTO((tree));
181 static int get_pointer_alignment PROTO((tree, unsigned));
182 static tree string_constant PROTO((tree, tree *));
183 static tree c_strlen PROTO((tree));
184 static rtx expand_builtin PROTO((tree, rtx, rtx,
185 enum machine_mode, int));
186 static int apply_args_size PROTO((void));
187 static int apply_result_size PROTO((void));
188 static rtx result_vector PROTO((int, rtx));
189 static rtx expand_builtin_apply_args PROTO((void));
190 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
191 static void expand_builtin_return PROTO((rtx));
192 static rtx expand_increment PROTO((tree, int, int));
193 void bc_expand_increment PROTO((struct increment_operator *, tree));
194 rtx bc_allocate_local PROTO((int, int));
195 void bc_store_memory PROTO((tree, tree));
196 tree bc_expand_component_address PROTO((tree));
197 tree bc_expand_address PROTO((tree));
198 void bc_expand_constructor PROTO((tree));
199 void bc_adjust_stack PROTO((int));
200 tree bc_canonicalize_array_ref PROTO((tree));
201 void bc_load_memory PROTO((tree, tree));
202 void bc_load_externaddr PROTO((rtx));
203 void bc_load_externaddr_id PROTO((tree, int));
204 void bc_load_localaddr PROTO((rtx));
205 void bc_load_parmaddr PROTO((rtx));
206 static void preexpand_calls PROTO((tree));
207 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
208 void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
209 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
210 static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
211 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
212 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
213 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
214 static tree defer_cleanups_to PROTO((tree));
215 extern tree truthvalue_conversion PROTO((tree));
216
217 /* Record for each mode whether we can move a register directly to or
218 from an object of that mode in memory. If we can't, we won't try
219 to use that mode directly when accessing a field of that mode. */
220
221 static char direct_load[NUM_MACHINE_MODES];
222 static char direct_store[NUM_MACHINE_MODES];
223
224 /* MOVE_RATIO is the number of move instructions that is better than
225 a block move. */
226
227 #ifndef MOVE_RATIO
228 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
229 #define MOVE_RATIO 2
230 #else
231 /* A value of around 6 would minimize code size; infinity would minimize
232 execution time. */
233 #define MOVE_RATIO 15
234 #endif
235 #endif
236
237 /* This array records the insn_code of insns to perform block moves. */
238 enum insn_code movstr_optab[NUM_MACHINE_MODES];
239
240 /* This array records the insn_code of insns to perform block clears. */
241 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
242
243 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
244
245 #ifndef SLOW_UNALIGNED_ACCESS
246 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
247 #endif
248
249 /* Register mappings for target machines without register windows. */
250 #ifndef INCOMING_REGNO
251 #define INCOMING_REGNO(OUT) (OUT)
252 #endif
253 #ifndef OUTGOING_REGNO
254 #define OUTGOING_REGNO(IN) (IN)
255 #endif
256 \f
257 /* Maps used to convert modes to const, load, and store bytecodes. */
258 enum bytecode_opcode mode_to_const_map[MAX_MACHINE_MODE];
259 enum bytecode_opcode mode_to_load_map[MAX_MACHINE_MODE];
260 enum bytecode_opcode mode_to_store_map[MAX_MACHINE_MODE];
261
262 /* Initialize maps used to convert modes to const, load, and store
263 bytecodes. */
264
265 void
266 bc_init_mode_to_opcode_maps ()
267 {
268 int mode;
269
270 for (mode = 0; mode < (int) MAX_MACHINE_MODE; mode++)
271 mode_to_const_map[mode] =
272 mode_to_load_map[mode] =
273 mode_to_store_map[mode] = neverneverland;
274
275 #define DEF_MODEMAP(SYM, CODE, UCODE, CONST, LOAD, STORE) \
276 mode_to_const_map[(int) SYM] = CONST; \
277 mode_to_load_map[(int) SYM] = LOAD; \
278 mode_to_store_map[(int) SYM] = STORE;
279
280 #include "modemap.def"
281 #undef DEF_MODEMAP
282 }
283 \f
284 /* This is run once per compilation to set up which modes can be used
285 directly in memory and to initialize the block move optab. */
286
287 void
288 init_expr_once ()
289 {
290 rtx insn, pat;
291 enum machine_mode mode;
292 /* Try indexing by frame ptr and try by stack ptr.
293 It is known that on the Convex the stack ptr isn't a valid index.
294 With luck, one or the other is valid on any machine. */
295 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
296 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
297
298 start_sequence ();
299 insn = emit_insn (gen_rtx (SET, 0, 0));
300 pat = PATTERN (insn);
301
302 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
303 mode = (enum machine_mode) ((int) mode + 1))
304 {
305 int regno;
306 rtx reg;
307 int num_clobbers;
308
309 direct_load[(int) mode] = direct_store[(int) mode] = 0;
310 PUT_MODE (mem, mode);
311 PUT_MODE (mem1, mode);
312
313 /* See if there is some register that can be used in this mode and
314 directly loaded or stored from memory. */
315
316 if (mode != VOIDmode && mode != BLKmode)
317 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
318 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
319 regno++)
320 {
321 if (! HARD_REGNO_MODE_OK (regno, mode))
322 continue;
323
324 reg = gen_rtx (REG, mode, regno);
325
326 SET_SRC (pat) = mem;
327 SET_DEST (pat) = reg;
328 if (recog (pat, insn, &num_clobbers) >= 0)
329 direct_load[(int) mode] = 1;
330
331 SET_SRC (pat) = mem1;
332 SET_DEST (pat) = reg;
333 if (recog (pat, insn, &num_clobbers) >= 0)
334 direct_load[(int) mode] = 1;
335
336 SET_SRC (pat) = reg;
337 SET_DEST (pat) = mem;
338 if (recog (pat, insn, &num_clobbers) >= 0)
339 direct_store[(int) mode] = 1;
340
341 SET_SRC (pat) = reg;
342 SET_DEST (pat) = mem1;
343 if (recog (pat, insn, &num_clobbers) >= 0)
344 direct_store[(int) mode] = 1;
345 }
346 }
347
348 end_sequence ();
349 }
350
351 /* This is run at the start of compiling a function. */
352
353 void
354 init_expr ()
355 {
356 init_queue ();
357
358 pending_stack_adjust = 0;
359 inhibit_defer_pop = 0;
360 cleanups_this_call = 0;
361 saveregs_value = 0;
362 apply_args_value = 0;
363 forced_labels = 0;
364 }
365
366 /* Save all variables describing the current status into the structure *P.
367 This is used before starting a nested function. */
368
369 void
370 save_expr_status (p)
371 struct function *p;
372 {
373 /* Instead of saving the postincrement queue, empty it. */
374 emit_queue ();
375
376 p->pending_stack_adjust = pending_stack_adjust;
377 p->inhibit_defer_pop = inhibit_defer_pop;
378 p->cleanups_this_call = cleanups_this_call;
379 p->saveregs_value = saveregs_value;
380 p->apply_args_value = apply_args_value;
381 p->forced_labels = forced_labels;
382
383 pending_stack_adjust = 0;
384 inhibit_defer_pop = 0;
385 cleanups_this_call = 0;
386 saveregs_value = 0;
387 apply_args_value = 0;
388 forced_labels = 0;
389 }
390
391 /* Restore all variables describing the current status from the structure *P.
392 This is used after a nested function. */
393
394 void
395 restore_expr_status (p)
396 struct function *p;
397 {
398 pending_stack_adjust = p->pending_stack_adjust;
399 inhibit_defer_pop = p->inhibit_defer_pop;
400 cleanups_this_call = p->cleanups_this_call;
401 saveregs_value = p->saveregs_value;
402 apply_args_value = p->apply_args_value;
403 forced_labels = p->forced_labels;
404 }
405 \f
406 /* Manage the queue of increment instructions to be output
407 for POSTINCREMENT_EXPR expressions, etc. */
408
409 static rtx pending_chain;
410
411 /* Queue up to increment (or change) VAR later. BODY says how:
412 BODY should be the same thing you would pass to emit_insn
413 to increment right away. It will go to emit_insn later on.
414
415 The value is a QUEUED expression to be used in place of VAR
416 where you want to guarantee the pre-incrementation value of VAR. */
417
418 static rtx
419 enqueue_insn (var, body)
420 rtx var, body;
421 {
422 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
423 var, NULL_RTX, NULL_RTX, body, pending_chain);
424 return pending_chain;
425 }
426
427 /* Use protect_from_queue to convert a QUEUED expression
428 into something that you can put immediately into an instruction.
429 If the queued incrementation has not happened yet,
430 protect_from_queue returns the variable itself.
431 If the incrementation has happened, protect_from_queue returns a temp
432 that contains a copy of the old value of the variable.
433
434 Any time an rtx which might possibly be a QUEUED is to be put
435 into an instruction, it must be passed through protect_from_queue first.
436 QUEUED expressions are not meaningful in instructions.
437
438 Do not pass a value through protect_from_queue and then hold
439 on to it for a while before putting it in an instruction!
440 If the queue is flushed in between, incorrect code will result. */
441
442 rtx
443 protect_from_queue (x, modify)
444 register rtx x;
445 int modify;
446 {
447 register RTX_CODE code = GET_CODE (x);
448
449 #if 0 /* A QUEUED can hang around after the queue is forced out. */
450 /* Shortcut for most common case. */
451 if (pending_chain == 0)
452 return x;
453 #endif
454
455 if (code != QUEUED)
456 {
457 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
458 use of autoincrement. Make a copy of the contents of the memory
459 location rather than a copy of the address, but not if the value is
460 of mode BLKmode. Don't modify X in place since it might be
461 shared. */
462 if (code == MEM && GET_MODE (x) != BLKmode
463 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
464 {
465 register rtx y = XEXP (x, 0);
466 register rtx new = gen_rtx (MEM, GET_MODE (x), QUEUED_VAR (y));
467
468 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
469 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
470 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
471
472 if (QUEUED_INSN (y))
473 {
474 register rtx temp = gen_reg_rtx (GET_MODE (new));
475 emit_insn_before (gen_move_insn (temp, new),
476 QUEUED_INSN (y));
477 return temp;
478 }
479 return new;
480 }
481 /* Otherwise, recursively protect the subexpressions of all
482 the kinds of rtx's that can contain a QUEUED. */
483 if (code == MEM)
484 {
485 rtx tem = protect_from_queue (XEXP (x, 0), 0);
486 if (tem != XEXP (x, 0))
487 {
488 x = copy_rtx (x);
489 XEXP (x, 0) = tem;
490 }
491 }
492 else if (code == PLUS || code == MULT)
493 {
494 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
495 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
496 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
497 {
498 x = copy_rtx (x);
499 XEXP (x, 0) = new0;
500 XEXP (x, 1) = new1;
501 }
502 }
503 return x;
504 }
505 /* If the increment has not happened, use the variable itself. */
506 if (QUEUED_INSN (x) == 0)
507 return QUEUED_VAR (x);
508 /* If the increment has happened and a pre-increment copy exists,
509 use that copy. */
510 if (QUEUED_COPY (x) != 0)
511 return QUEUED_COPY (x);
512 /* The increment has happened but we haven't set up a pre-increment copy.
513 Set one up now, and use it. */
514 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
515 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
516 QUEUED_INSN (x));
517 return QUEUED_COPY (x);
518 }
519
520 /* Return nonzero if X contains a QUEUED expression:
521 if it contains anything that will be altered by a queued increment.
522 We handle only combinations of MEM, PLUS, MINUS and MULT operators
523 since memory addresses generally contain only those. */
524
525 static int
526 queued_subexp_p (x)
527 rtx x;
528 {
529 register enum rtx_code code = GET_CODE (x);
530 switch (code)
531 {
532 case QUEUED:
533 return 1;
534 case MEM:
535 return queued_subexp_p (XEXP (x, 0));
536 case MULT:
537 case PLUS:
538 case MINUS:
539 return queued_subexp_p (XEXP (x, 0))
540 || queued_subexp_p (XEXP (x, 1));
541 }
542 return 0;
543 }
544
545 /* Perform all the pending incrementations. */
546
547 void
548 emit_queue ()
549 {
550 register rtx p;
551 while (p = pending_chain)
552 {
553 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
554 pending_chain = QUEUED_NEXT (p);
555 }
556 }
557
558 static void
559 init_queue ()
560 {
561 if (pending_chain)
562 abort ();
563 }
564 \f
565 /* Copy data from FROM to TO, where the machine modes are not the same.
566 Both modes may be integer, or both may be floating.
567 UNSIGNEDP should be nonzero if FROM is an unsigned type.
568 This causes zero-extension instead of sign-extension. */
569
570 void
571 convert_move (to, from, unsignedp)
572 register rtx to, from;
573 int unsignedp;
574 {
575 enum machine_mode to_mode = GET_MODE (to);
576 enum machine_mode from_mode = GET_MODE (from);
577 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
578 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
579 enum insn_code code;
580 rtx libcall;
581
582 /* rtx code for making an equivalent value. */
583 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
584
585 to = protect_from_queue (to, 1);
586 from = protect_from_queue (from, 0);
587
588 if (to_real != from_real)
589 abort ();
590
591 /* If FROM is a SUBREG that indicates that we have already done at least
592 the required extension, strip it. We don't handle such SUBREGs as
593 TO here. */
594
595 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
596 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
597 >= GET_MODE_SIZE (to_mode))
598 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
599 from = gen_lowpart (to_mode, from), from_mode = to_mode;
600
601 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
602 abort ();
603
604 if (to_mode == from_mode
605 || (from_mode == VOIDmode && CONSTANT_P (from)))
606 {
607 emit_move_insn (to, from);
608 return;
609 }
610
611 if (to_real)
612 {
613 rtx value;
614
615 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
616 {
617 /* Try converting directly if the insn is supported. */
618 if ((code = can_extend_p (to_mode, from_mode, 0))
619 != CODE_FOR_nothing)
620 {
621 emit_unop_insn (code, to, from, UNKNOWN);
622 return;
623 }
624 }
625
626 #ifdef HAVE_trunchfqf2
627 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
628 {
629 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
630 return;
631 }
632 #endif
633 #ifdef HAVE_truncsfqf2
634 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
635 {
636 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
637 return;
638 }
639 #endif
640 #ifdef HAVE_truncdfqf2
641 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
642 {
643 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
644 return;
645 }
646 #endif
647 #ifdef HAVE_truncxfqf2
648 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
649 {
650 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
651 return;
652 }
653 #endif
654 #ifdef HAVE_trunctfqf2
655 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
656 {
657 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
658 return;
659 }
660 #endif
661
662 #ifdef HAVE_trunctqfhf2
663 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
664 {
665 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
666 return;
667 }
668 #endif
669 #ifdef HAVE_truncsfhf2
670 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
671 {
672 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
673 return;
674 }
675 #endif
676 #ifdef HAVE_truncdfhf2
677 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
678 {
679 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
680 return;
681 }
682 #endif
683 #ifdef HAVE_truncxfhf2
684 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
685 {
686 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
687 return;
688 }
689 #endif
690 #ifdef HAVE_trunctfhf2
691 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
692 {
693 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
694 return;
695 }
696 #endif
697
698 #ifdef HAVE_truncsftqf2
699 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
700 {
701 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
702 return;
703 }
704 #endif
705 #ifdef HAVE_truncdftqf2
706 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
707 {
708 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
709 return;
710 }
711 #endif
712 #ifdef HAVE_truncxftqf2
713 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
714 {
715 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
716 return;
717 }
718 #endif
719 #ifdef HAVE_trunctftqf2
720 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
721 {
722 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
723 return;
724 }
725 #endif
726
727 #ifdef HAVE_truncdfsf2
728 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
729 {
730 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
731 return;
732 }
733 #endif
734 #ifdef HAVE_truncxfsf2
735 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
736 {
737 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
738 return;
739 }
740 #endif
741 #ifdef HAVE_trunctfsf2
742 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
743 {
744 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
745 return;
746 }
747 #endif
748 #ifdef HAVE_truncxfdf2
749 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
750 {
751 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
752 return;
753 }
754 #endif
755 #ifdef HAVE_trunctfdf2
756 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
757 {
758 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
759 return;
760 }
761 #endif
762
763 libcall = (rtx) 0;
764 switch (from_mode)
765 {
766 case SFmode:
767 switch (to_mode)
768 {
769 case DFmode:
770 libcall = extendsfdf2_libfunc;
771 break;
772
773 case XFmode:
774 libcall = extendsfxf2_libfunc;
775 break;
776
777 case TFmode:
778 libcall = extendsftf2_libfunc;
779 break;
780 }
781 break;
782
783 case DFmode:
784 switch (to_mode)
785 {
786 case SFmode:
787 libcall = truncdfsf2_libfunc;
788 break;
789
790 case XFmode:
791 libcall = extenddfxf2_libfunc;
792 break;
793
794 case TFmode:
795 libcall = extenddftf2_libfunc;
796 break;
797 }
798 break;
799
800 case XFmode:
801 switch (to_mode)
802 {
803 case SFmode:
804 libcall = truncxfsf2_libfunc;
805 break;
806
807 case DFmode:
808 libcall = truncxfdf2_libfunc;
809 break;
810 }
811 break;
812
813 case TFmode:
814 switch (to_mode)
815 {
816 case SFmode:
817 libcall = trunctfsf2_libfunc;
818 break;
819
820 case DFmode:
821 libcall = trunctfdf2_libfunc;
822 break;
823 }
824 break;
825 }
826
827 if (libcall == (rtx) 0)
828 /* This conversion is not implemented yet. */
829 abort ();
830
831 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
832 1, from, from_mode);
833 emit_move_insn (to, value);
834 return;
835 }
836
837 /* Now both modes are integers. */
838
839 /* Handle expanding beyond a word. */
840 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
841 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
842 {
843 rtx insns;
844 rtx lowpart;
845 rtx fill_value;
846 rtx lowfrom;
847 int i;
848 enum machine_mode lowpart_mode;
849 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
850
851 /* Try converting directly if the insn is supported. */
852 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
853 != CODE_FOR_nothing)
854 {
855 /* If FROM is a SUBREG, put it into a register. Do this
856 so that we always generate the same set of insns for
857 better cse'ing; if an intermediate assignment occurred,
858 we won't be doing the operation directly on the SUBREG. */
859 if (optimize > 0 && GET_CODE (from) == SUBREG)
860 from = force_reg (from_mode, from);
861 emit_unop_insn (code, to, from, equiv_code);
862 return;
863 }
864 /* Next, try converting via full word. */
865 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
866 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
867 != CODE_FOR_nothing))
868 {
869 if (GET_CODE (to) == REG)
870 emit_insn (gen_rtx (CLOBBER, VOIDmode, to));
871 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
872 emit_unop_insn (code, to,
873 gen_lowpart (word_mode, to), equiv_code);
874 return;
875 }
876
877 /* No special multiword conversion insn; do it by hand. */
878 start_sequence ();
879
880 /* Since we will turn this into a no conflict block, we must ensure
881 that the source does not overlap the target. */
882
883 if (reg_overlap_mentioned_p (to, from))
884 from = force_reg (from_mode, from);
885
886 /* Get a copy of FROM widened to a word, if necessary. */
887 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
888 lowpart_mode = word_mode;
889 else
890 lowpart_mode = from_mode;
891
892 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
893
894 lowpart = gen_lowpart (lowpart_mode, to);
895 emit_move_insn (lowpart, lowfrom);
896
897 /* Compute the value to put in each remaining word. */
898 if (unsignedp)
899 fill_value = const0_rtx;
900 else
901 {
902 #ifdef HAVE_slt
903 if (HAVE_slt
904 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
905 && STORE_FLAG_VALUE == -1)
906 {
907 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
908 lowpart_mode, 0, 0);
909 fill_value = gen_reg_rtx (word_mode);
910 emit_insn (gen_slt (fill_value));
911 }
912 else
913 #endif
914 {
915 fill_value
916 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
917 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
918 NULL_RTX, 0);
919 fill_value = convert_to_mode (word_mode, fill_value, 1);
920 }
921 }
922
923 /* Fill the remaining words. */
924 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
925 {
926 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
927 rtx subword = operand_subword (to, index, 1, to_mode);
928
929 if (subword == 0)
930 abort ();
931
932 if (fill_value != subword)
933 emit_move_insn (subword, fill_value);
934 }
935
936 insns = get_insns ();
937 end_sequence ();
938
939 emit_no_conflict_block (insns, to, from, NULL_RTX,
940 gen_rtx (equiv_code, to_mode, copy_rtx (from)));
941 return;
942 }
943
944 /* Truncating multi-word to a word or less. */
945 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
946 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
947 {
948 if (!((GET_CODE (from) == MEM
949 && ! MEM_VOLATILE_P (from)
950 && direct_load[(int) to_mode]
951 && ! mode_dependent_address_p (XEXP (from, 0)))
952 || GET_CODE (from) == REG
953 || GET_CODE (from) == SUBREG))
954 from = force_reg (from_mode, from);
955 convert_move (to, gen_lowpart (word_mode, from), 0);
956 return;
957 }
958
959 /* Handle pointer conversion */ /* SPEE 900220 */
960 if (to_mode == PSImode)
961 {
962 if (from_mode != SImode)
963 from = convert_to_mode (SImode, from, unsignedp);
964
965 #ifdef HAVE_truncsipsi2
966 if (HAVE_truncsipsi2)
967 {
968 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
969 return;
970 }
971 #endif /* HAVE_truncsipsi2 */
972 abort ();
973 }
974
975 if (from_mode == PSImode)
976 {
977 if (to_mode != SImode)
978 {
979 from = convert_to_mode (SImode, from, unsignedp);
980 from_mode = SImode;
981 }
982 else
983 {
984 #ifdef HAVE_extendpsisi2
985 if (HAVE_extendpsisi2)
986 {
987 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
988 return;
989 }
990 #endif /* HAVE_extendpsisi2 */
991 abort ();
992 }
993 }
994
995 if (to_mode == PDImode)
996 {
997 if (from_mode != DImode)
998 from = convert_to_mode (DImode, from, unsignedp);
999
1000 #ifdef HAVE_truncdipdi2
1001 if (HAVE_truncdipdi2)
1002 {
1003 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1004 return;
1005 }
1006 #endif /* HAVE_truncdipdi2 */
1007 abort ();
1008 }
1009
1010 if (from_mode == PDImode)
1011 {
1012 if (to_mode != DImode)
1013 {
1014 from = convert_to_mode (DImode, from, unsignedp);
1015 from_mode = DImode;
1016 }
1017 else
1018 {
1019 #ifdef HAVE_extendpdidi2
1020 if (HAVE_extendpdidi2)
1021 {
1022 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1023 return;
1024 }
1025 #endif /* HAVE_extendpdidi2 */
1026 abort ();
1027 }
1028 }
1029
1030 /* Now follow all the conversions between integers
1031 no more than a word long. */
1032
1033 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1034 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1035 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1036 GET_MODE_BITSIZE (from_mode)))
1037 {
1038 if (!((GET_CODE (from) == MEM
1039 && ! MEM_VOLATILE_P (from)
1040 && direct_load[(int) to_mode]
1041 && ! mode_dependent_address_p (XEXP (from, 0)))
1042 || GET_CODE (from) == REG
1043 || GET_CODE (from) == SUBREG))
1044 from = force_reg (from_mode, from);
1045 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1046 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1047 from = copy_to_reg (from);
1048 emit_move_insn (to, gen_lowpart (to_mode, from));
1049 return;
1050 }
1051
1052 /* Handle extension. */
1053 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1054 {
1055 /* Convert directly if that works. */
1056 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1057 != CODE_FOR_nothing)
1058 {
1059 emit_unop_insn (code, to, from, equiv_code);
1060 return;
1061 }
1062 else
1063 {
1064 enum machine_mode intermediate;
1065
1066 /* Search for a mode to convert via. */
1067 for (intermediate = from_mode; intermediate != VOIDmode;
1068 intermediate = GET_MODE_WIDER_MODE (intermediate))
1069 if (((can_extend_p (to_mode, intermediate, unsignedp)
1070 != CODE_FOR_nothing)
1071 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1072 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
1073 && (can_extend_p (intermediate, from_mode, unsignedp)
1074 != CODE_FOR_nothing))
1075 {
1076 convert_move (to, convert_to_mode (intermediate, from,
1077 unsignedp), unsignedp);
1078 return;
1079 }
1080
1081 /* No suitable intermediate mode. */
1082 abort ();
1083 }
1084 }
1085
1086 /* Support special truncate insns for certain modes. */
1087
1088 if (from_mode == DImode && to_mode == SImode)
1089 {
1090 #ifdef HAVE_truncdisi2
1091 if (HAVE_truncdisi2)
1092 {
1093 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1094 return;
1095 }
1096 #endif
1097 convert_move (to, force_reg (from_mode, from), unsignedp);
1098 return;
1099 }
1100
1101 if (from_mode == DImode && to_mode == HImode)
1102 {
1103 #ifdef HAVE_truncdihi2
1104 if (HAVE_truncdihi2)
1105 {
1106 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1107 return;
1108 }
1109 #endif
1110 convert_move (to, force_reg (from_mode, from), unsignedp);
1111 return;
1112 }
1113
1114 if (from_mode == DImode && to_mode == QImode)
1115 {
1116 #ifdef HAVE_truncdiqi2
1117 if (HAVE_truncdiqi2)
1118 {
1119 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1120 return;
1121 }
1122 #endif
1123 convert_move (to, force_reg (from_mode, from), unsignedp);
1124 return;
1125 }
1126
1127 if (from_mode == SImode && to_mode == HImode)
1128 {
1129 #ifdef HAVE_truncsihi2
1130 if (HAVE_truncsihi2)
1131 {
1132 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1133 return;
1134 }
1135 #endif
1136 convert_move (to, force_reg (from_mode, from), unsignedp);
1137 return;
1138 }
1139
1140 if (from_mode == SImode && to_mode == QImode)
1141 {
1142 #ifdef HAVE_truncsiqi2
1143 if (HAVE_truncsiqi2)
1144 {
1145 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1146 return;
1147 }
1148 #endif
1149 convert_move (to, force_reg (from_mode, from), unsignedp);
1150 return;
1151 }
1152
1153 if (from_mode == HImode && to_mode == QImode)
1154 {
1155 #ifdef HAVE_trunchiqi2
1156 if (HAVE_trunchiqi2)
1157 {
1158 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1159 return;
1160 }
1161 #endif
1162 convert_move (to, force_reg (from_mode, from), unsignedp);
1163 return;
1164 }
1165
1166 if (from_mode == TImode && to_mode == DImode)
1167 {
1168 #ifdef HAVE_trunctidi2
1169 if (HAVE_trunctidi2)
1170 {
1171 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1172 return;
1173 }
1174 #endif
1175 convert_move (to, force_reg (from_mode, from), unsignedp);
1176 return;
1177 }
1178
1179 if (from_mode == TImode && to_mode == SImode)
1180 {
1181 #ifdef HAVE_trunctisi2
1182 if (HAVE_trunctisi2)
1183 {
1184 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1185 return;
1186 }
1187 #endif
1188 convert_move (to, force_reg (from_mode, from), unsignedp);
1189 return;
1190 }
1191
1192 if (from_mode == TImode && to_mode == HImode)
1193 {
1194 #ifdef HAVE_trunctihi2
1195 if (HAVE_trunctihi2)
1196 {
1197 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1198 return;
1199 }
1200 #endif
1201 convert_move (to, force_reg (from_mode, from), unsignedp);
1202 return;
1203 }
1204
1205 if (from_mode == TImode && to_mode == QImode)
1206 {
1207 #ifdef HAVE_trunctiqi2
1208 if (HAVE_trunctiqi2)
1209 {
1210 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1211 return;
1212 }
1213 #endif
1214 convert_move (to, force_reg (from_mode, from), unsignedp);
1215 return;
1216 }
1217
1218 /* Handle truncation of volatile memrefs, and so on;
1219 the things that couldn't be truncated directly,
1220 and for which there was no special instruction. */
1221 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1222 {
1223 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1224 emit_move_insn (to, temp);
1225 return;
1226 }
1227
1228 /* Mode combination is not recognized. */
1229 abort ();
1230 }
1231
1232 /* Return an rtx for a value that would result
1233 from converting X to mode MODE.
1234 Both X and MODE may be floating, or both integer.
1235 UNSIGNEDP is nonzero if X is an unsigned value.
1236 This can be done by referring to a part of X in place
1237 or by copying to a new temporary with conversion.
1238
1239 This function *must not* call protect_from_queue
1240 except when putting X into an insn (in which case convert_move does it). */
1241
1242 rtx
1243 convert_to_mode (mode, x, unsignedp)
1244 enum machine_mode mode;
1245 rtx x;
1246 int unsignedp;
1247 {
1248 return convert_modes (mode, VOIDmode, x, unsignedp);
1249 }
1250
1251 /* Return an rtx for a value that would result
1252 from converting X from mode OLDMODE to mode MODE.
1253 Both modes may be floating, or both integer.
1254 UNSIGNEDP is nonzero if X is an unsigned value.
1255
1256 This can be done by referring to a part of X in place
1257 or by copying to a new temporary with conversion.
1258
1259 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1260
1261 This function *must not* call protect_from_queue
1262 except when putting X into an insn (in which case convert_move does it). */
1263
1264 rtx
1265 convert_modes (mode, oldmode, x, unsignedp)
1266 enum machine_mode mode, oldmode;
1267 rtx x;
1268 int unsignedp;
1269 {
1270 register rtx temp;
1271
1272 /* If FROM is a SUBREG that indicates that we have already done at least
1273 the required extension, strip it. */
1274
1275 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1276 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1277 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1278 x = gen_lowpart (mode, x);
1279
1280 if (GET_MODE (x) != VOIDmode)
1281 oldmode = GET_MODE (x);
1282
1283 if (mode == oldmode)
1284 return x;
1285
1286 /* There is one case that we must handle specially: If we are converting
1287 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1288 we are to interpret the constant as unsigned, gen_lowpart will do
1289 the wrong if the constant appears negative. What we want to do is
1290 make the high-order word of the constant zero, not all ones. */
1291
1292 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1293 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1294 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1295 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
1296
1297 /* We can do this with a gen_lowpart if both desired and current modes
1298 are integer, and this is either a constant integer, a register, or a
1299 non-volatile MEM. Except for the constant case where MODE is no
1300 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1301
1302 if ((GET_CODE (x) == CONST_INT
1303 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1304 || (GET_MODE_CLASS (mode) == MODE_INT
1305 && GET_MODE_CLASS (oldmode) == MODE_INT
1306 && (GET_CODE (x) == CONST_DOUBLE
1307 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1308 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1309 && direct_load[(int) mode])
1310 || (GET_CODE (x) == REG
1311 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1312 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1313 {
1314 /* ?? If we don't know OLDMODE, we have to assume here that
1315 X does not need sign- or zero-extension. This may not be
1316 the case, but it's the best we can do. */
1317 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1318 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1319 {
1320 HOST_WIDE_INT val = INTVAL (x);
1321 int width = GET_MODE_BITSIZE (oldmode);
1322
1323 /* We must sign or zero-extend in this case. Start by
1324 zero-extending, then sign extend if we need to. */
1325 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1326 if (! unsignedp
1327 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1328 val |= (HOST_WIDE_INT) (-1) << width;
1329
1330 return GEN_INT (val);
1331 }
1332
1333 return gen_lowpart (mode, x);
1334 }
1335
1336 temp = gen_reg_rtx (mode);
1337 convert_move (temp, x, unsignedp);
1338 return temp;
1339 }
1340 \f
1341 /* Generate several move instructions to copy LEN bytes
1342 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1343 The caller must pass FROM and TO
1344 through protect_from_queue before calling.
1345 ALIGN (in bytes) is maximum alignment we can assume. */
1346
1347 static void
1348 move_by_pieces (to, from, len, align)
1349 rtx to, from;
1350 int len, align;
1351 {
1352 struct move_by_pieces data;
1353 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1354 int max_size = MOVE_MAX + 1;
1355
1356 data.offset = 0;
1357 data.to_addr = to_addr;
1358 data.from_addr = from_addr;
1359 data.to = to;
1360 data.from = from;
1361 data.autinc_to
1362 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1363 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1364 data.autinc_from
1365 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1366 || GET_CODE (from_addr) == POST_INC
1367 || GET_CODE (from_addr) == POST_DEC);
1368
1369 data.explicit_inc_from = 0;
1370 data.explicit_inc_to = 0;
1371 data.reverse
1372 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1373 if (data.reverse) data.offset = len;
1374 data.len = len;
1375
1376 data.to_struct = MEM_IN_STRUCT_P (to);
1377 data.from_struct = MEM_IN_STRUCT_P (from);
1378
1379 /* If copying requires more than two move insns,
1380 copy addresses to registers (to make displacements shorter)
1381 and use post-increment if available. */
1382 if (!(data.autinc_from && data.autinc_to)
1383 && move_by_pieces_ninsns (len, align) > 2)
1384 {
1385 #ifdef HAVE_PRE_DECREMENT
1386 if (data.reverse && ! data.autinc_from)
1387 {
1388 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1389 data.autinc_from = 1;
1390 data.explicit_inc_from = -1;
1391 }
1392 #endif
1393 #ifdef HAVE_POST_INCREMENT
1394 if (! data.autinc_from)
1395 {
1396 data.from_addr = copy_addr_to_reg (from_addr);
1397 data.autinc_from = 1;
1398 data.explicit_inc_from = 1;
1399 }
1400 #endif
1401 if (!data.autinc_from && CONSTANT_P (from_addr))
1402 data.from_addr = copy_addr_to_reg (from_addr);
1403 #ifdef HAVE_PRE_DECREMENT
1404 if (data.reverse && ! data.autinc_to)
1405 {
1406 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1407 data.autinc_to = 1;
1408 data.explicit_inc_to = -1;
1409 }
1410 #endif
1411 #ifdef HAVE_POST_INCREMENT
1412 if (! data.reverse && ! data.autinc_to)
1413 {
1414 data.to_addr = copy_addr_to_reg (to_addr);
1415 data.autinc_to = 1;
1416 data.explicit_inc_to = 1;
1417 }
1418 #endif
1419 if (!data.autinc_to && CONSTANT_P (to_addr))
1420 data.to_addr = copy_addr_to_reg (to_addr);
1421 }
1422
1423 if (! SLOW_UNALIGNED_ACCESS
1424 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1425 align = MOVE_MAX;
1426
1427 /* First move what we can in the largest integer mode, then go to
1428 successively smaller modes. */
1429
1430 while (max_size > 1)
1431 {
1432 enum machine_mode mode = VOIDmode, tmode;
1433 enum insn_code icode;
1434
1435 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1436 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1437 if (GET_MODE_SIZE (tmode) < max_size)
1438 mode = tmode;
1439
1440 if (mode == VOIDmode)
1441 break;
1442
1443 icode = mov_optab->handlers[(int) mode].insn_code;
1444 if (icode != CODE_FOR_nothing
1445 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1446 GET_MODE_SIZE (mode)))
1447 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1448
1449 max_size = GET_MODE_SIZE (mode);
1450 }
1451
1452 /* The code above should have handled everything. */
1453 if (data.len != 0)
1454 abort ();
1455 }
1456
1457 /* Return number of insns required to move L bytes by pieces.
1458 ALIGN (in bytes) is maximum alignment we can assume. */
1459
1460 static int
1461 move_by_pieces_ninsns (l, align)
1462 unsigned int l;
1463 int align;
1464 {
1465 register int n_insns = 0;
1466 int max_size = MOVE_MAX + 1;
1467
1468 if (! SLOW_UNALIGNED_ACCESS
1469 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1470 align = MOVE_MAX;
1471
1472 while (max_size > 1)
1473 {
1474 enum machine_mode mode = VOIDmode, tmode;
1475 enum insn_code icode;
1476
1477 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1478 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1479 if (GET_MODE_SIZE (tmode) < max_size)
1480 mode = tmode;
1481
1482 if (mode == VOIDmode)
1483 break;
1484
1485 icode = mov_optab->handlers[(int) mode].insn_code;
1486 if (icode != CODE_FOR_nothing
1487 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1488 GET_MODE_SIZE (mode)))
1489 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1490
1491 max_size = GET_MODE_SIZE (mode);
1492 }
1493
1494 return n_insns;
1495 }
1496
1497 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1498 with move instructions for mode MODE. GENFUN is the gen_... function
1499 to make a move insn for that mode. DATA has all the other info. */
1500
1501 static void
1502 move_by_pieces_1 (genfun, mode, data)
1503 rtx (*genfun) ();
1504 enum machine_mode mode;
1505 struct move_by_pieces *data;
1506 {
1507 register int size = GET_MODE_SIZE (mode);
1508 register rtx to1, from1;
1509
1510 while (data->len >= size)
1511 {
1512 if (data->reverse) data->offset -= size;
1513
1514 to1 = (data->autinc_to
1515 ? gen_rtx (MEM, mode, data->to_addr)
1516 : change_address (data->to, mode,
1517 plus_constant (data->to_addr, data->offset)));
1518 MEM_IN_STRUCT_P (to1) = data->to_struct;
1519 from1 =
1520 (data->autinc_from
1521 ? gen_rtx (MEM, mode, data->from_addr)
1522 : change_address (data->from, mode,
1523 plus_constant (data->from_addr, data->offset)));
1524 MEM_IN_STRUCT_P (from1) = data->from_struct;
1525
1526 #ifdef HAVE_PRE_DECREMENT
1527 if (data->explicit_inc_to < 0)
1528 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1529 if (data->explicit_inc_from < 0)
1530 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1531 #endif
1532
1533 emit_insn ((*genfun) (to1, from1));
1534 #ifdef HAVE_POST_INCREMENT
1535 if (data->explicit_inc_to > 0)
1536 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1537 if (data->explicit_inc_from > 0)
1538 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1539 #endif
1540
1541 if (! data->reverse) data->offset += size;
1542
1543 data->len -= size;
1544 }
1545 }
1546 \f
1547 /* Emit code to move a block Y to a block X.
1548 This may be done with string-move instructions,
1549 with multiple scalar move instructions, or with a library call.
1550
1551 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1552 with mode BLKmode.
1553 SIZE is an rtx that says how long they are.
1554 ALIGN is the maximum alignment we can assume they have,
1555 measured in bytes. */
1556
1557 void
1558 emit_block_move (x, y, size, align)
1559 rtx x, y;
1560 rtx size;
1561 int align;
1562 {
1563 if (GET_MODE (x) != BLKmode)
1564 abort ();
1565
1566 if (GET_MODE (y) != BLKmode)
1567 abort ();
1568
1569 x = protect_from_queue (x, 1);
1570 y = protect_from_queue (y, 0);
1571 size = protect_from_queue (size, 0);
1572
1573 if (GET_CODE (x) != MEM)
1574 abort ();
1575 if (GET_CODE (y) != MEM)
1576 abort ();
1577 if (size == 0)
1578 abort ();
1579
1580 if (GET_CODE (size) == CONST_INT
1581 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1582 move_by_pieces (x, y, INTVAL (size), align);
1583 else
1584 {
1585 /* Try the most limited insn first, because there's no point
1586 including more than one in the machine description unless
1587 the more limited one has some advantage. */
1588
1589 rtx opalign = GEN_INT (align);
1590 enum machine_mode mode;
1591
1592 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1593 mode = GET_MODE_WIDER_MODE (mode))
1594 {
1595 enum insn_code code = movstr_optab[(int) mode];
1596
1597 if (code != CODE_FOR_nothing
1598 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1599 here because if SIZE is less than the mode mask, as it is
1600 returned by the macro, it will definitely be less than the
1601 actual mode mask. */
1602 && ((GET_CODE (size) == CONST_INT
1603 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1604 <= GET_MODE_MASK (mode)))
1605 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1606 && (insn_operand_predicate[(int) code][0] == 0
1607 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1608 && (insn_operand_predicate[(int) code][1] == 0
1609 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1610 && (insn_operand_predicate[(int) code][3] == 0
1611 || (*insn_operand_predicate[(int) code][3]) (opalign,
1612 VOIDmode)))
1613 {
1614 rtx op2;
1615 rtx last = get_last_insn ();
1616 rtx pat;
1617
1618 op2 = convert_to_mode (mode, size, 1);
1619 if (insn_operand_predicate[(int) code][2] != 0
1620 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1621 op2 = copy_to_mode_reg (mode, op2);
1622
1623 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1624 if (pat)
1625 {
1626 emit_insn (pat);
1627 return;
1628 }
1629 else
1630 delete_insns_since (last);
1631 }
1632 }
1633
1634 #ifdef TARGET_MEM_FUNCTIONS
1635 emit_library_call (memcpy_libfunc, 0,
1636 VOIDmode, 3, XEXP (x, 0), Pmode,
1637 XEXP (y, 0), Pmode,
1638 convert_to_mode (TYPE_MODE (sizetype), size,
1639 TREE_UNSIGNED (sizetype)),
1640 TYPE_MODE (sizetype));
1641 #else
1642 emit_library_call (bcopy_libfunc, 0,
1643 VOIDmode, 3, XEXP (y, 0), Pmode,
1644 XEXP (x, 0), Pmode,
1645 convert_to_mode (TYPE_MODE (integer_type_node), size,
1646 TREE_UNSIGNED (integer_type_node)),
1647 TYPE_MODE (integer_type_node));
1648 #endif
1649 }
1650 }
1651 \f
1652 /* Copy all or part of a value X into registers starting at REGNO.
1653 The number of registers to be filled is NREGS. */
1654
1655 void
1656 move_block_to_reg (regno, x, nregs, mode)
1657 int regno;
1658 rtx x;
1659 int nregs;
1660 enum machine_mode mode;
1661 {
1662 int i;
1663 rtx pat, last;
1664
1665 if (nregs == 0)
1666 return;
1667
1668 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1669 x = validize_mem (force_const_mem (mode, x));
1670
1671 /* See if the machine can do this with a load multiple insn. */
1672 #ifdef HAVE_load_multiple
1673 if (HAVE_load_multiple)
1674 {
1675 last = get_last_insn ();
1676 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1677 GEN_INT (nregs));
1678 if (pat)
1679 {
1680 emit_insn (pat);
1681 return;
1682 }
1683 else
1684 delete_insns_since (last);
1685 }
1686 #endif
1687
1688 for (i = 0; i < nregs; i++)
1689 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1690 operand_subword_force (x, i, mode));
1691 }
1692
1693 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1694 The number of registers to be filled is NREGS. SIZE indicates the number
1695 of bytes in the object X. */
1696
1697
1698 void
1699 move_block_from_reg (regno, x, nregs, size)
1700 int regno;
1701 rtx x;
1702 int nregs;
1703 int size;
1704 {
1705 int i;
1706 rtx pat, last;
1707
1708 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1709 to the left before storing to memory. */
1710 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1711 {
1712 rtx tem = operand_subword (x, 0, 1, BLKmode);
1713 rtx shift;
1714
1715 if (tem == 0)
1716 abort ();
1717
1718 shift = expand_shift (LSHIFT_EXPR, word_mode,
1719 gen_rtx (REG, word_mode, regno),
1720 build_int_2 ((UNITS_PER_WORD - size)
1721 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1722 emit_move_insn (tem, shift);
1723 return;
1724 }
1725
1726 /* See if the machine can do this with a store multiple insn. */
1727 #ifdef HAVE_store_multiple
1728 if (HAVE_store_multiple)
1729 {
1730 last = get_last_insn ();
1731 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1732 GEN_INT (nregs));
1733 if (pat)
1734 {
1735 emit_insn (pat);
1736 return;
1737 }
1738 else
1739 delete_insns_since (last);
1740 }
1741 #endif
1742
1743 for (i = 0; i < nregs; i++)
1744 {
1745 rtx tem = operand_subword (x, i, 1, BLKmode);
1746
1747 if (tem == 0)
1748 abort ();
1749
1750 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1751 }
1752 }
1753
1754 /* Emit code to move a block Y to a block X, where X is non-consecutive
1755 registers represented by a PARALLEL. */
1756
1757 void
1758 emit_group_load (x, y)
1759 rtx x, y;
1760 {
1761 rtx target_reg, source;
1762 int i;
1763
1764 if (GET_CODE (x) != PARALLEL)
1765 abort ();
1766
1767 /* Check for a NULL entry, used to indicate that the parameter goes
1768 both on the stack and in registers. */
1769 if (XEXP (XVECEXP (x, 0, 0), 0))
1770 i = 0;
1771 else
1772 i = 1;
1773
1774 for (; i < XVECLEN (x, 0); i++)
1775 {
1776 rtx element = XVECEXP (x, 0, i);
1777
1778 target_reg = XEXP (element, 0);
1779
1780 if (GET_CODE (y) == MEM)
1781 source = change_address (y, GET_MODE (target_reg),
1782 plus_constant (XEXP (y, 0),
1783 INTVAL (XEXP (element, 1))));
1784 else if (XEXP (element, 1) == const0_rtx)
1785 {
1786 if (GET_MODE (target_reg) == GET_MODE (y))
1787 source = y;
1788 /* Allow for the target_reg to be smaller than the input register
1789 to allow for AIX with 4 DF arguments after a single SI arg. The
1790 last DF argument will only load 1 word into the integer registers,
1791 but load a DF value into the float registers. */
1792 else if (GET_MODE_SIZE (GET_MODE (target_reg))
1793 <= GET_MODE_SIZE (GET_MODE (y)))
1794 source = gen_rtx (SUBREG, GET_MODE (target_reg), y, 0);
1795 else
1796 abort ();
1797 }
1798 else
1799 abort ();
1800
1801 emit_move_insn (target_reg, source);
1802 }
1803 }
1804
1805 /* Emit code to move a block Y to a block X, where Y is non-consecutive
1806 registers represented by a PARALLEL. */
1807
1808 void
1809 emit_group_store (x, y)
1810 rtx x, y;
1811 {
1812 rtx source_reg, target;
1813 int i;
1814
1815 if (GET_CODE (y) != PARALLEL)
1816 abort ();
1817
1818 /* Check for a NULL entry, used to indicate that the parameter goes
1819 both on the stack and in registers. */
1820 if (XEXP (XVECEXP (y, 0, 0), 0))
1821 i = 0;
1822 else
1823 i = 1;
1824
1825 for (; i < XVECLEN (y, 0); i++)
1826 {
1827 rtx element = XVECEXP (y, 0, i);
1828
1829 source_reg = XEXP (element, 0);
1830
1831 if (GET_CODE (x) == MEM)
1832 target = change_address (x, GET_MODE (source_reg),
1833 plus_constant (XEXP (x, 0),
1834 INTVAL (XEXP (element, 1))));
1835 else if (XEXP (element, 1) == const0_rtx)
1836 target = x;
1837 else
1838 abort ();
1839
1840 emit_move_insn (target, source_reg);
1841 }
1842 }
1843
1844 /* Add a USE expression for REG to the (possibly empty) list pointed
1845 to by CALL_FUSAGE. REG must denote a hard register. */
1846
1847 void
1848 use_reg (call_fusage, reg)
1849 rtx *call_fusage, reg;
1850 {
1851 if (GET_CODE (reg) != REG
1852 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
1853 abort();
1854
1855 *call_fusage
1856 = gen_rtx (EXPR_LIST, VOIDmode,
1857 gen_rtx (USE, VOIDmode, reg), *call_fusage);
1858 }
1859
1860 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1861 starting at REGNO. All of these registers must be hard registers. */
1862
1863 void
1864 use_regs (call_fusage, regno, nregs)
1865 rtx *call_fusage;
1866 int regno;
1867 int nregs;
1868 {
1869 int i;
1870
1871 if (regno + nregs > FIRST_PSEUDO_REGISTER)
1872 abort ();
1873
1874 for (i = 0; i < nregs; i++)
1875 use_reg (call_fusage, gen_rtx (REG, reg_raw_mode[regno + i], regno + i));
1876 }
1877
1878 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
1879 PARALLEL REGS. This is for calls that pass values in multiple
1880 non-contiguous locations. The Irix 6 ABI has examples of this. */
1881
1882 void
1883 use_group_regs (call_fusage, regs)
1884 rtx *call_fusage;
1885 rtx regs;
1886 {
1887 int i;
1888
1889 /* Check for a NULL entry, used to indicate that the parameter goes
1890 both on the stack and in registers. */
1891 if (XEXP (XVECEXP (regs, 0, 0), 0))
1892 i = 0;
1893 else
1894 i = 1;
1895
1896 for (; i < XVECLEN (regs, 0); i++)
1897 use_reg (call_fusage, XEXP (XVECEXP (regs, 0, i), 0));
1898 }
1899 \f
1900 /* Generate several move instructions to clear LEN bytes of block TO.
1901 (A MEM rtx with BLKmode). The caller must pass TO through
1902 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
1903 we can assume. */
1904
1905 static void
1906 clear_by_pieces (to, len, align)
1907 rtx to;
1908 int len, align;
1909 {
1910 struct clear_by_pieces data;
1911 rtx to_addr = XEXP (to, 0);
1912 int max_size = MOVE_MAX + 1;
1913
1914 data.offset = 0;
1915 data.to_addr = to_addr;
1916 data.to = to;
1917 data.autinc_to
1918 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1919 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1920
1921 data.explicit_inc_to = 0;
1922 data.reverse
1923 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1924 if (data.reverse) data.offset = len;
1925 data.len = len;
1926
1927 data.to_struct = MEM_IN_STRUCT_P (to);
1928
1929 /* If copying requires more than two move insns,
1930 copy addresses to registers (to make displacements shorter)
1931 and use post-increment if available. */
1932 if (!data.autinc_to
1933 && move_by_pieces_ninsns (len, align) > 2)
1934 {
1935 #ifdef HAVE_PRE_DECREMENT
1936 if (data.reverse && ! data.autinc_to)
1937 {
1938 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1939 data.autinc_to = 1;
1940 data.explicit_inc_to = -1;
1941 }
1942 #endif
1943 #ifdef HAVE_POST_INCREMENT
1944 if (! data.reverse && ! data.autinc_to)
1945 {
1946 data.to_addr = copy_addr_to_reg (to_addr);
1947 data.autinc_to = 1;
1948 data.explicit_inc_to = 1;
1949 }
1950 #endif
1951 if (!data.autinc_to && CONSTANT_P (to_addr))
1952 data.to_addr = copy_addr_to_reg (to_addr);
1953 }
1954
1955 if (! SLOW_UNALIGNED_ACCESS
1956 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1957 align = MOVE_MAX;
1958
1959 /* First move what we can in the largest integer mode, then go to
1960 successively smaller modes. */
1961
1962 while (max_size > 1)
1963 {
1964 enum machine_mode mode = VOIDmode, tmode;
1965 enum insn_code icode;
1966
1967 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1968 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1969 if (GET_MODE_SIZE (tmode) < max_size)
1970 mode = tmode;
1971
1972 if (mode == VOIDmode)
1973 break;
1974
1975 icode = mov_optab->handlers[(int) mode].insn_code;
1976 if (icode != CODE_FOR_nothing
1977 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1978 GET_MODE_SIZE (mode)))
1979 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
1980
1981 max_size = GET_MODE_SIZE (mode);
1982 }
1983
1984 /* The code above should have handled everything. */
1985 if (data.len != 0)
1986 abort ();
1987 }
1988
1989 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
1990 with move instructions for mode MODE. GENFUN is the gen_... function
1991 to make a move insn for that mode. DATA has all the other info. */
1992
1993 static void
1994 clear_by_pieces_1 (genfun, mode, data)
1995 rtx (*genfun) ();
1996 enum machine_mode mode;
1997 struct clear_by_pieces *data;
1998 {
1999 register int size = GET_MODE_SIZE (mode);
2000 register rtx to1;
2001
2002 while (data->len >= size)
2003 {
2004 if (data->reverse) data->offset -= size;
2005
2006 to1 = (data->autinc_to
2007 ? gen_rtx (MEM, mode, data->to_addr)
2008 : change_address (data->to, mode,
2009 plus_constant (data->to_addr, data->offset)));
2010 MEM_IN_STRUCT_P (to1) = data->to_struct;
2011
2012 #ifdef HAVE_PRE_DECREMENT
2013 if (data->explicit_inc_to < 0)
2014 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2015 #endif
2016
2017 emit_insn ((*genfun) (to1, const0_rtx));
2018 #ifdef HAVE_POST_INCREMENT
2019 if (data->explicit_inc_to > 0)
2020 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2021 #endif
2022
2023 if (! data->reverse) data->offset += size;
2024
2025 data->len -= size;
2026 }
2027 }
2028 \f
2029 /* Write zeros through the storage of OBJECT.
2030 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2031 the maximum alignment we can is has, measured in bytes. */
2032
2033 void
2034 clear_storage (object, size, align)
2035 rtx object;
2036 rtx size;
2037 int align;
2038 {
2039 if (GET_MODE (object) == BLKmode)
2040 {
2041 object = protect_from_queue (object, 1);
2042 size = protect_from_queue (size, 0);
2043
2044 if (GET_CODE (size) == CONST_INT
2045 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
2046 clear_by_pieces (object, INTVAL (size), align);
2047
2048 else
2049 {
2050 /* Try the most limited insn first, because there's no point
2051 including more than one in the machine description unless
2052 the more limited one has some advantage. */
2053
2054 rtx opalign = GEN_INT (align);
2055 enum machine_mode mode;
2056
2057 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2058 mode = GET_MODE_WIDER_MODE (mode))
2059 {
2060 enum insn_code code = clrstr_optab[(int) mode];
2061
2062 if (code != CODE_FOR_nothing
2063 /* We don't need MODE to be narrower than
2064 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2065 the mode mask, as it is returned by the macro, it will
2066 definitely be less than the actual mode mask. */
2067 && ((GET_CODE (size) == CONST_INT
2068 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2069 <= GET_MODE_MASK (mode)))
2070 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2071 && (insn_operand_predicate[(int) code][0] == 0
2072 || (*insn_operand_predicate[(int) code][0]) (object,
2073 BLKmode))
2074 && (insn_operand_predicate[(int) code][2] == 0
2075 || (*insn_operand_predicate[(int) code][2]) (opalign,
2076 VOIDmode)))
2077 {
2078 rtx op1;
2079 rtx last = get_last_insn ();
2080 rtx pat;
2081
2082 op1 = convert_to_mode (mode, size, 1);
2083 if (insn_operand_predicate[(int) code][1] != 0
2084 && ! (*insn_operand_predicate[(int) code][1]) (op1,
2085 mode))
2086 op1 = copy_to_mode_reg (mode, op1);
2087
2088 pat = GEN_FCN ((int) code) (object, op1, opalign);
2089 if (pat)
2090 {
2091 emit_insn (pat);
2092 return;
2093 }
2094 else
2095 delete_insns_since (last);
2096 }
2097 }
2098
2099
2100 #ifdef TARGET_MEM_FUNCTIONS
2101 emit_library_call (memset_libfunc, 0,
2102 VOIDmode, 3,
2103 XEXP (object, 0), Pmode,
2104 const0_rtx, TYPE_MODE (integer_type_node),
2105 convert_to_mode (TYPE_MODE (sizetype),
2106 size, TREE_UNSIGNED (sizetype)),
2107 TYPE_MODE (sizetype));
2108 #else
2109 emit_library_call (bzero_libfunc, 0,
2110 VOIDmode, 2,
2111 XEXP (object, 0), Pmode,
2112 convert_to_mode (TYPE_MODE (integer_type_node),
2113 size,
2114 TREE_UNSIGNED (integer_type_node)),
2115 TYPE_MODE (integer_type_node));
2116 #endif
2117 }
2118 }
2119 else
2120 emit_move_insn (object, const0_rtx);
2121 }
2122
2123 /* Generate code to copy Y into X.
2124 Both Y and X must have the same mode, except that
2125 Y can be a constant with VOIDmode.
2126 This mode cannot be BLKmode; use emit_block_move for that.
2127
2128 Return the last instruction emitted. */
2129
2130 rtx
2131 emit_move_insn (x, y)
2132 rtx x, y;
2133 {
2134 enum machine_mode mode = GET_MODE (x);
2135
2136 x = protect_from_queue (x, 1);
2137 y = protect_from_queue (y, 0);
2138
2139 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2140 abort ();
2141
2142 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2143 y = force_const_mem (mode, y);
2144
2145 /* If X or Y are memory references, verify that their addresses are valid
2146 for the machine. */
2147 if (GET_CODE (x) == MEM
2148 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2149 && ! push_operand (x, GET_MODE (x)))
2150 || (flag_force_addr
2151 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2152 x = change_address (x, VOIDmode, XEXP (x, 0));
2153
2154 if (GET_CODE (y) == MEM
2155 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2156 || (flag_force_addr
2157 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2158 y = change_address (y, VOIDmode, XEXP (y, 0));
2159
2160 if (mode == BLKmode)
2161 abort ();
2162
2163 return emit_move_insn_1 (x, y);
2164 }
2165
2166 /* Low level part of emit_move_insn.
2167 Called just like emit_move_insn, but assumes X and Y
2168 are basically valid. */
2169
2170 rtx
2171 emit_move_insn_1 (x, y)
2172 rtx x, y;
2173 {
2174 enum machine_mode mode = GET_MODE (x);
2175 enum machine_mode submode;
2176 enum mode_class class = GET_MODE_CLASS (mode);
2177 int i;
2178
2179 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2180 return
2181 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2182
2183 /* Expand complex moves by moving real part and imag part, if possible. */
2184 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2185 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2186 * BITS_PER_UNIT),
2187 (class == MODE_COMPLEX_INT
2188 ? MODE_INT : MODE_FLOAT),
2189 0))
2190 && (mov_optab->handlers[(int) submode].insn_code
2191 != CODE_FOR_nothing))
2192 {
2193 /* Don't split destination if it is a stack push. */
2194 int stack = push_operand (x, GET_MODE (x));
2195 rtx insns;
2196
2197 /* If this is a stack, push the highpart first, so it
2198 will be in the argument order.
2199
2200 In that case, change_address is used only to convert
2201 the mode, not to change the address. */
2202 if (stack)
2203 {
2204 /* Note that the real part always precedes the imag part in memory
2205 regardless of machine's endianness. */
2206 #ifdef STACK_GROWS_DOWNWARD
2207 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2208 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2209 gen_imagpart (submode, y)));
2210 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2211 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2212 gen_realpart (submode, y)));
2213 #else
2214 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2215 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2216 gen_realpart (submode, y)));
2217 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2218 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2219 gen_imagpart (submode, y)));
2220 #endif
2221 }
2222 else
2223 {
2224 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2225 (gen_realpart (submode, x), gen_realpart (submode, y)));
2226 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2227 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
2228 }
2229
2230 return get_last_insn ();
2231 }
2232
2233 /* This will handle any multi-word mode that lacks a move_insn pattern.
2234 However, you will get better code if you define such patterns,
2235 even if they must turn into multiple assembler instructions. */
2236 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2237 {
2238 rtx last_insn = 0;
2239 rtx insns;
2240
2241 #ifdef PUSH_ROUNDING
2242
2243 /* If X is a push on the stack, do the push now and replace
2244 X with a reference to the stack pointer. */
2245 if (push_operand (x, GET_MODE (x)))
2246 {
2247 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2248 x = change_address (x, VOIDmode, stack_pointer_rtx);
2249 }
2250 #endif
2251
2252 /* Show the output dies here. */
2253 if (x != y)
2254 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
2255
2256 for (i = 0;
2257 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2258 i++)
2259 {
2260 rtx xpart = operand_subword (x, i, 1, mode);
2261 rtx ypart = operand_subword (y, i, 1, mode);
2262
2263 /* If we can't get a part of Y, put Y into memory if it is a
2264 constant. Otherwise, force it into a register. If we still
2265 can't get a part of Y, abort. */
2266 if (ypart == 0 && CONSTANT_P (y))
2267 {
2268 y = force_const_mem (mode, y);
2269 ypart = operand_subword (y, i, 1, mode);
2270 }
2271 else if (ypart == 0)
2272 ypart = operand_subword_force (y, i, mode);
2273
2274 if (xpart == 0 || ypart == 0)
2275 abort ();
2276
2277 last_insn = emit_move_insn (xpart, ypart);
2278 }
2279
2280 return last_insn;
2281 }
2282 else
2283 abort ();
2284 }
2285 \f
2286 /* Pushing data onto the stack. */
2287
2288 /* Push a block of length SIZE (perhaps variable)
2289 and return an rtx to address the beginning of the block.
2290 Note that it is not possible for the value returned to be a QUEUED.
2291 The value may be virtual_outgoing_args_rtx.
2292
2293 EXTRA is the number of bytes of padding to push in addition to SIZE.
2294 BELOW nonzero means this padding comes at low addresses;
2295 otherwise, the padding comes at high addresses. */
2296
2297 rtx
2298 push_block (size, extra, below)
2299 rtx size;
2300 int extra, below;
2301 {
2302 register rtx temp;
2303
2304 size = convert_modes (Pmode, ptr_mode, size, 1);
2305 if (CONSTANT_P (size))
2306 anti_adjust_stack (plus_constant (size, extra));
2307 else if (GET_CODE (size) == REG && extra == 0)
2308 anti_adjust_stack (size);
2309 else
2310 {
2311 rtx temp = copy_to_mode_reg (Pmode, size);
2312 if (extra != 0)
2313 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2314 temp, 0, OPTAB_LIB_WIDEN);
2315 anti_adjust_stack (temp);
2316 }
2317
2318 #ifdef STACK_GROWS_DOWNWARD
2319 temp = virtual_outgoing_args_rtx;
2320 if (extra != 0 && below)
2321 temp = plus_constant (temp, extra);
2322 #else
2323 if (GET_CODE (size) == CONST_INT)
2324 temp = plus_constant (virtual_outgoing_args_rtx,
2325 - INTVAL (size) - (below ? 0 : extra));
2326 else if (extra != 0 && !below)
2327 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2328 negate_rtx (Pmode, plus_constant (size, extra)));
2329 else
2330 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2331 negate_rtx (Pmode, size));
2332 #endif
2333
2334 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2335 }
2336
2337 rtx
2338 gen_push_operand ()
2339 {
2340 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2341 }
2342
2343 /* Generate code to push X onto the stack, assuming it has mode MODE and
2344 type TYPE.
2345 MODE is redundant except when X is a CONST_INT (since they don't
2346 carry mode info).
2347 SIZE is an rtx for the size of data to be copied (in bytes),
2348 needed only if X is BLKmode.
2349
2350 ALIGN (in bytes) is maximum alignment we can assume.
2351
2352 If PARTIAL and REG are both nonzero, then copy that many of the first
2353 words of X into registers starting with REG, and push the rest of X.
2354 The amount of space pushed is decreased by PARTIAL words,
2355 rounded *down* to a multiple of PARM_BOUNDARY.
2356 REG must be a hard register in this case.
2357 If REG is zero but PARTIAL is not, take any all others actions for an
2358 argument partially in registers, but do not actually load any
2359 registers.
2360
2361 EXTRA is the amount in bytes of extra space to leave next to this arg.
2362 This is ignored if an argument block has already been allocated.
2363
2364 On a machine that lacks real push insns, ARGS_ADDR is the address of
2365 the bottom of the argument block for this call. We use indexing off there
2366 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2367 argument block has not been preallocated.
2368
2369 ARGS_SO_FAR is the size of args previously pushed for this call. */
2370
2371 void
2372 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2373 args_addr, args_so_far)
2374 register rtx x;
2375 enum machine_mode mode;
2376 tree type;
2377 rtx size;
2378 int align;
2379 int partial;
2380 rtx reg;
2381 int extra;
2382 rtx args_addr;
2383 rtx args_so_far;
2384 {
2385 rtx xinner;
2386 enum direction stack_direction
2387 #ifdef STACK_GROWS_DOWNWARD
2388 = downward;
2389 #else
2390 = upward;
2391 #endif
2392
2393 /* Decide where to pad the argument: `downward' for below,
2394 `upward' for above, or `none' for don't pad it.
2395 Default is below for small data on big-endian machines; else above. */
2396 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2397
2398 /* If we're placing part of X into a register and part of X onto
2399 the stack, indicate that the entire register is clobbered to
2400 keep flow from thinking the unused part of the register is live. */
2401 if (partial > 0 && reg != 0)
2402 emit_insn (gen_rtx (CLOBBER, VOIDmode, reg));
2403
2404 /* Invert direction if stack is post-update. */
2405 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2406 if (where_pad != none)
2407 where_pad = (where_pad == downward ? upward : downward);
2408
2409 xinner = x = protect_from_queue (x, 0);
2410
2411 if (mode == BLKmode)
2412 {
2413 /* Copy a block into the stack, entirely or partially. */
2414
2415 register rtx temp;
2416 int used = partial * UNITS_PER_WORD;
2417 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2418 int skip;
2419
2420 if (size == 0)
2421 abort ();
2422
2423 used -= offset;
2424
2425 /* USED is now the # of bytes we need not copy to the stack
2426 because registers will take care of them. */
2427
2428 if (partial != 0)
2429 xinner = change_address (xinner, BLKmode,
2430 plus_constant (XEXP (xinner, 0), used));
2431
2432 /* If the partial register-part of the arg counts in its stack size,
2433 skip the part of stack space corresponding to the registers.
2434 Otherwise, start copying to the beginning of the stack space,
2435 by setting SKIP to 0. */
2436 #ifndef REG_PARM_STACK_SPACE
2437 skip = 0;
2438 #else
2439 skip = used;
2440 #endif
2441
2442 #ifdef PUSH_ROUNDING
2443 /* Do it with several push insns if that doesn't take lots of insns
2444 and if there is no difficulty with push insns that skip bytes
2445 on the stack for alignment purposes. */
2446 if (args_addr == 0
2447 && GET_CODE (size) == CONST_INT
2448 && skip == 0
2449 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2450 < MOVE_RATIO)
2451 /* Here we avoid the case of a structure whose weak alignment
2452 forces many pushes of a small amount of data,
2453 and such small pushes do rounding that causes trouble. */
2454 && ((! SLOW_UNALIGNED_ACCESS)
2455 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2456 || PUSH_ROUNDING (align) == align)
2457 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2458 {
2459 /* Push padding now if padding above and stack grows down,
2460 or if padding below and stack grows up.
2461 But if space already allocated, this has already been done. */
2462 if (extra && args_addr == 0
2463 && where_pad != none && where_pad != stack_direction)
2464 anti_adjust_stack (GEN_INT (extra));
2465
2466 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
2467 INTVAL (size) - used, align);
2468 }
2469 else
2470 #endif /* PUSH_ROUNDING */
2471 {
2472 /* Otherwise make space on the stack and copy the data
2473 to the address of that space. */
2474
2475 /* Deduct words put into registers from the size we must copy. */
2476 if (partial != 0)
2477 {
2478 if (GET_CODE (size) == CONST_INT)
2479 size = GEN_INT (INTVAL (size) - used);
2480 else
2481 size = expand_binop (GET_MODE (size), sub_optab, size,
2482 GEN_INT (used), NULL_RTX, 0,
2483 OPTAB_LIB_WIDEN);
2484 }
2485
2486 /* Get the address of the stack space.
2487 In this case, we do not deal with EXTRA separately.
2488 A single stack adjust will do. */
2489 if (! args_addr)
2490 {
2491 temp = push_block (size, extra, where_pad == downward);
2492 extra = 0;
2493 }
2494 else if (GET_CODE (args_so_far) == CONST_INT)
2495 temp = memory_address (BLKmode,
2496 plus_constant (args_addr,
2497 skip + INTVAL (args_so_far)));
2498 else
2499 temp = memory_address (BLKmode,
2500 plus_constant (gen_rtx (PLUS, Pmode,
2501 args_addr, args_so_far),
2502 skip));
2503
2504 /* TEMP is the address of the block. Copy the data there. */
2505 if (GET_CODE (size) == CONST_INT
2506 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2507 < MOVE_RATIO))
2508 {
2509 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
2510 INTVAL (size), align);
2511 goto ret;
2512 }
2513 /* Try the most limited insn first, because there's no point
2514 including more than one in the machine description unless
2515 the more limited one has some advantage. */
2516 #ifdef HAVE_movstrqi
2517 if (HAVE_movstrqi
2518 && GET_CODE (size) == CONST_INT
2519 && ((unsigned) INTVAL (size)
2520 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
2521 {
2522 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
2523 xinner, size, GEN_INT (align));
2524 if (pat != 0)
2525 {
2526 emit_insn (pat);
2527 goto ret;
2528 }
2529 }
2530 #endif
2531 #ifdef HAVE_movstrhi
2532 if (HAVE_movstrhi
2533 && GET_CODE (size) == CONST_INT
2534 && ((unsigned) INTVAL (size)
2535 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
2536 {
2537 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
2538 xinner, size, GEN_INT (align));
2539 if (pat != 0)
2540 {
2541 emit_insn (pat);
2542 goto ret;
2543 }
2544 }
2545 #endif
2546 #ifdef HAVE_movstrsi
2547 if (HAVE_movstrsi)
2548 {
2549 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
2550 xinner, size, GEN_INT (align));
2551 if (pat != 0)
2552 {
2553 emit_insn (pat);
2554 goto ret;
2555 }
2556 }
2557 #endif
2558 #ifdef HAVE_movstrdi
2559 if (HAVE_movstrdi)
2560 {
2561 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
2562 xinner, size, GEN_INT (align));
2563 if (pat != 0)
2564 {
2565 emit_insn (pat);
2566 goto ret;
2567 }
2568 }
2569 #endif
2570
2571 #ifndef ACCUMULATE_OUTGOING_ARGS
2572 /* If the source is referenced relative to the stack pointer,
2573 copy it to another register to stabilize it. We do not need
2574 to do this if we know that we won't be changing sp. */
2575
2576 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2577 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2578 temp = copy_to_reg (temp);
2579 #endif
2580
2581 /* Make inhibit_defer_pop nonzero around the library call
2582 to force it to pop the bcopy-arguments right away. */
2583 NO_DEFER_POP;
2584 #ifdef TARGET_MEM_FUNCTIONS
2585 emit_library_call (memcpy_libfunc, 0,
2586 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
2587 convert_to_mode (TYPE_MODE (sizetype),
2588 size, TREE_UNSIGNED (sizetype)),
2589 TYPE_MODE (sizetype));
2590 #else
2591 emit_library_call (bcopy_libfunc, 0,
2592 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
2593 convert_to_mode (TYPE_MODE (integer_type_node),
2594 size,
2595 TREE_UNSIGNED (integer_type_node)),
2596 TYPE_MODE (integer_type_node));
2597 #endif
2598 OK_DEFER_POP;
2599 }
2600 }
2601 else if (partial > 0)
2602 {
2603 /* Scalar partly in registers. */
2604
2605 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2606 int i;
2607 int not_stack;
2608 /* # words of start of argument
2609 that we must make space for but need not store. */
2610 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2611 int args_offset = INTVAL (args_so_far);
2612 int skip;
2613
2614 /* Push padding now if padding above and stack grows down,
2615 or if padding below and stack grows up.
2616 But if space already allocated, this has already been done. */
2617 if (extra && args_addr == 0
2618 && where_pad != none && where_pad != stack_direction)
2619 anti_adjust_stack (GEN_INT (extra));
2620
2621 /* If we make space by pushing it, we might as well push
2622 the real data. Otherwise, we can leave OFFSET nonzero
2623 and leave the space uninitialized. */
2624 if (args_addr == 0)
2625 offset = 0;
2626
2627 /* Now NOT_STACK gets the number of words that we don't need to
2628 allocate on the stack. */
2629 not_stack = partial - offset;
2630
2631 /* If the partial register-part of the arg counts in its stack size,
2632 skip the part of stack space corresponding to the registers.
2633 Otherwise, start copying to the beginning of the stack space,
2634 by setting SKIP to 0. */
2635 #ifndef REG_PARM_STACK_SPACE
2636 skip = 0;
2637 #else
2638 skip = not_stack;
2639 #endif
2640
2641 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2642 x = validize_mem (force_const_mem (mode, x));
2643
2644 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2645 SUBREGs of such registers are not allowed. */
2646 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2647 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2648 x = copy_to_reg (x);
2649
2650 /* Loop over all the words allocated on the stack for this arg. */
2651 /* We can do it by words, because any scalar bigger than a word
2652 has a size a multiple of a word. */
2653 #ifndef PUSH_ARGS_REVERSED
2654 for (i = not_stack; i < size; i++)
2655 #else
2656 for (i = size - 1; i >= not_stack; i--)
2657 #endif
2658 if (i >= not_stack + offset)
2659 emit_push_insn (operand_subword_force (x, i, mode),
2660 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2661 0, args_addr,
2662 GEN_INT (args_offset + ((i - not_stack + skip)
2663 * UNITS_PER_WORD)));
2664 }
2665 else
2666 {
2667 rtx addr;
2668
2669 /* Push padding now if padding above and stack grows down,
2670 or if padding below and stack grows up.
2671 But if space already allocated, this has already been done. */
2672 if (extra && args_addr == 0
2673 && where_pad != none && where_pad != stack_direction)
2674 anti_adjust_stack (GEN_INT (extra));
2675
2676 #ifdef PUSH_ROUNDING
2677 if (args_addr == 0)
2678 addr = gen_push_operand ();
2679 else
2680 #endif
2681 if (GET_CODE (args_so_far) == CONST_INT)
2682 addr
2683 = memory_address (mode,
2684 plus_constant (args_addr, INTVAL (args_so_far)));
2685 else
2686 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2687 args_so_far));
2688
2689 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2690 }
2691
2692 ret:
2693 /* If part should go in registers, copy that part
2694 into the appropriate registers. Do this now, at the end,
2695 since mem-to-mem copies above may do function calls. */
2696 if (partial > 0 && reg != 0)
2697 {
2698 /* Handle calls that pass values in multiple non-contiguous locations.
2699 The Irix 6 ABI has examples of this. */
2700 if (GET_CODE (reg) == PARALLEL)
2701 emit_group_load (reg, x);
2702 else
2703 move_block_to_reg (REGNO (reg), x, partial, mode);
2704 }
2705
2706 if (extra && args_addr == 0 && where_pad == stack_direction)
2707 anti_adjust_stack (GEN_INT (extra));
2708 }
2709 \f
2710 /* Expand an assignment that stores the value of FROM into TO.
2711 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2712 (This may contain a QUEUED rtx;
2713 if the value is constant, this rtx is a constant.)
2714 Otherwise, the returned value is NULL_RTX.
2715
2716 SUGGEST_REG is no longer actually used.
2717 It used to mean, copy the value through a register
2718 and return that register, if that is possible.
2719 We now use WANT_VALUE to decide whether to do this. */
2720
2721 rtx
2722 expand_assignment (to, from, want_value, suggest_reg)
2723 tree to, from;
2724 int want_value;
2725 int suggest_reg;
2726 {
2727 register rtx to_rtx = 0;
2728 rtx result;
2729
2730 /* Don't crash if the lhs of the assignment was erroneous. */
2731
2732 if (TREE_CODE (to) == ERROR_MARK)
2733 {
2734 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
2735 return want_value ? result : NULL_RTX;
2736 }
2737
2738 if (output_bytecode)
2739 {
2740 tree dest_innermost;
2741
2742 bc_expand_expr (from);
2743 bc_emit_instruction (duplicate);
2744
2745 dest_innermost = bc_expand_address (to);
2746
2747 /* Can't deduce from TYPE that we're dealing with a bitfield, so
2748 take care of it here. */
2749
2750 bc_store_memory (TREE_TYPE (to), dest_innermost);
2751 return NULL;
2752 }
2753
2754 /* Assignment of a structure component needs special treatment
2755 if the structure component's rtx is not simply a MEM.
2756 Assignment of an array element at a constant index, and assignment of
2757 an array element in an unaligned packed structure field, has the same
2758 problem. */
2759
2760 if (TREE_CODE (to) == COMPONENT_REF
2761 || TREE_CODE (to) == BIT_FIELD_REF
2762 || (TREE_CODE (to) == ARRAY_REF
2763 && ((TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2764 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST)
2765 || (SLOW_UNALIGNED_ACCESS && get_inner_unaligned_p (to)))))
2766 {
2767 enum machine_mode mode1;
2768 int bitsize;
2769 int bitpos;
2770 tree offset;
2771 int unsignedp;
2772 int volatilep = 0;
2773 tree tem;
2774 int alignment;
2775
2776 push_temp_slots ();
2777 tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
2778 &mode1, &unsignedp, &volatilep);
2779
2780 /* If we are going to use store_bit_field and extract_bit_field,
2781 make sure to_rtx will be safe for multiple use. */
2782
2783 if (mode1 == VOIDmode && want_value)
2784 tem = stabilize_reference (tem);
2785
2786 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
2787 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
2788 if (offset != 0)
2789 {
2790 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2791
2792 if (GET_CODE (to_rtx) != MEM)
2793 abort ();
2794 to_rtx = change_address (to_rtx, VOIDmode,
2795 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
2796 force_reg (ptr_mode, offset_rtx)));
2797 /* If we have a variable offset, the known alignment
2798 is only that of the innermost structure containing the field.
2799 (Actually, we could sometimes do better by using the
2800 align of an element of the innermost array, but no need.) */
2801 if (TREE_CODE (to) == COMPONENT_REF
2802 || TREE_CODE (to) == BIT_FIELD_REF)
2803 alignment
2804 = TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (to, 0))) / BITS_PER_UNIT;
2805 }
2806 if (volatilep)
2807 {
2808 if (GET_CODE (to_rtx) == MEM)
2809 {
2810 /* When the offset is zero, to_rtx is the address of the
2811 structure we are storing into, and hence may be shared.
2812 We must make a new MEM before setting the volatile bit. */
2813 if (offset == 0)
2814 to_rtx = change_address (to_rtx, VOIDmode, XEXP (to_rtx, 0));
2815 MEM_VOLATILE_P (to_rtx) = 1;
2816 }
2817 #if 0 /* This was turned off because, when a field is volatile
2818 in an object which is not volatile, the object may be in a register,
2819 and then we would abort over here. */
2820 else
2821 abort ();
2822 #endif
2823 }
2824
2825 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2826 (want_value
2827 /* Spurious cast makes HPUX compiler happy. */
2828 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2829 : VOIDmode),
2830 unsignedp,
2831 /* Required alignment of containing datum. */
2832 alignment,
2833 int_size_in_bytes (TREE_TYPE (tem)));
2834 preserve_temp_slots (result);
2835 free_temp_slots ();
2836 pop_temp_slots ();
2837
2838 /* If the value is meaningful, convert RESULT to the proper mode.
2839 Otherwise, return nothing. */
2840 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
2841 TYPE_MODE (TREE_TYPE (from)),
2842 result,
2843 TREE_UNSIGNED (TREE_TYPE (to)))
2844 : NULL_RTX);
2845 }
2846
2847 /* If the rhs is a function call and its value is not an aggregate,
2848 call the function before we start to compute the lhs.
2849 This is needed for correct code for cases such as
2850 val = setjmp (buf) on machines where reference to val
2851 requires loading up part of an address in a separate insn.
2852
2853 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
2854 a promoted variable where the zero- or sign- extension needs to be done.
2855 Handling this in the normal way is safe because no computation is done
2856 before the call. */
2857 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
2858 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
2859 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
2860 {
2861 rtx value;
2862
2863 push_temp_slots ();
2864 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
2865 if (to_rtx == 0)
2866 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2867
2868 /* Handle calls that return values in multiple non-contiguous locations.
2869 The Irix 6 ABI has examples of this. */
2870 if (GET_CODE (to_rtx) == PARALLEL)
2871 emit_group_load (to_rtx, value);
2872 else if (GET_MODE (to_rtx) == BLKmode)
2873 emit_block_move (to_rtx, value, expr_size (from),
2874 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
2875 else
2876 emit_move_insn (to_rtx, value);
2877 preserve_temp_slots (to_rtx);
2878 free_temp_slots ();
2879 pop_temp_slots ();
2880 return want_value ? to_rtx : NULL_RTX;
2881 }
2882
2883 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2884 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2885
2886 if (to_rtx == 0)
2887 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2888
2889 /* Don't move directly into a return register. */
2890 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
2891 {
2892 rtx temp;
2893
2894 push_temp_slots ();
2895 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
2896 emit_move_insn (to_rtx, temp);
2897 preserve_temp_slots (to_rtx);
2898 free_temp_slots ();
2899 pop_temp_slots ();
2900 return want_value ? to_rtx : NULL_RTX;
2901 }
2902
2903 /* In case we are returning the contents of an object which overlaps
2904 the place the value is being stored, use a safe function when copying
2905 a value through a pointer into a structure value return block. */
2906 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2907 && current_function_returns_struct
2908 && !current_function_returns_pcc_struct)
2909 {
2910 rtx from_rtx, size;
2911
2912 push_temp_slots ();
2913 size = expr_size (from);
2914 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
2915
2916 #ifdef TARGET_MEM_FUNCTIONS
2917 emit_library_call (memcpy_libfunc, 0,
2918 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2919 XEXP (from_rtx, 0), Pmode,
2920 convert_to_mode (TYPE_MODE (sizetype),
2921 size, TREE_UNSIGNED (sizetype)),
2922 TYPE_MODE (sizetype));
2923 #else
2924 emit_library_call (bcopy_libfunc, 0,
2925 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2926 XEXP (to_rtx, 0), Pmode,
2927 convert_to_mode (TYPE_MODE (integer_type_node),
2928 size, TREE_UNSIGNED (integer_type_node)),
2929 TYPE_MODE (integer_type_node));
2930 #endif
2931
2932 preserve_temp_slots (to_rtx);
2933 free_temp_slots ();
2934 pop_temp_slots ();
2935 return want_value ? to_rtx : NULL_RTX;
2936 }
2937
2938 /* Compute FROM and store the value in the rtx we got. */
2939
2940 push_temp_slots ();
2941 result = store_expr (from, to_rtx, want_value);
2942 preserve_temp_slots (result);
2943 free_temp_slots ();
2944 pop_temp_slots ();
2945 return want_value ? result : NULL_RTX;
2946 }
2947
2948 /* Generate code for computing expression EXP,
2949 and storing the value into TARGET.
2950 TARGET may contain a QUEUED rtx.
2951
2952 If WANT_VALUE is nonzero, return a copy of the value
2953 not in TARGET, so that we can be sure to use the proper
2954 value in a containing expression even if TARGET has something
2955 else stored in it. If possible, we copy the value through a pseudo
2956 and return that pseudo. Or, if the value is constant, we try to
2957 return the constant. In some cases, we return a pseudo
2958 copied *from* TARGET.
2959
2960 If the mode is BLKmode then we may return TARGET itself.
2961 It turns out that in BLKmode it doesn't cause a problem.
2962 because C has no operators that could combine two different
2963 assignments into the same BLKmode object with different values
2964 with no sequence point. Will other languages need this to
2965 be more thorough?
2966
2967 If WANT_VALUE is 0, we return NULL, to make sure
2968 to catch quickly any cases where the caller uses the value
2969 and fails to set WANT_VALUE. */
2970
2971 rtx
2972 store_expr (exp, target, want_value)
2973 register tree exp;
2974 register rtx target;
2975 int want_value;
2976 {
2977 register rtx temp;
2978 int dont_return_target = 0;
2979
2980 if (TREE_CODE (exp) == COMPOUND_EXPR)
2981 {
2982 /* Perform first part of compound expression, then assign from second
2983 part. */
2984 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2985 emit_queue ();
2986 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
2987 }
2988 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2989 {
2990 /* For conditional expression, get safe form of the target. Then
2991 test the condition, doing the appropriate assignment on either
2992 side. This avoids the creation of unnecessary temporaries.
2993 For non-BLKmode, it is more efficient not to do this. */
2994
2995 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2996 rtx flag = NULL_RTX;
2997 tree left_cleanups = NULL_TREE;
2998 tree right_cleanups = NULL_TREE;
2999 tree old_cleanups = cleanups_this_call;
3000
3001 /* Used to save a pointer to the place to put the setting of
3002 the flag that indicates if this side of the conditional was
3003 taken. We backpatch the code, if we find out later that we
3004 have any conditional cleanups that need to be performed. */
3005 rtx dest_right_flag = NULL_RTX;
3006 rtx dest_left_flag = NULL_RTX;
3007
3008 emit_queue ();
3009 target = protect_from_queue (target, 1);
3010
3011 do_pending_stack_adjust ();
3012 NO_DEFER_POP;
3013 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3014 store_expr (TREE_OPERAND (exp, 1), target, 0);
3015 dest_left_flag = get_last_insn ();
3016 /* Handle conditional cleanups, if any. */
3017 left_cleanups = defer_cleanups_to (old_cleanups);
3018 emit_queue ();
3019 emit_jump_insn (gen_jump (lab2));
3020 emit_barrier ();
3021 emit_label (lab1);
3022 store_expr (TREE_OPERAND (exp, 2), target, 0);
3023 dest_right_flag = get_last_insn ();
3024 /* Handle conditional cleanups, if any. */
3025 right_cleanups = defer_cleanups_to (old_cleanups);
3026 emit_queue ();
3027 emit_label (lab2);
3028 OK_DEFER_POP;
3029
3030 /* Add back in any conditional cleanups. */
3031 if (left_cleanups || right_cleanups)
3032 {
3033 tree new_cleanups;
3034 tree cond;
3035 rtx last;
3036
3037 /* Now that we know that a flag is needed, go back and add in the
3038 setting of the flag. */
3039
3040 flag = gen_reg_rtx (word_mode);
3041
3042 /* Do the left side flag. */
3043 last = get_last_insn ();
3044 /* Flag left cleanups as needed. */
3045 emit_move_insn (flag, const1_rtx);
3046 /* ??? deprecated, use sequences instead. */
3047 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_left_flag);
3048
3049 /* Do the right side flag. */
3050 last = get_last_insn ();
3051 /* Flag left cleanups as needed. */
3052 emit_move_insn (flag, const0_rtx);
3053 /* ??? deprecated, use sequences instead. */
3054 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_right_flag);
3055
3056 /* All cleanups must be on the function_obstack. */
3057 push_obstacks_nochange ();
3058 resume_temporary_allocation ();
3059
3060 /* convert flag, which is an rtx, into a tree. */
3061 cond = make_node (RTL_EXPR);
3062 TREE_TYPE (cond) = integer_type_node;
3063 RTL_EXPR_RTL (cond) = flag;
3064 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
3065 cond = save_expr (cond);
3066
3067 if (! left_cleanups)
3068 left_cleanups = integer_zero_node;
3069 if (! right_cleanups)
3070 right_cleanups = integer_zero_node;
3071 new_cleanups = build (COND_EXPR, void_type_node,
3072 truthvalue_conversion (cond),
3073 left_cleanups, right_cleanups);
3074 new_cleanups = fold (new_cleanups);
3075
3076 pop_obstacks ();
3077
3078 /* Now add in the conditionalized cleanups. */
3079 cleanups_this_call
3080 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
3081 expand_eh_region_start ();
3082 }
3083 return want_value ? target : NULL_RTX;
3084 }
3085 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3086 && GET_MODE (target) != BLKmode)
3087 /* If target is in memory and caller wants value in a register instead,
3088 arrange that. Pass TARGET as target for expand_expr so that,
3089 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3090 We know expand_expr will not use the target in that case.
3091 Don't do this if TARGET is volatile because we are supposed
3092 to write it and then read it. */
3093 {
3094 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
3095 GET_MODE (target), 0);
3096 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3097 temp = copy_to_reg (temp);
3098 dont_return_target = 1;
3099 }
3100 else if (queued_subexp_p (target))
3101 /* If target contains a postincrement, let's not risk
3102 using it as the place to generate the rhs. */
3103 {
3104 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3105 {
3106 /* Expand EXP into a new pseudo. */
3107 temp = gen_reg_rtx (GET_MODE (target));
3108 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3109 }
3110 else
3111 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3112
3113 /* If target is volatile, ANSI requires accessing the value
3114 *from* the target, if it is accessed. So make that happen.
3115 In no case return the target itself. */
3116 if (! MEM_VOLATILE_P (target) && want_value)
3117 dont_return_target = 1;
3118 }
3119 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3120 /* If this is an scalar in a register that is stored in a wider mode
3121 than the declared mode, compute the result into its declared mode
3122 and then convert to the wider mode. Our value is the computed
3123 expression. */
3124 {
3125 /* If we don't want a value, we can do the conversion inside EXP,
3126 which will often result in some optimizations. Do the conversion
3127 in two steps: first change the signedness, if needed, then
3128 the extend. */
3129 if (! want_value)
3130 {
3131 if (TREE_UNSIGNED (TREE_TYPE (exp))
3132 != SUBREG_PROMOTED_UNSIGNED_P (target))
3133 exp
3134 = convert
3135 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3136 TREE_TYPE (exp)),
3137 exp);
3138
3139 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3140 SUBREG_PROMOTED_UNSIGNED_P (target)),
3141 exp);
3142 }
3143
3144 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3145
3146 /* If TEMP is a volatile MEM and we want a result value, make
3147 the access now so it gets done only once. Likewise if
3148 it contains TARGET. */
3149 if (GET_CODE (temp) == MEM && want_value
3150 && (MEM_VOLATILE_P (temp)
3151 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3152 temp = copy_to_reg (temp);
3153
3154 /* If TEMP is a VOIDmode constant, use convert_modes to make
3155 sure that we properly convert it. */
3156 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3157 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3158 TYPE_MODE (TREE_TYPE (exp)), temp,
3159 SUBREG_PROMOTED_UNSIGNED_P (target));
3160
3161 convert_move (SUBREG_REG (target), temp,
3162 SUBREG_PROMOTED_UNSIGNED_P (target));
3163 return want_value ? temp : NULL_RTX;
3164 }
3165 else
3166 {
3167 temp = expand_expr (exp, target, GET_MODE (target), 0);
3168 /* Return TARGET if it's a specified hardware register.
3169 If TARGET is a volatile mem ref, either return TARGET
3170 or return a reg copied *from* TARGET; ANSI requires this.
3171
3172 Otherwise, if TEMP is not TARGET, return TEMP
3173 if it is constant (for efficiency),
3174 or if we really want the correct value. */
3175 if (!(target && GET_CODE (target) == REG
3176 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3177 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3178 && temp != target
3179 && (CONSTANT_P (temp) || want_value))
3180 dont_return_target = 1;
3181 }
3182
3183 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3184 the same as that of TARGET, adjust the constant. This is needed, for
3185 example, in case it is a CONST_DOUBLE and we want only a word-sized
3186 value. */
3187 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3188 && TREE_CODE (exp) != ERROR_MARK
3189 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3190 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3191 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3192
3193 /* If value was not generated in the target, store it there.
3194 Convert the value to TARGET's type first if nec. */
3195
3196 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
3197 {
3198 target = protect_from_queue (target, 1);
3199 if (GET_MODE (temp) != GET_MODE (target)
3200 && GET_MODE (temp) != VOIDmode)
3201 {
3202 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3203 if (dont_return_target)
3204 {
3205 /* In this case, we will return TEMP,
3206 so make sure it has the proper mode.
3207 But don't forget to store the value into TARGET. */
3208 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3209 emit_move_insn (target, temp);
3210 }
3211 else
3212 convert_move (target, temp, unsignedp);
3213 }
3214
3215 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3216 {
3217 /* Handle copying a string constant into an array.
3218 The string constant may be shorter than the array.
3219 So copy just the string's actual length, and clear the rest. */
3220 rtx size;
3221 rtx addr;
3222
3223 /* Get the size of the data type of the string,
3224 which is actually the size of the target. */
3225 size = expr_size (exp);
3226 if (GET_CODE (size) == CONST_INT
3227 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3228 emit_block_move (target, temp, size,
3229 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3230 else
3231 {
3232 /* Compute the size of the data to copy from the string. */
3233 tree copy_size
3234 = size_binop (MIN_EXPR,
3235 make_tree (sizetype, size),
3236 convert (sizetype,
3237 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
3238 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3239 VOIDmode, 0);
3240 rtx label = 0;
3241
3242 /* Copy that much. */
3243 emit_block_move (target, temp, copy_size_rtx,
3244 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3245
3246 /* Figure out how much is left in TARGET that we have to clear.
3247 Do all calculations in ptr_mode. */
3248
3249 addr = XEXP (target, 0);
3250 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3251
3252 if (GET_CODE (copy_size_rtx) == CONST_INT)
3253 {
3254 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3255 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3256 }
3257 else
3258 {
3259 addr = force_reg (ptr_mode, addr);
3260 addr = expand_binop (ptr_mode, add_optab, addr,
3261 copy_size_rtx, NULL_RTX, 0,
3262 OPTAB_LIB_WIDEN);
3263
3264 size = expand_binop (ptr_mode, sub_optab, size,
3265 copy_size_rtx, NULL_RTX, 0,
3266 OPTAB_LIB_WIDEN);
3267
3268 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
3269 GET_MODE (size), 0, 0);
3270 label = gen_label_rtx ();
3271 emit_jump_insn (gen_blt (label));
3272 }
3273
3274 if (size != const0_rtx)
3275 {
3276 #ifdef TARGET_MEM_FUNCTIONS
3277 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3278 addr, Pmode,
3279 const0_rtx, TYPE_MODE (integer_type_node),
3280 convert_to_mode (TYPE_MODE (sizetype),
3281 size,
3282 TREE_UNSIGNED (sizetype)),
3283 TYPE_MODE (sizetype));
3284 #else
3285 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3286 addr, Pmode,
3287 convert_to_mode (TYPE_MODE (integer_type_node),
3288 size,
3289 TREE_UNSIGNED (integer_type_node)),
3290 TYPE_MODE (integer_type_node));
3291 #endif
3292 }
3293
3294 if (label)
3295 emit_label (label);
3296 }
3297 }
3298 /* Handle calls that return values in multiple non-contiguous locations.
3299 The Irix 6 ABI has examples of this. */
3300 else if (GET_CODE (target) == PARALLEL)
3301 emit_group_load (target, temp);
3302 else if (GET_MODE (temp) == BLKmode)
3303 emit_block_move (target, temp, expr_size (exp),
3304 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3305 else
3306 emit_move_insn (target, temp);
3307 }
3308
3309 /* If we don't want a value, return NULL_RTX. */
3310 if (! want_value)
3311 return NULL_RTX;
3312
3313 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3314 ??? The latter test doesn't seem to make sense. */
3315 else if (dont_return_target && GET_CODE (temp) != MEM)
3316 return temp;
3317
3318 /* Return TARGET itself if it is a hard register. */
3319 else if (want_value && GET_MODE (target) != BLKmode
3320 && ! (GET_CODE (target) == REG
3321 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3322 return copy_to_reg (target);
3323
3324 else
3325 return target;
3326 }
3327 \f
3328 /* Return 1 if EXP just contains zeros. */
3329
3330 static int
3331 is_zeros_p (exp)
3332 tree exp;
3333 {
3334 tree elt;
3335
3336 switch (TREE_CODE (exp))
3337 {
3338 case CONVERT_EXPR:
3339 case NOP_EXPR:
3340 case NON_LVALUE_EXPR:
3341 return is_zeros_p (TREE_OPERAND (exp, 0));
3342
3343 case INTEGER_CST:
3344 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3345
3346 case COMPLEX_CST:
3347 return
3348 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3349
3350 case REAL_CST:
3351 return REAL_VALUES_EQUAL (TREE_REAL_CST (exp), dconst0);
3352
3353 case CONSTRUCTOR:
3354 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3355 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
3356 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3357 if (! is_zeros_p (TREE_VALUE (elt)))
3358 return 0;
3359
3360 return 1;
3361 }
3362
3363 return 0;
3364 }
3365
3366 /* Return 1 if EXP contains mostly (3/4) zeros. */
3367
3368 static int
3369 mostly_zeros_p (exp)
3370 tree exp;
3371 {
3372 if (TREE_CODE (exp) == CONSTRUCTOR)
3373 {
3374 int elts = 0, zeros = 0;
3375 tree elt = CONSTRUCTOR_ELTS (exp);
3376 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3377 {
3378 /* If there are no ranges of true bits, it is all zero. */
3379 return elt == NULL_TREE;
3380 }
3381 for (; elt; elt = TREE_CHAIN (elt))
3382 {
3383 /* We do not handle the case where the index is a RANGE_EXPR,
3384 so the statistic will be somewhat inaccurate.
3385 We do make a more accurate count in store_constructor itself,
3386 so since this function is only used for nested array elements,
3387 this should be close enough. */
3388 if (mostly_zeros_p (TREE_VALUE (elt)))
3389 zeros++;
3390 elts++;
3391 }
3392
3393 return 4 * zeros >= 3 * elts;
3394 }
3395
3396 return is_zeros_p (exp);
3397 }
3398 \f
3399 /* Helper function for store_constructor.
3400 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3401 TYPE is the type of the CONSTRUCTOR, not the element type.
3402 CLEARED is as for store_constructor.
3403
3404 This provides a recursive shortcut back to store_constructor when it isn't
3405 necessary to go through store_field. This is so that we can pass through
3406 the cleared field to let store_constructor know that we may not have to
3407 clear a substructure if the outer structure has already been cleared. */
3408
3409 static void
3410 store_constructor_field (target, bitsize, bitpos,
3411 mode, exp, type, cleared)
3412 rtx target;
3413 int bitsize, bitpos;
3414 enum machine_mode mode;
3415 tree exp, type;
3416 int cleared;
3417 {
3418 if (TREE_CODE (exp) == CONSTRUCTOR
3419 && bitpos % BITS_PER_UNIT == 0
3420 /* If we have a non-zero bitpos for a register target, then we just
3421 let store_field do the bitfield handling. This is unlikely to
3422 generate unnecessary clear instructions anyways. */
3423 && (bitpos == 0 || GET_CODE (target) == MEM))
3424 {
3425 if (bitpos != 0)
3426 target = change_address (target, VOIDmode,
3427 plus_constant (XEXP (target, 0),
3428 bitpos / BITS_PER_UNIT));
3429 store_constructor (exp, target, cleared);
3430 }
3431 else
3432 store_field (target, bitsize, bitpos, mode, exp,
3433 VOIDmode, 0, TYPE_ALIGN (type) / BITS_PER_UNIT,
3434 int_size_in_bytes (type));
3435 }
3436
3437 /* Store the value of constructor EXP into the rtx TARGET.
3438 TARGET is either a REG or a MEM.
3439 CLEARED is true if TARGET is known to have been zero'd. */
3440
3441 static void
3442 store_constructor (exp, target, cleared)
3443 tree exp;
3444 rtx target;
3445 int cleared;
3446 {
3447 tree type = TREE_TYPE (exp);
3448
3449 /* We know our target cannot conflict, since safe_from_p has been called. */
3450 #if 0
3451 /* Don't try copying piece by piece into a hard register
3452 since that is vulnerable to being clobbered by EXP.
3453 Instead, construct in a pseudo register and then copy it all. */
3454 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
3455 {
3456 rtx temp = gen_reg_rtx (GET_MODE (target));
3457 store_constructor (exp, temp, 0);
3458 emit_move_insn (target, temp);
3459 return;
3460 }
3461 #endif
3462
3463 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
3464 || TREE_CODE (type) == QUAL_UNION_TYPE)
3465 {
3466 register tree elt;
3467
3468 /* Inform later passes that the whole union value is dead. */
3469 if (TREE_CODE (type) == UNION_TYPE
3470 || TREE_CODE (type) == QUAL_UNION_TYPE)
3471 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3472
3473 /* If we are building a static constructor into a register,
3474 set the initial value as zero so we can fold the value into
3475 a constant. But if more than one register is involved,
3476 this probably loses. */
3477 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
3478 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
3479 {
3480 if (! cleared)
3481 emit_move_insn (target, const0_rtx);
3482
3483 cleared = 1;
3484 }
3485
3486 /* If the constructor has fewer fields than the structure
3487 or if we are initializing the structure to mostly zeros,
3488 clear the whole structure first. */
3489 else if ((list_length (CONSTRUCTOR_ELTS (exp))
3490 != list_length (TYPE_FIELDS (type)))
3491 || mostly_zeros_p (exp))
3492 {
3493 if (! cleared)
3494 clear_storage (target, expr_size (exp),
3495 TYPE_ALIGN (type) / BITS_PER_UNIT);
3496
3497 cleared = 1;
3498 }
3499 else
3500 /* Inform later passes that the old value is dead. */
3501 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3502
3503 /* Store each element of the constructor into
3504 the corresponding field of TARGET. */
3505
3506 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3507 {
3508 register tree field = TREE_PURPOSE (elt);
3509 register enum machine_mode mode;
3510 int bitsize;
3511 int bitpos = 0;
3512 int unsignedp;
3513 tree pos, constant = 0, offset = 0;
3514 rtx to_rtx = target;
3515
3516 /* Just ignore missing fields.
3517 We cleared the whole structure, above,
3518 if any fields are missing. */
3519 if (field == 0)
3520 continue;
3521
3522 if (cleared && is_zeros_p (TREE_VALUE (elt)))
3523 continue;
3524
3525 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
3526 unsignedp = TREE_UNSIGNED (field);
3527 mode = DECL_MODE (field);
3528 if (DECL_BIT_FIELD (field))
3529 mode = VOIDmode;
3530
3531 pos = DECL_FIELD_BITPOS (field);
3532 if (TREE_CODE (pos) == INTEGER_CST)
3533 constant = pos;
3534 else if (TREE_CODE (pos) == PLUS_EXPR
3535 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3536 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
3537 else
3538 offset = pos;
3539
3540 if (constant)
3541 bitpos = TREE_INT_CST_LOW (constant);
3542
3543 if (offset)
3544 {
3545 rtx offset_rtx;
3546
3547 if (contains_placeholder_p (offset))
3548 offset = build (WITH_RECORD_EXPR, sizetype,
3549 offset, exp);
3550
3551 offset = size_binop (FLOOR_DIV_EXPR, offset,
3552 size_int (BITS_PER_UNIT));
3553
3554 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3555 if (GET_CODE (to_rtx) != MEM)
3556 abort ();
3557
3558 to_rtx
3559 = change_address (to_rtx, VOIDmode,
3560 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
3561 force_reg (ptr_mode, offset_rtx)));
3562 }
3563 if (TREE_READONLY (field))
3564 {
3565 if (GET_CODE (to_rtx) == MEM)
3566 to_rtx = change_address (to_rtx, GET_MODE (to_rtx),
3567 XEXP (to_rtx, 0));
3568 RTX_UNCHANGING_P (to_rtx) = 1;
3569 }
3570
3571 store_constructor_field (to_rtx, bitsize, bitpos,
3572 mode, TREE_VALUE (elt), type, cleared);
3573 }
3574 }
3575 else if (TREE_CODE (type) == ARRAY_TYPE)
3576 {
3577 register tree elt;
3578 register int i;
3579 int need_to_clear;
3580 tree domain = TYPE_DOMAIN (type);
3581 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
3582 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
3583 tree elttype = TREE_TYPE (type);
3584
3585 /* If the constructor has fewer elements than the array,
3586 clear the whole array first. Similarly if this this is
3587 static constructor of a non-BLKmode object. */
3588 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
3589 need_to_clear = 1;
3590 else
3591 {
3592 HOST_WIDE_INT count = 0, zero_count = 0;
3593 need_to_clear = 0;
3594 /* This loop is a more accurate version of the loop in
3595 mostly_zeros_p (it handles RANGE_EXPR in an index).
3596 It is also needed to check for missing elements. */
3597 for (elt = CONSTRUCTOR_ELTS (exp);
3598 elt != NULL_TREE;
3599 elt = TREE_CHAIN (elt), i++)
3600 {
3601 tree index = TREE_PURPOSE (elt);
3602 HOST_WIDE_INT this_node_count;
3603 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3604 {
3605 tree lo_index = TREE_OPERAND (index, 0);
3606 tree hi_index = TREE_OPERAND (index, 1);
3607 if (TREE_CODE (lo_index) != INTEGER_CST
3608 || TREE_CODE (hi_index) != INTEGER_CST)
3609 {
3610 need_to_clear = 1;
3611 break;
3612 }
3613 this_node_count = TREE_INT_CST_LOW (hi_index)
3614 - TREE_INT_CST_LOW (lo_index) + 1;
3615 }
3616 else
3617 this_node_count = 1;
3618 count += this_node_count;
3619 if (mostly_zeros_p (TREE_VALUE (elt)))
3620 zero_count += this_node_count;
3621 }
3622 /* Clear the entire array first if there are any missing elements,
3623 or if the incidence of zero elements is >= 75%. */
3624 if (count < maxelt - minelt + 1
3625 || 4 * zero_count >= 3 * count)
3626 need_to_clear = 1;
3627 }
3628 if (need_to_clear)
3629 {
3630 if (! cleared)
3631 clear_storage (target, expr_size (exp),
3632 TYPE_ALIGN (type) / BITS_PER_UNIT);
3633 cleared = 1;
3634 }
3635 else
3636 /* Inform later passes that the old value is dead. */
3637 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3638
3639 /* Store each element of the constructor into
3640 the corresponding element of TARGET, determined
3641 by counting the elements. */
3642 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
3643 elt;
3644 elt = TREE_CHAIN (elt), i++)
3645 {
3646 register enum machine_mode mode;
3647 int bitsize;
3648 int bitpos;
3649 int unsignedp;
3650 tree value = TREE_VALUE (elt);
3651 tree index = TREE_PURPOSE (elt);
3652 rtx xtarget = target;
3653
3654 if (cleared && is_zeros_p (value))
3655 continue;
3656
3657 mode = TYPE_MODE (elttype);
3658 bitsize = GET_MODE_BITSIZE (mode);
3659 unsignedp = TREE_UNSIGNED (elttype);
3660
3661 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3662 {
3663 tree lo_index = TREE_OPERAND (index, 0);
3664 tree hi_index = TREE_OPERAND (index, 1);
3665 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
3666 struct nesting *loop;
3667 HOST_WIDE_INT lo, hi, count;
3668 tree position;
3669
3670 /* If the range is constant and "small", unroll the loop. */
3671 if (TREE_CODE (lo_index) == INTEGER_CST
3672 && TREE_CODE (hi_index) == INTEGER_CST
3673 && (lo = TREE_INT_CST_LOW (lo_index),
3674 hi = TREE_INT_CST_LOW (hi_index),
3675 count = hi - lo + 1,
3676 (GET_CODE (target) != MEM
3677 || count <= 2
3678 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
3679 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
3680 <= 40 * 8))))
3681 {
3682 lo -= minelt; hi -= minelt;
3683 for (; lo <= hi; lo++)
3684 {
3685 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
3686 store_constructor_field (target, bitsize, bitpos,
3687 mode, value, type, cleared);
3688 }
3689 }
3690 else
3691 {
3692 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
3693 loop_top = gen_label_rtx ();
3694 loop_end = gen_label_rtx ();
3695
3696 unsignedp = TREE_UNSIGNED (domain);
3697
3698 index = build_decl (VAR_DECL, NULL_TREE, domain);
3699
3700 DECL_RTL (index) = index_r
3701 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
3702 &unsignedp, 0));
3703
3704 if (TREE_CODE (value) == SAVE_EXPR
3705 && SAVE_EXPR_RTL (value) == 0)
3706 {
3707 /* Make sure value gets expanded once before the
3708 loop. */
3709 expand_expr (value, const0_rtx, VOIDmode, 0);
3710 emit_queue ();
3711 }
3712 store_expr (lo_index, index_r, 0);
3713 loop = expand_start_loop (0);
3714
3715 /* Assign value to element index. */
3716 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3717 size_int (BITS_PER_UNIT));
3718 position = size_binop (MULT_EXPR,
3719 size_binop (MINUS_EXPR, index,
3720 TYPE_MIN_VALUE (domain)),
3721 position);
3722 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3723 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
3724 xtarget = change_address (target, mode, addr);
3725 if (TREE_CODE (value) == CONSTRUCTOR)
3726 store_constructor (value, xtarget, cleared);
3727 else
3728 store_expr (value, xtarget, 0);
3729
3730 expand_exit_loop_if_false (loop,
3731 build (LT_EXPR, integer_type_node,
3732 index, hi_index));
3733
3734 expand_increment (build (PREINCREMENT_EXPR,
3735 TREE_TYPE (index),
3736 index, integer_one_node), 0, 0);
3737 expand_end_loop ();
3738 emit_label (loop_end);
3739
3740 /* Needed by stupid register allocation. to extend the
3741 lifetime of pseudo-regs used by target past the end
3742 of the loop. */
3743 emit_insn (gen_rtx (USE, GET_MODE (target), target));
3744 }
3745 }
3746 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
3747 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
3748 {
3749 rtx pos_rtx, addr;
3750 tree position;
3751
3752 if (index == 0)
3753 index = size_int (i);
3754
3755 if (minelt)
3756 index = size_binop (MINUS_EXPR, index,
3757 TYPE_MIN_VALUE (domain));
3758 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3759 size_int (BITS_PER_UNIT));
3760 position = size_binop (MULT_EXPR, index, position);
3761 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3762 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
3763 xtarget = change_address (target, mode, addr);
3764 store_expr (value, xtarget, 0);
3765 }
3766 else
3767 {
3768 if (index != 0)
3769 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
3770 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3771 else
3772 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3773 store_constructor_field (target, bitsize, bitpos,
3774 mode, value, type, cleared);
3775 }
3776 }
3777 }
3778 /* set constructor assignments */
3779 else if (TREE_CODE (type) == SET_TYPE)
3780 {
3781 tree elt = CONSTRUCTOR_ELTS (exp);
3782 rtx xtarget = XEXP (target, 0);
3783 int set_word_size = TYPE_ALIGN (type);
3784 int nbytes = int_size_in_bytes (type), nbits;
3785 tree domain = TYPE_DOMAIN (type);
3786 tree domain_min, domain_max, bitlength;
3787
3788 /* The default implementation strategy is to extract the constant
3789 parts of the constructor, use that to initialize the target,
3790 and then "or" in whatever non-constant ranges we need in addition.
3791
3792 If a large set is all zero or all ones, it is
3793 probably better to set it using memset (if available) or bzero.
3794 Also, if a large set has just a single range, it may also be
3795 better to first clear all the first clear the set (using
3796 bzero/memset), and set the bits we want. */
3797
3798 /* Check for all zeros. */
3799 if (elt == NULL_TREE)
3800 {
3801 if (!cleared)
3802 clear_storage (target, expr_size (exp),
3803 TYPE_ALIGN (type) / BITS_PER_UNIT);
3804 return;
3805 }
3806
3807 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
3808 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
3809 bitlength = size_binop (PLUS_EXPR,
3810 size_binop (MINUS_EXPR, domain_max, domain_min),
3811 size_one_node);
3812
3813 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
3814 abort ();
3815 nbits = TREE_INT_CST_LOW (bitlength);
3816
3817 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
3818 are "complicated" (more than one range), initialize (the
3819 constant parts) by copying from a constant. */
3820 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
3821 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
3822 {
3823 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
3824 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
3825 char *bit_buffer = (char *) alloca (nbits);
3826 HOST_WIDE_INT word = 0;
3827 int bit_pos = 0;
3828 int ibit = 0;
3829 int offset = 0; /* In bytes from beginning of set. */
3830 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
3831 for (;;)
3832 {
3833 if (bit_buffer[ibit])
3834 {
3835 if (BYTES_BIG_ENDIAN)
3836 word |= (1 << (set_word_size - 1 - bit_pos));
3837 else
3838 word |= 1 << bit_pos;
3839 }
3840 bit_pos++; ibit++;
3841 if (bit_pos >= set_word_size || ibit == nbits)
3842 {
3843 if (word != 0 || ! cleared)
3844 {
3845 rtx datum = GEN_INT (word);
3846 rtx to_rtx;
3847 /* The assumption here is that it is safe to use
3848 XEXP if the set is multi-word, but not if
3849 it's single-word. */
3850 if (GET_CODE (target) == MEM)
3851 {
3852 to_rtx = plus_constant (XEXP (target, 0), offset);
3853 to_rtx = change_address (target, mode, to_rtx);
3854 }
3855 else if (offset == 0)
3856 to_rtx = target;
3857 else
3858 abort ();
3859 emit_move_insn (to_rtx, datum);
3860 }
3861 if (ibit == nbits)
3862 break;
3863 word = 0;
3864 bit_pos = 0;
3865 offset += set_word_size / BITS_PER_UNIT;
3866 }
3867 }
3868 }
3869 else if (!cleared)
3870 {
3871 /* Don't bother clearing storage if the set is all ones. */
3872 if (TREE_CHAIN (elt) != NULL_TREE
3873 || (TREE_PURPOSE (elt) == NULL_TREE
3874 ? nbits != 1
3875 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
3876 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
3877 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
3878 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
3879 != nbits))))
3880 clear_storage (target, expr_size (exp),
3881 TYPE_ALIGN (type) / BITS_PER_UNIT);
3882 }
3883
3884 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
3885 {
3886 /* start of range of element or NULL */
3887 tree startbit = TREE_PURPOSE (elt);
3888 /* end of range of element, or element value */
3889 tree endbit = TREE_VALUE (elt);
3890 HOST_WIDE_INT startb, endb;
3891 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
3892
3893 bitlength_rtx = expand_expr (bitlength,
3894 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
3895
3896 /* handle non-range tuple element like [ expr ] */
3897 if (startbit == NULL_TREE)
3898 {
3899 startbit = save_expr (endbit);
3900 endbit = startbit;
3901 }
3902 startbit = convert (sizetype, startbit);
3903 endbit = convert (sizetype, endbit);
3904 if (! integer_zerop (domain_min))
3905 {
3906 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
3907 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
3908 }
3909 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
3910 EXPAND_CONST_ADDRESS);
3911 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
3912 EXPAND_CONST_ADDRESS);
3913
3914 if (REG_P (target))
3915 {
3916 targetx = assign_stack_temp (GET_MODE (target),
3917 GET_MODE_SIZE (GET_MODE (target)),
3918 0);
3919 emit_move_insn (targetx, target);
3920 }
3921 else if (GET_CODE (target) == MEM)
3922 targetx = target;
3923 else
3924 abort ();
3925
3926 #ifdef TARGET_MEM_FUNCTIONS
3927 /* Optimization: If startbit and endbit are
3928 constants divisible by BITS_PER_UNIT,
3929 call memset instead. */
3930 if (TREE_CODE (startbit) == INTEGER_CST
3931 && TREE_CODE (endbit) == INTEGER_CST
3932 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
3933 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
3934 {
3935 emit_library_call (memset_libfunc, 0,
3936 VOIDmode, 3,
3937 plus_constant (XEXP (targetx, 0),
3938 startb / BITS_PER_UNIT),
3939 Pmode,
3940 constm1_rtx, TYPE_MODE (integer_type_node),
3941 GEN_INT ((endb - startb) / BITS_PER_UNIT),
3942 TYPE_MODE (sizetype));
3943 }
3944 else
3945 #endif
3946 {
3947 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__setbits"),
3948 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
3949 bitlength_rtx, TYPE_MODE (sizetype),
3950 startbit_rtx, TYPE_MODE (sizetype),
3951 endbit_rtx, TYPE_MODE (sizetype));
3952 }
3953 if (REG_P (target))
3954 emit_move_insn (target, targetx);
3955 }
3956 }
3957
3958 else
3959 abort ();
3960 }
3961
3962 /* Store the value of EXP (an expression tree)
3963 into a subfield of TARGET which has mode MODE and occupies
3964 BITSIZE bits, starting BITPOS bits from the start of TARGET.
3965 If MODE is VOIDmode, it means that we are storing into a bit-field.
3966
3967 If VALUE_MODE is VOIDmode, return nothing in particular.
3968 UNSIGNEDP is not used in this case.
3969
3970 Otherwise, return an rtx for the value stored. This rtx
3971 has mode VALUE_MODE if that is convenient to do.
3972 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
3973
3974 ALIGN is the alignment that TARGET is known to have, measured in bytes.
3975 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
3976
3977 static rtx
3978 store_field (target, bitsize, bitpos, mode, exp, value_mode,
3979 unsignedp, align, total_size)
3980 rtx target;
3981 int bitsize, bitpos;
3982 enum machine_mode mode;
3983 tree exp;
3984 enum machine_mode value_mode;
3985 int unsignedp;
3986 int align;
3987 int total_size;
3988 {
3989 HOST_WIDE_INT width_mask = 0;
3990
3991 if (bitsize < HOST_BITS_PER_WIDE_INT)
3992 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
3993
3994 /* If we are storing into an unaligned field of an aligned union that is
3995 in a register, we may have the mode of TARGET being an integer mode but
3996 MODE == BLKmode. In that case, get an aligned object whose size and
3997 alignment are the same as TARGET and store TARGET into it (we can avoid
3998 the store if the field being stored is the entire width of TARGET). Then
3999 call ourselves recursively to store the field into a BLKmode version of
4000 that object. Finally, load from the object into TARGET. This is not
4001 very efficient in general, but should only be slightly more expensive
4002 than the otherwise-required unaligned accesses. Perhaps this can be
4003 cleaned up later. */
4004
4005 if (mode == BLKmode
4006 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4007 {
4008 rtx object = assign_stack_temp (GET_MODE (target),
4009 GET_MODE_SIZE (GET_MODE (target)), 0);
4010 rtx blk_object = copy_rtx (object);
4011
4012 MEM_IN_STRUCT_P (object) = 1;
4013 MEM_IN_STRUCT_P (blk_object) = 1;
4014 PUT_MODE (blk_object, BLKmode);
4015
4016 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4017 emit_move_insn (object, target);
4018
4019 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4020 align, total_size);
4021
4022 /* Even though we aren't returning target, we need to
4023 give it the updated value. */
4024 emit_move_insn (target, object);
4025
4026 return blk_object;
4027 }
4028
4029 /* If the structure is in a register or if the component
4030 is a bit field, we cannot use addressing to access it.
4031 Use bit-field techniques or SUBREG to store in it. */
4032
4033 if (mode == VOIDmode
4034 || (mode != BLKmode && ! direct_store[(int) mode])
4035 || GET_CODE (target) == REG
4036 || GET_CODE (target) == SUBREG
4037 /* If the field isn't aligned enough to store as an ordinary memref,
4038 store it as a bit field. */
4039 || (SLOW_UNALIGNED_ACCESS
4040 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
4041 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
4042 {
4043 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4044
4045 /* If BITSIZE is narrower than the size of the type of EXP
4046 we will be narrowing TEMP. Normally, what's wanted are the
4047 low-order bits. However, if EXP's type is a record and this is
4048 big-endian machine, we want the upper BITSIZE bits. */
4049 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4050 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4051 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4052 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4053 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4054 - bitsize),
4055 temp, 1);
4056
4057 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4058 MODE. */
4059 if (mode != VOIDmode && mode != BLKmode
4060 && mode != TYPE_MODE (TREE_TYPE (exp)))
4061 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4062
4063 /* If the modes of TARGET and TEMP are both BLKmode, both
4064 must be in memory and BITPOS must be aligned on a byte
4065 boundary. If so, we simply do a block copy. */
4066 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4067 {
4068 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4069 || bitpos % BITS_PER_UNIT != 0)
4070 abort ();
4071
4072 target = change_address (target, VOIDmode,
4073 plus_constant (XEXP (target, 0),
4074 bitpos / BITS_PER_UNIT));
4075
4076 emit_block_move (target, temp,
4077 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4078 / BITS_PER_UNIT),
4079 1);
4080
4081 return value_mode == VOIDmode ? const0_rtx : target;
4082 }
4083
4084 /* Store the value in the bitfield. */
4085 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4086 if (value_mode != VOIDmode)
4087 {
4088 /* The caller wants an rtx for the value. */
4089 /* If possible, avoid refetching from the bitfield itself. */
4090 if (width_mask != 0
4091 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4092 {
4093 tree count;
4094 enum machine_mode tmode;
4095
4096 if (unsignedp)
4097 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4098 tmode = GET_MODE (temp);
4099 if (tmode == VOIDmode)
4100 tmode = value_mode;
4101 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4102 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4103 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4104 }
4105 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4106 NULL_RTX, value_mode, 0, align,
4107 total_size);
4108 }
4109 return const0_rtx;
4110 }
4111 else
4112 {
4113 rtx addr = XEXP (target, 0);
4114 rtx to_rtx;
4115
4116 /* If a value is wanted, it must be the lhs;
4117 so make the address stable for multiple use. */
4118
4119 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4120 && ! CONSTANT_ADDRESS_P (addr)
4121 /* A frame-pointer reference is already stable. */
4122 && ! (GET_CODE (addr) == PLUS
4123 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4124 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4125 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4126 addr = copy_to_reg (addr);
4127
4128 /* Now build a reference to just the desired component. */
4129
4130 to_rtx = change_address (target, mode,
4131 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
4132 MEM_IN_STRUCT_P (to_rtx) = 1;
4133
4134 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4135 }
4136 }
4137 \f
4138 /* Return true if any object containing the innermost array is an unaligned
4139 packed structure field. */
4140
4141 static int
4142 get_inner_unaligned_p (exp)
4143 tree exp;
4144 {
4145 int needed_alignment = TYPE_ALIGN (TREE_TYPE (exp));
4146
4147 while (1)
4148 {
4149 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4150 {
4151 if (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
4152 < needed_alignment)
4153 return 1;
4154 }
4155 else if (TREE_CODE (exp) != ARRAY_REF
4156 && TREE_CODE (exp) != NON_LVALUE_EXPR
4157 && ! ((TREE_CODE (exp) == NOP_EXPR
4158 || TREE_CODE (exp) == CONVERT_EXPR)
4159 && (TYPE_MODE (TREE_TYPE (exp))
4160 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4161 break;
4162
4163 exp = TREE_OPERAND (exp, 0);
4164 }
4165
4166 return 0;
4167 }
4168
4169 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4170 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4171 ARRAY_REFs and find the ultimate containing object, which we return.
4172
4173 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4174 bit position, and *PUNSIGNEDP to the signedness of the field.
4175 If the position of the field is variable, we store a tree
4176 giving the variable offset (in units) in *POFFSET.
4177 This offset is in addition to the bit position.
4178 If the position is not variable, we store 0 in *POFFSET.
4179
4180 If any of the extraction expressions is volatile,
4181 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4182
4183 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4184 is a mode that can be used to access the field. In that case, *PBITSIZE
4185 is redundant.
4186
4187 If the field describes a variable-sized object, *PMODE is set to
4188 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4189 this case, but the address of the object can be found. */
4190
4191 tree
4192 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4193 punsignedp, pvolatilep)
4194 tree exp;
4195 int *pbitsize;
4196 int *pbitpos;
4197 tree *poffset;
4198 enum machine_mode *pmode;
4199 int *punsignedp;
4200 int *pvolatilep;
4201 {
4202 tree orig_exp = exp;
4203 tree size_tree = 0;
4204 enum machine_mode mode = VOIDmode;
4205 tree offset = integer_zero_node;
4206
4207 if (TREE_CODE (exp) == COMPONENT_REF)
4208 {
4209 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4210 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4211 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4212 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4213 }
4214 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4215 {
4216 size_tree = TREE_OPERAND (exp, 1);
4217 *punsignedp = TREE_UNSIGNED (exp);
4218 }
4219 else
4220 {
4221 mode = TYPE_MODE (TREE_TYPE (exp));
4222 *pbitsize = GET_MODE_BITSIZE (mode);
4223 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4224 }
4225
4226 if (size_tree)
4227 {
4228 if (TREE_CODE (size_tree) != INTEGER_CST)
4229 mode = BLKmode, *pbitsize = -1;
4230 else
4231 *pbitsize = TREE_INT_CST_LOW (size_tree);
4232 }
4233
4234 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4235 and find the ultimate containing object. */
4236
4237 *pbitpos = 0;
4238
4239 while (1)
4240 {
4241 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4242 {
4243 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4244 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4245 : TREE_OPERAND (exp, 2));
4246 tree constant = integer_zero_node, var = pos;
4247
4248 /* If this field hasn't been filled in yet, don't go
4249 past it. This should only happen when folding expressions
4250 made during type construction. */
4251 if (pos == 0)
4252 break;
4253
4254 /* Assume here that the offset is a multiple of a unit.
4255 If not, there should be an explicitly added constant. */
4256 if (TREE_CODE (pos) == PLUS_EXPR
4257 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4258 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
4259 else if (TREE_CODE (pos) == INTEGER_CST)
4260 constant = pos, var = integer_zero_node;
4261
4262 *pbitpos += TREE_INT_CST_LOW (constant);
4263 offset = size_binop (PLUS_EXPR, offset,
4264 size_binop (EXACT_DIV_EXPR, var,
4265 size_int (BITS_PER_UNIT)));
4266 }
4267
4268 else if (TREE_CODE (exp) == ARRAY_REF)
4269 {
4270 /* This code is based on the code in case ARRAY_REF in expand_expr
4271 below. We assume here that the size of an array element is
4272 always an integral multiple of BITS_PER_UNIT. */
4273
4274 tree index = TREE_OPERAND (exp, 1);
4275 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4276 tree low_bound
4277 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4278 tree index_type = TREE_TYPE (index);
4279
4280 if (! integer_zerop (low_bound))
4281 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
4282
4283 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
4284 {
4285 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4286 index);
4287 index_type = TREE_TYPE (index);
4288 }
4289
4290 index = fold (build (MULT_EXPR, index_type, index,
4291 TYPE_SIZE (TREE_TYPE (exp))));
4292
4293 if (TREE_CODE (index) == INTEGER_CST
4294 && TREE_INT_CST_HIGH (index) == 0)
4295 *pbitpos += TREE_INT_CST_LOW (index);
4296 else
4297 offset = size_binop (PLUS_EXPR, offset,
4298 size_binop (FLOOR_DIV_EXPR, index,
4299 size_int (BITS_PER_UNIT)));
4300 }
4301 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
4302 && ! ((TREE_CODE (exp) == NOP_EXPR
4303 || TREE_CODE (exp) == CONVERT_EXPR)
4304 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4305 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
4306 != UNION_TYPE))
4307 && (TYPE_MODE (TREE_TYPE (exp))
4308 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4309 break;
4310
4311 /* If any reference in the chain is volatile, the effect is volatile. */
4312 if (TREE_THIS_VOLATILE (exp))
4313 *pvolatilep = 1;
4314 exp = TREE_OPERAND (exp, 0);
4315 }
4316
4317 if (integer_zerop (offset))
4318 offset = 0;
4319
4320 if (offset != 0 && contains_placeholder_p (offset))
4321 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
4322
4323 *pmode = mode;
4324 *poffset = offset;
4325 return exp;
4326 }
4327 \f
4328 /* Given an rtx VALUE that may contain additions and multiplications,
4329 return an equivalent value that just refers to a register or memory.
4330 This is done by generating instructions to perform the arithmetic
4331 and returning a pseudo-register containing the value.
4332
4333 The returned value may be a REG, SUBREG, MEM or constant. */
4334
4335 rtx
4336 force_operand (value, target)
4337 rtx value, target;
4338 {
4339 register optab binoptab = 0;
4340 /* Use a temporary to force order of execution of calls to
4341 `force_operand'. */
4342 rtx tmp;
4343 register rtx op2;
4344 /* Use subtarget as the target for operand 0 of a binary operation. */
4345 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4346
4347 if (GET_CODE (value) == PLUS)
4348 binoptab = add_optab;
4349 else if (GET_CODE (value) == MINUS)
4350 binoptab = sub_optab;
4351 else if (GET_CODE (value) == MULT)
4352 {
4353 op2 = XEXP (value, 1);
4354 if (!CONSTANT_P (op2)
4355 && !(GET_CODE (op2) == REG && op2 != subtarget))
4356 subtarget = 0;
4357 tmp = force_operand (XEXP (value, 0), subtarget);
4358 return expand_mult (GET_MODE (value), tmp,
4359 force_operand (op2, NULL_RTX),
4360 target, 0);
4361 }
4362
4363 if (binoptab)
4364 {
4365 op2 = XEXP (value, 1);
4366 if (!CONSTANT_P (op2)
4367 && !(GET_CODE (op2) == REG && op2 != subtarget))
4368 subtarget = 0;
4369 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
4370 {
4371 binoptab = add_optab;
4372 op2 = negate_rtx (GET_MODE (value), op2);
4373 }
4374
4375 /* Check for an addition with OP2 a constant integer and our first
4376 operand a PLUS of a virtual register and something else. In that
4377 case, we want to emit the sum of the virtual register and the
4378 constant first and then add the other value. This allows virtual
4379 register instantiation to simply modify the constant rather than
4380 creating another one around this addition. */
4381 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
4382 && GET_CODE (XEXP (value, 0)) == PLUS
4383 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
4384 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
4385 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
4386 {
4387 rtx temp = expand_binop (GET_MODE (value), binoptab,
4388 XEXP (XEXP (value, 0), 0), op2,
4389 subtarget, 0, OPTAB_LIB_WIDEN);
4390 return expand_binop (GET_MODE (value), binoptab, temp,
4391 force_operand (XEXP (XEXP (value, 0), 1), 0),
4392 target, 0, OPTAB_LIB_WIDEN);
4393 }
4394
4395 tmp = force_operand (XEXP (value, 0), subtarget);
4396 return expand_binop (GET_MODE (value), binoptab, tmp,
4397 force_operand (op2, NULL_RTX),
4398 target, 0, OPTAB_LIB_WIDEN);
4399 /* We give UNSIGNEDP = 0 to expand_binop
4400 because the only operations we are expanding here are signed ones. */
4401 }
4402 return value;
4403 }
4404 \f
4405 /* Subroutine of expand_expr:
4406 save the non-copied parts (LIST) of an expr (LHS), and return a list
4407 which can restore these values to their previous values,
4408 should something modify their storage. */
4409
4410 static tree
4411 save_noncopied_parts (lhs, list)
4412 tree lhs;
4413 tree list;
4414 {
4415 tree tail;
4416 tree parts = 0;
4417
4418 for (tail = list; tail; tail = TREE_CHAIN (tail))
4419 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4420 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
4421 else
4422 {
4423 tree part = TREE_VALUE (tail);
4424 tree part_type = TREE_TYPE (part);
4425 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
4426 rtx target = assign_temp (part_type, 0, 1, 1);
4427 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
4428 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
4429 parts = tree_cons (to_be_saved,
4430 build (RTL_EXPR, part_type, NULL_TREE,
4431 (tree) target),
4432 parts);
4433 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
4434 }
4435 return parts;
4436 }
4437
4438 /* Subroutine of expand_expr:
4439 record the non-copied parts (LIST) of an expr (LHS), and return a list
4440 which specifies the initial values of these parts. */
4441
4442 static tree
4443 init_noncopied_parts (lhs, list)
4444 tree lhs;
4445 tree list;
4446 {
4447 tree tail;
4448 tree parts = 0;
4449
4450 for (tail = list; tail; tail = TREE_CHAIN (tail))
4451 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4452 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
4453 else
4454 {
4455 tree part = TREE_VALUE (tail);
4456 tree part_type = TREE_TYPE (part);
4457 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
4458 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
4459 }
4460 return parts;
4461 }
4462
4463 /* Subroutine of expand_expr: return nonzero iff there is no way that
4464 EXP can reference X, which is being modified. */
4465
4466 static int
4467 safe_from_p (x, exp)
4468 rtx x;
4469 tree exp;
4470 {
4471 rtx exp_rtl = 0;
4472 int i, nops;
4473
4474 if (x == 0
4475 /* If EXP has varying size, we MUST use a target since we currently
4476 have no way of allocating temporaries of variable size
4477 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
4478 So we assume here that something at a higher level has prevented a
4479 clash. This is somewhat bogus, but the best we can do. Only
4480 do this when X is BLKmode. */
4481 || (TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4482 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
4483 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
4484 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
4485 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
4486 != INTEGER_CST)
4487 && GET_MODE (x) == BLKmode))
4488 return 1;
4489
4490 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
4491 find the underlying pseudo. */
4492 if (GET_CODE (x) == SUBREG)
4493 {
4494 x = SUBREG_REG (x);
4495 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4496 return 0;
4497 }
4498
4499 /* If X is a location in the outgoing argument area, it is always safe. */
4500 if (GET_CODE (x) == MEM
4501 && (XEXP (x, 0) == virtual_outgoing_args_rtx
4502 || (GET_CODE (XEXP (x, 0)) == PLUS
4503 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
4504 return 1;
4505
4506 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4507 {
4508 case 'd':
4509 exp_rtl = DECL_RTL (exp);
4510 break;
4511
4512 case 'c':
4513 return 1;
4514
4515 case 'x':
4516 if (TREE_CODE (exp) == TREE_LIST)
4517 return ((TREE_VALUE (exp) == 0
4518 || safe_from_p (x, TREE_VALUE (exp)))
4519 && (TREE_CHAIN (exp) == 0
4520 || safe_from_p (x, TREE_CHAIN (exp))));
4521 else
4522 return 0;
4523
4524 case '1':
4525 return safe_from_p (x, TREE_OPERAND (exp, 0));
4526
4527 case '2':
4528 case '<':
4529 return (safe_from_p (x, TREE_OPERAND (exp, 0))
4530 && safe_from_p (x, TREE_OPERAND (exp, 1)));
4531
4532 case 'e':
4533 case 'r':
4534 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
4535 the expression. If it is set, we conflict iff we are that rtx or
4536 both are in memory. Otherwise, we check all operands of the
4537 expression recursively. */
4538
4539 switch (TREE_CODE (exp))
4540 {
4541 case ADDR_EXPR:
4542 return (staticp (TREE_OPERAND (exp, 0))
4543 || safe_from_p (x, TREE_OPERAND (exp, 0)));
4544
4545 case INDIRECT_REF:
4546 if (GET_CODE (x) == MEM)
4547 return 0;
4548 break;
4549
4550 case CALL_EXPR:
4551 exp_rtl = CALL_EXPR_RTL (exp);
4552 if (exp_rtl == 0)
4553 {
4554 /* Assume that the call will clobber all hard registers and
4555 all of memory. */
4556 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4557 || GET_CODE (x) == MEM)
4558 return 0;
4559 }
4560
4561 break;
4562
4563 case RTL_EXPR:
4564 /* If a sequence exists, we would have to scan every instruction
4565 in the sequence to see if it was safe. This is probably not
4566 worthwhile. */
4567 if (RTL_EXPR_SEQUENCE (exp))
4568 return 0;
4569
4570 exp_rtl = RTL_EXPR_RTL (exp);
4571 break;
4572
4573 case WITH_CLEANUP_EXPR:
4574 exp_rtl = RTL_EXPR_RTL (exp);
4575 break;
4576
4577 case CLEANUP_POINT_EXPR:
4578 return safe_from_p (x, TREE_OPERAND (exp, 0));
4579
4580 case SAVE_EXPR:
4581 exp_rtl = SAVE_EXPR_RTL (exp);
4582 break;
4583
4584 case BIND_EXPR:
4585 /* The only operand we look at is operand 1. The rest aren't
4586 part of the expression. */
4587 return safe_from_p (x, TREE_OPERAND (exp, 1));
4588
4589 case METHOD_CALL_EXPR:
4590 /* This takes a rtx argument, but shouldn't appear here. */
4591 abort ();
4592 }
4593
4594 /* If we have an rtx, we do not need to scan our operands. */
4595 if (exp_rtl)
4596 break;
4597
4598 nops = tree_code_length[(int) TREE_CODE (exp)];
4599 for (i = 0; i < nops; i++)
4600 if (TREE_OPERAND (exp, i) != 0
4601 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
4602 return 0;
4603 }
4604
4605 /* If we have an rtl, find any enclosed object. Then see if we conflict
4606 with it. */
4607 if (exp_rtl)
4608 {
4609 if (GET_CODE (exp_rtl) == SUBREG)
4610 {
4611 exp_rtl = SUBREG_REG (exp_rtl);
4612 if (GET_CODE (exp_rtl) == REG
4613 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
4614 return 0;
4615 }
4616
4617 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
4618 are memory and EXP is not readonly. */
4619 return ! (rtx_equal_p (x, exp_rtl)
4620 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
4621 && ! TREE_READONLY (exp)));
4622 }
4623
4624 /* If we reach here, it is safe. */
4625 return 1;
4626 }
4627
4628 /* Subroutine of expand_expr: return nonzero iff EXP is an
4629 expression whose type is statically determinable. */
4630
4631 static int
4632 fixed_type_p (exp)
4633 tree exp;
4634 {
4635 if (TREE_CODE (exp) == PARM_DECL
4636 || TREE_CODE (exp) == VAR_DECL
4637 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
4638 || TREE_CODE (exp) == COMPONENT_REF
4639 || TREE_CODE (exp) == ARRAY_REF)
4640 return 1;
4641 return 0;
4642 }
4643
4644 /* Subroutine of expand_expr: return rtx if EXP is a
4645 variable or parameter; else return 0. */
4646
4647 static rtx
4648 var_rtx (exp)
4649 tree exp;
4650 {
4651 STRIP_NOPS (exp);
4652 switch (TREE_CODE (exp))
4653 {
4654 case PARM_DECL:
4655 case VAR_DECL:
4656 return DECL_RTL (exp);
4657 default:
4658 return 0;
4659 }
4660 }
4661 \f
4662 /* expand_expr: generate code for computing expression EXP.
4663 An rtx for the computed value is returned. The value is never null.
4664 In the case of a void EXP, const0_rtx is returned.
4665
4666 The value may be stored in TARGET if TARGET is nonzero.
4667 TARGET is just a suggestion; callers must assume that
4668 the rtx returned may not be the same as TARGET.
4669
4670 If TARGET is CONST0_RTX, it means that the value will be ignored.
4671
4672 If TMODE is not VOIDmode, it suggests generating the
4673 result in mode TMODE. But this is done only when convenient.
4674 Otherwise, TMODE is ignored and the value generated in its natural mode.
4675 TMODE is just a suggestion; callers must assume that
4676 the rtx returned may not have mode TMODE.
4677
4678 Note that TARGET may have neither TMODE nor MODE. In that case, it
4679 probably will not be used.
4680
4681 If MODIFIER is EXPAND_SUM then when EXP is an addition
4682 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
4683 or a nest of (PLUS ...) and (MINUS ...) where the terms are
4684 products as above, or REG or MEM, or constant.
4685 Ordinarily in such cases we would output mul or add instructions
4686 and then return a pseudo reg containing the sum.
4687
4688 EXPAND_INITIALIZER is much like EXPAND_SUM except that
4689 it also marks a label as absolutely required (it can't be dead).
4690 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
4691 This is used for outputting expressions used in initializers.
4692
4693 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
4694 with a constant address even if that address is not normally legitimate.
4695 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
4696
4697 rtx
4698 expand_expr (exp, target, tmode, modifier)
4699 register tree exp;
4700 rtx target;
4701 enum machine_mode tmode;
4702 enum expand_modifier modifier;
4703 {
4704 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
4705 This is static so it will be accessible to our recursive callees. */
4706 static tree placeholder_list = 0;
4707 register rtx op0, op1, temp;
4708 tree type = TREE_TYPE (exp);
4709 int unsignedp = TREE_UNSIGNED (type);
4710 register enum machine_mode mode = TYPE_MODE (type);
4711 register enum tree_code code = TREE_CODE (exp);
4712 optab this_optab;
4713 /* Use subtarget as the target for operand 0 of a binary operation. */
4714 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4715 rtx original_target = target;
4716 /* Maybe defer this until sure not doing bytecode? */
4717 int ignore = (target == const0_rtx
4718 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
4719 || code == CONVERT_EXPR || code == REFERENCE_EXPR
4720 || code == COND_EXPR)
4721 && TREE_CODE (type) == VOID_TYPE));
4722 tree context;
4723
4724
4725 if (output_bytecode && modifier != EXPAND_INITIALIZER)
4726 {
4727 bc_expand_expr (exp);
4728 return NULL;
4729 }
4730
4731 /* Don't use hard regs as subtargets, because the combiner
4732 can only handle pseudo regs. */
4733 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
4734 subtarget = 0;
4735 /* Avoid subtargets inside loops,
4736 since they hide some invariant expressions. */
4737 if (preserve_subexpressions_p ())
4738 subtarget = 0;
4739
4740 /* If we are going to ignore this result, we need only do something
4741 if there is a side-effect somewhere in the expression. If there
4742 is, short-circuit the most common cases here. Note that we must
4743 not call expand_expr with anything but const0_rtx in case this
4744 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
4745
4746 if (ignore)
4747 {
4748 if (! TREE_SIDE_EFFECTS (exp))
4749 return const0_rtx;
4750
4751 /* Ensure we reference a volatile object even if value is ignored. */
4752 if (TREE_THIS_VOLATILE (exp)
4753 && TREE_CODE (exp) != FUNCTION_DECL
4754 && mode != VOIDmode && mode != BLKmode)
4755 {
4756 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
4757 if (GET_CODE (temp) == MEM)
4758 temp = copy_to_reg (temp);
4759 return const0_rtx;
4760 }
4761
4762 if (TREE_CODE_CLASS (code) == '1')
4763 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4764 VOIDmode, modifier);
4765 else if (TREE_CODE_CLASS (code) == '2'
4766 || TREE_CODE_CLASS (code) == '<')
4767 {
4768 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
4769 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
4770 return const0_rtx;
4771 }
4772 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
4773 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
4774 /* If the second operand has no side effects, just evaluate
4775 the first. */
4776 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4777 VOIDmode, modifier);
4778
4779 target = 0;
4780 }
4781
4782 /* If will do cse, generate all results into pseudo registers
4783 since 1) that allows cse to find more things
4784 and 2) otherwise cse could produce an insn the machine
4785 cannot support. */
4786
4787 if (! cse_not_expected && mode != BLKmode && target
4788 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
4789 target = subtarget;
4790
4791 switch (code)
4792 {
4793 case LABEL_DECL:
4794 {
4795 tree function = decl_function_context (exp);
4796 /* Handle using a label in a containing function. */
4797 if (function != current_function_decl && function != 0)
4798 {
4799 struct function *p = find_function_data (function);
4800 /* Allocate in the memory associated with the function
4801 that the label is in. */
4802 push_obstacks (p->function_obstack,
4803 p->function_maybepermanent_obstack);
4804
4805 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4806 label_rtx (exp), p->forced_labels);
4807 pop_obstacks ();
4808 }
4809 else if (modifier == EXPAND_INITIALIZER)
4810 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4811 label_rtx (exp), forced_labels);
4812 temp = gen_rtx (MEM, FUNCTION_MODE,
4813 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
4814 if (function != current_function_decl && function != 0)
4815 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
4816 return temp;
4817 }
4818
4819 case PARM_DECL:
4820 if (DECL_RTL (exp) == 0)
4821 {
4822 error_with_decl (exp, "prior parameter's size depends on `%s'");
4823 return CONST0_RTX (mode);
4824 }
4825
4826 /* ... fall through ... */
4827
4828 case VAR_DECL:
4829 /* If a static var's type was incomplete when the decl was written,
4830 but the type is complete now, lay out the decl now. */
4831 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4832 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
4833 {
4834 push_obstacks_nochange ();
4835 end_temporary_allocation ();
4836 layout_decl (exp, 0);
4837 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
4838 pop_obstacks ();
4839 }
4840
4841 /* ... fall through ... */
4842
4843 case FUNCTION_DECL:
4844 case RESULT_DECL:
4845 if (DECL_RTL (exp) == 0)
4846 abort ();
4847
4848 /* Ensure variable marked as used even if it doesn't go through
4849 a parser. If it hasn't be used yet, write out an external
4850 definition. */
4851 if (! TREE_USED (exp))
4852 {
4853 assemble_external (exp);
4854 TREE_USED (exp) = 1;
4855 }
4856
4857 /* Show we haven't gotten RTL for this yet. */
4858 temp = 0;
4859
4860 /* Handle variables inherited from containing functions. */
4861 context = decl_function_context (exp);
4862
4863 /* We treat inline_function_decl as an alias for the current function
4864 because that is the inline function whose vars, types, etc.
4865 are being merged into the current function.
4866 See expand_inline_function. */
4867
4868 if (context != 0 && context != current_function_decl
4869 && context != inline_function_decl
4870 /* If var is static, we don't need a static chain to access it. */
4871 && ! (GET_CODE (DECL_RTL (exp)) == MEM
4872 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
4873 {
4874 rtx addr;
4875
4876 /* Mark as non-local and addressable. */
4877 DECL_NONLOCAL (exp) = 1;
4878 if (DECL_NO_STATIC_CHAIN (current_function_decl))
4879 abort ();
4880 mark_addressable (exp);
4881 if (GET_CODE (DECL_RTL (exp)) != MEM)
4882 abort ();
4883 addr = XEXP (DECL_RTL (exp), 0);
4884 if (GET_CODE (addr) == MEM)
4885 addr = gen_rtx (MEM, Pmode,
4886 fix_lexical_addr (XEXP (addr, 0), exp));
4887 else
4888 addr = fix_lexical_addr (addr, exp);
4889 temp = change_address (DECL_RTL (exp), mode, addr);
4890 }
4891
4892 /* This is the case of an array whose size is to be determined
4893 from its initializer, while the initializer is still being parsed.
4894 See expand_decl. */
4895
4896 else if (GET_CODE (DECL_RTL (exp)) == MEM
4897 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
4898 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
4899 XEXP (DECL_RTL (exp), 0));
4900
4901 /* If DECL_RTL is memory, we are in the normal case and either
4902 the address is not valid or it is not a register and -fforce-addr
4903 is specified, get the address into a register. */
4904
4905 else if (GET_CODE (DECL_RTL (exp)) == MEM
4906 && modifier != EXPAND_CONST_ADDRESS
4907 && modifier != EXPAND_SUM
4908 && modifier != EXPAND_INITIALIZER
4909 && (! memory_address_p (DECL_MODE (exp),
4910 XEXP (DECL_RTL (exp), 0))
4911 || (flag_force_addr
4912 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
4913 temp = change_address (DECL_RTL (exp), VOIDmode,
4914 copy_rtx (XEXP (DECL_RTL (exp), 0)));
4915
4916 /* If we got something, return it. But first, set the alignment
4917 the address is a register. */
4918 if (temp != 0)
4919 {
4920 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
4921 mark_reg_pointer (XEXP (temp, 0),
4922 DECL_ALIGN (exp) / BITS_PER_UNIT);
4923
4924 return temp;
4925 }
4926
4927 /* If the mode of DECL_RTL does not match that of the decl, it
4928 must be a promoted value. We return a SUBREG of the wanted mode,
4929 but mark it so that we know that it was already extended. */
4930
4931 if (GET_CODE (DECL_RTL (exp)) == REG
4932 && GET_MODE (DECL_RTL (exp)) != mode)
4933 {
4934 /* Get the signedness used for this variable. Ensure we get the
4935 same mode we got when the variable was declared. */
4936 if (GET_MODE (DECL_RTL (exp))
4937 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
4938 abort ();
4939
4940 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
4941 SUBREG_PROMOTED_VAR_P (temp) = 1;
4942 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4943 return temp;
4944 }
4945
4946 return DECL_RTL (exp);
4947
4948 case INTEGER_CST:
4949 return immed_double_const (TREE_INT_CST_LOW (exp),
4950 TREE_INT_CST_HIGH (exp),
4951 mode);
4952
4953 case CONST_DECL:
4954 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
4955
4956 case REAL_CST:
4957 /* If optimized, generate immediate CONST_DOUBLE
4958 which will be turned into memory by reload if necessary.
4959
4960 We used to force a register so that loop.c could see it. But
4961 this does not allow gen_* patterns to perform optimizations with
4962 the constants. It also produces two insns in cases like "x = 1.0;".
4963 On most machines, floating-point constants are not permitted in
4964 many insns, so we'd end up copying it to a register in any case.
4965
4966 Now, we do the copying in expand_binop, if appropriate. */
4967 return immed_real_const (exp);
4968
4969 case COMPLEX_CST:
4970 case STRING_CST:
4971 if (! TREE_CST_RTL (exp))
4972 output_constant_def (exp);
4973
4974 /* TREE_CST_RTL probably contains a constant address.
4975 On RISC machines where a constant address isn't valid,
4976 make some insns to get that address into a register. */
4977 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
4978 && modifier != EXPAND_CONST_ADDRESS
4979 && modifier != EXPAND_INITIALIZER
4980 && modifier != EXPAND_SUM
4981 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
4982 || (flag_force_addr
4983 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
4984 return change_address (TREE_CST_RTL (exp), VOIDmode,
4985 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
4986 return TREE_CST_RTL (exp);
4987
4988 case SAVE_EXPR:
4989 context = decl_function_context (exp);
4990
4991 /* We treat inline_function_decl as an alias for the current function
4992 because that is the inline function whose vars, types, etc.
4993 are being merged into the current function.
4994 See expand_inline_function. */
4995 if (context == current_function_decl || context == inline_function_decl)
4996 context = 0;
4997
4998 /* If this is non-local, handle it. */
4999 if (context)
5000 {
5001 temp = SAVE_EXPR_RTL (exp);
5002 if (temp && GET_CODE (temp) == REG)
5003 {
5004 put_var_into_stack (exp);
5005 temp = SAVE_EXPR_RTL (exp);
5006 }
5007 if (temp == 0 || GET_CODE (temp) != MEM)
5008 abort ();
5009 return change_address (temp, mode,
5010 fix_lexical_addr (XEXP (temp, 0), exp));
5011 }
5012 if (SAVE_EXPR_RTL (exp) == 0)
5013 {
5014 if (mode == VOIDmode)
5015 temp = const0_rtx;
5016 else
5017 temp = assign_temp (type, 0, 0, 0);
5018
5019 SAVE_EXPR_RTL (exp) = temp;
5020 if (!optimize && GET_CODE (temp) == REG)
5021 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
5022 save_expr_regs);
5023
5024 /* If the mode of TEMP does not match that of the expression, it
5025 must be a promoted value. We pass store_expr a SUBREG of the
5026 wanted mode but mark it so that we know that it was already
5027 extended. Note that `unsignedp' was modified above in
5028 this case. */
5029
5030 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
5031 {
5032 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
5033 SUBREG_PROMOTED_VAR_P (temp) = 1;
5034 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5035 }
5036
5037 if (temp == const0_rtx)
5038 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
5039 else
5040 store_expr (TREE_OPERAND (exp, 0), temp, 0);
5041 }
5042
5043 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
5044 must be a promoted value. We return a SUBREG of the wanted mode,
5045 but mark it so that we know that it was already extended. */
5046
5047 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
5048 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
5049 {
5050 /* Compute the signedness and make the proper SUBREG. */
5051 promote_mode (type, mode, &unsignedp, 0);
5052 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
5053 SUBREG_PROMOTED_VAR_P (temp) = 1;
5054 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5055 return temp;
5056 }
5057
5058 return SAVE_EXPR_RTL (exp);
5059
5060 case UNSAVE_EXPR:
5061 {
5062 rtx temp;
5063 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5064 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
5065 return temp;
5066 }
5067
5068 case PLACEHOLDER_EXPR:
5069 /* If there is an object on the head of the placeholder list,
5070 see if some object in it's references is of type TYPE. For
5071 further information, see tree.def. */
5072 if (placeholder_list)
5073 {
5074 tree object;
5075 tree old_list = placeholder_list;
5076
5077 for (object = TREE_PURPOSE (placeholder_list);
5078 (TYPE_MAIN_VARIANT (TREE_TYPE (object))
5079 != TYPE_MAIN_VARIANT (type))
5080 && (TREE_CODE_CLASS (TREE_CODE (object)) == 'r'
5081 || TREE_CODE_CLASS (TREE_CODE (object)) == '1'
5082 || TREE_CODE_CLASS (TREE_CODE (object)) == '2'
5083 || TREE_CODE_CLASS (TREE_CODE (object)) == 'e');
5084 object = TREE_OPERAND (object, 0))
5085 ;
5086
5087 if (object != 0
5088 && (TYPE_MAIN_VARIANT (TREE_TYPE (object))
5089 == TYPE_MAIN_VARIANT (type)))
5090 {
5091 /* Expand this object skipping the list entries before
5092 it was found in case it is also a PLACEHOLDER_EXPR.
5093 In that case, we want to translate it using subsequent
5094 entries. */
5095 placeholder_list = TREE_CHAIN (placeholder_list);
5096 temp = expand_expr (object, original_target, tmode, modifier);
5097 placeholder_list = old_list;
5098 return temp;
5099 }
5100 }
5101
5102 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
5103 abort ();
5104
5105 case WITH_RECORD_EXPR:
5106 /* Put the object on the placeholder list, expand our first operand,
5107 and pop the list. */
5108 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
5109 placeholder_list);
5110 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
5111 tmode, modifier);
5112 placeholder_list = TREE_CHAIN (placeholder_list);
5113 return target;
5114
5115 case EXIT_EXPR:
5116 expand_exit_loop_if_false (NULL_PTR,
5117 invert_truthvalue (TREE_OPERAND (exp, 0)));
5118 return const0_rtx;
5119
5120 case LOOP_EXPR:
5121 push_temp_slots ();
5122 expand_start_loop (1);
5123 expand_expr_stmt (TREE_OPERAND (exp, 0));
5124 expand_end_loop ();
5125 pop_temp_slots ();
5126
5127 return const0_rtx;
5128
5129 case BIND_EXPR:
5130 {
5131 tree vars = TREE_OPERAND (exp, 0);
5132 int vars_need_expansion = 0;
5133
5134 /* Need to open a binding contour here because
5135 if there are any cleanups they most be contained here. */
5136 expand_start_bindings (0);
5137
5138 /* Mark the corresponding BLOCK for output in its proper place. */
5139 if (TREE_OPERAND (exp, 2) != 0
5140 && ! TREE_USED (TREE_OPERAND (exp, 2)))
5141 insert_block (TREE_OPERAND (exp, 2));
5142
5143 /* If VARS have not yet been expanded, expand them now. */
5144 while (vars)
5145 {
5146 if (DECL_RTL (vars) == 0)
5147 {
5148 vars_need_expansion = 1;
5149 expand_decl (vars);
5150 }
5151 expand_decl_init (vars);
5152 vars = TREE_CHAIN (vars);
5153 }
5154
5155 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
5156
5157 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
5158
5159 return temp;
5160 }
5161
5162 case RTL_EXPR:
5163 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
5164 abort ();
5165 emit_insns (RTL_EXPR_SEQUENCE (exp));
5166 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
5167 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
5168 free_temps_for_rtl_expr (exp);
5169 return RTL_EXPR_RTL (exp);
5170
5171 case CONSTRUCTOR:
5172 /* If we don't need the result, just ensure we evaluate any
5173 subexpressions. */
5174 if (ignore)
5175 {
5176 tree elt;
5177 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
5178 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
5179 return const0_rtx;
5180 }
5181
5182 /* All elts simple constants => refer to a constant in memory. But
5183 if this is a non-BLKmode mode, let it store a field at a time
5184 since that should make a CONST_INT or CONST_DOUBLE when we
5185 fold. Likewise, if we have a target we can use, it is best to
5186 store directly into the target unless the type is large enough
5187 that memcpy will be used. If we are making an initializer and
5188 all operands are constant, put it in memory as well. */
5189 else if ((TREE_STATIC (exp)
5190 && ((mode == BLKmode
5191 && ! (target != 0 && safe_from_p (target, exp)))
5192 || TREE_ADDRESSABLE (exp)
5193 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5194 && (move_by_pieces_ninsns
5195 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
5196 TYPE_ALIGN (type) / BITS_PER_UNIT)
5197 > MOVE_RATIO)
5198 && ! mostly_zeros_p (exp))))
5199 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
5200 {
5201 rtx constructor = output_constant_def (exp);
5202 if (modifier != EXPAND_CONST_ADDRESS
5203 && modifier != EXPAND_INITIALIZER
5204 && modifier != EXPAND_SUM
5205 && (! memory_address_p (GET_MODE (constructor),
5206 XEXP (constructor, 0))
5207 || (flag_force_addr
5208 && GET_CODE (XEXP (constructor, 0)) != REG)))
5209 constructor = change_address (constructor, VOIDmode,
5210 XEXP (constructor, 0));
5211 return constructor;
5212 }
5213
5214 else
5215 {
5216 if (target == 0 || ! safe_from_p (target, exp))
5217 {
5218 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
5219 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5220 else
5221 target = assign_temp (type, 0, 1, 1);
5222 }
5223
5224 if (TREE_READONLY (exp))
5225 {
5226 if (GET_CODE (target) == MEM)
5227 target = change_address (target, GET_MODE (target),
5228 XEXP (target, 0));
5229 RTX_UNCHANGING_P (target) = 1;
5230 }
5231
5232 store_constructor (exp, target, 0);
5233 return target;
5234 }
5235
5236 case INDIRECT_REF:
5237 {
5238 tree exp1 = TREE_OPERAND (exp, 0);
5239 tree exp2;
5240
5241 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
5242 op0 = memory_address (mode, op0);
5243
5244 temp = gen_rtx (MEM, mode, op0);
5245 /* If address was computed by addition,
5246 mark this as an element of an aggregate. */
5247 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5248 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
5249 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
5250 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
5251 || (TREE_CODE (exp1) == ADDR_EXPR
5252 && (exp2 = TREE_OPERAND (exp1, 0))
5253 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
5254 MEM_IN_STRUCT_P (temp) = 1;
5255 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
5256
5257 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
5258 here, because, in C and C++, the fact that a location is accessed
5259 through a pointer to const does not mean that the value there can
5260 never change. Languages where it can never change should
5261 also set TREE_STATIC. */
5262 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
5263 return temp;
5264 }
5265
5266 case ARRAY_REF:
5267 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
5268 abort ();
5269
5270 {
5271 tree array = TREE_OPERAND (exp, 0);
5272 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5273 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
5274 tree index = TREE_OPERAND (exp, 1);
5275 tree index_type = TREE_TYPE (index);
5276 int i;
5277
5278 if (TREE_CODE (low_bound) != INTEGER_CST
5279 && contains_placeholder_p (low_bound))
5280 low_bound = build (WITH_RECORD_EXPR, sizetype, low_bound, exp);
5281
5282 /* Optimize the special-case of a zero lower bound.
5283
5284 We convert the low_bound to sizetype to avoid some problems
5285 with constant folding. (E.g. suppose the lower bound is 1,
5286 and its mode is QI. Without the conversion, (ARRAY
5287 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
5288 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
5289
5290 But sizetype isn't quite right either (especially if
5291 the lowbound is negative). FIXME */
5292
5293 if (! integer_zerop (low_bound))
5294 index = fold (build (MINUS_EXPR, index_type, index,
5295 convert (sizetype, low_bound)));
5296
5297 if ((TREE_CODE (index) != INTEGER_CST
5298 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5299 && (! SLOW_UNALIGNED_ACCESS || ! get_inner_unaligned_p (exp)))
5300 {
5301 /* Nonconstant array index or nonconstant element size, and
5302 not an array in an unaligned (packed) structure field.
5303 Generate the tree for *(&array+index) and expand that,
5304 except do it in a language-independent way
5305 and don't complain about non-lvalue arrays.
5306 `mark_addressable' should already have been called
5307 for any array for which this case will be reached. */
5308
5309 /* Don't forget the const or volatile flag from the array
5310 element. */
5311 tree variant_type = build_type_variant (type,
5312 TREE_READONLY (exp),
5313 TREE_THIS_VOLATILE (exp));
5314 tree array_adr = build1 (ADDR_EXPR,
5315 build_pointer_type (variant_type), array);
5316 tree elt;
5317 tree size = size_in_bytes (type);
5318
5319 /* Convert the integer argument to a type the same size as sizetype
5320 so the multiply won't overflow spuriously. */
5321 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
5322 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
5323 index);
5324
5325 if (TREE_CODE (size) != INTEGER_CST
5326 && contains_placeholder_p (size))
5327 size = build (WITH_RECORD_EXPR, sizetype, size, exp);
5328
5329 /* Don't think the address has side effects
5330 just because the array does.
5331 (In some cases the address might have side effects,
5332 and we fail to record that fact here. However, it should not
5333 matter, since expand_expr should not care.) */
5334 TREE_SIDE_EFFECTS (array_adr) = 0;
5335
5336 elt
5337 = build1
5338 (INDIRECT_REF, type,
5339 fold (build (PLUS_EXPR,
5340 TYPE_POINTER_TO (variant_type),
5341 array_adr,
5342 fold
5343 (build1
5344 (NOP_EXPR,
5345 TYPE_POINTER_TO (variant_type),
5346 fold (build (MULT_EXPR, TREE_TYPE (index),
5347 index,
5348 convert (TREE_TYPE (index),
5349 size))))))));;
5350
5351 /* Volatility, etc., of new expression is same as old
5352 expression. */
5353 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
5354 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
5355 TREE_READONLY (elt) = TREE_READONLY (exp);
5356
5357 return expand_expr (elt, target, tmode, modifier);
5358 }
5359
5360 /* Fold an expression like: "foo"[2].
5361 This is not done in fold so it won't happen inside &.
5362 Don't fold if this is for wide characters since it's too
5363 difficult to do correctly and this is a very rare case. */
5364
5365 if (TREE_CODE (array) == STRING_CST
5366 && TREE_CODE (index) == INTEGER_CST
5367 && !TREE_INT_CST_HIGH (index)
5368 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
5369 && GET_MODE_CLASS (mode) == MODE_INT
5370 && GET_MODE_SIZE (mode) == 1)
5371 return GEN_INT (TREE_STRING_POINTER (array)[i]);
5372
5373 /* If this is a constant index into a constant array,
5374 just get the value from the array. Handle both the cases when
5375 we have an explicit constructor and when our operand is a variable
5376 that was declared const. */
5377
5378 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
5379 {
5380 if (TREE_CODE (index) == INTEGER_CST
5381 && TREE_INT_CST_HIGH (index) == 0)
5382 {
5383 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
5384
5385 i = TREE_INT_CST_LOW (index);
5386 while (elem && i--)
5387 elem = TREE_CHAIN (elem);
5388 if (elem)
5389 return expand_expr (fold (TREE_VALUE (elem)), target,
5390 tmode, modifier);
5391 }
5392 }
5393
5394 else if (optimize >= 1
5395 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
5396 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
5397 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
5398 {
5399 if (TREE_CODE (index) == INTEGER_CST
5400 && TREE_INT_CST_HIGH (index) == 0)
5401 {
5402 tree init = DECL_INITIAL (array);
5403
5404 i = TREE_INT_CST_LOW (index);
5405 if (TREE_CODE (init) == CONSTRUCTOR)
5406 {
5407 tree elem = CONSTRUCTOR_ELTS (init);
5408
5409 while (elem
5410 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
5411 elem = TREE_CHAIN (elem);
5412 if (elem)
5413 return expand_expr (fold (TREE_VALUE (elem)), target,
5414 tmode, modifier);
5415 }
5416 else if (TREE_CODE (init) == STRING_CST
5417 && i < TREE_STRING_LENGTH (init))
5418 return GEN_INT (TREE_STRING_POINTER (init)[i]);
5419 }
5420 }
5421 }
5422
5423 /* Treat array-ref with constant index as a component-ref. */
5424
5425 case COMPONENT_REF:
5426 case BIT_FIELD_REF:
5427 /* If the operand is a CONSTRUCTOR, we can just extract the
5428 appropriate field if it is present. Don't do this if we have
5429 already written the data since we want to refer to that copy
5430 and varasm.c assumes that's what we'll do. */
5431 if (code != ARRAY_REF
5432 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
5433 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
5434 {
5435 tree elt;
5436
5437 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
5438 elt = TREE_CHAIN (elt))
5439 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
5440 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
5441 }
5442
5443 {
5444 enum machine_mode mode1;
5445 int bitsize;
5446 int bitpos;
5447 tree offset;
5448 int volatilep = 0;
5449 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
5450 &mode1, &unsignedp, &volatilep);
5451 int alignment;
5452
5453 /* If we got back the original object, something is wrong. Perhaps
5454 we are evaluating an expression too early. In any event, don't
5455 infinitely recurse. */
5456 if (tem == exp)
5457 abort ();
5458
5459 /* If TEM's type is a union of variable size, pass TARGET to the inner
5460 computation, since it will need a temporary and TARGET is known
5461 to have to do. This occurs in unchecked conversion in Ada. */
5462
5463 op0 = expand_expr (tem,
5464 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
5465 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
5466 != INTEGER_CST)
5467 ? target : NULL_RTX),
5468 VOIDmode,
5469 modifier == EXPAND_INITIALIZER ? modifier : 0);
5470
5471 /* If this is a constant, put it into a register if it is a
5472 legitimate constant and memory if it isn't. */
5473 if (CONSTANT_P (op0))
5474 {
5475 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
5476 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
5477 op0 = force_reg (mode, op0);
5478 else
5479 op0 = validize_mem (force_const_mem (mode, op0));
5480 }
5481
5482 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
5483 if (offset != 0)
5484 {
5485 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
5486
5487 if (GET_CODE (op0) != MEM)
5488 abort ();
5489 op0 = change_address (op0, VOIDmode,
5490 gen_rtx (PLUS, ptr_mode, XEXP (op0, 0),
5491 force_reg (ptr_mode, offset_rtx)));
5492 /* If we have a variable offset, the known alignment
5493 is only that of the innermost structure containing the field.
5494 (Actually, we could sometimes do better by using the
5495 size of an element of the innermost array, but no need.) */
5496 if (TREE_CODE (exp) == COMPONENT_REF
5497 || TREE_CODE (exp) == BIT_FIELD_REF)
5498 alignment = (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5499 / BITS_PER_UNIT);
5500 }
5501
5502 /* Don't forget about volatility even if this is a bitfield. */
5503 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
5504 {
5505 op0 = copy_rtx (op0);
5506 MEM_VOLATILE_P (op0) = 1;
5507 }
5508
5509 /* In cases where an aligned union has an unaligned object
5510 as a field, we might be extracting a BLKmode value from
5511 an integer-mode (e.g., SImode) object. Handle this case
5512 by doing the extract into an object as wide as the field
5513 (which we know to be the width of a basic mode), then
5514 storing into memory, and changing the mode to BLKmode.
5515 If we ultimately want the address (EXPAND_CONST_ADDRESS or
5516 EXPAND_INITIALIZER), then we must not copy to a temporary. */
5517 if (mode1 == VOIDmode
5518 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
5519 || (modifier != EXPAND_CONST_ADDRESS
5520 && modifier != EXPAND_INITIALIZER
5521 && ((mode1 != BLKmode && ! direct_load[(int) mode1])
5522 /* If the field isn't aligned enough to fetch as a memref,
5523 fetch it as a bit field. */
5524 || (SLOW_UNALIGNED_ACCESS
5525 && ((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
5526 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
5527 {
5528 enum machine_mode ext_mode = mode;
5529
5530 if (ext_mode == BLKmode)
5531 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
5532
5533 if (ext_mode == BLKmode)
5534 {
5535 /* In this case, BITPOS must start at a byte boundary and
5536 TARGET, if specified, must be a MEM. */
5537 if (GET_CODE (op0) != MEM
5538 || (target != 0 && GET_CODE (target) != MEM)
5539 || bitpos % BITS_PER_UNIT != 0)
5540 abort ();
5541
5542 op0 = change_address (op0, VOIDmode,
5543 plus_constant (XEXP (op0, 0),
5544 bitpos / BITS_PER_UNIT));
5545 if (target == 0)
5546 target = assign_temp (type, 0, 1, 1);
5547
5548 emit_block_move (target, op0,
5549 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5550 / BITS_PER_UNIT),
5551 1);
5552
5553 return target;
5554 }
5555
5556 op0 = validize_mem (op0);
5557
5558 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
5559 mark_reg_pointer (XEXP (op0, 0), alignment);
5560
5561 op0 = extract_bit_field (op0, bitsize, bitpos,
5562 unsignedp, target, ext_mode, ext_mode,
5563 alignment,
5564 int_size_in_bytes (TREE_TYPE (tem)));
5565
5566 /* If the result is a record type and BITSIZE is narrower than
5567 the mode of OP0, an integral mode, and this is a big endian
5568 machine, we must put the field into the high-order bits. */
5569 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
5570 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
5571 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
5572 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
5573 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
5574 - bitsize),
5575 op0, 1);
5576
5577 if (mode == BLKmode)
5578 {
5579 rtx new = assign_stack_temp (ext_mode,
5580 bitsize / BITS_PER_UNIT, 0);
5581
5582 emit_move_insn (new, op0);
5583 op0 = copy_rtx (new);
5584 PUT_MODE (op0, BLKmode);
5585 MEM_IN_STRUCT_P (op0) = 1;
5586 }
5587
5588 return op0;
5589 }
5590
5591 /* If the result is BLKmode, use that to access the object
5592 now as well. */
5593 if (mode == BLKmode)
5594 mode1 = BLKmode;
5595
5596 /* Get a reference to just this component. */
5597 if (modifier == EXPAND_CONST_ADDRESS
5598 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5599 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
5600 (bitpos / BITS_PER_UNIT)));
5601 else
5602 op0 = change_address (op0, mode1,
5603 plus_constant (XEXP (op0, 0),
5604 (bitpos / BITS_PER_UNIT)));
5605 if (GET_CODE (XEXP (op0, 0)) == REG)
5606 mark_reg_pointer (XEXP (op0, 0), alignment);
5607
5608 MEM_IN_STRUCT_P (op0) = 1;
5609 MEM_VOLATILE_P (op0) |= volatilep;
5610 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
5611 return op0;
5612 if (target == 0)
5613 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5614 convert_move (target, op0, unsignedp);
5615 return target;
5616 }
5617
5618 /* Intended for a reference to a buffer of a file-object in Pascal.
5619 But it's not certain that a special tree code will really be
5620 necessary for these. INDIRECT_REF might work for them. */
5621 case BUFFER_REF:
5622 abort ();
5623
5624 case IN_EXPR:
5625 {
5626 /* Pascal set IN expression.
5627
5628 Algorithm:
5629 rlo = set_low - (set_low%bits_per_word);
5630 the_word = set [ (index - rlo)/bits_per_word ];
5631 bit_index = index % bits_per_word;
5632 bitmask = 1 << bit_index;
5633 return !!(the_word & bitmask); */
5634
5635 tree set = TREE_OPERAND (exp, 0);
5636 tree index = TREE_OPERAND (exp, 1);
5637 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
5638 tree set_type = TREE_TYPE (set);
5639 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
5640 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
5641 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
5642 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
5643 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
5644 rtx setval = expand_expr (set, 0, VOIDmode, 0);
5645 rtx setaddr = XEXP (setval, 0);
5646 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
5647 rtx rlow;
5648 rtx diff, quo, rem, addr, bit, result;
5649
5650 preexpand_calls (exp);
5651
5652 /* If domain is empty, answer is no. Likewise if index is constant
5653 and out of bounds. */
5654 if ((TREE_CODE (set_high_bound) == INTEGER_CST
5655 && TREE_CODE (set_low_bound) == INTEGER_CST
5656 && tree_int_cst_lt (set_high_bound, set_low_bound)
5657 || (TREE_CODE (index) == INTEGER_CST
5658 && TREE_CODE (set_low_bound) == INTEGER_CST
5659 && tree_int_cst_lt (index, set_low_bound))
5660 || (TREE_CODE (set_high_bound) == INTEGER_CST
5661 && TREE_CODE (index) == INTEGER_CST
5662 && tree_int_cst_lt (set_high_bound, index))))
5663 return const0_rtx;
5664
5665 if (target == 0)
5666 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5667
5668 /* If we get here, we have to generate the code for both cases
5669 (in range and out of range). */
5670
5671 op0 = gen_label_rtx ();
5672 op1 = gen_label_rtx ();
5673
5674 if (! (GET_CODE (index_val) == CONST_INT
5675 && GET_CODE (lo_r) == CONST_INT))
5676 {
5677 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
5678 GET_MODE (index_val), iunsignedp, 0);
5679 emit_jump_insn (gen_blt (op1));
5680 }
5681
5682 if (! (GET_CODE (index_val) == CONST_INT
5683 && GET_CODE (hi_r) == CONST_INT))
5684 {
5685 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
5686 GET_MODE (index_val), iunsignedp, 0);
5687 emit_jump_insn (gen_bgt (op1));
5688 }
5689
5690 /* Calculate the element number of bit zero in the first word
5691 of the set. */
5692 if (GET_CODE (lo_r) == CONST_INT)
5693 rlow = GEN_INT (INTVAL (lo_r)
5694 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
5695 else
5696 rlow = expand_binop (index_mode, and_optab, lo_r,
5697 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
5698 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
5699
5700 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
5701 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
5702
5703 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
5704 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
5705 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
5706 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
5707
5708 addr = memory_address (byte_mode,
5709 expand_binop (index_mode, add_optab, diff,
5710 setaddr, NULL_RTX, iunsignedp,
5711 OPTAB_LIB_WIDEN));
5712
5713 /* Extract the bit we want to examine */
5714 bit = expand_shift (RSHIFT_EXPR, byte_mode,
5715 gen_rtx (MEM, byte_mode, addr),
5716 make_tree (TREE_TYPE (index), rem),
5717 NULL_RTX, 1);
5718 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
5719 GET_MODE (target) == byte_mode ? target : 0,
5720 1, OPTAB_LIB_WIDEN);
5721
5722 if (result != target)
5723 convert_move (target, result, 1);
5724
5725 /* Output the code to handle the out-of-range case. */
5726 emit_jump (op0);
5727 emit_label (op1);
5728 emit_move_insn (target, const0_rtx);
5729 emit_label (op0);
5730 return target;
5731 }
5732
5733 case WITH_CLEANUP_EXPR:
5734 if (RTL_EXPR_RTL (exp) == 0)
5735 {
5736 RTL_EXPR_RTL (exp)
5737 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5738 cleanups_this_call
5739 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
5740 /* That's it for this cleanup. */
5741 TREE_OPERAND (exp, 2) = 0;
5742 expand_eh_region_start ();
5743 }
5744 return RTL_EXPR_RTL (exp);
5745
5746 case CLEANUP_POINT_EXPR:
5747 {
5748 extern int temp_slot_level;
5749 tree old_cleanups = cleanups_this_call;
5750 int old_temp_level = target_temp_slot_level;
5751 push_temp_slots ();
5752 target_temp_slot_level = temp_slot_level;
5753 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5754 /* If we're going to use this value, load it up now. */
5755 if (! ignore)
5756 op0 = force_not_mem (op0);
5757 expand_cleanups_to (old_cleanups);
5758 preserve_temp_slots (op0);
5759 free_temp_slots ();
5760 pop_temp_slots ();
5761 target_temp_slot_level = old_temp_level;
5762 }
5763 return op0;
5764
5765 case CALL_EXPR:
5766 /* Check for a built-in function. */
5767 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5768 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5769 == FUNCTION_DECL)
5770 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5771 return expand_builtin (exp, target, subtarget, tmode, ignore);
5772
5773 /* If this call was expanded already by preexpand_calls,
5774 just return the result we got. */
5775 if (CALL_EXPR_RTL (exp) != 0)
5776 return CALL_EXPR_RTL (exp);
5777
5778 return expand_call (exp, target, ignore);
5779
5780 case NON_LVALUE_EXPR:
5781 case NOP_EXPR:
5782 case CONVERT_EXPR:
5783 case REFERENCE_EXPR:
5784 if (TREE_CODE (type) == UNION_TYPE)
5785 {
5786 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
5787 if (target == 0)
5788 {
5789 if (mode != BLKmode)
5790 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5791 else
5792 target = assign_temp (type, 0, 1, 1);
5793 }
5794
5795 if (GET_CODE (target) == MEM)
5796 /* Store data into beginning of memory target. */
5797 store_expr (TREE_OPERAND (exp, 0),
5798 change_address (target, TYPE_MODE (valtype), 0), 0);
5799
5800 else if (GET_CODE (target) == REG)
5801 /* Store this field into a union of the proper type. */
5802 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
5803 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
5804 VOIDmode, 0, 1,
5805 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
5806 else
5807 abort ();
5808
5809 /* Return the entire union. */
5810 return target;
5811 }
5812
5813 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5814 {
5815 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
5816 modifier);
5817
5818 /* If the signedness of the conversion differs and OP0 is
5819 a promoted SUBREG, clear that indication since we now
5820 have to do the proper extension. */
5821 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
5822 && GET_CODE (op0) == SUBREG)
5823 SUBREG_PROMOTED_VAR_P (op0) = 0;
5824
5825 return op0;
5826 }
5827
5828 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
5829 if (GET_MODE (op0) == mode)
5830 return op0;
5831
5832 /* If OP0 is a constant, just convert it into the proper mode. */
5833 if (CONSTANT_P (op0))
5834 return
5835 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5836 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5837
5838 if (modifier == EXPAND_INITIALIZER)
5839 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
5840
5841 if (target == 0)
5842 return
5843 convert_to_mode (mode, op0,
5844 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5845 else
5846 convert_move (target, op0,
5847 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5848 return target;
5849
5850 case PLUS_EXPR:
5851 /* We come here from MINUS_EXPR when the second operand is a
5852 constant. */
5853 plus_expr:
5854 this_optab = add_optab;
5855
5856 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
5857 something else, make sure we add the register to the constant and
5858 then to the other thing. This case can occur during strength
5859 reduction and doing it this way will produce better code if the
5860 frame pointer or argument pointer is eliminated.
5861
5862 fold-const.c will ensure that the constant is always in the inner
5863 PLUS_EXPR, so the only case we need to do anything about is if
5864 sp, ap, or fp is our second argument, in which case we must swap
5865 the innermost first argument and our second argument. */
5866
5867 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5868 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
5869 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
5870 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
5871 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
5872 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
5873 {
5874 tree t = TREE_OPERAND (exp, 1);
5875
5876 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5877 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
5878 }
5879
5880 /* If the result is to be ptr_mode and we are adding an integer to
5881 something, we might be forming a constant. So try to use
5882 plus_constant. If it produces a sum and we can't accept it,
5883 use force_operand. This allows P = &ARR[const] to generate
5884 efficient code on machines where a SYMBOL_REF is not a valid
5885 address.
5886
5887 If this is an EXPAND_SUM call, always return the sum. */
5888 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
5889 || mode == ptr_mode)
5890 {
5891 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
5892 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5893 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
5894 {
5895 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
5896 EXPAND_SUM);
5897 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
5898 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5899 op1 = force_operand (op1, target);
5900 return op1;
5901 }
5902
5903 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
5904 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
5905 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
5906 {
5907 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
5908 EXPAND_SUM);
5909 if (! CONSTANT_P (op0))
5910 {
5911 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5912 VOIDmode, modifier);
5913 /* Don't go to both_summands if modifier
5914 says it's not right to return a PLUS. */
5915 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5916 goto binop2;
5917 goto both_summands;
5918 }
5919 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
5920 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5921 op0 = force_operand (op0, target);
5922 return op0;
5923 }
5924 }
5925
5926 /* No sense saving up arithmetic to be done
5927 if it's all in the wrong mode to form part of an address.
5928 And force_operand won't know whether to sign-extend or
5929 zero-extend. */
5930 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5931 || mode != ptr_mode)
5932 goto binop;
5933
5934 preexpand_calls (exp);
5935 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5936 subtarget = 0;
5937
5938 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
5939 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
5940
5941 both_summands:
5942 /* Make sure any term that's a sum with a constant comes last. */
5943 if (GET_CODE (op0) == PLUS
5944 && CONSTANT_P (XEXP (op0, 1)))
5945 {
5946 temp = op0;
5947 op0 = op1;
5948 op1 = temp;
5949 }
5950 /* If adding to a sum including a constant,
5951 associate it to put the constant outside. */
5952 if (GET_CODE (op1) == PLUS
5953 && CONSTANT_P (XEXP (op1, 1)))
5954 {
5955 rtx constant_term = const0_rtx;
5956
5957 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
5958 if (temp != 0)
5959 op0 = temp;
5960 /* Ensure that MULT comes first if there is one. */
5961 else if (GET_CODE (op0) == MULT)
5962 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
5963 else
5964 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
5965
5966 /* Let's also eliminate constants from op0 if possible. */
5967 op0 = eliminate_constant_term (op0, &constant_term);
5968
5969 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
5970 their sum should be a constant. Form it into OP1, since the
5971 result we want will then be OP0 + OP1. */
5972
5973 temp = simplify_binary_operation (PLUS, mode, constant_term,
5974 XEXP (op1, 1));
5975 if (temp != 0)
5976 op1 = temp;
5977 else
5978 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
5979 }
5980
5981 /* Put a constant term last and put a multiplication first. */
5982 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
5983 temp = op1, op1 = op0, op0 = temp;
5984
5985 temp = simplify_binary_operation (PLUS, mode, op0, op1);
5986 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
5987
5988 case MINUS_EXPR:
5989 /* For initializers, we are allowed to return a MINUS of two
5990 symbolic constants. Here we handle all cases when both operands
5991 are constant. */
5992 /* Handle difference of two symbolic constants,
5993 for the sake of an initializer. */
5994 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5995 && really_constant_p (TREE_OPERAND (exp, 0))
5996 && really_constant_p (TREE_OPERAND (exp, 1)))
5997 {
5998 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
5999 VOIDmode, modifier);
6000 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6001 VOIDmode, modifier);
6002
6003 /* If the last operand is a CONST_INT, use plus_constant of
6004 the negated constant. Else make the MINUS. */
6005 if (GET_CODE (op1) == CONST_INT)
6006 return plus_constant (op0, - INTVAL (op1));
6007 else
6008 return gen_rtx (MINUS, mode, op0, op1);
6009 }
6010 /* Convert A - const to A + (-const). */
6011 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6012 {
6013 tree negated = fold (build1 (NEGATE_EXPR, type,
6014 TREE_OPERAND (exp, 1)));
6015
6016 /* Deal with the case where we can't negate the constant
6017 in TYPE. */
6018 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
6019 {
6020 tree newtype = signed_type (type);
6021 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
6022 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
6023 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
6024
6025 if (! TREE_OVERFLOW (newneg))
6026 return expand_expr (convert (type,
6027 build (PLUS_EXPR, newtype,
6028 newop0, newneg)),
6029 target, tmode, modifier);
6030 }
6031 else
6032 {
6033 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
6034 goto plus_expr;
6035 }
6036 }
6037 this_optab = sub_optab;
6038 goto binop;
6039
6040 case MULT_EXPR:
6041 preexpand_calls (exp);
6042 /* If first operand is constant, swap them.
6043 Thus the following special case checks need only
6044 check the second operand. */
6045 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6046 {
6047 register tree t1 = TREE_OPERAND (exp, 0);
6048 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
6049 TREE_OPERAND (exp, 1) = t1;
6050 }
6051
6052 /* Attempt to return something suitable for generating an
6053 indexed address, for machines that support that. */
6054
6055 if (modifier == EXPAND_SUM && mode == ptr_mode
6056 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6057 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
6058 {
6059 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
6060
6061 /* Apply distributive law if OP0 is x+c. */
6062 if (GET_CODE (op0) == PLUS
6063 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
6064 return gen_rtx (PLUS, mode,
6065 gen_rtx (MULT, mode, XEXP (op0, 0),
6066 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
6067 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
6068 * INTVAL (XEXP (op0, 1))));
6069
6070 if (GET_CODE (op0) != REG)
6071 op0 = force_operand (op0, NULL_RTX);
6072 if (GET_CODE (op0) != REG)
6073 op0 = copy_to_mode_reg (mode, op0);
6074
6075 return gen_rtx (MULT, mode, op0,
6076 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
6077 }
6078
6079 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6080 subtarget = 0;
6081
6082 /* Check for multiplying things that have been extended
6083 from a narrower type. If this machine supports multiplying
6084 in that narrower type with a result in the desired type,
6085 do it that way, and avoid the explicit type-conversion. */
6086 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
6087 && TREE_CODE (type) == INTEGER_TYPE
6088 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6089 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
6090 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6091 && int_fits_type_p (TREE_OPERAND (exp, 1),
6092 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6093 /* Don't use a widening multiply if a shift will do. */
6094 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
6095 > HOST_BITS_PER_WIDE_INT)
6096 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
6097 ||
6098 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
6099 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6100 ==
6101 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
6102 /* If both operands are extended, they must either both
6103 be zero-extended or both be sign-extended. */
6104 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6105 ==
6106 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
6107 {
6108 enum machine_mode innermode
6109 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
6110 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6111 ? smul_widen_optab : umul_widen_optab);
6112 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6113 ? umul_widen_optab : smul_widen_optab);
6114 if (mode == GET_MODE_WIDER_MODE (innermode))
6115 {
6116 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
6117 {
6118 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6119 NULL_RTX, VOIDmode, 0);
6120 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6121 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6122 VOIDmode, 0);
6123 else
6124 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6125 NULL_RTX, VOIDmode, 0);
6126 goto binop2;
6127 }
6128 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
6129 && innermode == word_mode)
6130 {
6131 rtx htem;
6132 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6133 NULL_RTX, VOIDmode, 0);
6134 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6135 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6136 VOIDmode, 0);
6137 else
6138 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6139 NULL_RTX, VOIDmode, 0);
6140 temp = expand_binop (mode, other_optab, op0, op1, target,
6141 unsignedp, OPTAB_LIB_WIDEN);
6142 htem = expand_mult_highpart_adjust (innermode,
6143 gen_highpart (innermode, temp),
6144 op0, op1,
6145 gen_highpart (innermode, temp),
6146 unsignedp);
6147 emit_move_insn (gen_highpart (innermode, temp), htem);
6148 return temp;
6149 }
6150 }
6151 }
6152 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6153 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6154 return expand_mult (mode, op0, op1, target, unsignedp);
6155
6156 case TRUNC_DIV_EXPR:
6157 case FLOOR_DIV_EXPR:
6158 case CEIL_DIV_EXPR:
6159 case ROUND_DIV_EXPR:
6160 case EXACT_DIV_EXPR:
6161 preexpand_calls (exp);
6162 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6163 subtarget = 0;
6164 /* Possible optimization: compute the dividend with EXPAND_SUM
6165 then if the divisor is constant can optimize the case
6166 where some terms of the dividend have coeffs divisible by it. */
6167 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6168 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6169 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
6170
6171 case RDIV_EXPR:
6172 this_optab = flodiv_optab;
6173 goto binop;
6174
6175 case TRUNC_MOD_EXPR:
6176 case FLOOR_MOD_EXPR:
6177 case CEIL_MOD_EXPR:
6178 case ROUND_MOD_EXPR:
6179 preexpand_calls (exp);
6180 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6181 subtarget = 0;
6182 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6183 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6184 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
6185
6186 case FIX_ROUND_EXPR:
6187 case FIX_FLOOR_EXPR:
6188 case FIX_CEIL_EXPR:
6189 abort (); /* Not used for C. */
6190
6191 case FIX_TRUNC_EXPR:
6192 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6193 if (target == 0)
6194 target = gen_reg_rtx (mode);
6195 expand_fix (target, op0, unsignedp);
6196 return target;
6197
6198 case FLOAT_EXPR:
6199 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6200 if (target == 0)
6201 target = gen_reg_rtx (mode);
6202 /* expand_float can't figure out what to do if FROM has VOIDmode.
6203 So give it the correct mode. With -O, cse will optimize this. */
6204 if (GET_MODE (op0) == VOIDmode)
6205 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6206 op0);
6207 expand_float (target, op0,
6208 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6209 return target;
6210
6211 case NEGATE_EXPR:
6212 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6213 temp = expand_unop (mode, neg_optab, op0, target, 0);
6214 if (temp == 0)
6215 abort ();
6216 return temp;
6217
6218 case ABS_EXPR:
6219 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6220
6221 /* Handle complex values specially. */
6222 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
6223 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
6224 return expand_complex_abs (mode, op0, target, unsignedp);
6225
6226 /* Unsigned abs is simply the operand. Testing here means we don't
6227 risk generating incorrect code below. */
6228 if (TREE_UNSIGNED (type))
6229 return op0;
6230
6231 return expand_abs (mode, op0, target, unsignedp,
6232 safe_from_p (target, TREE_OPERAND (exp, 0)));
6233
6234 case MAX_EXPR:
6235 case MIN_EXPR:
6236 target = original_target;
6237 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
6238 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
6239 || GET_MODE (target) != mode
6240 || (GET_CODE (target) == REG
6241 && REGNO (target) < FIRST_PSEUDO_REGISTER))
6242 target = gen_reg_rtx (mode);
6243 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6244 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6245
6246 /* First try to do it with a special MIN or MAX instruction.
6247 If that does not win, use a conditional jump to select the proper
6248 value. */
6249 this_optab = (TREE_UNSIGNED (type)
6250 ? (code == MIN_EXPR ? umin_optab : umax_optab)
6251 : (code == MIN_EXPR ? smin_optab : smax_optab));
6252
6253 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
6254 OPTAB_WIDEN);
6255 if (temp != 0)
6256 return temp;
6257
6258 /* At this point, a MEM target is no longer useful; we will get better
6259 code without it. */
6260
6261 if (GET_CODE (target) == MEM)
6262 target = gen_reg_rtx (mode);
6263
6264 if (target != op0)
6265 emit_move_insn (target, op0);
6266
6267 op0 = gen_label_rtx ();
6268
6269 /* If this mode is an integer too wide to compare properly,
6270 compare word by word. Rely on cse to optimize constant cases. */
6271 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
6272 {
6273 if (code == MAX_EXPR)
6274 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6275 target, op1, NULL_RTX, op0);
6276 else
6277 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6278 op1, target, NULL_RTX, op0);
6279 emit_move_insn (target, op1);
6280 }
6281 else
6282 {
6283 if (code == MAX_EXPR)
6284 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
6285 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
6286 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
6287 else
6288 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
6289 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
6290 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
6291 if (temp == const0_rtx)
6292 emit_move_insn (target, op1);
6293 else if (temp != const_true_rtx)
6294 {
6295 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
6296 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
6297 else
6298 abort ();
6299 emit_move_insn (target, op1);
6300 }
6301 }
6302 emit_label (op0);
6303 return target;
6304
6305 case BIT_NOT_EXPR:
6306 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6307 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
6308 if (temp == 0)
6309 abort ();
6310 return temp;
6311
6312 case FFS_EXPR:
6313 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6314 temp = expand_unop (mode, ffs_optab, op0, target, 1);
6315 if (temp == 0)
6316 abort ();
6317 return temp;
6318
6319 /* ??? Can optimize bitwise operations with one arg constant.
6320 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
6321 and (a bitwise1 b) bitwise2 b (etc)
6322 but that is probably not worth while. */
6323
6324 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
6325 boolean values when we want in all cases to compute both of them. In
6326 general it is fastest to do TRUTH_AND_EXPR by computing both operands
6327 as actual zero-or-1 values and then bitwise anding. In cases where
6328 there cannot be any side effects, better code would be made by
6329 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
6330 how to recognize those cases. */
6331
6332 case TRUTH_AND_EXPR:
6333 case BIT_AND_EXPR:
6334 this_optab = and_optab;
6335 goto binop;
6336
6337 case TRUTH_OR_EXPR:
6338 case BIT_IOR_EXPR:
6339 this_optab = ior_optab;
6340 goto binop;
6341
6342 case TRUTH_XOR_EXPR:
6343 case BIT_XOR_EXPR:
6344 this_optab = xor_optab;
6345 goto binop;
6346
6347 case LSHIFT_EXPR:
6348 case RSHIFT_EXPR:
6349 case LROTATE_EXPR:
6350 case RROTATE_EXPR:
6351 preexpand_calls (exp);
6352 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6353 subtarget = 0;
6354 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6355 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
6356 unsignedp);
6357
6358 /* Could determine the answer when only additive constants differ. Also,
6359 the addition of one can be handled by changing the condition. */
6360 case LT_EXPR:
6361 case LE_EXPR:
6362 case GT_EXPR:
6363 case GE_EXPR:
6364 case EQ_EXPR:
6365 case NE_EXPR:
6366 preexpand_calls (exp);
6367 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
6368 if (temp != 0)
6369 return temp;
6370
6371 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
6372 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
6373 && original_target
6374 && GET_CODE (original_target) == REG
6375 && (GET_MODE (original_target)
6376 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6377 {
6378 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
6379 VOIDmode, 0);
6380
6381 if (temp != original_target)
6382 temp = copy_to_reg (temp);
6383
6384 op1 = gen_label_rtx ();
6385 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
6386 GET_MODE (temp), unsignedp, 0);
6387 emit_jump_insn (gen_beq (op1));
6388 emit_move_insn (temp, const1_rtx);
6389 emit_label (op1);
6390 return temp;
6391 }
6392
6393 /* If no set-flag instruction, must generate a conditional
6394 store into a temporary variable. Drop through
6395 and handle this like && and ||. */
6396
6397 case TRUTH_ANDIF_EXPR:
6398 case TRUTH_ORIF_EXPR:
6399 if (! ignore
6400 && (target == 0 || ! safe_from_p (target, exp)
6401 /* Make sure we don't have a hard reg (such as function's return
6402 value) live across basic blocks, if not optimizing. */
6403 || (!optimize && GET_CODE (target) == REG
6404 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
6405 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6406
6407 if (target)
6408 emit_clr_insn (target);
6409
6410 op1 = gen_label_rtx ();
6411 jumpifnot (exp, op1);
6412
6413 if (target)
6414 emit_0_to_1_insn (target);
6415
6416 emit_label (op1);
6417 return ignore ? const0_rtx : target;
6418
6419 case TRUTH_NOT_EXPR:
6420 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6421 /* The parser is careful to generate TRUTH_NOT_EXPR
6422 only with operands that are always zero or one. */
6423 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
6424 target, 1, OPTAB_LIB_WIDEN);
6425 if (temp == 0)
6426 abort ();
6427 return temp;
6428
6429 case COMPOUND_EXPR:
6430 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6431 emit_queue ();
6432 return expand_expr (TREE_OPERAND (exp, 1),
6433 (ignore ? const0_rtx : target),
6434 VOIDmode, 0);
6435
6436 case COND_EXPR:
6437 {
6438 rtx flag = NULL_RTX;
6439 tree left_cleanups = NULL_TREE;
6440 tree right_cleanups = NULL_TREE;
6441
6442 /* Used to save a pointer to the place to put the setting of
6443 the flag that indicates if this side of the conditional was
6444 taken. We backpatch the code, if we find out later that we
6445 have any conditional cleanups that need to be performed. */
6446 rtx dest_right_flag = NULL_RTX;
6447 rtx dest_left_flag = NULL_RTX;
6448
6449 /* Note that COND_EXPRs whose type is a structure or union
6450 are required to be constructed to contain assignments of
6451 a temporary variable, so that we can evaluate them here
6452 for side effect only. If type is void, we must do likewise. */
6453
6454 /* If an arm of the branch requires a cleanup,
6455 only that cleanup is performed. */
6456
6457 tree singleton = 0;
6458 tree binary_op = 0, unary_op = 0;
6459 tree old_cleanups = cleanups_this_call;
6460
6461 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
6462 convert it to our mode, if necessary. */
6463 if (integer_onep (TREE_OPERAND (exp, 1))
6464 && integer_zerop (TREE_OPERAND (exp, 2))
6465 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6466 {
6467 if (ignore)
6468 {
6469 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6470 modifier);
6471 return const0_rtx;
6472 }
6473
6474 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
6475 if (GET_MODE (op0) == mode)
6476 return op0;
6477
6478 if (target == 0)
6479 target = gen_reg_rtx (mode);
6480 convert_move (target, op0, unsignedp);
6481 return target;
6482 }
6483
6484 /* Check for X ? A + B : A. If we have this, we can copy
6485 A to the output and conditionally add B. Similarly for unary
6486 operations. Don't do this if X has side-effects because
6487 those side effects might affect A or B and the "?" operation is
6488 a sequence point in ANSI. (We test for side effects later.) */
6489
6490 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
6491 && operand_equal_p (TREE_OPERAND (exp, 2),
6492 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6493 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
6494 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
6495 && operand_equal_p (TREE_OPERAND (exp, 1),
6496 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6497 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
6498 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
6499 && operand_equal_p (TREE_OPERAND (exp, 2),
6500 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6501 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
6502 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
6503 && operand_equal_p (TREE_OPERAND (exp, 1),
6504 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6505 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
6506
6507 /* If we are not to produce a result, we have no target. Otherwise,
6508 if a target was specified use it; it will not be used as an
6509 intermediate target unless it is safe. If no target, use a
6510 temporary. */
6511
6512 if (ignore)
6513 temp = 0;
6514 else if (original_target
6515 && (safe_from_p (original_target, TREE_OPERAND (exp, 0))
6516 || (singleton && GET_CODE (original_target) == REG
6517 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
6518 && original_target == var_rtx (singleton)))
6519 && GET_MODE (original_target) == mode
6520 && ! (GET_CODE (original_target) == MEM
6521 && MEM_VOLATILE_P (original_target)))
6522 temp = original_target;
6523 else if (TREE_ADDRESSABLE (type))
6524 abort ();
6525 else
6526 temp = assign_temp (type, 0, 0, 1);
6527
6528 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
6529 operation, do this as A + (X != 0). Similarly for other simple
6530 binary operators. */
6531 if (temp && singleton && binary_op
6532 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6533 && (TREE_CODE (binary_op) == PLUS_EXPR
6534 || TREE_CODE (binary_op) == MINUS_EXPR
6535 || TREE_CODE (binary_op) == BIT_IOR_EXPR
6536 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
6537 && integer_onep (TREE_OPERAND (binary_op, 1))
6538 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6539 {
6540 rtx result;
6541 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
6542 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
6543 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
6544 : xor_optab);
6545
6546 /* If we had X ? A : A + 1, do this as A + (X == 0).
6547
6548 We have to invert the truth value here and then put it
6549 back later if do_store_flag fails. We cannot simply copy
6550 TREE_OPERAND (exp, 0) to another variable and modify that
6551 because invert_truthvalue can modify the tree pointed to
6552 by its argument. */
6553 if (singleton == TREE_OPERAND (exp, 1))
6554 TREE_OPERAND (exp, 0)
6555 = invert_truthvalue (TREE_OPERAND (exp, 0));
6556
6557 result = do_store_flag (TREE_OPERAND (exp, 0),
6558 (safe_from_p (temp, singleton)
6559 ? temp : NULL_RTX),
6560 mode, BRANCH_COST <= 1);
6561
6562 if (result)
6563 {
6564 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
6565 return expand_binop (mode, boptab, op1, result, temp,
6566 unsignedp, OPTAB_LIB_WIDEN);
6567 }
6568 else if (singleton == TREE_OPERAND (exp, 1))
6569 TREE_OPERAND (exp, 0)
6570 = invert_truthvalue (TREE_OPERAND (exp, 0));
6571 }
6572
6573 do_pending_stack_adjust ();
6574 NO_DEFER_POP;
6575 op0 = gen_label_rtx ();
6576
6577 flag = gen_reg_rtx (word_mode);
6578 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
6579 {
6580 if (temp != 0)
6581 {
6582 /* If the target conflicts with the other operand of the
6583 binary op, we can't use it. Also, we can't use the target
6584 if it is a hard register, because evaluating the condition
6585 might clobber it. */
6586 if ((binary_op
6587 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
6588 || (GET_CODE (temp) == REG
6589 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
6590 temp = gen_reg_rtx (mode);
6591 store_expr (singleton, temp, 0);
6592 }
6593 else
6594 expand_expr (singleton,
6595 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6596 dest_left_flag = get_last_insn ();
6597 if (singleton == TREE_OPERAND (exp, 1))
6598 jumpif (TREE_OPERAND (exp, 0), op0);
6599 else
6600 jumpifnot (TREE_OPERAND (exp, 0), op0);
6601
6602 /* Allows cleanups up to here. */
6603 old_cleanups = cleanups_this_call;
6604 if (binary_op && temp == 0)
6605 /* Just touch the other operand. */
6606 expand_expr (TREE_OPERAND (binary_op, 1),
6607 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6608 else if (binary_op)
6609 store_expr (build (TREE_CODE (binary_op), type,
6610 make_tree (type, temp),
6611 TREE_OPERAND (binary_op, 1)),
6612 temp, 0);
6613 else
6614 store_expr (build1 (TREE_CODE (unary_op), type,
6615 make_tree (type, temp)),
6616 temp, 0);
6617 op1 = op0;
6618 dest_right_flag = get_last_insn ();
6619 }
6620 #if 0
6621 /* This is now done in jump.c and is better done there because it
6622 produces shorter register lifetimes. */
6623
6624 /* Check for both possibilities either constants or variables
6625 in registers (but not the same as the target!). If so, can
6626 save branches by assigning one, branching, and assigning the
6627 other. */
6628 else if (temp && GET_MODE (temp) != BLKmode
6629 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
6630 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
6631 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
6632 && DECL_RTL (TREE_OPERAND (exp, 1))
6633 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
6634 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
6635 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
6636 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
6637 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
6638 && DECL_RTL (TREE_OPERAND (exp, 2))
6639 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
6640 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
6641 {
6642 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6643 temp = gen_reg_rtx (mode);
6644 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6645 dest_left_flag = get_last_insn ();
6646 jumpifnot (TREE_OPERAND (exp, 0), op0);
6647
6648 /* Allows cleanups up to here. */
6649 old_cleanups = cleanups_this_call;
6650 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6651 op1 = op0;
6652 dest_right_flag = get_last_insn ();
6653 }
6654 #endif
6655 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
6656 comparison operator. If we have one of these cases, set the
6657 output to A, branch on A (cse will merge these two references),
6658 then set the output to FOO. */
6659 else if (temp
6660 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6661 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6662 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6663 TREE_OPERAND (exp, 1), 0)
6664 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6665 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
6666 {
6667 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6668 temp = gen_reg_rtx (mode);
6669 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6670 dest_left_flag = get_last_insn ();
6671 jumpif (TREE_OPERAND (exp, 0), op0);
6672
6673 /* Allows cleanups up to here. */
6674 old_cleanups = cleanups_this_call;
6675 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6676 op1 = op0;
6677 dest_right_flag = get_last_insn ();
6678 }
6679 else if (temp
6680 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6681 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6682 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6683 TREE_OPERAND (exp, 2), 0)
6684 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6685 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
6686 {
6687 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6688 temp = gen_reg_rtx (mode);
6689 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6690 dest_left_flag = get_last_insn ();
6691 jumpifnot (TREE_OPERAND (exp, 0), op0);
6692
6693 /* Allows cleanups up to here. */
6694 old_cleanups = cleanups_this_call;
6695 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6696 op1 = op0;
6697 dest_right_flag = get_last_insn ();
6698 }
6699 else
6700 {
6701 op1 = gen_label_rtx ();
6702 jumpifnot (TREE_OPERAND (exp, 0), op0);
6703
6704 /* Allows cleanups up to here. */
6705 old_cleanups = cleanups_this_call;
6706 if (temp != 0)
6707 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6708 else
6709 expand_expr (TREE_OPERAND (exp, 1),
6710 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6711 dest_left_flag = get_last_insn ();
6712
6713 /* Handle conditional cleanups, if any. */
6714 left_cleanups = defer_cleanups_to (old_cleanups);
6715
6716 emit_queue ();
6717 emit_jump_insn (gen_jump (op1));
6718 emit_barrier ();
6719 emit_label (op0);
6720 if (temp != 0)
6721 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6722 else
6723 expand_expr (TREE_OPERAND (exp, 2),
6724 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6725 dest_right_flag = get_last_insn ();
6726 }
6727
6728 /* Handle conditional cleanups, if any. */
6729 right_cleanups = defer_cleanups_to (old_cleanups);
6730
6731 emit_queue ();
6732 emit_label (op1);
6733 OK_DEFER_POP;
6734
6735 /* Add back in, any conditional cleanups. */
6736 if (left_cleanups || right_cleanups)
6737 {
6738 tree new_cleanups;
6739 tree cond;
6740 rtx last;
6741
6742 /* Now that we know that a flag is needed, go back and add in the
6743 setting of the flag. */
6744
6745 /* Do the left side flag. */
6746 last = get_last_insn ();
6747 /* Flag left cleanups as needed. */
6748 emit_move_insn (flag, const1_rtx);
6749 /* ??? deprecated, use sequences instead. */
6750 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_left_flag);
6751
6752 /* Do the right side flag. */
6753 last = get_last_insn ();
6754 /* Flag left cleanups as needed. */
6755 emit_move_insn (flag, const0_rtx);
6756 /* ??? deprecated, use sequences instead. */
6757 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_right_flag);
6758
6759 /* All cleanups must be on the function_obstack. */
6760 push_obstacks_nochange ();
6761 resume_temporary_allocation ();
6762
6763 /* convert flag, which is an rtx, into a tree. */
6764 cond = make_node (RTL_EXPR);
6765 TREE_TYPE (cond) = integer_type_node;
6766 RTL_EXPR_RTL (cond) = flag;
6767 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
6768 cond = save_expr (cond);
6769
6770 if (! left_cleanups)
6771 left_cleanups = integer_zero_node;
6772 if (! right_cleanups)
6773 right_cleanups = integer_zero_node;
6774 new_cleanups = build (COND_EXPR, void_type_node,
6775 truthvalue_conversion (cond),
6776 left_cleanups, right_cleanups);
6777 new_cleanups = fold (new_cleanups);
6778
6779 pop_obstacks ();
6780
6781 /* Now add in the conditionalized cleanups. */
6782 cleanups_this_call
6783 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
6784 expand_eh_region_start ();
6785 }
6786 return temp;
6787 }
6788
6789 case TARGET_EXPR:
6790 {
6791 /* Something needs to be initialized, but we didn't know
6792 where that thing was when building the tree. For example,
6793 it could be the return value of a function, or a parameter
6794 to a function which lays down in the stack, or a temporary
6795 variable which must be passed by reference.
6796
6797 We guarantee that the expression will either be constructed
6798 or copied into our original target. */
6799
6800 tree slot = TREE_OPERAND (exp, 0);
6801 tree cleanups = NULL_TREE;
6802 tree exp1;
6803 rtx temp;
6804
6805 if (TREE_CODE (slot) != VAR_DECL)
6806 abort ();
6807
6808 if (! ignore)
6809 target = original_target;
6810
6811 if (target == 0)
6812 {
6813 if (DECL_RTL (slot) != 0)
6814 {
6815 target = DECL_RTL (slot);
6816 /* If we have already expanded the slot, so don't do
6817 it again. (mrs) */
6818 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6819 return target;
6820 }
6821 else
6822 {
6823 target = assign_temp (type, 2, 1, 1);
6824 /* All temp slots at this level must not conflict. */
6825 preserve_temp_slots (target);
6826 DECL_RTL (slot) = target;
6827
6828 /* Since SLOT is not known to the called function
6829 to belong to its stack frame, we must build an explicit
6830 cleanup. This case occurs when we must build up a reference
6831 to pass the reference as an argument. In this case,
6832 it is very likely that such a reference need not be
6833 built here. */
6834
6835 if (TREE_OPERAND (exp, 2) == 0)
6836 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
6837 cleanups = TREE_OPERAND (exp, 2);
6838 }
6839 }
6840 else
6841 {
6842 /* This case does occur, when expanding a parameter which
6843 needs to be constructed on the stack. The target
6844 is the actual stack address that we want to initialize.
6845 The function we call will perform the cleanup in this case. */
6846
6847 /* If we have already assigned it space, use that space,
6848 not target that we were passed in, as our target
6849 parameter is only a hint. */
6850 if (DECL_RTL (slot) != 0)
6851 {
6852 target = DECL_RTL (slot);
6853 /* If we have already expanded the slot, so don't do
6854 it again. (mrs) */
6855 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6856 return target;
6857 }
6858
6859 DECL_RTL (slot) = target;
6860 }
6861
6862 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
6863 /* Mark it as expanded. */
6864 TREE_OPERAND (exp, 1) = NULL_TREE;
6865
6866 store_expr (exp1, target, 0);
6867
6868 if (cleanups)
6869 {
6870 cleanups_this_call = tree_cons (NULL_TREE,
6871 cleanups,
6872 cleanups_this_call);
6873 expand_eh_region_start ();
6874 }
6875
6876 return target;
6877 }
6878
6879 case INIT_EXPR:
6880 {
6881 tree lhs = TREE_OPERAND (exp, 0);
6882 tree rhs = TREE_OPERAND (exp, 1);
6883 tree noncopied_parts = 0;
6884 tree lhs_type = TREE_TYPE (lhs);
6885
6886 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
6887 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
6888 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
6889 TYPE_NONCOPIED_PARTS (lhs_type));
6890 while (noncopied_parts != 0)
6891 {
6892 expand_assignment (TREE_VALUE (noncopied_parts),
6893 TREE_PURPOSE (noncopied_parts), 0, 0);
6894 noncopied_parts = TREE_CHAIN (noncopied_parts);
6895 }
6896 return temp;
6897 }
6898
6899 case MODIFY_EXPR:
6900 {
6901 /* If lhs is complex, expand calls in rhs before computing it.
6902 That's so we don't compute a pointer and save it over a call.
6903 If lhs is simple, compute it first so we can give it as a
6904 target if the rhs is just a call. This avoids an extra temp and copy
6905 and that prevents a partial-subsumption which makes bad code.
6906 Actually we could treat component_ref's of vars like vars. */
6907
6908 tree lhs = TREE_OPERAND (exp, 0);
6909 tree rhs = TREE_OPERAND (exp, 1);
6910 tree noncopied_parts = 0;
6911 tree lhs_type = TREE_TYPE (lhs);
6912
6913 temp = 0;
6914
6915 if (TREE_CODE (lhs) != VAR_DECL
6916 && TREE_CODE (lhs) != RESULT_DECL
6917 && TREE_CODE (lhs) != PARM_DECL)
6918 preexpand_calls (exp);
6919
6920 /* Check for |= or &= of a bitfield of size one into another bitfield
6921 of size 1. In this case, (unless we need the result of the
6922 assignment) we can do this more efficiently with a
6923 test followed by an assignment, if necessary.
6924
6925 ??? At this point, we can't get a BIT_FIELD_REF here. But if
6926 things change so we do, this code should be enhanced to
6927 support it. */
6928 if (ignore
6929 && TREE_CODE (lhs) == COMPONENT_REF
6930 && (TREE_CODE (rhs) == BIT_IOR_EXPR
6931 || TREE_CODE (rhs) == BIT_AND_EXPR)
6932 && TREE_OPERAND (rhs, 0) == lhs
6933 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
6934 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
6935 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
6936 {
6937 rtx label = gen_label_rtx ();
6938
6939 do_jump (TREE_OPERAND (rhs, 1),
6940 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
6941 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
6942 expand_assignment (lhs, convert (TREE_TYPE (rhs),
6943 (TREE_CODE (rhs) == BIT_IOR_EXPR
6944 ? integer_one_node
6945 : integer_zero_node)),
6946 0, 0);
6947 do_pending_stack_adjust ();
6948 emit_label (label);
6949 return const0_rtx;
6950 }
6951
6952 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
6953 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
6954 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
6955 TYPE_NONCOPIED_PARTS (lhs_type));
6956
6957 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
6958 while (noncopied_parts != 0)
6959 {
6960 expand_assignment (TREE_PURPOSE (noncopied_parts),
6961 TREE_VALUE (noncopied_parts), 0, 0);
6962 noncopied_parts = TREE_CHAIN (noncopied_parts);
6963 }
6964 return temp;
6965 }
6966
6967 case PREINCREMENT_EXPR:
6968 case PREDECREMENT_EXPR:
6969 return expand_increment (exp, 0, ignore);
6970
6971 case POSTINCREMENT_EXPR:
6972 case POSTDECREMENT_EXPR:
6973 /* Faster to treat as pre-increment if result is not used. */
6974 return expand_increment (exp, ! ignore, ignore);
6975
6976 case ADDR_EXPR:
6977 /* If nonzero, TEMP will be set to the address of something that might
6978 be a MEM corresponding to a stack slot. */
6979 temp = 0;
6980
6981 /* Are we taking the address of a nested function? */
6982 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
6983 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
6984 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0)))
6985 {
6986 op0 = trampoline_address (TREE_OPERAND (exp, 0));
6987 op0 = force_operand (op0, target);
6988 }
6989 /* If we are taking the address of something erroneous, just
6990 return a zero. */
6991 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
6992 return const0_rtx;
6993 else
6994 {
6995 /* We make sure to pass const0_rtx down if we came in with
6996 ignore set, to avoid doing the cleanups twice for something. */
6997 op0 = expand_expr (TREE_OPERAND (exp, 0),
6998 ignore ? const0_rtx : NULL_RTX, VOIDmode,
6999 (modifier == EXPAND_INITIALIZER
7000 ? modifier : EXPAND_CONST_ADDRESS));
7001
7002 /* If we are going to ignore the result, OP0 will have been set
7003 to const0_rtx, so just return it. Don't get confused and
7004 think we are taking the address of the constant. */
7005 if (ignore)
7006 return op0;
7007
7008 op0 = protect_from_queue (op0, 0);
7009
7010 /* We would like the object in memory. If it is a constant,
7011 we can have it be statically allocated into memory. For
7012 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
7013 memory and store the value into it. */
7014
7015 if (CONSTANT_P (op0))
7016 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7017 op0);
7018 else if (GET_CODE (op0) == MEM)
7019 {
7020 mark_temp_addr_taken (op0);
7021 temp = XEXP (op0, 0);
7022 }
7023
7024 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7025 || GET_CODE (op0) == CONCAT)
7026 {
7027 /* If this object is in a register, it must be not
7028 be BLKmode. */
7029 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7030 rtx memloc = assign_temp (inner_type, 1, 1, 1);
7031
7032 mark_temp_addr_taken (memloc);
7033 emit_move_insn (memloc, op0);
7034 op0 = memloc;
7035 }
7036
7037 if (GET_CODE (op0) != MEM)
7038 abort ();
7039
7040 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7041 {
7042 temp = XEXP (op0, 0);
7043 #ifdef POINTERS_EXTEND_UNSIGNED
7044 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
7045 && mode == ptr_mode)
7046 temp = convert_memory_address (ptr_mode, temp);
7047 #endif
7048 return temp;
7049 }
7050
7051 op0 = force_operand (XEXP (op0, 0), target);
7052 }
7053
7054 if (flag_force_addr && GET_CODE (op0) != REG)
7055 op0 = force_reg (Pmode, op0);
7056
7057 if (GET_CODE (op0) == REG
7058 && ! REG_USERVAR_P (op0))
7059 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
7060
7061 /* If we might have had a temp slot, add an equivalent address
7062 for it. */
7063 if (temp != 0)
7064 update_temp_slot_address (temp, op0);
7065
7066 #ifdef POINTERS_EXTEND_UNSIGNED
7067 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
7068 && mode == ptr_mode)
7069 op0 = convert_memory_address (ptr_mode, op0);
7070 #endif
7071
7072 return op0;
7073
7074 case ENTRY_VALUE_EXPR:
7075 abort ();
7076
7077 /* COMPLEX type for Extended Pascal & Fortran */
7078 case COMPLEX_EXPR:
7079 {
7080 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7081 rtx insns;
7082
7083 /* Get the rtx code of the operands. */
7084 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7085 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
7086
7087 if (! target)
7088 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7089
7090 start_sequence ();
7091
7092 /* Move the real (op0) and imaginary (op1) parts to their location. */
7093 emit_move_insn (gen_realpart (mode, target), op0);
7094 emit_move_insn (gen_imagpart (mode, target), op1);
7095
7096 insns = get_insns ();
7097 end_sequence ();
7098
7099 /* Complex construction should appear as a single unit. */
7100 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
7101 each with a separate pseudo as destination.
7102 It's not correct for flow to treat them as a unit. */
7103 if (GET_CODE (target) != CONCAT)
7104 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
7105 else
7106 emit_insns (insns);
7107
7108 return target;
7109 }
7110
7111 case REALPART_EXPR:
7112 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7113 return gen_realpart (mode, op0);
7114
7115 case IMAGPART_EXPR:
7116 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7117 return gen_imagpart (mode, op0);
7118
7119 case CONJ_EXPR:
7120 {
7121 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7122 rtx imag_t;
7123 rtx insns;
7124
7125 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7126
7127 if (! target)
7128 target = gen_reg_rtx (mode);
7129
7130 start_sequence ();
7131
7132 /* Store the realpart and the negated imagpart to target. */
7133 emit_move_insn (gen_realpart (partmode, target),
7134 gen_realpart (partmode, op0));
7135
7136 imag_t = gen_imagpart (partmode, target);
7137 temp = expand_unop (partmode, neg_optab,
7138 gen_imagpart (partmode, op0), imag_t, 0);
7139 if (temp != imag_t)
7140 emit_move_insn (imag_t, temp);
7141
7142 insns = get_insns ();
7143 end_sequence ();
7144
7145 /* Conjugate should appear as a single unit
7146 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
7147 each with a separate pseudo as destination.
7148 It's not correct for flow to treat them as a unit. */
7149 if (GET_CODE (target) != CONCAT)
7150 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
7151 else
7152 emit_insns (insns);
7153
7154 return target;
7155 }
7156
7157 case ERROR_MARK:
7158 op0 = CONST0_RTX (tmode);
7159 if (op0 != 0)
7160 return op0;
7161 return const0_rtx;
7162
7163 default:
7164 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7165 }
7166
7167 /* Here to do an ordinary binary operator, generating an instruction
7168 from the optab already placed in `this_optab'. */
7169 binop:
7170 preexpand_calls (exp);
7171 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
7172 subtarget = 0;
7173 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7174 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7175 binop2:
7176 temp = expand_binop (mode, this_optab, op0, op1, target,
7177 unsignedp, OPTAB_LIB_WIDEN);
7178 if (temp == 0)
7179 abort ();
7180 return temp;
7181 }
7182
7183
7184 /* Emit bytecode to evaluate the given expression EXP to the stack. */
7185
7186 void
7187 bc_expand_expr (exp)
7188 tree exp;
7189 {
7190 enum tree_code code;
7191 tree type, arg0;
7192 rtx r;
7193 struct binary_operator *binoptab;
7194 struct unary_operator *unoptab;
7195 struct increment_operator *incroptab;
7196 struct bc_label *lab, *lab1;
7197 enum bytecode_opcode opcode;
7198
7199
7200 code = TREE_CODE (exp);
7201
7202 switch (code)
7203 {
7204 case PARM_DECL:
7205
7206 if (DECL_RTL (exp) == 0)
7207 {
7208 error_with_decl (exp, "prior parameter's size depends on `%s'");
7209 return;
7210 }
7211
7212 bc_load_parmaddr (DECL_RTL (exp));
7213 bc_load_memory (TREE_TYPE (exp), exp);
7214
7215 return;
7216
7217 case VAR_DECL:
7218
7219 if (DECL_RTL (exp) == 0)
7220 abort ();
7221
7222 #if 0
7223 if (BYTECODE_LABEL (DECL_RTL (exp)))
7224 bc_load_externaddr (DECL_RTL (exp));
7225 else
7226 bc_load_localaddr (DECL_RTL (exp));
7227 #endif
7228 if (TREE_PUBLIC (exp))
7229 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
7230 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
7231 else
7232 bc_load_localaddr (DECL_RTL (exp));
7233
7234 bc_load_memory (TREE_TYPE (exp), exp);
7235 return;
7236
7237 case INTEGER_CST:
7238
7239 #ifdef DEBUG_PRINT_CODE
7240 fprintf (stderr, " [%x]\n", TREE_INT_CST_LOW (exp));
7241 #endif
7242 bc_emit_instruction (mode_to_const_map[(int) (DECL_BIT_FIELD (exp)
7243 ? SImode
7244 : TYPE_MODE (TREE_TYPE (exp)))],
7245 (HOST_WIDE_INT) TREE_INT_CST_LOW (exp));
7246 return;
7247
7248 case REAL_CST:
7249
7250 #if 0
7251 #ifdef DEBUG_PRINT_CODE
7252 fprintf (stderr, " [%g]\n", (double) TREE_INT_CST_LOW (exp));
7253 #endif
7254 /* FIX THIS: find a better way to pass real_cst's. -bson */
7255 bc_emit_instruction (mode_to_const_map[TYPE_MODE (TREE_TYPE (exp))],
7256 (double) TREE_REAL_CST (exp));
7257 #else
7258 abort ();
7259 #endif
7260
7261 return;
7262
7263 case CALL_EXPR:
7264
7265 /* We build a call description vector describing the type of
7266 the return value and of the arguments; this call vector,
7267 together with a pointer to a location for the return value
7268 and the base of the argument list, is passed to the low
7269 level machine dependent call subroutine, which is responsible
7270 for putting the arguments wherever real functions expect
7271 them, as well as getting the return value back. */
7272 {
7273 tree calldesc = 0, arg;
7274 int nargs = 0, i;
7275 rtx retval;
7276
7277 /* Push the evaluated args on the evaluation stack in reverse
7278 order. Also make an entry for each arg in the calldesc
7279 vector while we're at it. */
7280
7281 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
7282
7283 for (arg = TREE_OPERAND (exp, 1); arg; arg = TREE_CHAIN (arg))
7284 {
7285 ++nargs;
7286 bc_expand_expr (TREE_VALUE (arg));
7287
7288 calldesc = tree_cons ((tree) 0,
7289 size_in_bytes (TREE_TYPE (TREE_VALUE (arg))),
7290 calldesc);
7291 calldesc = tree_cons ((tree) 0,
7292 bc_runtime_type_code (TREE_TYPE (TREE_VALUE (arg))),
7293 calldesc);
7294 }
7295
7296 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
7297
7298 /* Allocate a location for the return value and push its
7299 address on the evaluation stack. Also make an entry
7300 at the front of the calldesc for the return value type. */
7301
7302 type = TREE_TYPE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7303 retval = bc_allocate_local (int_size_in_bytes (type), TYPE_ALIGN (type));
7304 bc_load_localaddr (retval);
7305
7306 calldesc = tree_cons ((tree) 0, size_in_bytes (type), calldesc);
7307 calldesc = tree_cons ((tree) 0, bc_runtime_type_code (type), calldesc);
7308
7309 /* Prepend the argument count. */
7310 calldesc = tree_cons ((tree) 0,
7311 build_int_2 (nargs, 0),
7312 calldesc);
7313
7314 /* Push the address of the call description vector on the stack. */
7315 calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
7316 TREE_TYPE (calldesc) = build_array_type (integer_type_node,
7317 build_index_type (build_int_2 (nargs * 2, 0)));
7318 r = output_constant_def (calldesc);
7319 bc_load_externaddr (r);
7320
7321 /* Push the address of the function to be called. */
7322 bc_expand_expr (TREE_OPERAND (exp, 0));
7323
7324 /* Call the function, popping its address and the calldesc vector
7325 address off the evaluation stack in the process. */
7326 bc_emit_instruction (call);
7327
7328 /* Pop the arguments off the stack. */
7329 bc_adjust_stack (nargs);
7330
7331 /* Load the return value onto the stack. */
7332 bc_load_localaddr (retval);
7333 bc_load_memory (type, TREE_OPERAND (exp, 0));
7334 }
7335 return;
7336
7337 case SAVE_EXPR:
7338
7339 if (!SAVE_EXPR_RTL (exp))
7340 {
7341 /* First time around: copy to local variable */
7342 SAVE_EXPR_RTL (exp) = bc_allocate_local (int_size_in_bytes (TREE_TYPE (exp)),
7343 TYPE_ALIGN (TREE_TYPE(exp)));
7344 bc_expand_expr (TREE_OPERAND (exp, 0));
7345 bc_emit_instruction (duplicate);
7346
7347 bc_load_localaddr (SAVE_EXPR_RTL (exp));
7348 bc_store_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7349 }
7350 else
7351 {
7352 /* Consecutive reference: use saved copy */
7353 bc_load_localaddr (SAVE_EXPR_RTL (exp));
7354 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7355 }
7356 return;
7357
7358 #if 0
7359 /* FIXME: the XXXX_STMT codes have been removed in GCC2, but
7360 how are they handled instead? */
7361 case LET_STMT:
7362
7363 TREE_USED (exp) = 1;
7364 bc_expand_expr (STMT_BODY (exp));
7365 return;
7366 #endif
7367
7368 case NOP_EXPR:
7369 case CONVERT_EXPR:
7370
7371 bc_expand_expr (TREE_OPERAND (exp, 0));
7372 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)), TREE_TYPE (exp));
7373 return;
7374
7375 case MODIFY_EXPR:
7376
7377 expand_assignment (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 0, 0);
7378 return;
7379
7380 case ADDR_EXPR:
7381
7382 bc_expand_address (TREE_OPERAND (exp, 0));
7383 return;
7384
7385 case INDIRECT_REF:
7386
7387 bc_expand_expr (TREE_OPERAND (exp, 0));
7388 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7389 return;
7390
7391 case ARRAY_REF:
7392
7393 bc_expand_expr (bc_canonicalize_array_ref (exp));
7394 return;
7395
7396 case COMPONENT_REF:
7397
7398 bc_expand_component_address (exp);
7399
7400 /* If we have a bitfield, generate a proper load */
7401 bc_load_memory (TREE_TYPE (TREE_OPERAND (exp, 1)), TREE_OPERAND (exp, 1));
7402 return;
7403
7404 case COMPOUND_EXPR:
7405
7406 bc_expand_expr (TREE_OPERAND (exp, 0));
7407 bc_emit_instruction (drop);
7408 bc_expand_expr (TREE_OPERAND (exp, 1));
7409 return;
7410
7411 case COND_EXPR:
7412
7413 bc_expand_expr (TREE_OPERAND (exp, 0));
7414 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
7415 lab = bc_get_bytecode_label ();
7416 bc_emit_bytecode (xjumpifnot);
7417 bc_emit_bytecode_labelref (lab);
7418
7419 #ifdef DEBUG_PRINT_CODE
7420 fputc ('\n', stderr);
7421 #endif
7422 bc_expand_expr (TREE_OPERAND (exp, 1));
7423 lab1 = bc_get_bytecode_label ();
7424 bc_emit_bytecode (jump);
7425 bc_emit_bytecode_labelref (lab1);
7426
7427 #ifdef DEBUG_PRINT_CODE
7428 fputc ('\n', stderr);
7429 #endif
7430
7431 bc_emit_bytecode_labeldef (lab);
7432 bc_expand_expr (TREE_OPERAND (exp, 2));
7433 bc_emit_bytecode_labeldef (lab1);
7434 return;
7435
7436 case TRUTH_ANDIF_EXPR:
7437
7438 opcode = xjumpifnot;
7439 goto andorif;
7440
7441 case TRUTH_ORIF_EXPR:
7442
7443 opcode = xjumpif;
7444 goto andorif;
7445
7446 case PLUS_EXPR:
7447
7448 binoptab = optab_plus_expr;
7449 goto binop;
7450
7451 case MINUS_EXPR:
7452
7453 binoptab = optab_minus_expr;
7454 goto binop;
7455
7456 case MULT_EXPR:
7457
7458 binoptab = optab_mult_expr;
7459 goto binop;
7460
7461 case TRUNC_DIV_EXPR:
7462 case FLOOR_DIV_EXPR:
7463 case CEIL_DIV_EXPR:
7464 case ROUND_DIV_EXPR:
7465 case EXACT_DIV_EXPR:
7466
7467 binoptab = optab_trunc_div_expr;
7468 goto binop;
7469
7470 case TRUNC_MOD_EXPR:
7471 case FLOOR_MOD_EXPR:
7472 case CEIL_MOD_EXPR:
7473 case ROUND_MOD_EXPR:
7474
7475 binoptab = optab_trunc_mod_expr;
7476 goto binop;
7477
7478 case FIX_ROUND_EXPR:
7479 case FIX_FLOOR_EXPR:
7480 case FIX_CEIL_EXPR:
7481 abort (); /* Not used for C. */
7482
7483 case FIX_TRUNC_EXPR:
7484 case FLOAT_EXPR:
7485 case MAX_EXPR:
7486 case MIN_EXPR:
7487 case FFS_EXPR:
7488 case LROTATE_EXPR:
7489 case RROTATE_EXPR:
7490 abort (); /* FIXME */
7491
7492 case RDIV_EXPR:
7493
7494 binoptab = optab_rdiv_expr;
7495 goto binop;
7496
7497 case BIT_AND_EXPR:
7498
7499 binoptab = optab_bit_and_expr;
7500 goto binop;
7501
7502 case BIT_IOR_EXPR:
7503
7504 binoptab = optab_bit_ior_expr;
7505 goto binop;
7506
7507 case BIT_XOR_EXPR:
7508
7509 binoptab = optab_bit_xor_expr;
7510 goto binop;
7511
7512 case LSHIFT_EXPR:
7513
7514 binoptab = optab_lshift_expr;
7515 goto binop;
7516
7517 case RSHIFT_EXPR:
7518
7519 binoptab = optab_rshift_expr;
7520 goto binop;
7521
7522 case TRUTH_AND_EXPR:
7523
7524 binoptab = optab_truth_and_expr;
7525 goto binop;
7526
7527 case TRUTH_OR_EXPR:
7528
7529 binoptab = optab_truth_or_expr;
7530 goto binop;
7531
7532 case LT_EXPR:
7533
7534 binoptab = optab_lt_expr;
7535 goto binop;
7536
7537 case LE_EXPR:
7538
7539 binoptab = optab_le_expr;
7540 goto binop;
7541
7542 case GE_EXPR:
7543
7544 binoptab = optab_ge_expr;
7545 goto binop;
7546
7547 case GT_EXPR:
7548
7549 binoptab = optab_gt_expr;
7550 goto binop;
7551
7552 case EQ_EXPR:
7553
7554 binoptab = optab_eq_expr;
7555 goto binop;
7556
7557 case NE_EXPR:
7558
7559 binoptab = optab_ne_expr;
7560 goto binop;
7561
7562 case NEGATE_EXPR:
7563
7564 unoptab = optab_negate_expr;
7565 goto unop;
7566
7567 case BIT_NOT_EXPR:
7568
7569 unoptab = optab_bit_not_expr;
7570 goto unop;
7571
7572 case TRUTH_NOT_EXPR:
7573
7574 unoptab = optab_truth_not_expr;
7575 goto unop;
7576
7577 case PREDECREMENT_EXPR:
7578
7579 incroptab = optab_predecrement_expr;
7580 goto increment;
7581
7582 case PREINCREMENT_EXPR:
7583
7584 incroptab = optab_preincrement_expr;
7585 goto increment;
7586
7587 case POSTDECREMENT_EXPR:
7588
7589 incroptab = optab_postdecrement_expr;
7590 goto increment;
7591
7592 case POSTINCREMENT_EXPR:
7593
7594 incroptab = optab_postincrement_expr;
7595 goto increment;
7596
7597 case CONSTRUCTOR:
7598
7599 bc_expand_constructor (exp);
7600 return;
7601
7602 case ERROR_MARK:
7603 case RTL_EXPR:
7604
7605 return;
7606
7607 case BIND_EXPR:
7608 {
7609 tree vars = TREE_OPERAND (exp, 0);
7610 int vars_need_expansion = 0;
7611
7612 /* Need to open a binding contour here because
7613 if there are any cleanups they most be contained here. */
7614 expand_start_bindings (0);
7615
7616 /* Mark the corresponding BLOCK for output. */
7617 if (TREE_OPERAND (exp, 2) != 0)
7618 TREE_USED (TREE_OPERAND (exp, 2)) = 1;
7619
7620 /* If VARS have not yet been expanded, expand them now. */
7621 while (vars)
7622 {
7623 if (DECL_RTL (vars) == 0)
7624 {
7625 vars_need_expansion = 1;
7626 expand_decl (vars);
7627 }
7628 expand_decl_init (vars);
7629 vars = TREE_CHAIN (vars);
7630 }
7631
7632 bc_expand_expr (TREE_OPERAND (exp, 1));
7633
7634 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
7635
7636 return;
7637 }
7638 }
7639
7640 abort ();
7641
7642 binop:
7643
7644 bc_expand_binary_operation (binoptab, TREE_TYPE (exp),
7645 TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1));
7646 return;
7647
7648
7649 unop:
7650
7651 bc_expand_unary_operation (unoptab, TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7652 return;
7653
7654
7655 andorif:
7656
7657 bc_expand_expr (TREE_OPERAND (exp, 0));
7658 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
7659 lab = bc_get_bytecode_label ();
7660
7661 bc_emit_instruction (duplicate);
7662 bc_emit_bytecode (opcode);
7663 bc_emit_bytecode_labelref (lab);
7664
7665 #ifdef DEBUG_PRINT_CODE
7666 fputc ('\n', stderr);
7667 #endif
7668
7669 bc_emit_instruction (drop);
7670
7671 bc_expand_expr (TREE_OPERAND (exp, 1));
7672 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)));
7673 bc_emit_bytecode_labeldef (lab);
7674 return;
7675
7676
7677 increment:
7678
7679 type = TREE_TYPE (TREE_OPERAND (exp, 0));
7680
7681 /* Push the quantum. */
7682 bc_expand_expr (TREE_OPERAND (exp, 1));
7683
7684 /* Convert it to the lvalue's type. */
7685 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)), type);
7686
7687 /* Push the address of the lvalue */
7688 bc_expand_expr (build1 (ADDR_EXPR, TYPE_POINTER_TO (type), TREE_OPERAND (exp, 0)));
7689
7690 /* Perform actual increment */
7691 bc_expand_increment (incroptab, type);
7692 return;
7693 }
7694 \f
7695 /* Return the alignment in bits of EXP, a pointer valued expression.
7696 But don't return more than MAX_ALIGN no matter what.
7697 The alignment returned is, by default, the alignment of the thing that
7698 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
7699
7700 Otherwise, look at the expression to see if we can do better, i.e., if the
7701 expression is actually pointing at an object whose alignment is tighter. */
7702
7703 static int
7704 get_pointer_alignment (exp, max_align)
7705 tree exp;
7706 unsigned max_align;
7707 {
7708 unsigned align, inner;
7709
7710 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7711 return 0;
7712
7713 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7714 align = MIN (align, max_align);
7715
7716 while (1)
7717 {
7718 switch (TREE_CODE (exp))
7719 {
7720 case NOP_EXPR:
7721 case CONVERT_EXPR:
7722 case NON_LVALUE_EXPR:
7723 exp = TREE_OPERAND (exp, 0);
7724 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7725 return align;
7726 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7727 align = MIN (inner, max_align);
7728 break;
7729
7730 case PLUS_EXPR:
7731 /* If sum of pointer + int, restrict our maximum alignment to that
7732 imposed by the integer. If not, we can't do any better than
7733 ALIGN. */
7734 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
7735 return align;
7736
7737 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
7738 & (max_align - 1))
7739 != 0)
7740 max_align >>= 1;
7741
7742 exp = TREE_OPERAND (exp, 0);
7743 break;
7744
7745 case ADDR_EXPR:
7746 /* See what we are pointing at and look at its alignment. */
7747 exp = TREE_OPERAND (exp, 0);
7748 if (TREE_CODE (exp) == FUNCTION_DECL)
7749 align = FUNCTION_BOUNDARY;
7750 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
7751 align = DECL_ALIGN (exp);
7752 #ifdef CONSTANT_ALIGNMENT
7753 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
7754 align = CONSTANT_ALIGNMENT (exp, align);
7755 #endif
7756 return MIN (align, max_align);
7757
7758 default:
7759 return align;
7760 }
7761 }
7762 }
7763 \f
7764 /* Return the tree node and offset if a given argument corresponds to
7765 a string constant. */
7766
7767 static tree
7768 string_constant (arg, ptr_offset)
7769 tree arg;
7770 tree *ptr_offset;
7771 {
7772 STRIP_NOPS (arg);
7773
7774 if (TREE_CODE (arg) == ADDR_EXPR
7775 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
7776 {
7777 *ptr_offset = integer_zero_node;
7778 return TREE_OPERAND (arg, 0);
7779 }
7780 else if (TREE_CODE (arg) == PLUS_EXPR)
7781 {
7782 tree arg0 = TREE_OPERAND (arg, 0);
7783 tree arg1 = TREE_OPERAND (arg, 1);
7784
7785 STRIP_NOPS (arg0);
7786 STRIP_NOPS (arg1);
7787
7788 if (TREE_CODE (arg0) == ADDR_EXPR
7789 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
7790 {
7791 *ptr_offset = arg1;
7792 return TREE_OPERAND (arg0, 0);
7793 }
7794 else if (TREE_CODE (arg1) == ADDR_EXPR
7795 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
7796 {
7797 *ptr_offset = arg0;
7798 return TREE_OPERAND (arg1, 0);
7799 }
7800 }
7801
7802 return 0;
7803 }
7804
7805 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
7806 way, because it could contain a zero byte in the middle.
7807 TREE_STRING_LENGTH is the size of the character array, not the string.
7808
7809 Unfortunately, string_constant can't access the values of const char
7810 arrays with initializers, so neither can we do so here. */
7811
7812 static tree
7813 c_strlen (src)
7814 tree src;
7815 {
7816 tree offset_node;
7817 int offset, max;
7818 char *ptr;
7819
7820 src = string_constant (src, &offset_node);
7821 if (src == 0)
7822 return 0;
7823 max = TREE_STRING_LENGTH (src);
7824 ptr = TREE_STRING_POINTER (src);
7825 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
7826 {
7827 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
7828 compute the offset to the following null if we don't know where to
7829 start searching for it. */
7830 int i;
7831 for (i = 0; i < max; i++)
7832 if (ptr[i] == 0)
7833 return 0;
7834 /* We don't know the starting offset, but we do know that the string
7835 has no internal zero bytes. We can assume that the offset falls
7836 within the bounds of the string; otherwise, the programmer deserves
7837 what he gets. Subtract the offset from the length of the string,
7838 and return that. */
7839 /* This would perhaps not be valid if we were dealing with named
7840 arrays in addition to literal string constants. */
7841 return size_binop (MINUS_EXPR, size_int (max), offset_node);
7842 }
7843
7844 /* We have a known offset into the string. Start searching there for
7845 a null character. */
7846 if (offset_node == 0)
7847 offset = 0;
7848 else
7849 {
7850 /* Did we get a long long offset? If so, punt. */
7851 if (TREE_INT_CST_HIGH (offset_node) != 0)
7852 return 0;
7853 offset = TREE_INT_CST_LOW (offset_node);
7854 }
7855 /* If the offset is known to be out of bounds, warn, and call strlen at
7856 runtime. */
7857 if (offset < 0 || offset > max)
7858 {
7859 warning ("offset outside bounds of constant string");
7860 return 0;
7861 }
7862 /* Use strlen to search for the first zero byte. Since any strings
7863 constructed with build_string will have nulls appended, we win even
7864 if we get handed something like (char[4])"abcd".
7865
7866 Since OFFSET is our starting index into the string, no further
7867 calculation is needed. */
7868 return size_int (strlen (ptr + offset));
7869 }
7870
7871 rtx
7872 expand_builtin_return_addr (fndecl_code, count, tem)
7873 enum built_in_function fndecl_code;
7874 int count;
7875 rtx tem;
7876 {
7877 int i;
7878
7879 /* Some machines need special handling before we can access
7880 arbitrary frames. For example, on the sparc, we must first flush
7881 all register windows to the stack. */
7882 #ifdef SETUP_FRAME_ADDRESSES
7883 SETUP_FRAME_ADDRESSES ();
7884 #endif
7885
7886 /* On the sparc, the return address is not in the frame, it is in a
7887 register. There is no way to access it off of the current frame
7888 pointer, but it can be accessed off the previous frame pointer by
7889 reading the value from the register window save area. */
7890 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
7891 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
7892 count--;
7893 #endif
7894
7895 /* Scan back COUNT frames to the specified frame. */
7896 for (i = 0; i < count; i++)
7897 {
7898 /* Assume the dynamic chain pointer is in the word that the
7899 frame address points to, unless otherwise specified. */
7900 #ifdef DYNAMIC_CHAIN_ADDRESS
7901 tem = DYNAMIC_CHAIN_ADDRESS (tem);
7902 #endif
7903 tem = memory_address (Pmode, tem);
7904 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
7905 }
7906
7907 /* For __builtin_frame_address, return what we've got. */
7908 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
7909 return tem;
7910
7911 /* For __builtin_return_address, Get the return address from that
7912 frame. */
7913 #ifdef RETURN_ADDR_RTX
7914 tem = RETURN_ADDR_RTX (count, tem);
7915 #else
7916 tem = memory_address (Pmode,
7917 plus_constant (tem, GET_MODE_SIZE (Pmode)));
7918 tem = gen_rtx (MEM, Pmode, tem);
7919 #endif
7920 return tem;
7921 }
7922 \f
7923 /* Expand an expression EXP that calls a built-in function,
7924 with result going to TARGET if that's convenient
7925 (and in mode MODE if that's convenient).
7926 SUBTARGET may be used as the target for computing one of EXP's operands.
7927 IGNORE is nonzero if the value is to be ignored. */
7928
7929 #define CALLED_AS_BUILT_IN(NODE) \
7930 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
7931
7932 static rtx
7933 expand_builtin (exp, target, subtarget, mode, ignore)
7934 tree exp;
7935 rtx target;
7936 rtx subtarget;
7937 enum machine_mode mode;
7938 int ignore;
7939 {
7940 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7941 tree arglist = TREE_OPERAND (exp, 1);
7942 rtx op0;
7943 rtx lab1, insns;
7944 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
7945 optab builtin_optab;
7946
7947 switch (DECL_FUNCTION_CODE (fndecl))
7948 {
7949 case BUILT_IN_ABS:
7950 case BUILT_IN_LABS:
7951 case BUILT_IN_FABS:
7952 /* build_function_call changes these into ABS_EXPR. */
7953 abort ();
7954
7955 case BUILT_IN_SIN:
7956 case BUILT_IN_COS:
7957 /* Treat these like sqrt, but only if the user asks for them. */
7958 if (! flag_fast_math)
7959 break;
7960 case BUILT_IN_FSQRT:
7961 /* If not optimizing, call the library function. */
7962 if (! optimize)
7963 break;
7964
7965 if (arglist == 0
7966 /* Arg could be wrong type if user redeclared this fcn wrong. */
7967 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
7968 break;
7969
7970 /* Stabilize and compute the argument. */
7971 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
7972 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
7973 {
7974 exp = copy_node (exp);
7975 arglist = copy_node (arglist);
7976 TREE_OPERAND (exp, 1) = arglist;
7977 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
7978 }
7979 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
7980
7981 /* Make a suitable register to place result in. */
7982 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7983
7984 emit_queue ();
7985 start_sequence ();
7986
7987 switch (DECL_FUNCTION_CODE (fndecl))
7988 {
7989 case BUILT_IN_SIN:
7990 builtin_optab = sin_optab; break;
7991 case BUILT_IN_COS:
7992 builtin_optab = cos_optab; break;
7993 case BUILT_IN_FSQRT:
7994 builtin_optab = sqrt_optab; break;
7995 default:
7996 abort ();
7997 }
7998
7999 /* Compute into TARGET.
8000 Set TARGET to wherever the result comes back. */
8001 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8002 builtin_optab, op0, target, 0);
8003
8004 /* If we were unable to expand via the builtin, stop the
8005 sequence (without outputting the insns) and break, causing
8006 a call the the library function. */
8007 if (target == 0)
8008 {
8009 end_sequence ();
8010 break;
8011 }
8012
8013 /* Check the results by default. But if flag_fast_math is turned on,
8014 then assume sqrt will always be called with valid arguments. */
8015
8016 if (! flag_fast_math)
8017 {
8018 /* Don't define the builtin FP instructions
8019 if your machine is not IEEE. */
8020 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
8021 abort ();
8022
8023 lab1 = gen_label_rtx ();
8024
8025 /* Test the result; if it is NaN, set errno=EDOM because
8026 the argument was not in the domain. */
8027 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
8028 emit_jump_insn (gen_beq (lab1));
8029
8030 #ifdef TARGET_EDOM
8031 {
8032 #ifdef GEN_ERRNO_RTX
8033 rtx errno_rtx = GEN_ERRNO_RTX;
8034 #else
8035 rtx errno_rtx
8036 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "errno"));
8037 #endif
8038
8039 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
8040 }
8041 #else
8042 /* We can't set errno=EDOM directly; let the library call do it.
8043 Pop the arguments right away in case the call gets deleted. */
8044 NO_DEFER_POP;
8045 expand_call (exp, target, 0);
8046 OK_DEFER_POP;
8047 #endif
8048
8049 emit_label (lab1);
8050 }
8051
8052 /* Output the entire sequence. */
8053 insns = get_insns ();
8054 end_sequence ();
8055 emit_insns (insns);
8056
8057 return target;
8058
8059 /* __builtin_apply_args returns block of memory allocated on
8060 the stack into which is stored the arg pointer, structure
8061 value address, static chain, and all the registers that might
8062 possibly be used in performing a function call. The code is
8063 moved to the start of the function so the incoming values are
8064 saved. */
8065 case BUILT_IN_APPLY_ARGS:
8066 /* Don't do __builtin_apply_args more than once in a function.
8067 Save the result of the first call and reuse it. */
8068 if (apply_args_value != 0)
8069 return apply_args_value;
8070 {
8071 /* When this function is called, it means that registers must be
8072 saved on entry to this function. So we migrate the
8073 call to the first insn of this function. */
8074 rtx temp;
8075 rtx seq;
8076
8077 start_sequence ();
8078 temp = expand_builtin_apply_args ();
8079 seq = get_insns ();
8080 end_sequence ();
8081
8082 apply_args_value = temp;
8083
8084 /* Put the sequence after the NOTE that starts the function.
8085 If this is inside a SEQUENCE, make the outer-level insn
8086 chain current, so the code is placed at the start of the
8087 function. */
8088 push_topmost_sequence ();
8089 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8090 pop_topmost_sequence ();
8091 return temp;
8092 }
8093
8094 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8095 FUNCTION with a copy of the parameters described by
8096 ARGUMENTS, and ARGSIZE. It returns a block of memory
8097 allocated on the stack into which is stored all the registers
8098 that might possibly be used for returning the result of a
8099 function. ARGUMENTS is the value returned by
8100 __builtin_apply_args. ARGSIZE is the number of bytes of
8101 arguments that must be copied. ??? How should this value be
8102 computed? We'll also need a safe worst case value for varargs
8103 functions. */
8104 case BUILT_IN_APPLY:
8105 if (arglist == 0
8106 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8107 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8108 || TREE_CHAIN (arglist) == 0
8109 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8110 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8111 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8112 return const0_rtx;
8113 else
8114 {
8115 int i;
8116 tree t;
8117 rtx ops[3];
8118
8119 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
8120 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
8121
8122 return expand_builtin_apply (ops[0], ops[1], ops[2]);
8123 }
8124
8125 /* __builtin_return (RESULT) causes the function to return the
8126 value described by RESULT. RESULT is address of the block of
8127 memory returned by __builtin_apply. */
8128 case BUILT_IN_RETURN:
8129 if (arglist
8130 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8131 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
8132 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
8133 NULL_RTX, VOIDmode, 0));
8134 return const0_rtx;
8135
8136 case BUILT_IN_SAVEREGS:
8137 /* Don't do __builtin_saveregs more than once in a function.
8138 Save the result of the first call and reuse it. */
8139 if (saveregs_value != 0)
8140 return saveregs_value;
8141 {
8142 /* When this function is called, it means that registers must be
8143 saved on entry to this function. So we migrate the
8144 call to the first insn of this function. */
8145 rtx temp;
8146 rtx seq;
8147
8148 /* Now really call the function. `expand_call' does not call
8149 expand_builtin, so there is no danger of infinite recursion here. */
8150 start_sequence ();
8151
8152 #ifdef EXPAND_BUILTIN_SAVEREGS
8153 /* Do whatever the machine needs done in this case. */
8154 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
8155 #else
8156 /* The register where the function returns its value
8157 is likely to have something else in it, such as an argument.
8158 So preserve that register around the call. */
8159
8160 if (value_mode != VOIDmode)
8161 {
8162 rtx valreg = hard_libcall_value (value_mode);
8163 rtx saved_valreg = gen_reg_rtx (value_mode);
8164
8165 emit_move_insn (saved_valreg, valreg);
8166 temp = expand_call (exp, target, ignore);
8167 emit_move_insn (valreg, saved_valreg);
8168 }
8169 else
8170 /* Generate the call, putting the value in a pseudo. */
8171 temp = expand_call (exp, target, ignore);
8172 #endif
8173
8174 seq = get_insns ();
8175 end_sequence ();
8176
8177 saveregs_value = temp;
8178
8179 /* Put the sequence after the NOTE that starts the function.
8180 If this is inside a SEQUENCE, make the outer-level insn
8181 chain current, so the code is placed at the start of the
8182 function. */
8183 push_topmost_sequence ();
8184 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8185 pop_topmost_sequence ();
8186 return temp;
8187 }
8188
8189 /* __builtin_args_info (N) returns word N of the arg space info
8190 for the current function. The number and meanings of words
8191 is controlled by the definition of CUMULATIVE_ARGS. */
8192 case BUILT_IN_ARGS_INFO:
8193 {
8194 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
8195 int i;
8196 int *word_ptr = (int *) &current_function_args_info;
8197 tree type, elts, result;
8198
8199 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
8200 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
8201 __FILE__, __LINE__);
8202
8203 if (arglist != 0)
8204 {
8205 tree arg = TREE_VALUE (arglist);
8206 if (TREE_CODE (arg) != INTEGER_CST)
8207 error ("argument of `__builtin_args_info' must be constant");
8208 else
8209 {
8210 int wordnum = TREE_INT_CST_LOW (arg);
8211
8212 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
8213 error ("argument of `__builtin_args_info' out of range");
8214 else
8215 return GEN_INT (word_ptr[wordnum]);
8216 }
8217 }
8218 else
8219 error ("missing argument in `__builtin_args_info'");
8220
8221 return const0_rtx;
8222
8223 #if 0
8224 for (i = 0; i < nwords; i++)
8225 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
8226
8227 type = build_array_type (integer_type_node,
8228 build_index_type (build_int_2 (nwords, 0)));
8229 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
8230 TREE_CONSTANT (result) = 1;
8231 TREE_STATIC (result) = 1;
8232 result = build (INDIRECT_REF, build_pointer_type (type), result);
8233 TREE_CONSTANT (result) = 1;
8234 return expand_expr (result, NULL_RTX, VOIDmode, 0);
8235 #endif
8236 }
8237
8238 /* Return the address of the first anonymous stack arg. */
8239 case BUILT_IN_NEXT_ARG:
8240 {
8241 tree fntype = TREE_TYPE (current_function_decl);
8242
8243 if ((TYPE_ARG_TYPES (fntype) == 0
8244 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
8245 == void_type_node))
8246 && ! current_function_varargs)
8247 {
8248 error ("`va_start' used in function with fixed args");
8249 return const0_rtx;
8250 }
8251
8252 if (arglist)
8253 {
8254 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
8255 tree arg = TREE_VALUE (arglist);
8256
8257 /* Strip off all nops for the sake of the comparison. This
8258 is not quite the same as STRIP_NOPS. It does more.
8259 We must also strip off INDIRECT_EXPR for C++ reference
8260 parameters. */
8261 while (TREE_CODE (arg) == NOP_EXPR
8262 || TREE_CODE (arg) == CONVERT_EXPR
8263 || TREE_CODE (arg) == NON_LVALUE_EXPR
8264 || TREE_CODE (arg) == INDIRECT_REF)
8265 arg = TREE_OPERAND (arg, 0);
8266 if (arg != last_parm)
8267 warning ("second parameter of `va_start' not last named argument");
8268 }
8269 else if (! current_function_varargs)
8270 /* Evidently an out of date version of <stdarg.h>; can't validate
8271 va_start's second argument, but can still work as intended. */
8272 warning ("`__builtin_next_arg' called without an argument");
8273 }
8274
8275 return expand_binop (Pmode, add_optab,
8276 current_function_internal_arg_pointer,
8277 current_function_arg_offset_rtx,
8278 NULL_RTX, 0, OPTAB_LIB_WIDEN);
8279
8280 case BUILT_IN_CLASSIFY_TYPE:
8281 if (arglist != 0)
8282 {
8283 tree type = TREE_TYPE (TREE_VALUE (arglist));
8284 enum tree_code code = TREE_CODE (type);
8285 if (code == VOID_TYPE)
8286 return GEN_INT (void_type_class);
8287 if (code == INTEGER_TYPE)
8288 return GEN_INT (integer_type_class);
8289 if (code == CHAR_TYPE)
8290 return GEN_INT (char_type_class);
8291 if (code == ENUMERAL_TYPE)
8292 return GEN_INT (enumeral_type_class);
8293 if (code == BOOLEAN_TYPE)
8294 return GEN_INT (boolean_type_class);
8295 if (code == POINTER_TYPE)
8296 return GEN_INT (pointer_type_class);
8297 if (code == REFERENCE_TYPE)
8298 return GEN_INT (reference_type_class);
8299 if (code == OFFSET_TYPE)
8300 return GEN_INT (offset_type_class);
8301 if (code == REAL_TYPE)
8302 return GEN_INT (real_type_class);
8303 if (code == COMPLEX_TYPE)
8304 return GEN_INT (complex_type_class);
8305 if (code == FUNCTION_TYPE)
8306 return GEN_INT (function_type_class);
8307 if (code == METHOD_TYPE)
8308 return GEN_INT (method_type_class);
8309 if (code == RECORD_TYPE)
8310 return GEN_INT (record_type_class);
8311 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
8312 return GEN_INT (union_type_class);
8313 if (code == ARRAY_TYPE)
8314 {
8315 if (TYPE_STRING_FLAG (type))
8316 return GEN_INT (string_type_class);
8317 else
8318 return GEN_INT (array_type_class);
8319 }
8320 if (code == SET_TYPE)
8321 return GEN_INT (set_type_class);
8322 if (code == FILE_TYPE)
8323 return GEN_INT (file_type_class);
8324 if (code == LANG_TYPE)
8325 return GEN_INT (lang_type_class);
8326 }
8327 return GEN_INT (no_type_class);
8328
8329 case BUILT_IN_CONSTANT_P:
8330 if (arglist == 0)
8331 return const0_rtx;
8332 else
8333 {
8334 tree arg = TREE_VALUE (arglist);
8335
8336 STRIP_NOPS (arg);
8337 return (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c'
8338 || (TREE_CODE (arg) == ADDR_EXPR
8339 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8340 ? const1_rtx : const0_rtx);
8341 }
8342
8343 case BUILT_IN_FRAME_ADDRESS:
8344 /* The argument must be a nonnegative integer constant.
8345 It counts the number of frames to scan up the stack.
8346 The value is the address of that frame. */
8347 case BUILT_IN_RETURN_ADDRESS:
8348 /* The argument must be a nonnegative integer constant.
8349 It counts the number of frames to scan up the stack.
8350 The value is the return address saved in that frame. */
8351 if (arglist == 0)
8352 /* Warning about missing arg was already issued. */
8353 return const0_rtx;
8354 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
8355 {
8356 error ("invalid arg to `__builtin_return_address'");
8357 return const0_rtx;
8358 }
8359 else if (tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
8360 {
8361 error ("invalid arg to `__builtin_return_address'");
8362 return const0_rtx;
8363 }
8364 else
8365 {
8366 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
8367 TREE_INT_CST_LOW (TREE_VALUE (arglist)),
8368 hard_frame_pointer_rtx);
8369
8370 /* For __builtin_frame_address, return what we've got. */
8371 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8372 return tem;
8373
8374 if (GET_CODE (tem) != REG)
8375 tem = copy_to_reg (tem);
8376 return tem;
8377 }
8378
8379 case BUILT_IN_ALLOCA:
8380 if (arglist == 0
8381 /* Arg could be non-integer if user redeclared this fcn wrong. */
8382 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8383 break;
8384
8385 /* Compute the argument. */
8386 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
8387
8388 /* Allocate the desired space. */
8389 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
8390
8391 case BUILT_IN_FFS:
8392 /* If not optimizing, call the library function. */
8393 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8394 break;
8395
8396 if (arglist == 0
8397 /* Arg could be non-integer if user redeclared this fcn wrong. */
8398 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8399 break;
8400
8401 /* Compute the argument. */
8402 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8403 /* Compute ffs, into TARGET if possible.
8404 Set TARGET to wherever the result comes back. */
8405 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8406 ffs_optab, op0, target, 1);
8407 if (target == 0)
8408 abort ();
8409 return target;
8410
8411 case BUILT_IN_STRLEN:
8412 /* If not optimizing, call the library function. */
8413 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8414 break;
8415
8416 if (arglist == 0
8417 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8418 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8419 break;
8420 else
8421 {
8422 tree src = TREE_VALUE (arglist);
8423 tree len = c_strlen (src);
8424
8425 int align
8426 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8427
8428 rtx result, src_rtx, char_rtx;
8429 enum machine_mode insn_mode = value_mode, char_mode;
8430 enum insn_code icode;
8431
8432 /* If the length is known, just return it. */
8433 if (len != 0)
8434 return expand_expr (len, target, mode, 0);
8435
8436 /* If SRC is not a pointer type, don't do this operation inline. */
8437 if (align == 0)
8438 break;
8439
8440 /* Call a function if we can't compute strlen in the right mode. */
8441
8442 while (insn_mode != VOIDmode)
8443 {
8444 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
8445 if (icode != CODE_FOR_nothing)
8446 break;
8447
8448 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
8449 }
8450 if (insn_mode == VOIDmode)
8451 break;
8452
8453 /* Make a place to write the result of the instruction. */
8454 result = target;
8455 if (! (result != 0
8456 && GET_CODE (result) == REG
8457 && GET_MODE (result) == insn_mode
8458 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8459 result = gen_reg_rtx (insn_mode);
8460
8461 /* Make sure the operands are acceptable to the predicates. */
8462
8463 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
8464 result = gen_reg_rtx (insn_mode);
8465
8466 src_rtx = memory_address (BLKmode,
8467 expand_expr (src, NULL_RTX, ptr_mode,
8468 EXPAND_NORMAL));
8469 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
8470 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
8471
8472 char_rtx = const0_rtx;
8473 char_mode = insn_operand_mode[(int)icode][2];
8474 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
8475 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
8476
8477 emit_insn (GEN_FCN (icode) (result,
8478 gen_rtx (MEM, BLKmode, src_rtx),
8479 char_rtx, GEN_INT (align)));
8480
8481 /* Return the value in the proper mode for this function. */
8482 if (GET_MODE (result) == value_mode)
8483 return result;
8484 else if (target != 0)
8485 {
8486 convert_move (target, result, 0);
8487 return target;
8488 }
8489 else
8490 return convert_to_mode (value_mode, result, 0);
8491 }
8492
8493 case BUILT_IN_STRCPY:
8494 /* If not optimizing, call the library function. */
8495 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8496 break;
8497
8498 if (arglist == 0
8499 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8500 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8501 || TREE_CHAIN (arglist) == 0
8502 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
8503 break;
8504 else
8505 {
8506 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
8507
8508 if (len == 0)
8509 break;
8510
8511 len = size_binop (PLUS_EXPR, len, integer_one_node);
8512
8513 chainon (arglist, build_tree_list (NULL_TREE, len));
8514 }
8515
8516 /* Drops in. */
8517 case BUILT_IN_MEMCPY:
8518 /* If not optimizing, call the library function. */
8519 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8520 break;
8521
8522 if (arglist == 0
8523 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8524 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8525 || TREE_CHAIN (arglist) == 0
8526 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8527 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8528 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8529 break;
8530 else
8531 {
8532 tree dest = TREE_VALUE (arglist);
8533 tree src = TREE_VALUE (TREE_CHAIN (arglist));
8534 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8535 tree type;
8536
8537 int src_align
8538 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8539 int dest_align
8540 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8541 rtx dest_rtx, dest_mem, src_mem;
8542
8543 /* If either SRC or DEST is not a pointer type, don't do
8544 this operation in-line. */
8545 if (src_align == 0 || dest_align == 0)
8546 {
8547 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
8548 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8549 break;
8550 }
8551
8552 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
8553 dest_mem = gen_rtx (MEM, BLKmode,
8554 memory_address (BLKmode, dest_rtx));
8555 /* There could be a void* cast on top of the object. */
8556 while (TREE_CODE (dest) == NOP_EXPR)
8557 dest = TREE_OPERAND (dest, 0);
8558 type = TREE_TYPE (TREE_TYPE (dest));
8559 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
8560 src_mem = gen_rtx (MEM, BLKmode,
8561 memory_address (BLKmode,
8562 expand_expr (src, NULL_RTX,
8563 ptr_mode,
8564 EXPAND_SUM)));
8565 /* There could be a void* cast on top of the object. */
8566 while (TREE_CODE (src) == NOP_EXPR)
8567 src = TREE_OPERAND (src, 0);
8568 type = TREE_TYPE (TREE_TYPE (src));
8569 MEM_IN_STRUCT_P (src_mem) = AGGREGATE_TYPE_P (type);
8570
8571 /* Copy word part most expediently. */
8572 emit_block_move (dest_mem, src_mem,
8573 expand_expr (len, NULL_RTX, VOIDmode, 0),
8574 MIN (src_align, dest_align));
8575 return force_operand (dest_rtx, NULL_RTX);
8576 }
8577
8578 case BUILT_IN_MEMSET:
8579 /* If not optimizing, call the library function. */
8580 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8581 break;
8582
8583 if (arglist == 0
8584 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8585 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8586 || TREE_CHAIN (arglist) == 0
8587 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
8588 != INTEGER_TYPE)
8589 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8590 || (INTEGER_CST
8591 != (TREE_CODE (TREE_TYPE
8592 (TREE_VALUE
8593 (TREE_CHAIN (TREE_CHAIN (arglist))))))))
8594 break;
8595 else
8596 {
8597 tree dest = TREE_VALUE (arglist);
8598 tree val = TREE_VALUE (TREE_CHAIN (arglist));
8599 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8600 tree type;
8601
8602 int dest_align
8603 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8604 rtx dest_rtx, dest_mem;
8605
8606 /* If DEST is not a pointer type, don't do this
8607 operation in-line. */
8608 if (dest_align == 0)
8609 break;
8610
8611 /* If VAL is not 0, don't do this operation in-line. */
8612 if (expand_expr (val, NULL_RTX, VOIDmode, 0) != const0_rtx)
8613 break;
8614
8615 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
8616 dest_mem = gen_rtx (MEM, BLKmode,
8617 memory_address (BLKmode, dest_rtx));
8618 /* There could be a void* cast on top of the object. */
8619 while (TREE_CODE (dest) == NOP_EXPR)
8620 dest = TREE_OPERAND (dest, 0);
8621 type = TREE_TYPE (TREE_TYPE (dest));
8622 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
8623
8624 clear_storage (dest_mem, expand_expr (len, NULL_RTX, VOIDmode, 0),
8625 dest_align);
8626
8627 return force_operand (dest_rtx, NULL_RTX);
8628 }
8629
8630 /* These comparison functions need an instruction that returns an actual
8631 index. An ordinary compare that just sets the condition codes
8632 is not enough. */
8633 #ifdef HAVE_cmpstrsi
8634 case BUILT_IN_STRCMP:
8635 /* If not optimizing, call the library function. */
8636 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8637 break;
8638
8639 if (arglist == 0
8640 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8641 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8642 || TREE_CHAIN (arglist) == 0
8643 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
8644 break;
8645 else if (!HAVE_cmpstrsi)
8646 break;
8647 {
8648 tree arg1 = TREE_VALUE (arglist);
8649 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
8650 tree offset;
8651 tree len, len2;
8652
8653 len = c_strlen (arg1);
8654 if (len)
8655 len = size_binop (PLUS_EXPR, integer_one_node, len);
8656 len2 = c_strlen (arg2);
8657 if (len2)
8658 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
8659
8660 /* If we don't have a constant length for the first, use the length
8661 of the second, if we know it. We don't require a constant for
8662 this case; some cost analysis could be done if both are available
8663 but neither is constant. For now, assume they're equally cheap.
8664
8665 If both strings have constant lengths, use the smaller. This
8666 could arise if optimization results in strcpy being called with
8667 two fixed strings, or if the code was machine-generated. We should
8668 add some code to the `memcmp' handler below to deal with such
8669 situations, someday. */
8670 if (!len || TREE_CODE (len) != INTEGER_CST)
8671 {
8672 if (len2)
8673 len = len2;
8674 else if (len == 0)
8675 break;
8676 }
8677 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
8678 {
8679 if (tree_int_cst_lt (len2, len))
8680 len = len2;
8681 }
8682
8683 chainon (arglist, build_tree_list (NULL_TREE, len));
8684 }
8685
8686 /* Drops in. */
8687 case BUILT_IN_MEMCMP:
8688 /* If not optimizing, call the library function. */
8689 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8690 break;
8691
8692 if (arglist == 0
8693 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8694 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8695 || TREE_CHAIN (arglist) == 0
8696 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8697 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8698 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8699 break;
8700 else if (!HAVE_cmpstrsi)
8701 break;
8702 {
8703 tree arg1 = TREE_VALUE (arglist);
8704 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
8705 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8706 rtx result;
8707
8708 int arg1_align
8709 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8710 int arg2_align
8711 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8712 enum machine_mode insn_mode
8713 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
8714
8715 /* If we don't have POINTER_TYPE, call the function. */
8716 if (arg1_align == 0 || arg2_align == 0)
8717 {
8718 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
8719 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8720 break;
8721 }
8722
8723 /* Make a place to write the result of the instruction. */
8724 result = target;
8725 if (! (result != 0
8726 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
8727 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8728 result = gen_reg_rtx (insn_mode);
8729
8730 emit_insn (gen_cmpstrsi (result,
8731 gen_rtx (MEM, BLKmode,
8732 expand_expr (arg1, NULL_RTX,
8733 ptr_mode,
8734 EXPAND_NORMAL)),
8735 gen_rtx (MEM, BLKmode,
8736 expand_expr (arg2, NULL_RTX,
8737 ptr_mode,
8738 EXPAND_NORMAL)),
8739 expand_expr (len, NULL_RTX, VOIDmode, 0),
8740 GEN_INT (MIN (arg1_align, arg2_align))));
8741
8742 /* Return the value in the proper mode for this function. */
8743 mode = TYPE_MODE (TREE_TYPE (exp));
8744 if (GET_MODE (result) == mode)
8745 return result;
8746 else if (target != 0)
8747 {
8748 convert_move (target, result, 0);
8749 return target;
8750 }
8751 else
8752 return convert_to_mode (mode, result, 0);
8753 }
8754 #else
8755 case BUILT_IN_STRCMP:
8756 case BUILT_IN_MEMCMP:
8757 break;
8758 #endif
8759
8760 /* __builtin_setjmp is passed a pointer to an array of five words
8761 (not all will be used on all machines). It operates similarly to
8762 the C library function of the same name, but is more efficient.
8763 Much of the code below (and for longjmp) is copied from the handling
8764 of non-local gotos.
8765
8766 NOTE: This is intended for use by GNAT and will only work in
8767 the method used by it. This code will likely NOT survive to
8768 the GCC 2.8.0 release. */
8769 case BUILT_IN_SETJMP:
8770 if (arglist == 0
8771 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8772 break;
8773
8774 {
8775 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
8776 VOIDmode, 0);
8777 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
8778 enum machine_mode sa_mode = Pmode;
8779 rtx stack_save;
8780 int old_inhibit_defer_pop = inhibit_defer_pop;
8781 int return_pops = RETURN_POPS_ARGS (get_identifier ("__dummy"),
8782 get_identifier ("__dummy"), 0);
8783 rtx next_arg_reg;
8784 CUMULATIVE_ARGS args_so_far;
8785 int i;
8786
8787 #ifdef POINTERS_EXTEND_UNSIGNED
8788 buf_addr = convert_memory_address (Pmode, buf_addr);
8789 #endif
8790
8791 buf_addr = force_reg (Pmode, buf_addr);
8792
8793 if (target == 0 || GET_CODE (target) != REG
8794 || REGNO (target) < FIRST_PSEUDO_REGISTER)
8795 target = gen_reg_rtx (value_mode);
8796
8797 emit_queue ();
8798
8799 CONST_CALL_P (emit_note (NULL_PTR, NOTE_INSN_SETJMP)) = 1;
8800 current_function_calls_setjmp = 1;
8801
8802 /* We store the frame pointer and the address of lab1 in the buffer
8803 and use the rest of it for the stack save area, which is
8804 machine-dependent. */
8805 emit_move_insn (gen_rtx (MEM, Pmode, buf_addr),
8806 virtual_stack_vars_rtx);
8807 emit_move_insn
8808 (validize_mem (gen_rtx (MEM, Pmode,
8809 plus_constant (buf_addr,
8810 GET_MODE_SIZE (Pmode)))),
8811 gen_rtx (LABEL_REF, Pmode, lab1));
8812
8813 #ifdef HAVE_save_stack_nonlocal
8814 if (HAVE_save_stack_nonlocal)
8815 sa_mode = insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0];
8816 #endif
8817
8818 stack_save = gen_rtx (MEM, sa_mode,
8819 plus_constant (buf_addr,
8820 2 * GET_MODE_SIZE (Pmode)));
8821 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
8822
8823 #ifdef HAVE_setjmp
8824 if (HAVE_setjmp)
8825 emit_insn (gen_setjmp ());
8826 #endif
8827
8828 /* Set TARGET to zero and branch around the other case. */
8829 emit_move_insn (target, const0_rtx);
8830 emit_jump_insn (gen_jump (lab2));
8831 emit_barrier ();
8832 emit_label (lab1);
8833
8834 /* Note that setjmp clobbers FP when we get here, so we have to
8835 make sure it's marked as used by this function. */
8836 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
8837
8838 /* Mark the static chain as clobbered here so life information
8839 doesn't get messed up for it. */
8840 emit_insn (gen_rtx (CLOBBER, VOIDmode, static_chain_rtx));
8841
8842 /* Now put in the code to restore the frame pointer, and argument
8843 pointer, if needed. The code below is from expand_end_bindings
8844 in stmt.c; see detailed documentation there. */
8845 #ifdef HAVE_nonlocal_goto
8846 if (! HAVE_nonlocal_goto)
8847 #endif
8848 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
8849
8850 current_function_has_nonlocal_goto = 1;
8851
8852 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
8853 if (fixed_regs[ARG_POINTER_REGNUM])
8854 {
8855 #ifdef ELIMINABLE_REGS
8856 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
8857
8858 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
8859 if (elim_regs[i].from == ARG_POINTER_REGNUM
8860 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
8861 break;
8862
8863 if (i == sizeof elim_regs / sizeof elim_regs [0])
8864 #endif
8865 {
8866 /* Now restore our arg pointer from the address at which it
8867 was saved in our stack frame.
8868 If there hasn't be space allocated for it yet, make
8869 some now. */
8870 if (arg_pointer_save_area == 0)
8871 arg_pointer_save_area
8872 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
8873 emit_move_insn (virtual_incoming_args_rtx,
8874 copy_to_reg (arg_pointer_save_area));
8875 }
8876 }
8877 #endif
8878
8879 #ifdef HAVE_nonlocal_goto_receiver
8880 if (HAVE_nonlocal_goto_receiver)
8881 emit_insn (gen_nonlocal_goto_receiver ());
8882 #endif
8883 /* The static chain pointer contains the address of dummy function.
8884 We need to call it here to handle some PIC cases of restoring
8885 a global pointer. Then return 1. */
8886 op0 = copy_to_mode_reg (Pmode, static_chain_rtx);
8887
8888 /* We can't actually call emit_library_call here, so do everything
8889 it does, which isn't much for a libfunc with no args. */
8890 op0 = memory_address (FUNCTION_MODE, op0);
8891
8892 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE,
8893 gen_rtx (SYMBOL_REF, Pmode, "__dummy"), 1);
8894 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1);
8895
8896 #ifndef ACCUMULATE_OUTGOING_ARGS
8897 #ifdef HAVE_call_pop
8898 if (HAVE_call_pop)
8899 emit_call_insn (gen_call_pop (gen_rtx (MEM, FUNCTION_MODE, op0),
8900 const0_rtx, next_arg_reg,
8901 GEN_INT (return_pops)));
8902 else
8903 #endif
8904 #endif
8905
8906 #ifdef HAVE_call
8907 if (HAVE_call)
8908 emit_call_insn (gen_call (gen_rtx (MEM, FUNCTION_MODE, op0),
8909 const0_rtx, next_arg_reg, const0_rtx));
8910 else
8911 #endif
8912 abort ();
8913
8914 emit_move_insn (target, const1_rtx);
8915 emit_label (lab2);
8916 return target;
8917 }
8918
8919 /* __builtin_longjmp is passed a pointer to an array of five words
8920 and a value, which is a dummy. It's similar to the C library longjmp
8921 function but works with __builtin_setjmp above. */
8922 case BUILT_IN_LONGJMP:
8923 if (arglist == 0 || TREE_CHAIN (arglist) == 0
8924 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8925 break;
8926
8927 {
8928 tree dummy_id = get_identifier ("__dummy");
8929 tree dummy_type = build_function_type (void_type_node, NULL_TREE);
8930 tree dummy_decl = build_decl (FUNCTION_DECL, dummy_id, dummy_type);
8931 #ifdef POINTERS_EXTEND_UNSIGNED
8932 rtx buf_addr
8933 = force_reg (Pmode,
8934 convert_memory_address
8935 (Pmode,
8936 expand_expr (TREE_VALUE (arglist),
8937 NULL_RTX, VOIDmode, 0)));
8938 #else
8939 rtx buf_addr
8940 = force_reg (Pmode, expand_expr (TREE_VALUE (arglist),
8941 NULL_RTX,
8942 VOIDmode, 0));
8943 #endif
8944 rtx fp = gen_rtx (MEM, Pmode, buf_addr);
8945 rtx lab = gen_rtx (MEM, Pmode,
8946 plus_constant (buf_addr, GET_MODE_SIZE (Pmode)));
8947 enum machine_mode sa_mode
8948 #ifdef HAVE_save_stack_nonlocal
8949 = (HAVE_save_stack_nonlocal
8950 ? insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0]
8951 : Pmode);
8952 #else
8953 = Pmode;
8954 #endif
8955 rtx stack = gen_rtx (MEM, sa_mode,
8956 plus_constant (buf_addr,
8957 2 * GET_MODE_SIZE (Pmode)));
8958
8959 DECL_EXTERNAL (dummy_decl) = 1;
8960 TREE_PUBLIC (dummy_decl) = 1;
8961 make_decl_rtl (dummy_decl, NULL_PTR, 1);
8962
8963 /* Expand the second expression just for side-effects. */
8964 expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
8965 const0_rtx, VOIDmode, 0);
8966
8967 assemble_external (dummy_decl);
8968
8969 /* Pick up FP, label, and SP from the block and jump. This code is
8970 from expand_goto in stmt.c; see there for detailed comments. */
8971 #if HAVE_nonlocal_goto
8972 if (HAVE_nonlocal_goto)
8973 emit_insn (gen_nonlocal_goto (fp, lab, stack,
8974 XEXP (DECL_RTL (dummy_decl), 0)));
8975 else
8976 #endif
8977 {
8978 lab = copy_to_reg (lab);
8979 emit_move_insn (hard_frame_pointer_rtx, fp);
8980 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
8981
8982 /* Put in the static chain register the address of the dummy
8983 function. */
8984 emit_move_insn (static_chain_rtx, XEXP (DECL_RTL (dummy_decl), 0));
8985 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
8986 emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
8987 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
8988 emit_indirect_jump (lab);
8989 }
8990
8991 return const0_rtx;
8992 }
8993
8994 default: /* just do library call, if unknown builtin */
8995 error ("built-in function `%s' not currently supported",
8996 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
8997 }
8998
8999 /* The switch statement above can drop through to cause the function
9000 to be called normally. */
9001
9002 return expand_call (exp, target, ignore);
9003 }
9004 \f
9005 /* Built-in functions to perform an untyped call and return. */
9006
9007 /* For each register that may be used for calling a function, this
9008 gives a mode used to copy the register's value. VOIDmode indicates
9009 the register is not used for calling a function. If the machine
9010 has register windows, this gives only the outbound registers.
9011 INCOMING_REGNO gives the corresponding inbound register. */
9012 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
9013
9014 /* For each register that may be used for returning values, this gives
9015 a mode used to copy the register's value. VOIDmode indicates the
9016 register is not used for returning values. If the machine has
9017 register windows, this gives only the outbound registers.
9018 INCOMING_REGNO gives the corresponding inbound register. */
9019 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
9020
9021 /* For each register that may be used for calling a function, this
9022 gives the offset of that register into the block returned by
9023 __builtin_apply_args. 0 indicates that the register is not
9024 used for calling a function. */
9025 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
9026
9027 /* Return the offset of register REGNO into the block returned by
9028 __builtin_apply_args. This is not declared static, since it is
9029 needed in objc-act.c. */
9030
9031 int
9032 apply_args_register_offset (regno)
9033 int regno;
9034 {
9035 apply_args_size ();
9036
9037 /* Arguments are always put in outgoing registers (in the argument
9038 block) if such make sense. */
9039 #ifdef OUTGOING_REGNO
9040 regno = OUTGOING_REGNO(regno);
9041 #endif
9042 return apply_args_reg_offset[regno];
9043 }
9044
9045 /* Return the size required for the block returned by __builtin_apply_args,
9046 and initialize apply_args_mode. */
9047
9048 static int
9049 apply_args_size ()
9050 {
9051 static int size = -1;
9052 int align, regno;
9053 enum machine_mode mode;
9054
9055 /* The values computed by this function never change. */
9056 if (size < 0)
9057 {
9058 /* The first value is the incoming arg-pointer. */
9059 size = GET_MODE_SIZE (Pmode);
9060
9061 /* The second value is the structure value address unless this is
9062 passed as an "invisible" first argument. */
9063 if (struct_value_rtx)
9064 size += GET_MODE_SIZE (Pmode);
9065
9066 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9067 if (FUNCTION_ARG_REGNO_P (regno))
9068 {
9069 /* Search for the proper mode for copying this register's
9070 value. I'm not sure this is right, but it works so far. */
9071 enum machine_mode best_mode = VOIDmode;
9072
9073 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9074 mode != VOIDmode;
9075 mode = GET_MODE_WIDER_MODE (mode))
9076 if (HARD_REGNO_MODE_OK (regno, mode)
9077 && HARD_REGNO_NREGS (regno, mode) == 1)
9078 best_mode = mode;
9079
9080 if (best_mode == VOIDmode)
9081 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9082 mode != VOIDmode;
9083 mode = GET_MODE_WIDER_MODE (mode))
9084 if (HARD_REGNO_MODE_OK (regno, mode)
9085 && (mov_optab->handlers[(int) mode].insn_code
9086 != CODE_FOR_nothing))
9087 best_mode = mode;
9088
9089 mode = best_mode;
9090 if (mode == VOIDmode)
9091 abort ();
9092
9093 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9094 if (size % align != 0)
9095 size = CEIL (size, align) * align;
9096 apply_args_reg_offset[regno] = size;
9097 size += GET_MODE_SIZE (mode);
9098 apply_args_mode[regno] = mode;
9099 }
9100 else
9101 {
9102 apply_args_mode[regno] = VOIDmode;
9103 apply_args_reg_offset[regno] = 0;
9104 }
9105 }
9106 return size;
9107 }
9108
9109 /* Return the size required for the block returned by __builtin_apply,
9110 and initialize apply_result_mode. */
9111
9112 static int
9113 apply_result_size ()
9114 {
9115 static int size = -1;
9116 int align, regno;
9117 enum machine_mode mode;
9118
9119 /* The values computed by this function never change. */
9120 if (size < 0)
9121 {
9122 size = 0;
9123
9124 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9125 if (FUNCTION_VALUE_REGNO_P (regno))
9126 {
9127 /* Search for the proper mode for copying this register's
9128 value. I'm not sure this is right, but it works so far. */
9129 enum machine_mode best_mode = VOIDmode;
9130
9131 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9132 mode != TImode;
9133 mode = GET_MODE_WIDER_MODE (mode))
9134 if (HARD_REGNO_MODE_OK (regno, mode))
9135 best_mode = mode;
9136
9137 if (best_mode == VOIDmode)
9138 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9139 mode != VOIDmode;
9140 mode = GET_MODE_WIDER_MODE (mode))
9141 if (HARD_REGNO_MODE_OK (regno, mode)
9142 && (mov_optab->handlers[(int) mode].insn_code
9143 != CODE_FOR_nothing))
9144 best_mode = mode;
9145
9146 mode = best_mode;
9147 if (mode == VOIDmode)
9148 abort ();
9149
9150 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9151 if (size % align != 0)
9152 size = CEIL (size, align) * align;
9153 size += GET_MODE_SIZE (mode);
9154 apply_result_mode[regno] = mode;
9155 }
9156 else
9157 apply_result_mode[regno] = VOIDmode;
9158
9159 /* Allow targets that use untyped_call and untyped_return to override
9160 the size so that machine-specific information can be stored here. */
9161 #ifdef APPLY_RESULT_SIZE
9162 size = APPLY_RESULT_SIZE;
9163 #endif
9164 }
9165 return size;
9166 }
9167
9168 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
9169 /* Create a vector describing the result block RESULT. If SAVEP is true,
9170 the result block is used to save the values; otherwise it is used to
9171 restore the values. */
9172
9173 static rtx
9174 result_vector (savep, result)
9175 int savep;
9176 rtx result;
9177 {
9178 int regno, size, align, nelts;
9179 enum machine_mode mode;
9180 rtx reg, mem;
9181 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
9182
9183 size = nelts = 0;
9184 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9185 if ((mode = apply_result_mode[regno]) != VOIDmode)
9186 {
9187 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9188 if (size % align != 0)
9189 size = CEIL (size, align) * align;
9190 reg = gen_rtx (REG, mode, savep ? regno : INCOMING_REGNO (regno));
9191 mem = change_address (result, mode,
9192 plus_constant (XEXP (result, 0), size));
9193 savevec[nelts++] = (savep
9194 ? gen_rtx (SET, VOIDmode, mem, reg)
9195 : gen_rtx (SET, VOIDmode, reg, mem));
9196 size += GET_MODE_SIZE (mode);
9197 }
9198 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
9199 }
9200 #endif /* HAVE_untyped_call or HAVE_untyped_return */
9201
9202 /* Save the state required to perform an untyped call with the same
9203 arguments as were passed to the current function. */
9204
9205 static rtx
9206 expand_builtin_apply_args ()
9207 {
9208 rtx registers;
9209 int size, align, regno;
9210 enum machine_mode mode;
9211
9212 /* Create a block where the arg-pointer, structure value address,
9213 and argument registers can be saved. */
9214 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
9215
9216 /* Walk past the arg-pointer and structure value address. */
9217 size = GET_MODE_SIZE (Pmode);
9218 if (struct_value_rtx)
9219 size += GET_MODE_SIZE (Pmode);
9220
9221 /* Save each register used in calling a function to the block. */
9222 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9223 if ((mode = apply_args_mode[regno]) != VOIDmode)
9224 {
9225 rtx tem;
9226
9227 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9228 if (size % align != 0)
9229 size = CEIL (size, align) * align;
9230
9231 tem = gen_rtx (REG, mode, INCOMING_REGNO (regno));
9232
9233 #ifdef STACK_REGS
9234 /* For reg-stack.c's stack register household.
9235 Compare with a similar piece of code in function.c. */
9236
9237 emit_insn (gen_rtx (USE, mode, tem));
9238 #endif
9239
9240 emit_move_insn (change_address (registers, mode,
9241 plus_constant (XEXP (registers, 0),
9242 size)),
9243 tem);
9244 size += GET_MODE_SIZE (mode);
9245 }
9246
9247 /* Save the arg pointer to the block. */
9248 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
9249 copy_to_reg (virtual_incoming_args_rtx));
9250 size = GET_MODE_SIZE (Pmode);
9251
9252 /* Save the structure value address unless this is passed as an
9253 "invisible" first argument. */
9254 if (struct_value_incoming_rtx)
9255 {
9256 emit_move_insn (change_address (registers, Pmode,
9257 plus_constant (XEXP (registers, 0),
9258 size)),
9259 copy_to_reg (struct_value_incoming_rtx));
9260 size += GET_MODE_SIZE (Pmode);
9261 }
9262
9263 /* Return the address of the block. */
9264 return copy_addr_to_reg (XEXP (registers, 0));
9265 }
9266
9267 /* Perform an untyped call and save the state required to perform an
9268 untyped return of whatever value was returned by the given function. */
9269
9270 static rtx
9271 expand_builtin_apply (function, arguments, argsize)
9272 rtx function, arguments, argsize;
9273 {
9274 int size, align, regno;
9275 enum machine_mode mode;
9276 rtx incoming_args, result, reg, dest, call_insn;
9277 rtx old_stack_level = 0;
9278 rtx call_fusage = 0;
9279
9280 /* Create a block where the return registers can be saved. */
9281 result = assign_stack_local (BLKmode, apply_result_size (), -1);
9282
9283 /* ??? The argsize value should be adjusted here. */
9284
9285 /* Fetch the arg pointer from the ARGUMENTS block. */
9286 incoming_args = gen_reg_rtx (Pmode);
9287 emit_move_insn (incoming_args,
9288 gen_rtx (MEM, Pmode, arguments));
9289 #ifndef STACK_GROWS_DOWNWARD
9290 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
9291 incoming_args, 0, OPTAB_LIB_WIDEN);
9292 #endif
9293
9294 /* Perform postincrements before actually calling the function. */
9295 emit_queue ();
9296
9297 /* Push a new argument block and copy the arguments. */
9298 do_pending_stack_adjust ();
9299 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
9300
9301 /* Push a block of memory onto the stack to store the memory arguments.
9302 Save the address in a register, and copy the memory arguments. ??? I
9303 haven't figured out how the calling convention macros effect this,
9304 but it's likely that the source and/or destination addresses in
9305 the block copy will need updating in machine specific ways. */
9306 dest = copy_addr_to_reg (push_block (argsize, 0, 0));
9307 emit_block_move (gen_rtx (MEM, BLKmode, dest),
9308 gen_rtx (MEM, BLKmode, incoming_args),
9309 argsize,
9310 PARM_BOUNDARY / BITS_PER_UNIT);
9311
9312 /* Refer to the argument block. */
9313 apply_args_size ();
9314 arguments = gen_rtx (MEM, BLKmode, arguments);
9315
9316 /* Walk past the arg-pointer and structure value address. */
9317 size = GET_MODE_SIZE (Pmode);
9318 if (struct_value_rtx)
9319 size += GET_MODE_SIZE (Pmode);
9320
9321 /* Restore each of the registers previously saved. Make USE insns
9322 for each of these registers for use in making the call. */
9323 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9324 if ((mode = apply_args_mode[regno]) != VOIDmode)
9325 {
9326 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9327 if (size % align != 0)
9328 size = CEIL (size, align) * align;
9329 reg = gen_rtx (REG, mode, regno);
9330 emit_move_insn (reg,
9331 change_address (arguments, mode,
9332 plus_constant (XEXP (arguments, 0),
9333 size)));
9334
9335 use_reg (&call_fusage, reg);
9336 size += GET_MODE_SIZE (mode);
9337 }
9338
9339 /* Restore the structure value address unless this is passed as an
9340 "invisible" first argument. */
9341 size = GET_MODE_SIZE (Pmode);
9342 if (struct_value_rtx)
9343 {
9344 rtx value = gen_reg_rtx (Pmode);
9345 emit_move_insn (value,
9346 change_address (arguments, Pmode,
9347 plus_constant (XEXP (arguments, 0),
9348 size)));
9349 emit_move_insn (struct_value_rtx, value);
9350 if (GET_CODE (struct_value_rtx) == REG)
9351 use_reg (&call_fusage, struct_value_rtx);
9352 size += GET_MODE_SIZE (Pmode);
9353 }
9354
9355 /* All arguments and registers used for the call are set up by now! */
9356 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
9357
9358 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
9359 and we don't want to load it into a register as an optimization,
9360 because prepare_call_address already did it if it should be done. */
9361 if (GET_CODE (function) != SYMBOL_REF)
9362 function = memory_address (FUNCTION_MODE, function);
9363
9364 /* Generate the actual call instruction and save the return value. */
9365 #ifdef HAVE_untyped_call
9366 if (HAVE_untyped_call)
9367 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
9368 result, result_vector (1, result)));
9369 else
9370 #endif
9371 #ifdef HAVE_call_value
9372 if (HAVE_call_value)
9373 {
9374 rtx valreg = 0;
9375
9376 /* Locate the unique return register. It is not possible to
9377 express a call that sets more than one return register using
9378 call_value; use untyped_call for that. In fact, untyped_call
9379 only needs to save the return registers in the given block. */
9380 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9381 if ((mode = apply_result_mode[regno]) != VOIDmode)
9382 {
9383 if (valreg)
9384 abort (); /* HAVE_untyped_call required. */
9385 valreg = gen_rtx (REG, mode, regno);
9386 }
9387
9388 emit_call_insn (gen_call_value (valreg,
9389 gen_rtx (MEM, FUNCTION_MODE, function),
9390 const0_rtx, NULL_RTX, const0_rtx));
9391
9392 emit_move_insn (change_address (result, GET_MODE (valreg),
9393 XEXP (result, 0)),
9394 valreg);
9395 }
9396 else
9397 #endif
9398 abort ();
9399
9400 /* Find the CALL insn we just emitted. */
9401 for (call_insn = get_last_insn ();
9402 call_insn && GET_CODE (call_insn) != CALL_INSN;
9403 call_insn = PREV_INSN (call_insn))
9404 ;
9405
9406 if (! call_insn)
9407 abort ();
9408
9409 /* Put the register usage information on the CALL. If there is already
9410 some usage information, put ours at the end. */
9411 if (CALL_INSN_FUNCTION_USAGE (call_insn))
9412 {
9413 rtx link;
9414
9415 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
9416 link = XEXP (link, 1))
9417 ;
9418
9419 XEXP (link, 1) = call_fusage;
9420 }
9421 else
9422 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
9423
9424 /* Restore the stack. */
9425 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
9426
9427 /* Return the address of the result block. */
9428 return copy_addr_to_reg (XEXP (result, 0));
9429 }
9430
9431 /* Perform an untyped return. */
9432
9433 static void
9434 expand_builtin_return (result)
9435 rtx result;
9436 {
9437 int size, align, regno;
9438 enum machine_mode mode;
9439 rtx reg;
9440 rtx call_fusage = 0;
9441
9442 apply_result_size ();
9443 result = gen_rtx (MEM, BLKmode, result);
9444
9445 #ifdef HAVE_untyped_return
9446 if (HAVE_untyped_return)
9447 {
9448 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
9449 emit_barrier ();
9450 return;
9451 }
9452 #endif
9453
9454 /* Restore the return value and note that each value is used. */
9455 size = 0;
9456 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9457 if ((mode = apply_result_mode[regno]) != VOIDmode)
9458 {
9459 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9460 if (size % align != 0)
9461 size = CEIL (size, align) * align;
9462 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
9463 emit_move_insn (reg,
9464 change_address (result, mode,
9465 plus_constant (XEXP (result, 0),
9466 size)));
9467
9468 push_to_sequence (call_fusage);
9469 emit_insn (gen_rtx (USE, VOIDmode, reg));
9470 call_fusage = get_insns ();
9471 end_sequence ();
9472 size += GET_MODE_SIZE (mode);
9473 }
9474
9475 /* Put the USE insns before the return. */
9476 emit_insns (call_fusage);
9477
9478 /* Return whatever values was restored by jumping directly to the end
9479 of the function. */
9480 expand_null_return ();
9481 }
9482 \f
9483 /* Expand code for a post- or pre- increment or decrement
9484 and return the RTX for the result.
9485 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9486
9487 static rtx
9488 expand_increment (exp, post, ignore)
9489 register tree exp;
9490 int post, ignore;
9491 {
9492 register rtx op0, op1;
9493 register rtx temp, value;
9494 register tree incremented = TREE_OPERAND (exp, 0);
9495 optab this_optab = add_optab;
9496 int icode;
9497 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9498 int op0_is_copy = 0;
9499 int single_insn = 0;
9500 /* 1 means we can't store into OP0 directly,
9501 because it is a subreg narrower than a word,
9502 and we don't dare clobber the rest of the word. */
9503 int bad_subreg = 0;
9504
9505 if (output_bytecode)
9506 {
9507 bc_expand_expr (exp);
9508 return NULL_RTX;
9509 }
9510
9511 /* Stabilize any component ref that might need to be
9512 evaluated more than once below. */
9513 if (!post
9514 || TREE_CODE (incremented) == BIT_FIELD_REF
9515 || (TREE_CODE (incremented) == COMPONENT_REF
9516 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9517 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9518 incremented = stabilize_reference (incremented);
9519 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9520 ones into save exprs so that they don't accidentally get evaluated
9521 more than once by the code below. */
9522 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9523 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9524 incremented = save_expr (incremented);
9525
9526 /* Compute the operands as RTX.
9527 Note whether OP0 is the actual lvalue or a copy of it:
9528 I believe it is a copy iff it is a register or subreg
9529 and insns were generated in computing it. */
9530
9531 temp = get_last_insn ();
9532 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9533
9534 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9535 in place but instead must do sign- or zero-extension during assignment,
9536 so we copy it into a new register and let the code below use it as
9537 a copy.
9538
9539 Note that we can safely modify this SUBREG since it is know not to be
9540 shared (it was made by the expand_expr call above). */
9541
9542 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9543 {
9544 if (post)
9545 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9546 else
9547 bad_subreg = 1;
9548 }
9549 else if (GET_CODE (op0) == SUBREG
9550 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9551 {
9552 /* We cannot increment this SUBREG in place. If we are
9553 post-incrementing, get a copy of the old value. Otherwise,
9554 just mark that we cannot increment in place. */
9555 if (post)
9556 op0 = copy_to_reg (op0);
9557 else
9558 bad_subreg = 1;
9559 }
9560
9561 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9562 && temp != get_last_insn ());
9563 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9564
9565 /* Decide whether incrementing or decrementing. */
9566 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9567 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9568 this_optab = sub_optab;
9569
9570 /* Convert decrement by a constant into a negative increment. */
9571 if (this_optab == sub_optab
9572 && GET_CODE (op1) == CONST_INT)
9573 {
9574 op1 = GEN_INT (- INTVAL (op1));
9575 this_optab = add_optab;
9576 }
9577
9578 /* For a preincrement, see if we can do this with a single instruction. */
9579 if (!post)
9580 {
9581 icode = (int) this_optab->handlers[(int) mode].insn_code;
9582 if (icode != (int) CODE_FOR_nothing
9583 /* Make sure that OP0 is valid for operands 0 and 1
9584 of the insn we want to queue. */
9585 && (*insn_operand_predicate[icode][0]) (op0, mode)
9586 && (*insn_operand_predicate[icode][1]) (op0, mode)
9587 && (*insn_operand_predicate[icode][2]) (op1, mode))
9588 single_insn = 1;
9589 }
9590
9591 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9592 then we cannot just increment OP0. We must therefore contrive to
9593 increment the original value. Then, for postincrement, we can return
9594 OP0 since it is a copy of the old value. For preincrement, expand here
9595 unless we can do it with a single insn.
9596
9597 Likewise if storing directly into OP0 would clobber high bits
9598 we need to preserve (bad_subreg). */
9599 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9600 {
9601 /* This is the easiest way to increment the value wherever it is.
9602 Problems with multiple evaluation of INCREMENTED are prevented
9603 because either (1) it is a component_ref or preincrement,
9604 in which case it was stabilized above, or (2) it is an array_ref
9605 with constant index in an array in a register, which is
9606 safe to reevaluate. */
9607 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9608 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9609 ? MINUS_EXPR : PLUS_EXPR),
9610 TREE_TYPE (exp),
9611 incremented,
9612 TREE_OPERAND (exp, 1));
9613
9614 while (TREE_CODE (incremented) == NOP_EXPR
9615 || TREE_CODE (incremented) == CONVERT_EXPR)
9616 {
9617 newexp = convert (TREE_TYPE (incremented), newexp);
9618 incremented = TREE_OPERAND (incremented, 0);
9619 }
9620
9621 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9622 return post ? op0 : temp;
9623 }
9624
9625 if (post)
9626 {
9627 /* We have a true reference to the value in OP0.
9628 If there is an insn to add or subtract in this mode, queue it.
9629 Queueing the increment insn avoids the register shuffling
9630 that often results if we must increment now and first save
9631 the old value for subsequent use. */
9632
9633 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9634 op0 = stabilize (op0);
9635 #endif
9636
9637 icode = (int) this_optab->handlers[(int) mode].insn_code;
9638 if (icode != (int) CODE_FOR_nothing
9639 /* Make sure that OP0 is valid for operands 0 and 1
9640 of the insn we want to queue. */
9641 && (*insn_operand_predicate[icode][0]) (op0, mode)
9642 && (*insn_operand_predicate[icode][1]) (op0, mode))
9643 {
9644 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
9645 op1 = force_reg (mode, op1);
9646
9647 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9648 }
9649 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9650 {
9651 rtx addr = force_reg (Pmode, XEXP (op0, 0));
9652 rtx temp, result;
9653
9654 op0 = change_address (op0, VOIDmode, addr);
9655 temp = force_reg (GET_MODE (op0), op0);
9656 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
9657 op1 = force_reg (mode, op1);
9658
9659 /* The increment queue is LIFO, thus we have to `queue'
9660 the instructions in reverse order. */
9661 enqueue_insn (op0, gen_move_insn (op0, temp));
9662 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9663 return result;
9664 }
9665 }
9666
9667 /* Preincrement, or we can't increment with one simple insn. */
9668 if (post)
9669 /* Save a copy of the value before inc or dec, to return it later. */
9670 temp = value = copy_to_reg (op0);
9671 else
9672 /* Arrange to return the incremented value. */
9673 /* Copy the rtx because expand_binop will protect from the queue,
9674 and the results of that would be invalid for us to return
9675 if our caller does emit_queue before using our result. */
9676 temp = copy_rtx (value = op0);
9677
9678 /* Increment however we can. */
9679 op1 = expand_binop (mode, this_optab, value, op1, op0,
9680 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9681 /* Make sure the value is stored into OP0. */
9682 if (op1 != op0)
9683 emit_move_insn (op0, op1);
9684
9685 return temp;
9686 }
9687 \f
9688 /* Expand all function calls contained within EXP, innermost ones first.
9689 But don't look within expressions that have sequence points.
9690 For each CALL_EXPR, record the rtx for its value
9691 in the CALL_EXPR_RTL field. */
9692
9693 static void
9694 preexpand_calls (exp)
9695 tree exp;
9696 {
9697 register int nops, i;
9698 int type = TREE_CODE_CLASS (TREE_CODE (exp));
9699
9700 if (! do_preexpand_calls)
9701 return;
9702
9703 /* Only expressions and references can contain calls. */
9704
9705 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
9706 return;
9707
9708 switch (TREE_CODE (exp))
9709 {
9710 case CALL_EXPR:
9711 /* Do nothing if already expanded. */
9712 if (CALL_EXPR_RTL (exp) != 0
9713 /* Do nothing if the call returns a variable-sized object. */
9714 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
9715 /* Do nothing to built-in functions. */
9716 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
9717 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9718 == FUNCTION_DECL)
9719 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
9720 return;
9721
9722 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
9723 return;
9724
9725 case COMPOUND_EXPR:
9726 case COND_EXPR:
9727 case TRUTH_ANDIF_EXPR:
9728 case TRUTH_ORIF_EXPR:
9729 /* If we find one of these, then we can be sure
9730 the adjust will be done for it (since it makes jumps).
9731 Do it now, so that if this is inside an argument
9732 of a function, we don't get the stack adjustment
9733 after some other args have already been pushed. */
9734 do_pending_stack_adjust ();
9735 return;
9736
9737 case BLOCK:
9738 case RTL_EXPR:
9739 case WITH_CLEANUP_EXPR:
9740 case CLEANUP_POINT_EXPR:
9741 return;
9742
9743 case SAVE_EXPR:
9744 if (SAVE_EXPR_RTL (exp) != 0)
9745 return;
9746 }
9747
9748 nops = tree_code_length[(int) TREE_CODE (exp)];
9749 for (i = 0; i < nops; i++)
9750 if (TREE_OPERAND (exp, i) != 0)
9751 {
9752 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9753 if (type == 'e' || type == '<' || type == '1' || type == '2'
9754 || type == 'r')
9755 preexpand_calls (TREE_OPERAND (exp, i));
9756 }
9757 }
9758 \f
9759 /* At the start of a function, record that we have no previously-pushed
9760 arguments waiting to be popped. */
9761
9762 void
9763 init_pending_stack_adjust ()
9764 {
9765 pending_stack_adjust = 0;
9766 }
9767
9768 /* When exiting from function, if safe, clear out any pending stack adjust
9769 so the adjustment won't get done. */
9770
9771 void
9772 clear_pending_stack_adjust ()
9773 {
9774 #ifdef EXIT_IGNORE_STACK
9775 if (optimize > 0
9776 && ! flag_omit_frame_pointer && EXIT_IGNORE_STACK
9777 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9778 && ! flag_inline_functions)
9779 pending_stack_adjust = 0;
9780 #endif
9781 }
9782
9783 /* Pop any previously-pushed arguments that have not been popped yet. */
9784
9785 void
9786 do_pending_stack_adjust ()
9787 {
9788 if (inhibit_defer_pop == 0)
9789 {
9790 if (pending_stack_adjust != 0)
9791 adjust_stack (GEN_INT (pending_stack_adjust));
9792 pending_stack_adjust = 0;
9793 }
9794 }
9795
9796 /* Defer the expansion all cleanups up to OLD_CLEANUPS.
9797 Returns the cleanups to be performed. */
9798
9799 static tree
9800 defer_cleanups_to (old_cleanups)
9801 tree old_cleanups;
9802 {
9803 tree new_cleanups = NULL_TREE;
9804 tree cleanups = cleanups_this_call;
9805 tree last = NULL_TREE;
9806
9807 while (cleanups_this_call != old_cleanups)
9808 {
9809 expand_eh_region_end (TREE_VALUE (cleanups_this_call));
9810 last = cleanups_this_call;
9811 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
9812 }
9813
9814 if (last)
9815 {
9816 /* Remove the list from the chain of cleanups. */
9817 TREE_CHAIN (last) = NULL_TREE;
9818
9819 /* reverse them so that we can build them in the right order. */
9820 cleanups = nreverse (cleanups);
9821
9822 /* All cleanups must be on the function_obstack. */
9823 push_obstacks_nochange ();
9824 resume_temporary_allocation ();
9825
9826 while (cleanups)
9827 {
9828 if (new_cleanups)
9829 new_cleanups = build (COMPOUND_EXPR, TREE_TYPE (new_cleanups),
9830 TREE_VALUE (cleanups), new_cleanups);
9831 else
9832 new_cleanups = TREE_VALUE (cleanups);
9833
9834 cleanups = TREE_CHAIN (cleanups);
9835 }
9836
9837 pop_obstacks ();
9838 }
9839
9840 return new_cleanups;
9841 }
9842
9843 /* Expand all cleanups up to OLD_CLEANUPS.
9844 Needed here, and also for language-dependent calls. */
9845
9846 void
9847 expand_cleanups_to (old_cleanups)
9848 tree old_cleanups;
9849 {
9850 while (cleanups_this_call != old_cleanups)
9851 {
9852 expand_eh_region_end (TREE_VALUE (cleanups_this_call));
9853 expand_expr (TREE_VALUE (cleanups_this_call), const0_rtx, VOIDmode, 0);
9854 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
9855 }
9856 }
9857 \f
9858 /* Expand conditional expressions. */
9859
9860 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9861 LABEL is an rtx of code CODE_LABEL, in this function and all the
9862 functions here. */
9863
9864 void
9865 jumpifnot (exp, label)
9866 tree exp;
9867 rtx label;
9868 {
9869 do_jump (exp, label, NULL_RTX);
9870 }
9871
9872 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9873
9874 void
9875 jumpif (exp, label)
9876 tree exp;
9877 rtx label;
9878 {
9879 do_jump (exp, NULL_RTX, label);
9880 }
9881
9882 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9883 the result is zero, or IF_TRUE_LABEL if the result is one.
9884 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9885 meaning fall through in that case.
9886
9887 do_jump always does any pending stack adjust except when it does not
9888 actually perform a jump. An example where there is no jump
9889 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9890
9891 This function is responsible for optimizing cases such as
9892 &&, || and comparison operators in EXP. */
9893
9894 void
9895 do_jump (exp, if_false_label, if_true_label)
9896 tree exp;
9897 rtx if_false_label, if_true_label;
9898 {
9899 register enum tree_code code = TREE_CODE (exp);
9900 /* Some cases need to create a label to jump to
9901 in order to properly fall through.
9902 These cases set DROP_THROUGH_LABEL nonzero. */
9903 rtx drop_through_label = 0;
9904 rtx temp;
9905 rtx comparison = 0;
9906 int i;
9907 tree type;
9908 enum machine_mode mode;
9909
9910 emit_queue ();
9911
9912 switch (code)
9913 {
9914 case ERROR_MARK:
9915 break;
9916
9917 case INTEGER_CST:
9918 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9919 if (temp)
9920 emit_jump (temp);
9921 break;
9922
9923 #if 0
9924 /* This is not true with #pragma weak */
9925 case ADDR_EXPR:
9926 /* The address of something can never be zero. */
9927 if (if_true_label)
9928 emit_jump (if_true_label);
9929 break;
9930 #endif
9931
9932 case NOP_EXPR:
9933 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9934 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9935 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9936 goto normal;
9937 case CONVERT_EXPR:
9938 /* If we are narrowing the operand, we have to do the compare in the
9939 narrower mode. */
9940 if ((TYPE_PRECISION (TREE_TYPE (exp))
9941 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9942 goto normal;
9943 case NON_LVALUE_EXPR:
9944 case REFERENCE_EXPR:
9945 case ABS_EXPR:
9946 case NEGATE_EXPR:
9947 case LROTATE_EXPR:
9948 case RROTATE_EXPR:
9949 /* These cannot change zero->non-zero or vice versa. */
9950 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9951 break;
9952
9953 #if 0
9954 /* This is never less insns than evaluating the PLUS_EXPR followed by
9955 a test and can be longer if the test is eliminated. */
9956 case PLUS_EXPR:
9957 /* Reduce to minus. */
9958 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9959 TREE_OPERAND (exp, 0),
9960 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9961 TREE_OPERAND (exp, 1))));
9962 /* Process as MINUS. */
9963 #endif
9964
9965 case MINUS_EXPR:
9966 /* Non-zero iff operands of minus differ. */
9967 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
9968 TREE_OPERAND (exp, 0),
9969 TREE_OPERAND (exp, 1)),
9970 NE, NE);
9971 break;
9972
9973 case BIT_AND_EXPR:
9974 /* If we are AND'ing with a small constant, do this comparison in the
9975 smallest type that fits. If the machine doesn't have comparisons
9976 that small, it will be converted back to the wider comparison.
9977 This helps if we are testing the sign bit of a narrower object.
9978 combine can't do this for us because it can't know whether a
9979 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9980
9981 if (! SLOW_BYTE_ACCESS
9982 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9983 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9984 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
9985 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9986 && (type = type_for_mode (mode, 1)) != 0
9987 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9988 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9989 != CODE_FOR_nothing))
9990 {
9991 do_jump (convert (type, exp), if_false_label, if_true_label);
9992 break;
9993 }
9994 goto normal;
9995
9996 case TRUTH_NOT_EXPR:
9997 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9998 break;
9999
10000 case TRUTH_ANDIF_EXPR:
10001 {
10002 rtx seq1, seq2;
10003 tree cleanups, old_cleanups;
10004
10005 if (if_false_label == 0)
10006 if_false_label = drop_through_label = gen_label_rtx ();
10007 start_sequence ();
10008 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
10009 seq1 = get_insns ();
10010 end_sequence ();
10011
10012 old_cleanups = cleanups_this_call;
10013 start_sequence ();
10014 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10015 seq2 = get_insns ();
10016 cleanups = defer_cleanups_to (old_cleanups);
10017 end_sequence ();
10018
10019 if (cleanups)
10020 {
10021 rtx flag = gen_reg_rtx (word_mode);
10022 tree new_cleanups;
10023 tree cond;
10024
10025 /* Flag cleanups as not needed. */
10026 emit_move_insn (flag, const0_rtx);
10027 emit_insns (seq1);
10028
10029 /* Flag cleanups as needed. */
10030 emit_move_insn (flag, const1_rtx);
10031 emit_insns (seq2);
10032
10033 /* All cleanups must be on the function_obstack. */
10034 push_obstacks_nochange ();
10035 resume_temporary_allocation ();
10036
10037 /* convert flag, which is an rtx, into a tree. */
10038 cond = make_node (RTL_EXPR);
10039 TREE_TYPE (cond) = integer_type_node;
10040 RTL_EXPR_RTL (cond) = flag;
10041 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
10042 cond = save_expr (cond);
10043
10044 new_cleanups = build (COND_EXPR, void_type_node,
10045 truthvalue_conversion (cond),
10046 cleanups, integer_zero_node);
10047 new_cleanups = fold (new_cleanups);
10048
10049 pop_obstacks ();
10050
10051 /* Now add in the conditionalized cleanups. */
10052 cleanups_this_call
10053 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
10054 expand_eh_region_start ();
10055 }
10056 else
10057 {
10058 emit_insns (seq1);
10059 emit_insns (seq2);
10060 }
10061 }
10062 break;
10063
10064 case TRUTH_ORIF_EXPR:
10065 {
10066 rtx seq1, seq2;
10067 tree cleanups, old_cleanups;
10068
10069 if (if_true_label == 0)
10070 if_true_label = drop_through_label = gen_label_rtx ();
10071 start_sequence ();
10072 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
10073 seq1 = get_insns ();
10074 end_sequence ();
10075
10076 old_cleanups = cleanups_this_call;
10077 start_sequence ();
10078 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10079 seq2 = get_insns ();
10080 cleanups = defer_cleanups_to (old_cleanups);
10081 end_sequence ();
10082
10083 if (cleanups)
10084 {
10085 rtx flag = gen_reg_rtx (word_mode);
10086 tree new_cleanups;
10087 tree cond;
10088
10089 /* Flag cleanups as not needed. */
10090 emit_move_insn (flag, const0_rtx);
10091 emit_insns (seq1);
10092
10093 /* Flag cleanups as needed. */
10094 emit_move_insn (flag, const1_rtx);
10095 emit_insns (seq2);
10096
10097 /* All cleanups must be on the function_obstack. */
10098 push_obstacks_nochange ();
10099 resume_temporary_allocation ();
10100
10101 /* convert flag, which is an rtx, into a tree. */
10102 cond = make_node (RTL_EXPR);
10103 TREE_TYPE (cond) = integer_type_node;
10104 RTL_EXPR_RTL (cond) = flag;
10105 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
10106 cond = save_expr (cond);
10107
10108 new_cleanups = build (COND_EXPR, void_type_node,
10109 truthvalue_conversion (cond),
10110 cleanups, integer_zero_node);
10111 new_cleanups = fold (new_cleanups);
10112
10113 pop_obstacks ();
10114
10115 /* Now add in the conditionalized cleanups. */
10116 cleanups_this_call
10117 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
10118 expand_eh_region_start ();
10119 }
10120 else
10121 {
10122 emit_insns (seq1);
10123 emit_insns (seq2);
10124 }
10125 }
10126 break;
10127
10128 case COMPOUND_EXPR:
10129 push_temp_slots ();
10130 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
10131 preserve_temp_slots (NULL_RTX);
10132 free_temp_slots ();
10133 pop_temp_slots ();
10134 emit_queue ();
10135 do_pending_stack_adjust ();
10136 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10137 break;
10138
10139 case COMPONENT_REF:
10140 case BIT_FIELD_REF:
10141 case ARRAY_REF:
10142 {
10143 int bitsize, bitpos, unsignedp;
10144 enum machine_mode mode;
10145 tree type;
10146 tree offset;
10147 int volatilep = 0;
10148
10149 /* Get description of this reference. We don't actually care
10150 about the underlying object here. */
10151 get_inner_reference (exp, &bitsize, &bitpos, &offset,
10152 &mode, &unsignedp, &volatilep);
10153
10154 type = type_for_size (bitsize, unsignedp);
10155 if (! SLOW_BYTE_ACCESS
10156 && type != 0 && bitsize >= 0
10157 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10158 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10159 != CODE_FOR_nothing))
10160 {
10161 do_jump (convert (type, exp), if_false_label, if_true_label);
10162 break;
10163 }
10164 goto normal;
10165 }
10166
10167 case COND_EXPR:
10168 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
10169 if (integer_onep (TREE_OPERAND (exp, 1))
10170 && integer_zerop (TREE_OPERAND (exp, 2)))
10171 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10172
10173 else if (integer_zerop (TREE_OPERAND (exp, 1))
10174 && integer_onep (TREE_OPERAND (exp, 2)))
10175 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10176
10177 else
10178 {
10179 register rtx label1 = gen_label_rtx ();
10180 drop_through_label = gen_label_rtx ();
10181 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
10182 /* Now the THEN-expression. */
10183 do_jump (TREE_OPERAND (exp, 1),
10184 if_false_label ? if_false_label : drop_through_label,
10185 if_true_label ? if_true_label : drop_through_label);
10186 /* In case the do_jump just above never jumps. */
10187 do_pending_stack_adjust ();
10188 emit_label (label1);
10189 /* Now the ELSE-expression. */
10190 do_jump (TREE_OPERAND (exp, 2),
10191 if_false_label ? if_false_label : drop_through_label,
10192 if_true_label ? if_true_label : drop_through_label);
10193 }
10194 break;
10195
10196 case EQ_EXPR:
10197 {
10198 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10199
10200 if (integer_zerop (TREE_OPERAND (exp, 1)))
10201 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10202 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10203 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10204 do_jump
10205 (fold
10206 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
10207 fold (build (EQ_EXPR, TREE_TYPE (exp),
10208 fold (build1 (REALPART_EXPR,
10209 TREE_TYPE (inner_type),
10210 TREE_OPERAND (exp, 0))),
10211 fold (build1 (REALPART_EXPR,
10212 TREE_TYPE (inner_type),
10213 TREE_OPERAND (exp, 1))))),
10214 fold (build (EQ_EXPR, TREE_TYPE (exp),
10215 fold (build1 (IMAGPART_EXPR,
10216 TREE_TYPE (inner_type),
10217 TREE_OPERAND (exp, 0))),
10218 fold (build1 (IMAGPART_EXPR,
10219 TREE_TYPE (inner_type),
10220 TREE_OPERAND (exp, 1))))))),
10221 if_false_label, if_true_label);
10222 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10223 && !can_compare_p (TYPE_MODE (inner_type)))
10224 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
10225 else
10226 comparison = compare (exp, EQ, EQ);
10227 break;
10228 }
10229
10230 case NE_EXPR:
10231 {
10232 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10233
10234 if (integer_zerop (TREE_OPERAND (exp, 1)))
10235 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10236 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10237 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10238 do_jump
10239 (fold
10240 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
10241 fold (build (NE_EXPR, TREE_TYPE (exp),
10242 fold (build1 (REALPART_EXPR,
10243 TREE_TYPE (inner_type),
10244 TREE_OPERAND (exp, 0))),
10245 fold (build1 (REALPART_EXPR,
10246 TREE_TYPE (inner_type),
10247 TREE_OPERAND (exp, 1))))),
10248 fold (build (NE_EXPR, TREE_TYPE (exp),
10249 fold (build1 (IMAGPART_EXPR,
10250 TREE_TYPE (inner_type),
10251 TREE_OPERAND (exp, 0))),
10252 fold (build1 (IMAGPART_EXPR,
10253 TREE_TYPE (inner_type),
10254 TREE_OPERAND (exp, 1))))))),
10255 if_false_label, if_true_label);
10256 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10257 && !can_compare_p (TYPE_MODE (inner_type)))
10258 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
10259 else
10260 comparison = compare (exp, NE, NE);
10261 break;
10262 }
10263
10264 case LT_EXPR:
10265 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10266 == MODE_INT)
10267 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10268 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
10269 else
10270 comparison = compare (exp, LT, LTU);
10271 break;
10272
10273 case LE_EXPR:
10274 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10275 == MODE_INT)
10276 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10277 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
10278 else
10279 comparison = compare (exp, LE, LEU);
10280 break;
10281
10282 case GT_EXPR:
10283 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10284 == MODE_INT)
10285 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10286 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
10287 else
10288 comparison = compare (exp, GT, GTU);
10289 break;
10290
10291 case GE_EXPR:
10292 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10293 == MODE_INT)
10294 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10295 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
10296 else
10297 comparison = compare (exp, GE, GEU);
10298 break;
10299
10300 default:
10301 normal:
10302 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10303 #if 0
10304 /* This is not needed any more and causes poor code since it causes
10305 comparisons and tests from non-SI objects to have different code
10306 sequences. */
10307 /* Copy to register to avoid generating bad insns by cse
10308 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10309 if (!cse_not_expected && GET_CODE (temp) == MEM)
10310 temp = copy_to_reg (temp);
10311 #endif
10312 do_pending_stack_adjust ();
10313 if (GET_CODE (temp) == CONST_INT)
10314 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
10315 else if (GET_CODE (temp) == LABEL_REF)
10316 comparison = const_true_rtx;
10317 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10318 && !can_compare_p (GET_MODE (temp)))
10319 /* Note swapping the labels gives us not-equal. */
10320 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10321 else if (GET_MODE (temp) != VOIDmode)
10322 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
10323 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10324 GET_MODE (temp), NULL_RTX, 0);
10325 else
10326 abort ();
10327 }
10328
10329 /* Do any postincrements in the expression that was tested. */
10330 emit_queue ();
10331
10332 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
10333 straight into a conditional jump instruction as the jump condition.
10334 Otherwise, all the work has been done already. */
10335
10336 if (comparison == const_true_rtx)
10337 {
10338 if (if_true_label)
10339 emit_jump (if_true_label);
10340 }
10341 else if (comparison == const0_rtx)
10342 {
10343 if (if_false_label)
10344 emit_jump (if_false_label);
10345 }
10346 else if (comparison)
10347 do_jump_for_compare (comparison, if_false_label, if_true_label);
10348
10349 if (drop_through_label)
10350 {
10351 /* If do_jump produces code that might be jumped around,
10352 do any stack adjusts from that code, before the place
10353 where control merges in. */
10354 do_pending_stack_adjust ();
10355 emit_label (drop_through_label);
10356 }
10357 }
10358 \f
10359 /* Given a comparison expression EXP for values too wide to be compared
10360 with one insn, test the comparison and jump to the appropriate label.
10361 The code of EXP is ignored; we always test GT if SWAP is 0,
10362 and LT if SWAP is 1. */
10363
10364 static void
10365 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10366 tree exp;
10367 int swap;
10368 rtx if_false_label, if_true_label;
10369 {
10370 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10371 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10372 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10373 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10374 rtx drop_through_label = 0;
10375 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10376 int i;
10377
10378 if (! if_true_label || ! if_false_label)
10379 drop_through_label = gen_label_rtx ();
10380 if (! if_true_label)
10381 if_true_label = drop_through_label;
10382 if (! if_false_label)
10383 if_false_label = drop_through_label;
10384
10385 /* Compare a word at a time, high order first. */
10386 for (i = 0; i < nwords; i++)
10387 {
10388 rtx comp;
10389 rtx op0_word, op1_word;
10390
10391 if (WORDS_BIG_ENDIAN)
10392 {
10393 op0_word = operand_subword_force (op0, i, mode);
10394 op1_word = operand_subword_force (op1, i, mode);
10395 }
10396 else
10397 {
10398 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10399 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10400 }
10401
10402 /* All but high-order word must be compared as unsigned. */
10403 comp = compare_from_rtx (op0_word, op1_word,
10404 (unsignedp || i > 0) ? GTU : GT,
10405 unsignedp, word_mode, NULL_RTX, 0);
10406 if (comp == const_true_rtx)
10407 emit_jump (if_true_label);
10408 else if (comp != const0_rtx)
10409 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10410
10411 /* Consider lower words only if these are equal. */
10412 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10413 NULL_RTX, 0);
10414 if (comp == const_true_rtx)
10415 emit_jump (if_false_label);
10416 else if (comp != const0_rtx)
10417 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10418 }
10419
10420 if (if_false_label)
10421 emit_jump (if_false_label);
10422 if (drop_through_label)
10423 emit_label (drop_through_label);
10424 }
10425
10426 /* Compare OP0 with OP1, word at a time, in mode MODE.
10427 UNSIGNEDP says to do unsigned comparison.
10428 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10429
10430 void
10431 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10432 enum machine_mode mode;
10433 int unsignedp;
10434 rtx op0, op1;
10435 rtx if_false_label, if_true_label;
10436 {
10437 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10438 rtx drop_through_label = 0;
10439 int i;
10440
10441 if (! if_true_label || ! if_false_label)
10442 drop_through_label = gen_label_rtx ();
10443 if (! if_true_label)
10444 if_true_label = drop_through_label;
10445 if (! if_false_label)
10446 if_false_label = drop_through_label;
10447
10448 /* Compare a word at a time, high order first. */
10449 for (i = 0; i < nwords; i++)
10450 {
10451 rtx comp;
10452 rtx op0_word, op1_word;
10453
10454 if (WORDS_BIG_ENDIAN)
10455 {
10456 op0_word = operand_subword_force (op0, i, mode);
10457 op1_word = operand_subword_force (op1, i, mode);
10458 }
10459 else
10460 {
10461 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10462 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10463 }
10464
10465 /* All but high-order word must be compared as unsigned. */
10466 comp = compare_from_rtx (op0_word, op1_word,
10467 (unsignedp || i > 0) ? GTU : GT,
10468 unsignedp, word_mode, NULL_RTX, 0);
10469 if (comp == const_true_rtx)
10470 emit_jump (if_true_label);
10471 else if (comp != const0_rtx)
10472 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10473
10474 /* Consider lower words only if these are equal. */
10475 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10476 NULL_RTX, 0);
10477 if (comp == const_true_rtx)
10478 emit_jump (if_false_label);
10479 else if (comp != const0_rtx)
10480 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10481 }
10482
10483 if (if_false_label)
10484 emit_jump (if_false_label);
10485 if (drop_through_label)
10486 emit_label (drop_through_label);
10487 }
10488
10489 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10490 with one insn, test the comparison and jump to the appropriate label. */
10491
10492 static void
10493 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10494 tree exp;
10495 rtx if_false_label, if_true_label;
10496 {
10497 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10498 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10499 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10500 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10501 int i;
10502 rtx drop_through_label = 0;
10503
10504 if (! if_false_label)
10505 drop_through_label = if_false_label = gen_label_rtx ();
10506
10507 for (i = 0; i < nwords; i++)
10508 {
10509 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
10510 operand_subword_force (op1, i, mode),
10511 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10512 word_mode, NULL_RTX, 0);
10513 if (comp == const_true_rtx)
10514 emit_jump (if_false_label);
10515 else if (comp != const0_rtx)
10516 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10517 }
10518
10519 if (if_true_label)
10520 emit_jump (if_true_label);
10521 if (drop_through_label)
10522 emit_label (drop_through_label);
10523 }
10524 \f
10525 /* Jump according to whether OP0 is 0.
10526 We assume that OP0 has an integer mode that is too wide
10527 for the available compare insns. */
10528
10529 static void
10530 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10531 rtx op0;
10532 rtx if_false_label, if_true_label;
10533 {
10534 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10535 int i;
10536 rtx drop_through_label = 0;
10537
10538 if (! if_false_label)
10539 drop_through_label = if_false_label = gen_label_rtx ();
10540
10541 for (i = 0; i < nwords; i++)
10542 {
10543 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
10544 GET_MODE (op0)),
10545 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
10546 if (comp == const_true_rtx)
10547 emit_jump (if_false_label);
10548 else if (comp != const0_rtx)
10549 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10550 }
10551
10552 if (if_true_label)
10553 emit_jump (if_true_label);
10554 if (drop_through_label)
10555 emit_label (drop_through_label);
10556 }
10557
10558 /* Given a comparison expression in rtl form, output conditional branches to
10559 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
10560
10561 static void
10562 do_jump_for_compare (comparison, if_false_label, if_true_label)
10563 rtx comparison, if_false_label, if_true_label;
10564 {
10565 if (if_true_label)
10566 {
10567 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10568 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
10569 else
10570 abort ();
10571
10572 if (if_false_label)
10573 emit_jump (if_false_label);
10574 }
10575 else if (if_false_label)
10576 {
10577 rtx insn;
10578 rtx prev = get_last_insn ();
10579 rtx branch = 0;
10580
10581 /* Output the branch with the opposite condition. Then try to invert
10582 what is generated. If more than one insn is a branch, or if the
10583 branch is not the last insn written, abort. If we can't invert
10584 the branch, emit make a true label, redirect this jump to that,
10585 emit a jump to the false label and define the true label. */
10586
10587 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10588 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])(if_false_label));
10589 else
10590 abort ();
10591
10592 /* Here we get the first insn that was just emitted. It used to be the
10593 case that, on some machines, emitting the branch would discard
10594 the previous compare insn and emit a replacement. This isn't
10595 done anymore, but abort if we see that PREV is deleted. */
10596
10597 if (prev == 0)
10598 insn = get_insns ();
10599 else if (INSN_DELETED_P (prev))
10600 abort ();
10601 else
10602 insn = NEXT_INSN (prev);
10603
10604 for (; insn; insn = NEXT_INSN (insn))
10605 if (GET_CODE (insn) == JUMP_INSN)
10606 {
10607 if (branch)
10608 abort ();
10609 branch = insn;
10610 }
10611
10612 if (branch != get_last_insn ())
10613 abort ();
10614
10615 JUMP_LABEL (branch) = if_false_label;
10616 if (! invert_jump (branch, if_false_label))
10617 {
10618 if_true_label = gen_label_rtx ();
10619 redirect_jump (branch, if_true_label);
10620 emit_jump (if_false_label);
10621 emit_label (if_true_label);
10622 }
10623 }
10624 }
10625 \f
10626 /* Generate code for a comparison expression EXP
10627 (including code to compute the values to be compared)
10628 and set (CC0) according to the result.
10629 SIGNED_CODE should be the rtx operation for this comparison for
10630 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10631
10632 We force a stack adjustment unless there are currently
10633 things pushed on the stack that aren't yet used. */
10634
10635 static rtx
10636 compare (exp, signed_code, unsigned_code)
10637 register tree exp;
10638 enum rtx_code signed_code, unsigned_code;
10639 {
10640 register rtx op0
10641 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10642 register rtx op1
10643 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10644 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
10645 register enum machine_mode mode = TYPE_MODE (type);
10646 int unsignedp = TREE_UNSIGNED (type);
10647 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
10648
10649 #ifdef HAVE_canonicalize_funcptr_for_compare
10650 /* If function pointers need to be "canonicalized" before they can
10651 be reliably compared, then canonicalize them. */
10652 if (HAVE_canonicalize_funcptr_for_compare
10653 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10654 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10655 == FUNCTION_TYPE))
10656 {
10657 rtx new_op0 = gen_reg_rtx (mode);
10658
10659 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10660 op0 = new_op0;
10661 }
10662
10663 if (HAVE_canonicalize_funcptr_for_compare
10664 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10665 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10666 == FUNCTION_TYPE))
10667 {
10668 rtx new_op1 = gen_reg_rtx (mode);
10669
10670 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10671 op1 = new_op1;
10672 }
10673 #endif
10674
10675 return compare_from_rtx (op0, op1, code, unsignedp, mode,
10676 ((mode == BLKmode)
10677 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10678 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
10679 }
10680
10681 /* Like compare but expects the values to compare as two rtx's.
10682 The decision as to signed or unsigned comparison must be made by the caller.
10683
10684 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10685 compared.
10686
10687 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10688 size of MODE should be used. */
10689
10690 rtx
10691 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10692 register rtx op0, op1;
10693 enum rtx_code code;
10694 int unsignedp;
10695 enum machine_mode mode;
10696 rtx size;
10697 int align;
10698 {
10699 rtx tem;
10700
10701 /* If one operand is constant, make it the second one. Only do this
10702 if the other operand is not constant as well. */
10703
10704 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10705 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
10706 {
10707 tem = op0;
10708 op0 = op1;
10709 op1 = tem;
10710 code = swap_condition (code);
10711 }
10712
10713 if (flag_force_mem)
10714 {
10715 op0 = force_not_mem (op0);
10716 op1 = force_not_mem (op1);
10717 }
10718
10719 do_pending_stack_adjust ();
10720
10721 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10722 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10723 return tem;
10724
10725 #if 0
10726 /* There's no need to do this now that combine.c can eliminate lots of
10727 sign extensions. This can be less efficient in certain cases on other
10728 machines. */
10729
10730 /* If this is a signed equality comparison, we can do it as an
10731 unsigned comparison since zero-extension is cheaper than sign
10732 extension and comparisons with zero are done as unsigned. This is
10733 the case even on machines that can do fast sign extension, since
10734 zero-extension is easier to combine with other operations than
10735 sign-extension is. If we are comparing against a constant, we must
10736 convert it to what it would look like unsigned. */
10737 if ((code == EQ || code == NE) && ! unsignedp
10738 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10739 {
10740 if (GET_CODE (op1) == CONST_INT
10741 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10742 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10743 unsignedp = 1;
10744 }
10745 #endif
10746
10747 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10748
10749 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
10750 }
10751 \f
10752 /* Generate code to calculate EXP using a store-flag instruction
10753 and return an rtx for the result. EXP is either a comparison
10754 or a TRUTH_NOT_EXPR whose operand is a comparison.
10755
10756 If TARGET is nonzero, store the result there if convenient.
10757
10758 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10759 cheap.
10760
10761 Return zero if there is no suitable set-flag instruction
10762 available on this machine.
10763
10764 Once expand_expr has been called on the arguments of the comparison,
10765 we are committed to doing the store flag, since it is not safe to
10766 re-evaluate the expression. We emit the store-flag insn by calling
10767 emit_store_flag, but only expand the arguments if we have a reason
10768 to believe that emit_store_flag will be successful. If we think that
10769 it will, but it isn't, we have to simulate the store-flag with a
10770 set/jump/set sequence. */
10771
10772 static rtx
10773 do_store_flag (exp, target, mode, only_cheap)
10774 tree exp;
10775 rtx target;
10776 enum machine_mode mode;
10777 int only_cheap;
10778 {
10779 enum rtx_code code;
10780 tree arg0, arg1, type;
10781 tree tem;
10782 enum machine_mode operand_mode;
10783 int invert = 0;
10784 int unsignedp;
10785 rtx op0, op1;
10786 enum insn_code icode;
10787 rtx subtarget = target;
10788 rtx result, label, pattern, jump_pat;
10789
10790 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10791 result at the end. We can't simply invert the test since it would
10792 have already been inverted if it were valid. This case occurs for
10793 some floating-point comparisons. */
10794
10795 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10796 invert = 1, exp = TREE_OPERAND (exp, 0);
10797
10798 arg0 = TREE_OPERAND (exp, 0);
10799 arg1 = TREE_OPERAND (exp, 1);
10800 type = TREE_TYPE (arg0);
10801 operand_mode = TYPE_MODE (type);
10802 unsignedp = TREE_UNSIGNED (type);
10803
10804 /* We won't bother with BLKmode store-flag operations because it would mean
10805 passing a lot of information to emit_store_flag. */
10806 if (operand_mode == BLKmode)
10807 return 0;
10808
10809 /* We won't bother with store-flag operations involving function pointers
10810 when function pointers must be canonicalized before comparisons. */
10811 #ifdef HAVE_canonicalize_funcptr_for_compare
10812 if (HAVE_canonicalize_funcptr_for_compare
10813 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10814 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10815 == FUNCTION_TYPE))
10816 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10817 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10818 == FUNCTION_TYPE))))
10819 return 0;
10820 #endif
10821
10822 STRIP_NOPS (arg0);
10823 STRIP_NOPS (arg1);
10824
10825 /* Get the rtx comparison code to use. We know that EXP is a comparison
10826 operation of some type. Some comparisons against 1 and -1 can be
10827 converted to comparisons with zero. Do so here so that the tests
10828 below will be aware that we have a comparison with zero. These
10829 tests will not catch constants in the first operand, but constants
10830 are rarely passed as the first operand. */
10831
10832 switch (TREE_CODE (exp))
10833 {
10834 case EQ_EXPR:
10835 code = EQ;
10836 break;
10837 case NE_EXPR:
10838 code = NE;
10839 break;
10840 case LT_EXPR:
10841 if (integer_onep (arg1))
10842 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10843 else
10844 code = unsignedp ? LTU : LT;
10845 break;
10846 case LE_EXPR:
10847 if (! unsignedp && integer_all_onesp (arg1))
10848 arg1 = integer_zero_node, code = LT;
10849 else
10850 code = unsignedp ? LEU : LE;
10851 break;
10852 case GT_EXPR:
10853 if (! unsignedp && integer_all_onesp (arg1))
10854 arg1 = integer_zero_node, code = GE;
10855 else
10856 code = unsignedp ? GTU : GT;
10857 break;
10858 case GE_EXPR:
10859 if (integer_onep (arg1))
10860 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10861 else
10862 code = unsignedp ? GEU : GE;
10863 break;
10864 default:
10865 abort ();
10866 }
10867
10868 /* Put a constant second. */
10869 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10870 {
10871 tem = arg0; arg0 = arg1; arg1 = tem;
10872 code = swap_condition (code);
10873 }
10874
10875 /* If this is an equality or inequality test of a single bit, we can
10876 do this by shifting the bit being tested to the low-order bit and
10877 masking the result with the constant 1. If the condition was EQ,
10878 we xor it with 1. This does not require an scc insn and is faster
10879 than an scc insn even if we have it. */
10880
10881 if ((code == NE || code == EQ)
10882 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10883 && integer_pow2p (TREE_OPERAND (arg0, 1))
10884 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
10885 {
10886 tree inner = TREE_OPERAND (arg0, 0);
10887 HOST_WIDE_INT tem;
10888 int bitnum;
10889 int ops_unsignedp;
10890
10891 tem = INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
10892 NULL_RTX, VOIDmode, 0));
10893 /* In this case, immed_double_const will sign extend the value to make
10894 it look the same on the host and target. We must remove the
10895 sign-extension before calling exact_log2, since exact_log2 will
10896 fail for negative values. */
10897 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT
10898 && BITS_PER_WORD == GET_MODE_BITSIZE (TYPE_MODE (type)))
10899 /* We don't use the obvious constant shift to generate the mask,
10900 because that generates compiler warnings when BITS_PER_WORD is
10901 greater than or equal to HOST_BITS_PER_WIDE_INT, even though this
10902 code is unreachable in that case. */
10903 tem = tem & GET_MODE_MASK (word_mode);
10904 bitnum = exact_log2 (tem);
10905
10906 /* If INNER is a right shift of a constant and it plus BITNUM does
10907 not overflow, adjust BITNUM and INNER. */
10908
10909 if (TREE_CODE (inner) == RSHIFT_EXPR
10910 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10911 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10912 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
10913 < TYPE_PRECISION (type)))
10914 {
10915 bitnum +=TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10916 inner = TREE_OPERAND (inner, 0);
10917 }
10918
10919 /* If we are going to be able to omit the AND below, we must do our
10920 operations as unsigned. If we must use the AND, we have a choice.
10921 Normally unsigned is faster, but for some machines signed is. */
10922 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10923 #ifdef LOAD_EXTEND_OP
10924 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10925 #else
10926 : 1
10927 #endif
10928 );
10929
10930 if (subtarget == 0 || GET_CODE (subtarget) != REG
10931 || GET_MODE (subtarget) != operand_mode
10932 || ! safe_from_p (subtarget, inner))
10933 subtarget = 0;
10934
10935 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10936
10937 if (bitnum != 0)
10938 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
10939 size_int (bitnum), subtarget, ops_unsignedp);
10940
10941 if (GET_MODE (op0) != mode)
10942 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10943
10944 if ((code == EQ && ! invert) || (code == NE && invert))
10945 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10946 ops_unsignedp, OPTAB_LIB_WIDEN);
10947
10948 /* Put the AND last so it can combine with more things. */
10949 if (bitnum != TYPE_PRECISION (type) - 1)
10950 op0 = expand_and (op0, const1_rtx, subtarget);
10951
10952 return op0;
10953 }
10954
10955 /* Now see if we are likely to be able to do this. Return if not. */
10956 if (! can_compare_p (operand_mode))
10957 return 0;
10958 icode = setcc_gen_code[(int) code];
10959 if (icode == CODE_FOR_nothing
10960 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
10961 {
10962 /* We can only do this if it is one of the special cases that
10963 can be handled without an scc insn. */
10964 if ((code == LT && integer_zerop (arg1))
10965 || (! only_cheap && code == GE && integer_zerop (arg1)))
10966 ;
10967 else if (BRANCH_COST >= 0
10968 && ! only_cheap && (code == NE || code == EQ)
10969 && TREE_CODE (type) != REAL_TYPE
10970 && ((abs_optab->handlers[(int) operand_mode].insn_code
10971 != CODE_FOR_nothing)
10972 || (ffs_optab->handlers[(int) operand_mode].insn_code
10973 != CODE_FOR_nothing)))
10974 ;
10975 else
10976 return 0;
10977 }
10978
10979 preexpand_calls (exp);
10980 if (subtarget == 0 || GET_CODE (subtarget) != REG
10981 || GET_MODE (subtarget) != operand_mode
10982 || ! safe_from_p (subtarget, arg1))
10983 subtarget = 0;
10984
10985 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10986 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10987
10988 if (target == 0)
10989 target = gen_reg_rtx (mode);
10990
10991 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10992 because, if the emit_store_flag does anything it will succeed and
10993 OP0 and OP1 will not be used subsequently. */
10994
10995 result = emit_store_flag (target, code,
10996 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10997 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10998 operand_mode, unsignedp, 1);
10999
11000 if (result)
11001 {
11002 if (invert)
11003 result = expand_binop (mode, xor_optab, result, const1_rtx,
11004 result, 0, OPTAB_LIB_WIDEN);
11005 return result;
11006 }
11007
11008 /* If this failed, we have to do this with set/compare/jump/set code. */
11009 if (target == 0 || GET_CODE (target) != REG
11010 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
11011 target = gen_reg_rtx (GET_MODE (target));
11012
11013 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
11014 result = compare_from_rtx (op0, op1, code, unsignedp,
11015 operand_mode, NULL_RTX, 0);
11016 if (GET_CODE (result) == CONST_INT)
11017 return (((result == const0_rtx && ! invert)
11018 || (result != const0_rtx && invert))
11019 ? const0_rtx : const1_rtx);
11020
11021 label = gen_label_rtx ();
11022 if (bcc_gen_fctn[(int) code] == 0)
11023 abort ();
11024
11025 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
11026 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
11027 emit_label (label);
11028
11029 return target;
11030 }
11031 \f
11032 /* Generate a tablejump instruction (used for switch statements). */
11033
11034 #ifdef HAVE_tablejump
11035
11036 /* INDEX is the value being switched on, with the lowest value
11037 in the table already subtracted.
11038 MODE is its expected mode (needed if INDEX is constant).
11039 RANGE is the length of the jump table.
11040 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11041
11042 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11043 index value is out of range. */
11044
11045 void
11046 do_tablejump (index, mode, range, table_label, default_label)
11047 rtx index, range, table_label, default_label;
11048 enum machine_mode mode;
11049 {
11050 register rtx temp, vector;
11051
11052 /* Do an unsigned comparison (in the proper mode) between the index
11053 expression and the value which represents the length of the range.
11054 Since we just finished subtracting the lower bound of the range
11055 from the index expression, this comparison allows us to simultaneously
11056 check that the original index expression value is both greater than
11057 or equal to the minimum value of the range and less than or equal to
11058 the maximum value of the range. */
11059
11060 emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
11061 emit_jump_insn (gen_bgtu (default_label));
11062
11063 /* If index is in range, it must fit in Pmode.
11064 Convert to Pmode so we can index with it. */
11065 if (mode != Pmode)
11066 index = convert_to_mode (Pmode, index, 1);
11067
11068 /* Don't let a MEM slip thru, because then INDEX that comes
11069 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11070 and break_out_memory_refs will go to work on it and mess it up. */
11071 #ifdef PIC_CASE_VECTOR_ADDRESS
11072 if (flag_pic && GET_CODE (index) != REG)
11073 index = copy_to_mode_reg (Pmode, index);
11074 #endif
11075
11076 /* If flag_force_addr were to affect this address
11077 it could interfere with the tricky assumptions made
11078 about addresses that contain label-refs,
11079 which may be valid only very near the tablejump itself. */
11080 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11081 GET_MODE_SIZE, because this indicates how large insns are. The other
11082 uses should all be Pmode, because they are addresses. This code
11083 could fail if addresses and insns are not the same size. */
11084 index = gen_rtx (PLUS, Pmode,
11085 gen_rtx (MULT, Pmode, index,
11086 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
11087 gen_rtx (LABEL_REF, Pmode, table_label));
11088 #ifdef PIC_CASE_VECTOR_ADDRESS
11089 if (flag_pic)
11090 index = PIC_CASE_VECTOR_ADDRESS (index);
11091 else
11092 #endif
11093 index = memory_address_noforce (CASE_VECTOR_MODE, index);
11094 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11095 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
11096 RTX_UNCHANGING_P (vector) = 1;
11097 convert_move (temp, vector, 0);
11098
11099 emit_jump_insn (gen_tablejump (temp, table_label));
11100
11101 #ifndef CASE_VECTOR_PC_RELATIVE
11102 /* If we are generating PIC code or if the table is PC-relative, the
11103 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11104 if (! flag_pic)
11105 emit_barrier ();
11106 #endif
11107 }
11108
11109 #endif /* HAVE_tablejump */
11110
11111
11112 /* Emit a suitable bytecode to load a value from memory, assuming a pointer
11113 to that value is on the top of the stack. The resulting type is TYPE, and
11114 the source declaration is DECL. */
11115
11116 void
11117 bc_load_memory (type, decl)
11118 tree type, decl;
11119 {
11120 enum bytecode_opcode opcode;
11121
11122
11123 /* Bit fields are special. We only know about signed and
11124 unsigned ints, and enums. The latter are treated as
11125 signed integers. */
11126
11127 if (DECL_BIT_FIELD (decl))
11128 if (TREE_CODE (type) == ENUMERAL_TYPE
11129 || TREE_CODE (type) == INTEGER_TYPE)
11130 opcode = TREE_UNSIGNED (type) ? zxloadBI : sxloadBI;
11131 else
11132 abort ();
11133 else
11134 /* See corresponding comment in bc_store_memory(). */
11135 if (TYPE_MODE (type) == BLKmode
11136 || TYPE_MODE (type) == VOIDmode)
11137 return;
11138 else
11139 opcode = mode_to_load_map [(int) TYPE_MODE (type)];
11140
11141 if (opcode == neverneverland)
11142 abort ();
11143
11144 bc_emit_bytecode (opcode);
11145
11146 #ifdef DEBUG_PRINT_CODE
11147 fputc ('\n', stderr);
11148 #endif
11149 }
11150
11151
11152 /* Store the contents of the second stack slot to the address in the
11153 top stack slot. DECL is the declaration of the destination and is used
11154 to determine whether we're dealing with a bitfield. */
11155
11156 void
11157 bc_store_memory (type, decl)
11158 tree type, decl;
11159 {
11160 enum bytecode_opcode opcode;
11161
11162
11163 if (DECL_BIT_FIELD (decl))
11164 {
11165 if (TREE_CODE (type) == ENUMERAL_TYPE
11166 || TREE_CODE (type) == INTEGER_TYPE)
11167 opcode = sstoreBI;
11168 else
11169 abort ();
11170 }
11171 else
11172 if (TYPE_MODE (type) == BLKmode)
11173 {
11174 /* Copy structure. This expands to a block copy instruction, storeBLK.
11175 In addition to the arguments expected by the other store instructions,
11176 it also expects a type size (SImode) on top of the stack, which is the
11177 structure size in size units (usually bytes). The two first arguments
11178 are already on the stack; so we just put the size on level 1. For some
11179 other languages, the size may be variable, this is why we don't encode
11180 it as a storeBLK literal, but rather treat it as a full-fledged expression. */
11181
11182 bc_expand_expr (TYPE_SIZE (type));
11183 opcode = storeBLK;
11184 }
11185 else
11186 opcode = mode_to_store_map [(int) TYPE_MODE (type)];
11187
11188 if (opcode == neverneverland)
11189 abort ();
11190
11191 bc_emit_bytecode (opcode);
11192
11193 #ifdef DEBUG_PRINT_CODE
11194 fputc ('\n', stderr);
11195 #endif
11196 }
11197
11198
11199 /* Allocate local stack space sufficient to hold a value of the given
11200 SIZE at alignment boundary ALIGNMENT bits. ALIGNMENT must be an
11201 integral power of 2. A special case is locals of type VOID, which
11202 have size 0 and alignment 1 - any "voidish" SIZE or ALIGNMENT is
11203 remapped into the corresponding attribute of SI. */
11204
11205 rtx
11206 bc_allocate_local (size, alignment)
11207 int size, alignment;
11208 {
11209 rtx retval;
11210 int byte_alignment;
11211
11212 if (size < 0)
11213 abort ();
11214
11215 /* Normalize size and alignment */
11216 if (!size)
11217 size = UNITS_PER_WORD;
11218
11219 if (alignment < BITS_PER_UNIT)
11220 byte_alignment = 1 << (INT_ALIGN - 1);
11221 else
11222 /* Align */
11223 byte_alignment = alignment / BITS_PER_UNIT;
11224
11225 if (local_vars_size & (byte_alignment - 1))
11226 local_vars_size += byte_alignment - (local_vars_size & (byte_alignment - 1));
11227
11228 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
11229 local_vars_size += size;
11230
11231 return retval;
11232 }
11233
11234
11235 /* Allocate variable-sized local array. Variable-sized arrays are
11236 actually pointers to the address in memory where they are stored. */
11237
11238 rtx
11239 bc_allocate_variable_array (size)
11240 tree size;
11241 {
11242 rtx retval;
11243 const int ptralign = (1 << (PTR_ALIGN - 1));
11244
11245 /* Align pointer */
11246 if (local_vars_size & ptralign)
11247 local_vars_size += ptralign - (local_vars_size & ptralign);
11248
11249 /* Note down local space needed: pointer to block; also return
11250 dummy rtx */
11251
11252 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
11253 local_vars_size += POINTER_SIZE / BITS_PER_UNIT;
11254 return retval;
11255 }
11256
11257
11258 /* Push the machine address for the given external variable offset. */
11259
11260 void
11261 bc_load_externaddr (externaddr)
11262 rtx externaddr;
11263 {
11264 bc_emit_bytecode (constP);
11265 bc_emit_code_labelref (BYTECODE_LABEL (externaddr),
11266 BYTECODE_BC_LABEL (externaddr)->offset);
11267
11268 #ifdef DEBUG_PRINT_CODE
11269 fputc ('\n', stderr);
11270 #endif
11271 }
11272
11273
11274 /* Like above, but expects an IDENTIFIER. */
11275
11276 void
11277 bc_load_externaddr_id (id, offset)
11278 tree id;
11279 int offset;
11280 {
11281 if (!IDENTIFIER_POINTER (id))
11282 abort ();
11283
11284 bc_emit_bytecode (constP);
11285 bc_emit_code_labelref (xstrdup (IDENTIFIER_POINTER (id)), offset);
11286
11287 #ifdef DEBUG_PRINT_CODE
11288 fputc ('\n', stderr);
11289 #endif
11290 }
11291
11292
11293 /* Push the machine address for the given local variable offset. */
11294
11295 void
11296 bc_load_localaddr (localaddr)
11297 rtx localaddr;
11298 {
11299 bc_emit_instruction (localP, (HOST_WIDE_INT) BYTECODE_BC_LABEL (localaddr)->offset);
11300 }
11301
11302
11303 /* Push the machine address for the given parameter offset.
11304 NOTE: offset is in bits. */
11305
11306 void
11307 bc_load_parmaddr (parmaddr)
11308 rtx parmaddr;
11309 {
11310 bc_emit_instruction (argP, ((HOST_WIDE_INT) BYTECODE_BC_LABEL (parmaddr)->offset
11311 / BITS_PER_UNIT));
11312 }
11313
11314
11315 /* Convert a[i] into *(a + i). */
11316
11317 tree
11318 bc_canonicalize_array_ref (exp)
11319 tree exp;
11320 {
11321 tree type = TREE_TYPE (exp);
11322 tree array_adr = build1 (ADDR_EXPR, TYPE_POINTER_TO (type),
11323 TREE_OPERAND (exp, 0));
11324 tree index = TREE_OPERAND (exp, 1);
11325
11326
11327 /* Convert the integer argument to a type the same size as a pointer
11328 so the multiply won't overflow spuriously. */
11329
11330 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
11331 index = convert (type_for_size (POINTER_SIZE, 0), index);
11332
11333 /* The array address isn't volatile even if the array is.
11334 (Of course this isn't terribly relevant since the bytecode
11335 translator treats nearly everything as volatile anyway.) */
11336 TREE_THIS_VOLATILE (array_adr) = 0;
11337
11338 return build1 (INDIRECT_REF, type,
11339 fold (build (PLUS_EXPR,
11340 TYPE_POINTER_TO (type),
11341 array_adr,
11342 fold (build (MULT_EXPR,
11343 TYPE_POINTER_TO (type),
11344 index,
11345 size_in_bytes (type))))));
11346 }
11347
11348
11349 /* Load the address of the component referenced by the given
11350 COMPONENT_REF expression.
11351
11352 Returns innermost lvalue. */
11353
11354 tree
11355 bc_expand_component_address (exp)
11356 tree exp;
11357 {
11358 tree tem, chain;
11359 enum machine_mode mode;
11360 int bitpos = 0;
11361 HOST_WIDE_INT SIval;
11362
11363
11364 tem = TREE_OPERAND (exp, 1);
11365 mode = DECL_MODE (tem);
11366
11367
11368 /* Compute cumulative bit offset for nested component refs
11369 and array refs, and find the ultimate containing object. */
11370
11371 for (tem = exp;; tem = TREE_OPERAND (tem, 0))
11372 {
11373 if (TREE_CODE (tem) == COMPONENT_REF)
11374 bitpos += TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (tem, 1)));
11375 else
11376 if (TREE_CODE (tem) == ARRAY_REF
11377 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
11378 && TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) == INTEGER_CST)
11379
11380 bitpos += (TREE_INT_CST_LOW (TREE_OPERAND (tem, 1))
11381 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)))
11382 /* * TYPE_SIZE_UNIT (TREE_TYPE (tem)) */);
11383 else
11384 break;
11385 }
11386
11387 bc_expand_expr (tem);
11388
11389
11390 /* For bitfields also push their offset and size */
11391 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
11392 bc_push_offset_and_size (bitpos, /* DECL_SIZE_UNIT */ (TREE_OPERAND (exp, 1)));
11393 else
11394 if (SIval = bitpos / BITS_PER_UNIT)
11395 bc_emit_instruction (addconstPSI, SIval);
11396
11397 return (TREE_OPERAND (exp, 1));
11398 }
11399
11400
11401 /* Emit code to push two SI constants */
11402
11403 void
11404 bc_push_offset_and_size (offset, size)
11405 HOST_WIDE_INT offset, size;
11406 {
11407 bc_emit_instruction (constSI, offset);
11408 bc_emit_instruction (constSI, size);
11409 }
11410
11411
11412 /* Emit byte code to push the address of the given lvalue expression to
11413 the stack. If it's a bit field, we also push offset and size info.
11414
11415 Returns innermost component, which allows us to determine not only
11416 its type, but also whether it's a bitfield. */
11417
11418 tree
11419 bc_expand_address (exp)
11420 tree exp;
11421 {
11422 /* Safeguard */
11423 if (!exp || TREE_CODE (exp) == ERROR_MARK)
11424 return (exp);
11425
11426
11427 switch (TREE_CODE (exp))
11428 {
11429 case ARRAY_REF:
11430
11431 return (bc_expand_address (bc_canonicalize_array_ref (exp)));
11432
11433 case COMPONENT_REF:
11434
11435 return (bc_expand_component_address (exp));
11436
11437 case INDIRECT_REF:
11438
11439 bc_expand_expr (TREE_OPERAND (exp, 0));
11440
11441 /* For variable-sized types: retrieve pointer. Sometimes the
11442 TYPE_SIZE tree is NULL. Is this a bug or a feature? Let's
11443 also make sure we have an operand, just in case... */
11444
11445 if (TREE_OPERAND (exp, 0)
11446 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))
11447 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))) != INTEGER_CST)
11448 bc_emit_instruction (loadP);
11449
11450 /* If packed, also return offset and size */
11451 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 0)))
11452
11453 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (exp, 0))),
11454 TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (exp, 0))));
11455
11456 return (TREE_OPERAND (exp, 0));
11457
11458 case FUNCTION_DECL:
11459
11460 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
11461 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
11462 break;
11463
11464 case PARM_DECL:
11465
11466 bc_load_parmaddr (DECL_RTL (exp));
11467
11468 /* For variable-sized types: retrieve pointer */
11469 if (TYPE_SIZE (TREE_TYPE (exp))
11470 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
11471 bc_emit_instruction (loadP);
11472
11473 /* If packed, also return offset and size */
11474 if (DECL_BIT_FIELD (exp))
11475 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
11476 TREE_INT_CST_LOW (DECL_SIZE (exp)));
11477
11478 break;
11479
11480 case RESULT_DECL:
11481
11482 bc_emit_instruction (returnP);
11483 break;
11484
11485 case VAR_DECL:
11486
11487 #if 0
11488 if (BYTECODE_LABEL (DECL_RTL (exp)))
11489 bc_load_externaddr (DECL_RTL (exp));
11490 #endif
11491
11492 if (DECL_EXTERNAL (exp))
11493 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
11494 (BYTECODE_BC_LABEL (DECL_RTL (exp)))->offset);
11495 else
11496 bc_load_localaddr (DECL_RTL (exp));
11497
11498 /* For variable-sized types: retrieve pointer */
11499 if (TYPE_SIZE (TREE_TYPE (exp))
11500 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
11501 bc_emit_instruction (loadP);
11502
11503 /* If packed, also return offset and size */
11504 if (DECL_BIT_FIELD (exp))
11505 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
11506 TREE_INT_CST_LOW (DECL_SIZE (exp)));
11507
11508 break;
11509
11510 case STRING_CST:
11511 {
11512 rtx r;
11513
11514 bc_emit_bytecode (constP);
11515 r = output_constant_def (exp);
11516 bc_emit_code_labelref (BYTECODE_LABEL (r), BYTECODE_BC_LABEL (r)->offset);
11517
11518 #ifdef DEBUG_PRINT_CODE
11519 fputc ('\n', stderr);
11520 #endif
11521 }
11522 break;
11523
11524 default:
11525
11526 abort();
11527 break;
11528 }
11529
11530 /* Most lvalues don't have components. */
11531 return (exp);
11532 }
11533
11534
11535 /* Emit a type code to be used by the runtime support in handling
11536 parameter passing. The type code consists of the machine mode
11537 plus the minimal alignment shifted left 8 bits. */
11538
11539 tree
11540 bc_runtime_type_code (type)
11541 tree type;
11542 {
11543 int val;
11544
11545 switch (TREE_CODE (type))
11546 {
11547 case VOID_TYPE:
11548 case INTEGER_TYPE:
11549 case REAL_TYPE:
11550 case COMPLEX_TYPE:
11551 case ENUMERAL_TYPE:
11552 case POINTER_TYPE:
11553 case RECORD_TYPE:
11554
11555 val = (int) TYPE_MODE (type) | TYPE_ALIGN (type) << 8;
11556 break;
11557
11558 case ERROR_MARK:
11559
11560 val = 0;
11561 break;
11562
11563 default:
11564
11565 abort ();
11566 }
11567 return build_int_2 (val, 0);
11568 }
11569
11570
11571 /* Generate constructor label */
11572
11573 char *
11574 bc_gen_constr_label ()
11575 {
11576 static int label_counter;
11577 static char label[20];
11578
11579 sprintf (label, "*LR%d", label_counter++);
11580
11581 return (obstack_copy0 (&permanent_obstack, label, strlen (label)));
11582 }
11583
11584
11585 /* Evaluate constructor CONSTR and return pointer to it on level one. We
11586 expand the constructor data as static data, and push a pointer to it.
11587 The pointer is put in the pointer table and is retrieved by a constP
11588 bytecode instruction. We then loop and store each constructor member in
11589 the corresponding component. Finally, we return the original pointer on
11590 the stack. */
11591
11592 void
11593 bc_expand_constructor (constr)
11594 tree constr;
11595 {
11596 char *l;
11597 HOST_WIDE_INT ptroffs;
11598 rtx constr_rtx;
11599
11600
11601 /* Literal constructors are handled as constants, whereas
11602 non-literals are evaluated and stored element by element
11603 into the data segment. */
11604
11605 /* Allocate space in proper segment and push pointer to space on stack.
11606 */
11607
11608 l = bc_gen_constr_label ();
11609
11610 if (TREE_CONSTANT (constr))
11611 {
11612 text_section ();
11613
11614 bc_emit_const_labeldef (l);
11615 bc_output_constructor (constr, int_size_in_bytes (TREE_TYPE (constr)));
11616 }
11617 else
11618 {
11619 data_section ();
11620
11621 bc_emit_data_labeldef (l);
11622 bc_output_data_constructor (constr);
11623 }
11624
11625
11626 /* Add reference to pointer table and recall pointer to stack;
11627 this code is common for both types of constructors: literals
11628 and non-literals. */
11629
11630 ptroffs = bc_define_pointer (l);
11631 bc_emit_instruction (constP, ptroffs);
11632
11633 /* This is all that has to be done if it's a literal. */
11634 if (TREE_CONSTANT (constr))
11635 return;
11636
11637
11638 /* At this point, we have the pointer to the structure on top of the stack.
11639 Generate sequences of store_memory calls for the constructor. */
11640
11641 /* constructor type is structure */
11642 if (TREE_CODE (TREE_TYPE (constr)) == RECORD_TYPE)
11643 {
11644 register tree elt;
11645
11646 /* If the constructor has fewer fields than the structure,
11647 clear the whole structure first. */
11648
11649 if (list_length (CONSTRUCTOR_ELTS (constr))
11650 != list_length (TYPE_FIELDS (TREE_TYPE (constr))))
11651 {
11652 bc_emit_instruction (duplicate);
11653 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
11654 bc_emit_instruction (clearBLK);
11655 }
11656
11657 /* Store each element of the constructor into the corresponding
11658 field of TARGET. */
11659
11660 for (elt = CONSTRUCTOR_ELTS (constr); elt; elt = TREE_CHAIN (elt))
11661 {
11662 register tree field = TREE_PURPOSE (elt);
11663 register enum machine_mode mode;
11664 int bitsize;
11665 int bitpos;
11666 int unsignedp;
11667
11668 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field)) /* * DECL_SIZE_UNIT (field) */;
11669 mode = DECL_MODE (field);
11670 unsignedp = TREE_UNSIGNED (field);
11671
11672 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
11673
11674 bc_store_field (elt, bitsize, bitpos, mode, TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
11675 /* The alignment of TARGET is
11676 at least what its type requires. */
11677 VOIDmode, 0,
11678 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
11679 int_size_in_bytes (TREE_TYPE (constr)));
11680 }
11681 }
11682 else
11683
11684 /* Constructor type is array */
11685 if (TREE_CODE (TREE_TYPE (constr)) == ARRAY_TYPE)
11686 {
11687 register tree elt;
11688 register int i;
11689 tree domain = TYPE_DOMAIN (TREE_TYPE (constr));
11690 int minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
11691 int maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
11692 tree elttype = TREE_TYPE (TREE_TYPE (constr));
11693
11694 /* If the constructor has fewer fields than the structure,
11695 clear the whole structure first. */
11696
11697 if (list_length (CONSTRUCTOR_ELTS (constr)) < maxelt - minelt + 1)
11698 {
11699 bc_emit_instruction (duplicate);
11700 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
11701 bc_emit_instruction (clearBLK);
11702 }
11703
11704
11705 /* Store each element of the constructor into the corresponding
11706 element of TARGET, determined by counting the elements. */
11707
11708 for (elt = CONSTRUCTOR_ELTS (constr), i = 0;
11709 elt;
11710 elt = TREE_CHAIN (elt), i++)
11711 {
11712 register enum machine_mode mode;
11713 int bitsize;
11714 int bitpos;
11715 int unsignedp;
11716
11717 mode = TYPE_MODE (elttype);
11718 bitsize = GET_MODE_BITSIZE (mode);
11719 unsignedp = TREE_UNSIGNED (elttype);
11720
11721 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype))
11722 /* * TYPE_SIZE_UNIT (elttype) */ );
11723
11724 bc_store_field (elt, bitsize, bitpos, mode,
11725 TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
11726 /* The alignment of TARGET is
11727 at least what its type requires. */
11728 VOIDmode, 0,
11729 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
11730 int_size_in_bytes (TREE_TYPE (constr)));
11731 }
11732
11733 }
11734 }
11735
11736
11737 /* Store the value of EXP (an expression tree) into member FIELD of
11738 structure at address on stack, which has type TYPE, mode MODE and
11739 occupies BITSIZE bits, starting BITPOS bits from the beginning of the
11740 structure.
11741
11742 ALIGN is the alignment that TARGET is known to have, measured in bytes.
11743 TOTAL_SIZE is its size in bytes, or -1 if variable. */
11744
11745 void
11746 bc_store_field (field, bitsize, bitpos, mode, exp, type,
11747 value_mode, unsignedp, align, total_size)
11748 int bitsize, bitpos;
11749 enum machine_mode mode;
11750 tree field, exp, type;
11751 enum machine_mode value_mode;
11752 int unsignedp;
11753 int align;
11754 int total_size;
11755 {
11756
11757 /* Expand expression and copy pointer */
11758 bc_expand_expr (exp);
11759 bc_emit_instruction (over);
11760
11761
11762 /* If the component is a bit field, we cannot use addressing to access
11763 it. Use bit-field techniques to store in it. */
11764
11765 if (DECL_BIT_FIELD (field))
11766 {
11767 bc_store_bit_field (bitpos, bitsize, unsignedp);
11768 return;
11769 }
11770 else
11771 /* Not bit field */
11772 {
11773 HOST_WIDE_INT offset = bitpos / BITS_PER_UNIT;
11774
11775 /* Advance pointer to the desired member */
11776 if (offset)
11777 bc_emit_instruction (addconstPSI, offset);
11778
11779 /* Store */
11780 bc_store_memory (type, field);
11781 }
11782 }
11783
11784
11785 /* Store SI/SU in bitfield */
11786
11787 void
11788 bc_store_bit_field (offset, size, unsignedp)
11789 int offset, size, unsignedp;
11790 {
11791 /* Push bitfield offset and size */
11792 bc_push_offset_and_size (offset, size);
11793
11794 /* Store */
11795 bc_emit_instruction (sstoreBI);
11796 }
11797
11798
11799 /* Load SI/SU from bitfield */
11800
11801 void
11802 bc_load_bit_field (offset, size, unsignedp)
11803 int offset, size, unsignedp;
11804 {
11805 /* Push bitfield offset and size */
11806 bc_push_offset_and_size (offset, size);
11807
11808 /* Load: sign-extend if signed, else zero-extend */
11809 bc_emit_instruction (unsignedp ? zxloadBI : sxloadBI);
11810 }
11811
11812
11813 /* Adjust interpreter stack by NLEVELS. Positive means drop NLEVELS
11814 (adjust stack pointer upwards), negative means add that number of
11815 levels (adjust the stack pointer downwards). Only positive values
11816 normally make sense. */
11817
11818 void
11819 bc_adjust_stack (nlevels)
11820 int nlevels;
11821 {
11822 switch (nlevels)
11823 {
11824 case 0:
11825 break;
11826
11827 case 2:
11828 bc_emit_instruction (drop);
11829
11830 case 1:
11831 bc_emit_instruction (drop);
11832 break;
11833
11834 default:
11835
11836 bc_emit_instruction (adjstackSI, (HOST_WIDE_INT) nlevels);
11837 stack_depth -= nlevels;
11838 }
11839
11840 #if defined (VALIDATE_STACK_FOR_BC)
11841 VALIDATE_STACK_FOR_BC ();
11842 #endif
11843 }
This page took 0.559902 seconds and 6 git commands to generate.