]> gcc.gnu.org Git - gcc.git/blob - gcc/expr.c
(expand_expr, case COMPONENT_REF): If result is BLKmode, use that to
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 #include "config.h"
23 #include "machmode.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "obstack.h"
27 #include "flags.h"
28 #include "regs.h"
29 #include "function.h"
30 #include "insn-flags.h"
31 #include "insn-codes.h"
32 #include "expr.h"
33 #include "insn-config.h"
34 #include "recog.h"
35 #include "output.h"
36 #include "typeclass.h"
37
38 #include "bytecode.h"
39 #include "bc-opcode.h"
40 #include "bc-typecd.h"
41 #include "bc-optab.h"
42 #include "bc-emit.h"
43
44
45 #define CEIL(x,y) (((x) + (y) - 1) / (y))
46
47 /* Decide whether a function's arguments should be processed
48 from first to last or from last to first.
49
50 They should if the stack and args grow in opposite directions, but
51 only if we have push insns. */
52
53 #ifdef PUSH_ROUNDING
54
55 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
56 #define PUSH_ARGS_REVERSED /* If it's last to first */
57 #endif
58
59 #endif
60
61 #ifndef STACK_PUSH_CODE
62 #ifdef STACK_GROWS_DOWNWARD
63 #define STACK_PUSH_CODE PRE_DEC
64 #else
65 #define STACK_PUSH_CODE PRE_INC
66 #endif
67 #endif
68
69 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
70 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
71
72 /* If this is nonzero, we do not bother generating VOLATILE
73 around volatile memory references, and we are willing to
74 output indirect addresses. If cse is to follow, we reject
75 indirect addresses so a useful potential cse is generated;
76 if it is used only once, instruction combination will produce
77 the same indirect address eventually. */
78 int cse_not_expected;
79
80 /* Nonzero to generate code for all the subroutines within an
81 expression before generating the upper levels of the expression.
82 Nowadays this is never zero. */
83 int do_preexpand_calls = 1;
84
85 /* Number of units that we should eventually pop off the stack.
86 These are the arguments to function calls that have already returned. */
87 int pending_stack_adjust;
88
89 /* Nonzero means stack pops must not be deferred, and deferred stack
90 pops must not be output. It is nonzero inside a function call,
91 inside a conditional expression, inside a statement expression,
92 and in other cases as well. */
93 int inhibit_defer_pop;
94
95 /* A list of all cleanups which belong to the arguments of
96 function calls being expanded by expand_call. */
97 tree cleanups_this_call;
98
99 /* When temporaries are created by TARGET_EXPRs, they are created at
100 this level of temp_slot_level, so that they can remain allocated
101 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
102 of TARGET_EXPRs. */
103 int target_temp_slot_level;
104
105 /* Nonzero means __builtin_saveregs has already been done in this function.
106 The value is the pseudoreg containing the value __builtin_saveregs
107 returned. */
108 static rtx saveregs_value;
109
110 /* Similarly for __builtin_apply_args. */
111 static rtx apply_args_value;
112
113 /* This structure is used by move_by_pieces to describe the move to
114 be performed. */
115
116 struct move_by_pieces
117 {
118 rtx to;
119 rtx to_addr;
120 int autinc_to;
121 int explicit_inc_to;
122 int to_struct;
123 rtx from;
124 rtx from_addr;
125 int autinc_from;
126 int explicit_inc_from;
127 int from_struct;
128 int len;
129 int offset;
130 int reverse;
131 };
132
133 /* Used to generate bytecodes: keep track of size of local variables,
134 as well as depth of arithmetic stack. (Notice that variables are
135 stored on the machine's stack, not the arithmetic stack.) */
136
137 extern int local_vars_size;
138 extern int stack_depth;
139 extern int max_stack_depth;
140 extern struct obstack permanent_obstack;
141
142
143 static rtx enqueue_insn PROTO((rtx, rtx));
144 static int queued_subexp_p PROTO((rtx));
145 static void init_queue PROTO((void));
146 static void move_by_pieces PROTO((rtx, rtx, int, int));
147 static int move_by_pieces_ninsns PROTO((unsigned int, int));
148 static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
149 struct move_by_pieces *));
150 static void store_constructor PROTO((tree, rtx));
151 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
152 enum machine_mode, int, int, int));
153 static int get_inner_unaligned_p PROTO((tree));
154 static tree save_noncopied_parts PROTO((tree, tree));
155 static tree init_noncopied_parts PROTO((tree, tree));
156 static int safe_from_p PROTO((rtx, tree));
157 static int fixed_type_p PROTO((tree));
158 static int get_pointer_alignment PROTO((tree, unsigned));
159 static tree string_constant PROTO((tree, tree *));
160 static tree c_strlen PROTO((tree));
161 static rtx expand_builtin PROTO((tree, rtx, rtx,
162 enum machine_mode, int));
163 static int apply_args_size PROTO((void));
164 static int apply_result_size PROTO((void));
165 static rtx result_vector PROTO((int, rtx));
166 static rtx expand_builtin_apply_args PROTO((void));
167 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
168 static void expand_builtin_return PROTO((rtx));
169 static rtx expand_increment PROTO((tree, int));
170 rtx bc_expand_increment PROTO((struct increment_operator *, tree));
171 tree bc_runtime_type_code PROTO((tree));
172 rtx bc_allocate_local PROTO((int, int));
173 void bc_store_memory PROTO((tree, tree));
174 tree bc_expand_component_address PROTO((tree));
175 tree bc_expand_address PROTO((tree));
176 void bc_expand_constructor PROTO((tree));
177 void bc_adjust_stack PROTO((int));
178 tree bc_canonicalize_array_ref PROTO((tree));
179 void bc_load_memory PROTO((tree, tree));
180 void bc_load_externaddr PROTO((rtx));
181 void bc_load_externaddr_id PROTO((tree, int));
182 void bc_load_localaddr PROTO((rtx));
183 void bc_load_parmaddr PROTO((rtx));
184 static void preexpand_calls PROTO((tree));
185 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
186 void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
187 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
188 static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
189 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
190 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
191 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
192 static tree defer_cleanups_to PROTO((tree));
193 extern void (*interim_eh_hook) PROTO((tree));
194 extern tree truthvalue_conversion PROTO((tree));
195
196 /* Record for each mode whether we can move a register directly to or
197 from an object of that mode in memory. If we can't, we won't try
198 to use that mode directly when accessing a field of that mode. */
199
200 static char direct_load[NUM_MACHINE_MODES];
201 static char direct_store[NUM_MACHINE_MODES];
202
203 /* MOVE_RATIO is the number of move instructions that is better than
204 a block move. */
205
206 #ifndef MOVE_RATIO
207 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
208 #define MOVE_RATIO 2
209 #else
210 /* A value of around 6 would minimize code size; infinity would minimize
211 execution time. */
212 #define MOVE_RATIO 15
213 #endif
214 #endif
215
216 /* This array records the insn_code of insns to perform block moves. */
217 enum insn_code movstr_optab[NUM_MACHINE_MODES];
218
219 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
220
221 #ifndef SLOW_UNALIGNED_ACCESS
222 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
223 #endif
224
225 /* Register mappings for target machines without register windows. */
226 #ifndef INCOMING_REGNO
227 #define INCOMING_REGNO(OUT) (OUT)
228 #endif
229 #ifndef OUTGOING_REGNO
230 #define OUTGOING_REGNO(IN) (IN)
231 #endif
232 \f
233 /* Maps used to convert modes to const, load, and store bytecodes. */
234 enum bytecode_opcode mode_to_const_map[MAX_MACHINE_MODE];
235 enum bytecode_opcode mode_to_load_map[MAX_MACHINE_MODE];
236 enum bytecode_opcode mode_to_store_map[MAX_MACHINE_MODE];
237
238 /* Initialize maps used to convert modes to const, load, and store
239 bytecodes. */
240 void
241 bc_init_mode_to_opcode_maps ()
242 {
243 int mode;
244
245 for (mode = 0; mode < (int) MAX_MACHINE_MODE; mode++)
246 mode_to_const_map[mode] =
247 mode_to_load_map[mode] =
248 mode_to_store_map[mode] = neverneverland;
249
250 #define DEF_MODEMAP(SYM, CODE, UCODE, CONST, LOAD, STORE) \
251 mode_to_const_map[(int) SYM] = CONST; \
252 mode_to_load_map[(int) SYM] = LOAD; \
253 mode_to_store_map[(int) SYM] = STORE;
254
255 #include "modemap.def"
256 #undef DEF_MODEMAP
257 }
258 \f
259 /* This is run once per compilation to set up which modes can be used
260 directly in memory and to initialize the block move optab. */
261
262 void
263 init_expr_once ()
264 {
265 rtx insn, pat;
266 enum machine_mode mode;
267 /* Try indexing by frame ptr and try by stack ptr.
268 It is known that on the Convex the stack ptr isn't a valid index.
269 With luck, one or the other is valid on any machine. */
270 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
271 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
272
273 start_sequence ();
274 insn = emit_insn (gen_rtx (SET, 0, 0));
275 pat = PATTERN (insn);
276
277 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
278 mode = (enum machine_mode) ((int) mode + 1))
279 {
280 int regno;
281 rtx reg;
282 int num_clobbers;
283
284 direct_load[(int) mode] = direct_store[(int) mode] = 0;
285 PUT_MODE (mem, mode);
286 PUT_MODE (mem1, mode);
287
288 /* See if there is some register that can be used in this mode and
289 directly loaded or stored from memory. */
290
291 if (mode != VOIDmode && mode != BLKmode)
292 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
293 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
294 regno++)
295 {
296 if (! HARD_REGNO_MODE_OK (regno, mode))
297 continue;
298
299 reg = gen_rtx (REG, mode, regno);
300
301 SET_SRC (pat) = mem;
302 SET_DEST (pat) = reg;
303 if (recog (pat, insn, &num_clobbers) >= 0)
304 direct_load[(int) mode] = 1;
305
306 SET_SRC (pat) = mem1;
307 SET_DEST (pat) = reg;
308 if (recog (pat, insn, &num_clobbers) >= 0)
309 direct_load[(int) mode] = 1;
310
311 SET_SRC (pat) = reg;
312 SET_DEST (pat) = mem;
313 if (recog (pat, insn, &num_clobbers) >= 0)
314 direct_store[(int) mode] = 1;
315
316 SET_SRC (pat) = reg;
317 SET_DEST (pat) = mem1;
318 if (recog (pat, insn, &num_clobbers) >= 0)
319 direct_store[(int) mode] = 1;
320 }
321 }
322
323 end_sequence ();
324 }
325
326 /* This is run at the start of compiling a function. */
327
328 void
329 init_expr ()
330 {
331 init_queue ();
332
333 pending_stack_adjust = 0;
334 inhibit_defer_pop = 0;
335 cleanups_this_call = 0;
336 saveregs_value = 0;
337 apply_args_value = 0;
338 forced_labels = 0;
339 }
340
341 /* Save all variables describing the current status into the structure *P.
342 This is used before starting a nested function. */
343
344 void
345 save_expr_status (p)
346 struct function *p;
347 {
348 /* Instead of saving the postincrement queue, empty it. */
349 emit_queue ();
350
351 p->pending_stack_adjust = pending_stack_adjust;
352 p->inhibit_defer_pop = inhibit_defer_pop;
353 p->cleanups_this_call = cleanups_this_call;
354 p->saveregs_value = saveregs_value;
355 p->apply_args_value = apply_args_value;
356 p->forced_labels = forced_labels;
357
358 pending_stack_adjust = 0;
359 inhibit_defer_pop = 0;
360 cleanups_this_call = 0;
361 saveregs_value = 0;
362 apply_args_value = 0;
363 forced_labels = 0;
364 }
365
366 /* Restore all variables describing the current status from the structure *P.
367 This is used after a nested function. */
368
369 void
370 restore_expr_status (p)
371 struct function *p;
372 {
373 pending_stack_adjust = p->pending_stack_adjust;
374 inhibit_defer_pop = p->inhibit_defer_pop;
375 cleanups_this_call = p->cleanups_this_call;
376 saveregs_value = p->saveregs_value;
377 apply_args_value = p->apply_args_value;
378 forced_labels = p->forced_labels;
379 }
380 \f
381 /* Manage the queue of increment instructions to be output
382 for POSTINCREMENT_EXPR expressions, etc. */
383
384 static rtx pending_chain;
385
386 /* Queue up to increment (or change) VAR later. BODY says how:
387 BODY should be the same thing you would pass to emit_insn
388 to increment right away. It will go to emit_insn later on.
389
390 The value is a QUEUED expression to be used in place of VAR
391 where you want to guarantee the pre-incrementation value of VAR. */
392
393 static rtx
394 enqueue_insn (var, body)
395 rtx var, body;
396 {
397 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
398 var, NULL_RTX, NULL_RTX, body, pending_chain);
399 return pending_chain;
400 }
401
402 /* Use protect_from_queue to convert a QUEUED expression
403 into something that you can put immediately into an instruction.
404 If the queued incrementation has not happened yet,
405 protect_from_queue returns the variable itself.
406 If the incrementation has happened, protect_from_queue returns a temp
407 that contains a copy of the old value of the variable.
408
409 Any time an rtx which might possibly be a QUEUED is to be put
410 into an instruction, it must be passed through protect_from_queue first.
411 QUEUED expressions are not meaningful in instructions.
412
413 Do not pass a value through protect_from_queue and then hold
414 on to it for a while before putting it in an instruction!
415 If the queue is flushed in between, incorrect code will result. */
416
417 rtx
418 protect_from_queue (x, modify)
419 register rtx x;
420 int modify;
421 {
422 register RTX_CODE code = GET_CODE (x);
423
424 #if 0 /* A QUEUED can hang around after the queue is forced out. */
425 /* Shortcut for most common case. */
426 if (pending_chain == 0)
427 return x;
428 #endif
429
430 if (code != QUEUED)
431 {
432 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
433 use of autoincrement. Make a copy of the contents of the memory
434 location rather than a copy of the address, but not if the value is
435 of mode BLKmode. Don't modify X in place since it might be
436 shared. */
437 if (code == MEM && GET_MODE (x) != BLKmode
438 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
439 {
440 register rtx y = XEXP (x, 0);
441 register rtx new = gen_rtx (MEM, GET_MODE (x), QUEUED_VAR (y));
442
443 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
444 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
445 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
446
447 if (QUEUED_INSN (y))
448 {
449 register rtx temp = gen_reg_rtx (GET_MODE (new));
450 emit_insn_before (gen_move_insn (temp, new),
451 QUEUED_INSN (y));
452 return temp;
453 }
454 return new;
455 }
456 /* Otherwise, recursively protect the subexpressions of all
457 the kinds of rtx's that can contain a QUEUED. */
458 if (code == MEM)
459 {
460 rtx tem = protect_from_queue (XEXP (x, 0), 0);
461 if (tem != XEXP (x, 0))
462 {
463 x = copy_rtx (x);
464 XEXP (x, 0) = tem;
465 }
466 }
467 else if (code == PLUS || code == MULT)
468 {
469 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
470 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
471 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
472 {
473 x = copy_rtx (x);
474 XEXP (x, 0) = new0;
475 XEXP (x, 1) = new1;
476 }
477 }
478 return x;
479 }
480 /* If the increment has not happened, use the variable itself. */
481 if (QUEUED_INSN (x) == 0)
482 return QUEUED_VAR (x);
483 /* If the increment has happened and a pre-increment copy exists,
484 use that copy. */
485 if (QUEUED_COPY (x) != 0)
486 return QUEUED_COPY (x);
487 /* The increment has happened but we haven't set up a pre-increment copy.
488 Set one up now, and use it. */
489 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
490 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
491 QUEUED_INSN (x));
492 return QUEUED_COPY (x);
493 }
494
495 /* Return nonzero if X contains a QUEUED expression:
496 if it contains anything that will be altered by a queued increment.
497 We handle only combinations of MEM, PLUS, MINUS and MULT operators
498 since memory addresses generally contain only those. */
499
500 static int
501 queued_subexp_p (x)
502 rtx x;
503 {
504 register enum rtx_code code = GET_CODE (x);
505 switch (code)
506 {
507 case QUEUED:
508 return 1;
509 case MEM:
510 return queued_subexp_p (XEXP (x, 0));
511 case MULT:
512 case PLUS:
513 case MINUS:
514 return queued_subexp_p (XEXP (x, 0))
515 || queued_subexp_p (XEXP (x, 1));
516 }
517 return 0;
518 }
519
520 /* Perform all the pending incrementations. */
521
522 void
523 emit_queue ()
524 {
525 register rtx p;
526 while (p = pending_chain)
527 {
528 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
529 pending_chain = QUEUED_NEXT (p);
530 }
531 }
532
533 static void
534 init_queue ()
535 {
536 if (pending_chain)
537 abort ();
538 }
539 \f
540 /* Copy data from FROM to TO, where the machine modes are not the same.
541 Both modes may be integer, or both may be floating.
542 UNSIGNEDP should be nonzero if FROM is an unsigned type.
543 This causes zero-extension instead of sign-extension. */
544
545 void
546 convert_move (to, from, unsignedp)
547 register rtx to, from;
548 int unsignedp;
549 {
550 enum machine_mode to_mode = GET_MODE (to);
551 enum machine_mode from_mode = GET_MODE (from);
552 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
553 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
554 enum insn_code code;
555 rtx libcall;
556
557 /* rtx code for making an equivalent value. */
558 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
559
560 to = protect_from_queue (to, 1);
561 from = protect_from_queue (from, 0);
562
563 if (to_real != from_real)
564 abort ();
565
566 /* If FROM is a SUBREG that indicates that we have already done at least
567 the required extension, strip it. We don't handle such SUBREGs as
568 TO here. */
569
570 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
571 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
572 >= GET_MODE_SIZE (to_mode))
573 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
574 from = gen_lowpart (to_mode, from), from_mode = to_mode;
575
576 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
577 abort ();
578
579 if (to_mode == from_mode
580 || (from_mode == VOIDmode && CONSTANT_P (from)))
581 {
582 emit_move_insn (to, from);
583 return;
584 }
585
586 if (to_real)
587 {
588 rtx value;
589
590 #ifdef HAVE_extendqfhf2
591 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == HFmode)
592 {
593 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
594 return;
595 }
596 #endif
597 #ifdef HAVE_extendqfsf2
598 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == SFmode)
599 {
600 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
601 return;
602 }
603 #endif
604 #ifdef HAVE_extendqfdf2
605 if (HAVE_extendqfdf2 && from_mode == QFmode && to_mode == DFmode)
606 {
607 emit_unop_insn (CODE_FOR_extendqfdf2, to, from, UNKNOWN);
608 return;
609 }
610 #endif
611 #ifdef HAVE_extendqfxf2
612 if (HAVE_extendqfxf2 && from_mode == QFmode && to_mode == XFmode)
613 {
614 emit_unop_insn (CODE_FOR_extendqfxf2, to, from, UNKNOWN);
615 return;
616 }
617 #endif
618 #ifdef HAVE_extendqftf2
619 if (HAVE_extendqftf2 && from_mode == QFmode && to_mode == TFmode)
620 {
621 emit_unop_insn (CODE_FOR_extendqftf2, to, from, UNKNOWN);
622 return;
623 }
624 #endif
625
626 #ifdef HAVE_extendhftqf2
627 if (HAVE_extendhftqf2 && from_mode == HFmode && to_mode == TQFmode)
628 {
629 emit_unop_insn (CODE_FOR_extendhftqf2, to, from, UNKNOWN);
630 return;
631 }
632 #endif
633
634 #ifdef HAVE_extendhfsf2
635 if (HAVE_extendhfsf2 && from_mode == HFmode && to_mode == SFmode)
636 {
637 emit_unop_insn (CODE_FOR_extendhfsf2, to, from, UNKNOWN);
638 return;
639 }
640 #endif
641 #ifdef HAVE_extendhfdf2
642 if (HAVE_extendhfdf2 && from_mode == HFmode && to_mode == DFmode)
643 {
644 emit_unop_insn (CODE_FOR_extendhfdf2, to, from, UNKNOWN);
645 return;
646 }
647 #endif
648 #ifdef HAVE_extendhfxf2
649 if (HAVE_extendhfxf2 && from_mode == HFmode && to_mode == XFmode)
650 {
651 emit_unop_insn (CODE_FOR_extendhfxf2, to, from, UNKNOWN);
652 return;
653 }
654 #endif
655 #ifdef HAVE_extendhftf2
656 if (HAVE_extendhftf2 && from_mode == HFmode && to_mode == TFmode)
657 {
658 emit_unop_insn (CODE_FOR_extendhftf2, to, from, UNKNOWN);
659 return;
660 }
661 #endif
662
663 #ifdef HAVE_extendsfdf2
664 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
665 {
666 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
667 return;
668 }
669 #endif
670 #ifdef HAVE_extendsfxf2
671 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
672 {
673 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
674 return;
675 }
676 #endif
677 #ifdef HAVE_extendsftf2
678 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
679 {
680 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
681 return;
682 }
683 #endif
684 #ifdef HAVE_extenddfxf2
685 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
686 {
687 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
688 return;
689 }
690 #endif
691 #ifdef HAVE_extenddftf2
692 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
693 {
694 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
695 return;
696 }
697 #endif
698
699 #ifdef HAVE_trunchfqf2
700 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
701 {
702 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
703 return;
704 }
705 #endif
706 #ifdef HAVE_truncsfqf2
707 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
708 {
709 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
710 return;
711 }
712 #endif
713 #ifdef HAVE_truncdfqf2
714 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
715 {
716 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
717 return;
718 }
719 #endif
720 #ifdef HAVE_truncxfqf2
721 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
722 {
723 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
724 return;
725 }
726 #endif
727 #ifdef HAVE_trunctfqf2
728 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
729 {
730 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
731 return;
732 }
733 #endif
734
735 #ifdef HAVE_trunctqfhf2
736 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
737 {
738 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
739 return;
740 }
741 #endif
742 #ifdef HAVE_truncsfhf2
743 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
744 {
745 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
746 return;
747 }
748 #endif
749 #ifdef HAVE_truncdfhf2
750 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
751 {
752 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
753 return;
754 }
755 #endif
756 #ifdef HAVE_truncxfhf2
757 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
758 {
759 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
760 return;
761 }
762 #endif
763 #ifdef HAVE_trunctfhf2
764 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
765 {
766 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
767 return;
768 }
769 #endif
770 #ifdef HAVE_truncdfsf2
771 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
772 {
773 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
774 return;
775 }
776 #endif
777 #ifdef HAVE_truncxfsf2
778 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
779 {
780 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
781 return;
782 }
783 #endif
784 #ifdef HAVE_trunctfsf2
785 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
786 {
787 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
788 return;
789 }
790 #endif
791 #ifdef HAVE_truncxfdf2
792 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
793 {
794 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
795 return;
796 }
797 #endif
798 #ifdef HAVE_trunctfdf2
799 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
800 {
801 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
802 return;
803 }
804 #endif
805
806 libcall = (rtx) 0;
807 switch (from_mode)
808 {
809 case SFmode:
810 switch (to_mode)
811 {
812 case DFmode:
813 libcall = extendsfdf2_libfunc;
814 break;
815
816 case XFmode:
817 libcall = extendsfxf2_libfunc;
818 break;
819
820 case TFmode:
821 libcall = extendsftf2_libfunc;
822 break;
823 }
824 break;
825
826 case DFmode:
827 switch (to_mode)
828 {
829 case SFmode:
830 libcall = truncdfsf2_libfunc;
831 break;
832
833 case XFmode:
834 libcall = extenddfxf2_libfunc;
835 break;
836
837 case TFmode:
838 libcall = extenddftf2_libfunc;
839 break;
840 }
841 break;
842
843 case XFmode:
844 switch (to_mode)
845 {
846 case SFmode:
847 libcall = truncxfsf2_libfunc;
848 break;
849
850 case DFmode:
851 libcall = truncxfdf2_libfunc;
852 break;
853 }
854 break;
855
856 case TFmode:
857 switch (to_mode)
858 {
859 case SFmode:
860 libcall = trunctfsf2_libfunc;
861 break;
862
863 case DFmode:
864 libcall = trunctfdf2_libfunc;
865 break;
866 }
867 break;
868 }
869
870 if (libcall == (rtx) 0)
871 /* This conversion is not implemented yet. */
872 abort ();
873
874 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
875 1, from, from_mode);
876 emit_move_insn (to, value);
877 return;
878 }
879
880 /* Now both modes are integers. */
881
882 /* Handle expanding beyond a word. */
883 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
884 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
885 {
886 rtx insns;
887 rtx lowpart;
888 rtx fill_value;
889 rtx lowfrom;
890 int i;
891 enum machine_mode lowpart_mode;
892 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
893
894 /* Try converting directly if the insn is supported. */
895 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
896 != CODE_FOR_nothing)
897 {
898 /* If FROM is a SUBREG, put it into a register. Do this
899 so that we always generate the same set of insns for
900 better cse'ing; if an intermediate assignment occurred,
901 we won't be doing the operation directly on the SUBREG. */
902 if (optimize > 0 && GET_CODE (from) == SUBREG)
903 from = force_reg (from_mode, from);
904 emit_unop_insn (code, to, from, equiv_code);
905 return;
906 }
907 /* Next, try converting via full word. */
908 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
909 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
910 != CODE_FOR_nothing))
911 {
912 if (GET_CODE (to) == REG)
913 emit_insn (gen_rtx (CLOBBER, VOIDmode, to));
914 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
915 emit_unop_insn (code, to,
916 gen_lowpart (word_mode, to), equiv_code);
917 return;
918 }
919
920 /* No special multiword conversion insn; do it by hand. */
921 start_sequence ();
922
923 /* Since we will turn this into a no conflict block, we must ensure
924 that the source does not overlap the target. */
925
926 if (reg_overlap_mentioned_p (to, from))
927 from = force_reg (from_mode, from);
928
929 /* Get a copy of FROM widened to a word, if necessary. */
930 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
931 lowpart_mode = word_mode;
932 else
933 lowpart_mode = from_mode;
934
935 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
936
937 lowpart = gen_lowpart (lowpart_mode, to);
938 emit_move_insn (lowpart, lowfrom);
939
940 /* Compute the value to put in each remaining word. */
941 if (unsignedp)
942 fill_value = const0_rtx;
943 else
944 {
945 #ifdef HAVE_slt
946 if (HAVE_slt
947 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
948 && STORE_FLAG_VALUE == -1)
949 {
950 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
951 lowpart_mode, 0, 0);
952 fill_value = gen_reg_rtx (word_mode);
953 emit_insn (gen_slt (fill_value));
954 }
955 else
956 #endif
957 {
958 fill_value
959 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
960 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
961 NULL_RTX, 0);
962 fill_value = convert_to_mode (word_mode, fill_value, 1);
963 }
964 }
965
966 /* Fill the remaining words. */
967 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
968 {
969 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
970 rtx subword = operand_subword (to, index, 1, to_mode);
971
972 if (subword == 0)
973 abort ();
974
975 if (fill_value != subword)
976 emit_move_insn (subword, fill_value);
977 }
978
979 insns = get_insns ();
980 end_sequence ();
981
982 emit_no_conflict_block (insns, to, from, NULL_RTX,
983 gen_rtx (equiv_code, to_mode, copy_rtx (from)));
984 return;
985 }
986
987 /* Truncating multi-word to a word or less. */
988 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
989 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
990 {
991 if (!((GET_CODE (from) == MEM
992 && ! MEM_VOLATILE_P (from)
993 && direct_load[(int) to_mode]
994 && ! mode_dependent_address_p (XEXP (from, 0)))
995 || GET_CODE (from) == REG
996 || GET_CODE (from) == SUBREG))
997 from = force_reg (from_mode, from);
998 convert_move (to, gen_lowpart (word_mode, from), 0);
999 return;
1000 }
1001
1002 /* Handle pointer conversion */ /* SPEE 900220 */
1003 if (to_mode == PSImode)
1004 {
1005 if (from_mode != SImode)
1006 from = convert_to_mode (SImode, from, unsignedp);
1007
1008 #ifdef HAVE_truncsipsi2
1009 if (HAVE_truncsipsi2)
1010 {
1011 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1012 return;
1013 }
1014 #endif /* HAVE_truncsipsi2 */
1015 abort ();
1016 }
1017
1018 if (from_mode == PSImode)
1019 {
1020 if (to_mode != SImode)
1021 {
1022 from = convert_to_mode (SImode, from, unsignedp);
1023 from_mode = SImode;
1024 }
1025 else
1026 {
1027 #ifdef HAVE_extendpsisi2
1028 if (HAVE_extendpsisi2)
1029 {
1030 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1031 return;
1032 }
1033 #endif /* HAVE_extendpsisi2 */
1034 abort ();
1035 }
1036 }
1037
1038 if (to_mode == PDImode)
1039 {
1040 if (from_mode != DImode)
1041 from = convert_to_mode (DImode, from, unsignedp);
1042
1043 #ifdef HAVE_truncdipdi2
1044 if (HAVE_truncdipdi2)
1045 {
1046 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1047 return;
1048 }
1049 #endif /* HAVE_truncdipdi2 */
1050 abort ();
1051 }
1052
1053 if (from_mode == PDImode)
1054 {
1055 if (to_mode != DImode)
1056 {
1057 from = convert_to_mode (DImode, from, unsignedp);
1058 from_mode = DImode;
1059 }
1060 else
1061 {
1062 #ifdef HAVE_extendpdidi2
1063 if (HAVE_extendpdidi2)
1064 {
1065 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1066 return;
1067 }
1068 #endif /* HAVE_extendpdidi2 */
1069 abort ();
1070 }
1071 }
1072
1073 /* Now follow all the conversions between integers
1074 no more than a word long. */
1075
1076 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1077 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1078 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1079 GET_MODE_BITSIZE (from_mode)))
1080 {
1081 if (!((GET_CODE (from) == MEM
1082 && ! MEM_VOLATILE_P (from)
1083 && direct_load[(int) to_mode]
1084 && ! mode_dependent_address_p (XEXP (from, 0)))
1085 || GET_CODE (from) == REG
1086 || GET_CODE (from) == SUBREG))
1087 from = force_reg (from_mode, from);
1088 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1089 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1090 from = copy_to_reg (from);
1091 emit_move_insn (to, gen_lowpart (to_mode, from));
1092 return;
1093 }
1094
1095 /* Handle extension. */
1096 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1097 {
1098 /* Convert directly if that works. */
1099 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1100 != CODE_FOR_nothing)
1101 {
1102 emit_unop_insn (code, to, from, equiv_code);
1103 return;
1104 }
1105 else
1106 {
1107 enum machine_mode intermediate;
1108
1109 /* Search for a mode to convert via. */
1110 for (intermediate = from_mode; intermediate != VOIDmode;
1111 intermediate = GET_MODE_WIDER_MODE (intermediate))
1112 if (((can_extend_p (to_mode, intermediate, unsignedp)
1113 != CODE_FOR_nothing)
1114 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1115 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
1116 && (can_extend_p (intermediate, from_mode, unsignedp)
1117 != CODE_FOR_nothing))
1118 {
1119 convert_move (to, convert_to_mode (intermediate, from,
1120 unsignedp), unsignedp);
1121 return;
1122 }
1123
1124 /* No suitable intermediate mode. */
1125 abort ();
1126 }
1127 }
1128
1129 /* Support special truncate insns for certain modes. */
1130
1131 if (from_mode == DImode && to_mode == SImode)
1132 {
1133 #ifdef HAVE_truncdisi2
1134 if (HAVE_truncdisi2)
1135 {
1136 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1137 return;
1138 }
1139 #endif
1140 convert_move (to, force_reg (from_mode, from), unsignedp);
1141 return;
1142 }
1143
1144 if (from_mode == DImode && to_mode == HImode)
1145 {
1146 #ifdef HAVE_truncdihi2
1147 if (HAVE_truncdihi2)
1148 {
1149 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1150 return;
1151 }
1152 #endif
1153 convert_move (to, force_reg (from_mode, from), unsignedp);
1154 return;
1155 }
1156
1157 if (from_mode == DImode && to_mode == QImode)
1158 {
1159 #ifdef HAVE_truncdiqi2
1160 if (HAVE_truncdiqi2)
1161 {
1162 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1163 return;
1164 }
1165 #endif
1166 convert_move (to, force_reg (from_mode, from), unsignedp);
1167 return;
1168 }
1169
1170 if (from_mode == SImode && to_mode == HImode)
1171 {
1172 #ifdef HAVE_truncsihi2
1173 if (HAVE_truncsihi2)
1174 {
1175 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1176 return;
1177 }
1178 #endif
1179 convert_move (to, force_reg (from_mode, from), unsignedp);
1180 return;
1181 }
1182
1183 if (from_mode == SImode && to_mode == QImode)
1184 {
1185 #ifdef HAVE_truncsiqi2
1186 if (HAVE_truncsiqi2)
1187 {
1188 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1189 return;
1190 }
1191 #endif
1192 convert_move (to, force_reg (from_mode, from), unsignedp);
1193 return;
1194 }
1195
1196 if (from_mode == HImode && to_mode == QImode)
1197 {
1198 #ifdef HAVE_trunchiqi2
1199 if (HAVE_trunchiqi2)
1200 {
1201 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1202 return;
1203 }
1204 #endif
1205 convert_move (to, force_reg (from_mode, from), unsignedp);
1206 return;
1207 }
1208
1209 if (from_mode == TImode && to_mode == DImode)
1210 {
1211 #ifdef HAVE_trunctidi2
1212 if (HAVE_trunctidi2)
1213 {
1214 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1215 return;
1216 }
1217 #endif
1218 convert_move (to, force_reg (from_mode, from), unsignedp);
1219 return;
1220 }
1221
1222 if (from_mode == TImode && to_mode == SImode)
1223 {
1224 #ifdef HAVE_trunctisi2
1225 if (HAVE_trunctisi2)
1226 {
1227 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1228 return;
1229 }
1230 #endif
1231 convert_move (to, force_reg (from_mode, from), unsignedp);
1232 return;
1233 }
1234
1235 if (from_mode == TImode && to_mode == HImode)
1236 {
1237 #ifdef HAVE_trunctihi2
1238 if (HAVE_trunctihi2)
1239 {
1240 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1241 return;
1242 }
1243 #endif
1244 convert_move (to, force_reg (from_mode, from), unsignedp);
1245 return;
1246 }
1247
1248 if (from_mode == TImode && to_mode == QImode)
1249 {
1250 #ifdef HAVE_trunctiqi2
1251 if (HAVE_trunctiqi2)
1252 {
1253 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1254 return;
1255 }
1256 #endif
1257 convert_move (to, force_reg (from_mode, from), unsignedp);
1258 return;
1259 }
1260
1261 /* Handle truncation of volatile memrefs, and so on;
1262 the things that couldn't be truncated directly,
1263 and for which there was no special instruction. */
1264 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1265 {
1266 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1267 emit_move_insn (to, temp);
1268 return;
1269 }
1270
1271 /* Mode combination is not recognized. */
1272 abort ();
1273 }
1274
1275 /* Return an rtx for a value that would result
1276 from converting X to mode MODE.
1277 Both X and MODE may be floating, or both integer.
1278 UNSIGNEDP is nonzero if X is an unsigned value.
1279 This can be done by referring to a part of X in place
1280 or by copying to a new temporary with conversion.
1281
1282 This function *must not* call protect_from_queue
1283 except when putting X into an insn (in which case convert_move does it). */
1284
1285 rtx
1286 convert_to_mode (mode, x, unsignedp)
1287 enum machine_mode mode;
1288 rtx x;
1289 int unsignedp;
1290 {
1291 return convert_modes (mode, VOIDmode, x, unsignedp);
1292 }
1293
1294 /* Return an rtx for a value that would result
1295 from converting X from mode OLDMODE to mode MODE.
1296 Both modes may be floating, or both integer.
1297 UNSIGNEDP is nonzero if X is an unsigned value.
1298
1299 This can be done by referring to a part of X in place
1300 or by copying to a new temporary with conversion.
1301
1302 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1303
1304 This function *must not* call protect_from_queue
1305 except when putting X into an insn (in which case convert_move does it). */
1306
1307 rtx
1308 convert_modes (mode, oldmode, x, unsignedp)
1309 enum machine_mode mode, oldmode;
1310 rtx x;
1311 int unsignedp;
1312 {
1313 register rtx temp;
1314
1315 /* If FROM is a SUBREG that indicates that we have already done at least
1316 the required extension, strip it. */
1317
1318 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1319 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1320 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1321 x = gen_lowpart (mode, x);
1322
1323 if (GET_MODE (x) != VOIDmode)
1324 oldmode = GET_MODE (x);
1325
1326 if (mode == oldmode)
1327 return x;
1328
1329 /* There is one case that we must handle specially: If we are converting
1330 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1331 we are to interpret the constant as unsigned, gen_lowpart will do
1332 the wrong if the constant appears negative. What we want to do is
1333 make the high-order word of the constant zero, not all ones. */
1334
1335 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1336 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1337 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1338 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
1339
1340 /* We can do this with a gen_lowpart if both desired and current modes
1341 are integer, and this is either a constant integer, a register, or a
1342 non-volatile MEM. Except for the constant case where MODE is no
1343 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1344
1345 if ((GET_CODE (x) == CONST_INT
1346 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1347 || (GET_MODE_CLASS (mode) == MODE_INT
1348 && GET_MODE_CLASS (oldmode) == MODE_INT
1349 && (GET_CODE (x) == CONST_DOUBLE
1350 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1351 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1352 && direct_load[(int) mode])
1353 || (GET_CODE (x) == REG
1354 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1355 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1356 {
1357 /* ?? If we don't know OLDMODE, we have to assume here that
1358 X does not need sign- or zero-extension. This may not be
1359 the case, but it's the best we can do. */
1360 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1361 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1362 {
1363 HOST_WIDE_INT val = INTVAL (x);
1364 int width = GET_MODE_BITSIZE (oldmode);
1365
1366 /* We must sign or zero-extend in this case. Start by
1367 zero-extending, then sign extend if we need to. */
1368 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1369 if (! unsignedp
1370 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1371 val |= (HOST_WIDE_INT) (-1) << width;
1372
1373 return GEN_INT (val);
1374 }
1375
1376 return gen_lowpart (mode, x);
1377 }
1378
1379 temp = gen_reg_rtx (mode);
1380 convert_move (temp, x, unsignedp);
1381 return temp;
1382 }
1383 \f
1384 /* Generate several move instructions to copy LEN bytes
1385 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1386 The caller must pass FROM and TO
1387 through protect_from_queue before calling.
1388 ALIGN (in bytes) is maximum alignment we can assume. */
1389
1390 static void
1391 move_by_pieces (to, from, len, align)
1392 rtx to, from;
1393 int len, align;
1394 {
1395 struct move_by_pieces data;
1396 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1397 int max_size = MOVE_MAX + 1;
1398
1399 data.offset = 0;
1400 data.to_addr = to_addr;
1401 data.from_addr = from_addr;
1402 data.to = to;
1403 data.from = from;
1404 data.autinc_to
1405 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1406 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1407 data.autinc_from
1408 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1409 || GET_CODE (from_addr) == POST_INC
1410 || GET_CODE (from_addr) == POST_DEC);
1411
1412 data.explicit_inc_from = 0;
1413 data.explicit_inc_to = 0;
1414 data.reverse
1415 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1416 if (data.reverse) data.offset = len;
1417 data.len = len;
1418
1419 data.to_struct = MEM_IN_STRUCT_P (to);
1420 data.from_struct = MEM_IN_STRUCT_P (from);
1421
1422 /* If copying requires more than two move insns,
1423 copy addresses to registers (to make displacements shorter)
1424 and use post-increment if available. */
1425 if (!(data.autinc_from && data.autinc_to)
1426 && move_by_pieces_ninsns (len, align) > 2)
1427 {
1428 #ifdef HAVE_PRE_DECREMENT
1429 if (data.reverse && ! data.autinc_from)
1430 {
1431 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1432 data.autinc_from = 1;
1433 data.explicit_inc_from = -1;
1434 }
1435 #endif
1436 #ifdef HAVE_POST_INCREMENT
1437 if (! data.autinc_from)
1438 {
1439 data.from_addr = copy_addr_to_reg (from_addr);
1440 data.autinc_from = 1;
1441 data.explicit_inc_from = 1;
1442 }
1443 #endif
1444 if (!data.autinc_from && CONSTANT_P (from_addr))
1445 data.from_addr = copy_addr_to_reg (from_addr);
1446 #ifdef HAVE_PRE_DECREMENT
1447 if (data.reverse && ! data.autinc_to)
1448 {
1449 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1450 data.autinc_to = 1;
1451 data.explicit_inc_to = -1;
1452 }
1453 #endif
1454 #ifdef HAVE_POST_INCREMENT
1455 if (! data.reverse && ! data.autinc_to)
1456 {
1457 data.to_addr = copy_addr_to_reg (to_addr);
1458 data.autinc_to = 1;
1459 data.explicit_inc_to = 1;
1460 }
1461 #endif
1462 if (!data.autinc_to && CONSTANT_P (to_addr))
1463 data.to_addr = copy_addr_to_reg (to_addr);
1464 }
1465
1466 if (! SLOW_UNALIGNED_ACCESS
1467 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1468 align = MOVE_MAX;
1469
1470 /* First move what we can in the largest integer mode, then go to
1471 successively smaller modes. */
1472
1473 while (max_size > 1)
1474 {
1475 enum machine_mode mode = VOIDmode, tmode;
1476 enum insn_code icode;
1477
1478 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1479 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1480 if (GET_MODE_SIZE (tmode) < max_size)
1481 mode = tmode;
1482
1483 if (mode == VOIDmode)
1484 break;
1485
1486 icode = mov_optab->handlers[(int) mode].insn_code;
1487 if (icode != CODE_FOR_nothing
1488 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1489 GET_MODE_SIZE (mode)))
1490 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1491
1492 max_size = GET_MODE_SIZE (mode);
1493 }
1494
1495 /* The code above should have handled everything. */
1496 if (data.len != 0)
1497 abort ();
1498 }
1499
1500 /* Return number of insns required to move L bytes by pieces.
1501 ALIGN (in bytes) is maximum alignment we can assume. */
1502
1503 static int
1504 move_by_pieces_ninsns (l, align)
1505 unsigned int l;
1506 int align;
1507 {
1508 register int n_insns = 0;
1509 int max_size = MOVE_MAX + 1;
1510
1511 if (! SLOW_UNALIGNED_ACCESS
1512 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1513 align = MOVE_MAX;
1514
1515 while (max_size > 1)
1516 {
1517 enum machine_mode mode = VOIDmode, tmode;
1518 enum insn_code icode;
1519
1520 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1521 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1522 if (GET_MODE_SIZE (tmode) < max_size)
1523 mode = tmode;
1524
1525 if (mode == VOIDmode)
1526 break;
1527
1528 icode = mov_optab->handlers[(int) mode].insn_code;
1529 if (icode != CODE_FOR_nothing
1530 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1531 GET_MODE_SIZE (mode)))
1532 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1533
1534 max_size = GET_MODE_SIZE (mode);
1535 }
1536
1537 return n_insns;
1538 }
1539
1540 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1541 with move instructions for mode MODE. GENFUN is the gen_... function
1542 to make a move insn for that mode. DATA has all the other info. */
1543
1544 static void
1545 move_by_pieces_1 (genfun, mode, data)
1546 rtx (*genfun) ();
1547 enum machine_mode mode;
1548 struct move_by_pieces *data;
1549 {
1550 register int size = GET_MODE_SIZE (mode);
1551 register rtx to1, from1;
1552
1553 while (data->len >= size)
1554 {
1555 if (data->reverse) data->offset -= size;
1556
1557 to1 = (data->autinc_to
1558 ? gen_rtx (MEM, mode, data->to_addr)
1559 : change_address (data->to, mode,
1560 plus_constant (data->to_addr, data->offset)));
1561 MEM_IN_STRUCT_P (to1) = data->to_struct;
1562 from1 =
1563 (data->autinc_from
1564 ? gen_rtx (MEM, mode, data->from_addr)
1565 : change_address (data->from, mode,
1566 plus_constant (data->from_addr, data->offset)));
1567 MEM_IN_STRUCT_P (from1) = data->from_struct;
1568
1569 #ifdef HAVE_PRE_DECREMENT
1570 if (data->explicit_inc_to < 0)
1571 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1572 if (data->explicit_inc_from < 0)
1573 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1574 #endif
1575
1576 emit_insn ((*genfun) (to1, from1));
1577 #ifdef HAVE_POST_INCREMENT
1578 if (data->explicit_inc_to > 0)
1579 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1580 if (data->explicit_inc_from > 0)
1581 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1582 #endif
1583
1584 if (! data->reverse) data->offset += size;
1585
1586 data->len -= size;
1587 }
1588 }
1589 \f
1590 /* Emit code to move a block Y to a block X.
1591 This may be done with string-move instructions,
1592 with multiple scalar move instructions, or with a library call.
1593
1594 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1595 with mode BLKmode.
1596 SIZE is an rtx that says how long they are.
1597 ALIGN is the maximum alignment we can assume they have,
1598 measured in bytes. */
1599
1600 void
1601 emit_block_move (x, y, size, align)
1602 rtx x, y;
1603 rtx size;
1604 int align;
1605 {
1606 if (GET_MODE (x) != BLKmode)
1607 abort ();
1608
1609 if (GET_MODE (y) != BLKmode)
1610 abort ();
1611
1612 x = protect_from_queue (x, 1);
1613 y = protect_from_queue (y, 0);
1614 size = protect_from_queue (size, 0);
1615
1616 if (GET_CODE (x) != MEM)
1617 abort ();
1618 if (GET_CODE (y) != MEM)
1619 abort ();
1620 if (size == 0)
1621 abort ();
1622
1623 if (GET_CODE (size) == CONST_INT
1624 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1625 move_by_pieces (x, y, INTVAL (size), align);
1626 else
1627 {
1628 /* Try the most limited insn first, because there's no point
1629 including more than one in the machine description unless
1630 the more limited one has some advantage. */
1631
1632 rtx opalign = GEN_INT (align);
1633 enum machine_mode mode;
1634
1635 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1636 mode = GET_MODE_WIDER_MODE (mode))
1637 {
1638 enum insn_code code = movstr_optab[(int) mode];
1639
1640 if (code != CODE_FOR_nothing
1641 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1642 here because if SIZE is less than the mode mask, as it is
1643 returned by the macro, it will definitely be less than the
1644 actual mode mask. */
1645 && ((GET_CODE (size) == CONST_INT
1646 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1647 <= GET_MODE_MASK (mode)))
1648 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1649 && (insn_operand_predicate[(int) code][0] == 0
1650 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1651 && (insn_operand_predicate[(int) code][1] == 0
1652 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1653 && (insn_operand_predicate[(int) code][3] == 0
1654 || (*insn_operand_predicate[(int) code][3]) (opalign,
1655 VOIDmode)))
1656 {
1657 rtx op2;
1658 rtx last = get_last_insn ();
1659 rtx pat;
1660
1661 op2 = convert_to_mode (mode, size, 1);
1662 if (insn_operand_predicate[(int) code][2] != 0
1663 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1664 op2 = copy_to_mode_reg (mode, op2);
1665
1666 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1667 if (pat)
1668 {
1669 emit_insn (pat);
1670 return;
1671 }
1672 else
1673 delete_insns_since (last);
1674 }
1675 }
1676
1677 #ifdef TARGET_MEM_FUNCTIONS
1678 emit_library_call (memcpy_libfunc, 0,
1679 VOIDmode, 3, XEXP (x, 0), Pmode,
1680 XEXP (y, 0), Pmode,
1681 convert_to_mode (TYPE_MODE (sizetype), size,
1682 TREE_UNSIGNED (sizetype)),
1683 TYPE_MODE (sizetype));
1684 #else
1685 emit_library_call (bcopy_libfunc, 0,
1686 VOIDmode, 3, XEXP (y, 0), Pmode,
1687 XEXP (x, 0), Pmode,
1688 convert_to_mode (TYPE_MODE (sizetype), size,
1689 TREE_UNSIGNED (sizetype)),
1690 TYPE_MODE (sizetype));
1691 #endif
1692 }
1693 }
1694 \f
1695 /* Copy all or part of a value X into registers starting at REGNO.
1696 The number of registers to be filled is NREGS. */
1697
1698 void
1699 move_block_to_reg (regno, x, nregs, mode)
1700 int regno;
1701 rtx x;
1702 int nregs;
1703 enum machine_mode mode;
1704 {
1705 int i;
1706 rtx pat, last;
1707
1708 if (nregs == 0)
1709 return;
1710
1711 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1712 x = validize_mem (force_const_mem (mode, x));
1713
1714 /* See if the machine can do this with a load multiple insn. */
1715 #ifdef HAVE_load_multiple
1716 if (HAVE_load_multiple)
1717 {
1718 last = get_last_insn ();
1719 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1720 GEN_INT (nregs));
1721 if (pat)
1722 {
1723 emit_insn (pat);
1724 return;
1725 }
1726 else
1727 delete_insns_since (last);
1728 }
1729 #endif
1730
1731 for (i = 0; i < nregs; i++)
1732 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1733 operand_subword_force (x, i, mode));
1734 }
1735
1736 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1737 The number of registers to be filled is NREGS. SIZE indicates the number
1738 of bytes in the object X. */
1739
1740
1741 void
1742 move_block_from_reg (regno, x, nregs, size)
1743 int regno;
1744 rtx x;
1745 int nregs;
1746 int size;
1747 {
1748 int i;
1749 rtx pat, last;
1750
1751 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1752 to the left before storing to memory. */
1753 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1754 {
1755 rtx tem = operand_subword (x, 0, 1, BLKmode);
1756 rtx shift;
1757
1758 if (tem == 0)
1759 abort ();
1760
1761 shift = expand_shift (LSHIFT_EXPR, word_mode,
1762 gen_rtx (REG, word_mode, regno),
1763 build_int_2 ((UNITS_PER_WORD - size)
1764 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1765 emit_move_insn (tem, shift);
1766 return;
1767 }
1768
1769 /* See if the machine can do this with a store multiple insn. */
1770 #ifdef HAVE_store_multiple
1771 if (HAVE_store_multiple)
1772 {
1773 last = get_last_insn ();
1774 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1775 GEN_INT (nregs));
1776 if (pat)
1777 {
1778 emit_insn (pat);
1779 return;
1780 }
1781 else
1782 delete_insns_since (last);
1783 }
1784 #endif
1785
1786 for (i = 0; i < nregs; i++)
1787 {
1788 rtx tem = operand_subword (x, i, 1, BLKmode);
1789
1790 if (tem == 0)
1791 abort ();
1792
1793 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1794 }
1795 }
1796
1797 /* Add a USE expression for REG to the (possibly empty) list pointed
1798 to by CALL_FUSAGE. REG must denote a hard register. */
1799
1800 void
1801 use_reg (call_fusage, reg)
1802 rtx *call_fusage, reg;
1803 {
1804 if (GET_CODE (reg) != REG
1805 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
1806 abort();
1807
1808 *call_fusage
1809 = gen_rtx (EXPR_LIST, VOIDmode,
1810 gen_rtx (USE, VOIDmode, reg), *call_fusage);
1811 }
1812
1813 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1814 starting at REGNO. All of these registers must be hard registers. */
1815
1816 void
1817 use_regs (call_fusage, regno, nregs)
1818 rtx *call_fusage;
1819 int regno;
1820 int nregs;
1821 {
1822 int i;
1823
1824 if (regno + nregs > FIRST_PSEUDO_REGISTER)
1825 abort ();
1826
1827 for (i = 0; i < nregs; i++)
1828 use_reg (call_fusage, gen_rtx (REG, reg_raw_mode[regno + i], regno + i));
1829 }
1830 \f
1831 /* Write zeros through the storage of OBJECT.
1832 If OBJECT has BLKmode, SIZE is its length in bytes. */
1833
1834 void
1835 clear_storage (object, size)
1836 rtx object;
1837 rtx size;
1838 {
1839 if (GET_MODE (object) == BLKmode)
1840 {
1841 #ifdef TARGET_MEM_FUNCTIONS
1842 emit_library_call (memset_libfunc, 0,
1843 VOIDmode, 3,
1844 XEXP (object, 0), Pmode, const0_rtx, ptr_mode,
1845 convert_to_mode (TYPE_MODE (sizetype),
1846 size, TREE_UNSIGNED (sizetype)),
1847 TYPE_MODE (sizetype));
1848 #else
1849 emit_library_call (bzero_libfunc, 0,
1850 VOIDmode, 2,
1851 XEXP (object, 0), Pmode,
1852 convert_to_mode (TYPE_MODE (sizetype),
1853 size, TREE_UNSIGNED (sizetype)),
1854 TYPE_MODE (sizetype));
1855 #endif
1856 }
1857 else
1858 emit_move_insn (object, const0_rtx);
1859 }
1860
1861 /* Generate code to copy Y into X.
1862 Both Y and X must have the same mode, except that
1863 Y can be a constant with VOIDmode.
1864 This mode cannot be BLKmode; use emit_block_move for that.
1865
1866 Return the last instruction emitted. */
1867
1868 rtx
1869 emit_move_insn (x, y)
1870 rtx x, y;
1871 {
1872 enum machine_mode mode = GET_MODE (x);
1873
1874 x = protect_from_queue (x, 1);
1875 y = protect_from_queue (y, 0);
1876
1877 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1878 abort ();
1879
1880 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1881 y = force_const_mem (mode, y);
1882
1883 /* If X or Y are memory references, verify that their addresses are valid
1884 for the machine. */
1885 if (GET_CODE (x) == MEM
1886 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1887 && ! push_operand (x, GET_MODE (x)))
1888 || (flag_force_addr
1889 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1890 x = change_address (x, VOIDmode, XEXP (x, 0));
1891
1892 if (GET_CODE (y) == MEM
1893 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1894 || (flag_force_addr
1895 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1896 y = change_address (y, VOIDmode, XEXP (y, 0));
1897
1898 if (mode == BLKmode)
1899 abort ();
1900
1901 return emit_move_insn_1 (x, y);
1902 }
1903
1904 /* Low level part of emit_move_insn.
1905 Called just like emit_move_insn, but assumes X and Y
1906 are basically valid. */
1907
1908 rtx
1909 emit_move_insn_1 (x, y)
1910 rtx x, y;
1911 {
1912 enum machine_mode mode = GET_MODE (x);
1913 enum machine_mode submode;
1914 enum mode_class class = GET_MODE_CLASS (mode);
1915 int i;
1916
1917 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1918 return
1919 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1920
1921 /* Expand complex moves by moving real part and imag part, if possible. */
1922 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1923 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
1924 * BITS_PER_UNIT),
1925 (class == MODE_COMPLEX_INT
1926 ? MODE_INT : MODE_FLOAT),
1927 0))
1928 && (mov_optab->handlers[(int) submode].insn_code
1929 != CODE_FOR_nothing))
1930 {
1931 /* Don't split destination if it is a stack push. */
1932 int stack = push_operand (x, GET_MODE (x));
1933 rtx insns;
1934
1935 /* If this is a stack, push the highpart first, so it
1936 will be in the argument order.
1937
1938 In that case, change_address is used only to convert
1939 the mode, not to change the address. */
1940 if (stack)
1941 {
1942 /* Note that the real part always precedes the imag part in memory
1943 regardless of machine's endianness. */
1944 #ifdef STACK_GROWS_DOWNWARD
1945 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1946 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1947 gen_imagpart (submode, y)));
1948 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1949 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1950 gen_realpart (submode, y)));
1951 #else
1952 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1953 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1954 gen_realpart (submode, y)));
1955 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1956 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1957 gen_imagpart (submode, y)));
1958 #endif
1959 }
1960 else
1961 {
1962 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1963 (gen_realpart (submode, x), gen_realpart (submode, y)));
1964 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1965 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
1966 }
1967
1968 return get_last_insn ();
1969 }
1970
1971 /* This will handle any multi-word mode that lacks a move_insn pattern.
1972 However, you will get better code if you define such patterns,
1973 even if they must turn into multiple assembler instructions. */
1974 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1975 {
1976 rtx last_insn = 0;
1977 rtx insns;
1978
1979 #ifdef PUSH_ROUNDING
1980
1981 /* If X is a push on the stack, do the push now and replace
1982 X with a reference to the stack pointer. */
1983 if (push_operand (x, GET_MODE (x)))
1984 {
1985 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
1986 x = change_address (x, VOIDmode, stack_pointer_rtx);
1987 }
1988 #endif
1989
1990 /* Show the output dies here. */
1991 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
1992
1993 for (i = 0;
1994 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1995 i++)
1996 {
1997 rtx xpart = operand_subword (x, i, 1, mode);
1998 rtx ypart = operand_subword (y, i, 1, mode);
1999
2000 /* If we can't get a part of Y, put Y into memory if it is a
2001 constant. Otherwise, force it into a register. If we still
2002 can't get a part of Y, abort. */
2003 if (ypart == 0 && CONSTANT_P (y))
2004 {
2005 y = force_const_mem (mode, y);
2006 ypart = operand_subword (y, i, 1, mode);
2007 }
2008 else if (ypart == 0)
2009 ypart = operand_subword_force (y, i, mode);
2010
2011 if (xpart == 0 || ypart == 0)
2012 abort ();
2013
2014 last_insn = emit_move_insn (xpart, ypart);
2015 }
2016
2017 return last_insn;
2018 }
2019 else
2020 abort ();
2021 }
2022 \f
2023 /* Pushing data onto the stack. */
2024
2025 /* Push a block of length SIZE (perhaps variable)
2026 and return an rtx to address the beginning of the block.
2027 Note that it is not possible for the value returned to be a QUEUED.
2028 The value may be virtual_outgoing_args_rtx.
2029
2030 EXTRA is the number of bytes of padding to push in addition to SIZE.
2031 BELOW nonzero means this padding comes at low addresses;
2032 otherwise, the padding comes at high addresses. */
2033
2034 rtx
2035 push_block (size, extra, below)
2036 rtx size;
2037 int extra, below;
2038 {
2039 register rtx temp;
2040
2041 size = convert_modes (Pmode, ptr_mode, size, 1);
2042 if (CONSTANT_P (size))
2043 anti_adjust_stack (plus_constant (size, extra));
2044 else if (GET_CODE (size) == REG && extra == 0)
2045 anti_adjust_stack (size);
2046 else
2047 {
2048 rtx temp = copy_to_mode_reg (Pmode, size);
2049 if (extra != 0)
2050 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2051 temp, 0, OPTAB_LIB_WIDEN);
2052 anti_adjust_stack (temp);
2053 }
2054
2055 #ifdef STACK_GROWS_DOWNWARD
2056 temp = virtual_outgoing_args_rtx;
2057 if (extra != 0 && below)
2058 temp = plus_constant (temp, extra);
2059 #else
2060 if (GET_CODE (size) == CONST_INT)
2061 temp = plus_constant (virtual_outgoing_args_rtx,
2062 - INTVAL (size) - (below ? 0 : extra));
2063 else if (extra != 0 && !below)
2064 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2065 negate_rtx (Pmode, plus_constant (size, extra)));
2066 else
2067 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2068 negate_rtx (Pmode, size));
2069 #endif
2070
2071 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2072 }
2073
2074 rtx
2075 gen_push_operand ()
2076 {
2077 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2078 }
2079
2080 /* Generate code to push X onto the stack, assuming it has mode MODE and
2081 type TYPE.
2082 MODE is redundant except when X is a CONST_INT (since they don't
2083 carry mode info).
2084 SIZE is an rtx for the size of data to be copied (in bytes),
2085 needed only if X is BLKmode.
2086
2087 ALIGN (in bytes) is maximum alignment we can assume.
2088
2089 If PARTIAL and REG are both nonzero, then copy that many of the first
2090 words of X into registers starting with REG, and push the rest of X.
2091 The amount of space pushed is decreased by PARTIAL words,
2092 rounded *down* to a multiple of PARM_BOUNDARY.
2093 REG must be a hard register in this case.
2094 If REG is zero but PARTIAL is not, take any all others actions for an
2095 argument partially in registers, but do not actually load any
2096 registers.
2097
2098 EXTRA is the amount in bytes of extra space to leave next to this arg.
2099 This is ignored if an argument block has already been allocated.
2100
2101 On a machine that lacks real push insns, ARGS_ADDR is the address of
2102 the bottom of the argument block for this call. We use indexing off there
2103 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2104 argument block has not been preallocated.
2105
2106 ARGS_SO_FAR is the size of args previously pushed for this call. */
2107
2108 void
2109 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2110 args_addr, args_so_far)
2111 register rtx x;
2112 enum machine_mode mode;
2113 tree type;
2114 rtx size;
2115 int align;
2116 int partial;
2117 rtx reg;
2118 int extra;
2119 rtx args_addr;
2120 rtx args_so_far;
2121 {
2122 rtx xinner;
2123 enum direction stack_direction
2124 #ifdef STACK_GROWS_DOWNWARD
2125 = downward;
2126 #else
2127 = upward;
2128 #endif
2129
2130 /* Decide where to pad the argument: `downward' for below,
2131 `upward' for above, or `none' for don't pad it.
2132 Default is below for small data on big-endian machines; else above. */
2133 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2134
2135 /* Invert direction if stack is post-update. */
2136 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2137 if (where_pad != none)
2138 where_pad = (where_pad == downward ? upward : downward);
2139
2140 xinner = x = protect_from_queue (x, 0);
2141
2142 if (mode == BLKmode)
2143 {
2144 /* Copy a block into the stack, entirely or partially. */
2145
2146 register rtx temp;
2147 int used = partial * UNITS_PER_WORD;
2148 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2149 int skip;
2150
2151 if (size == 0)
2152 abort ();
2153
2154 used -= offset;
2155
2156 /* USED is now the # of bytes we need not copy to the stack
2157 because registers will take care of them. */
2158
2159 if (partial != 0)
2160 xinner = change_address (xinner, BLKmode,
2161 plus_constant (XEXP (xinner, 0), used));
2162
2163 /* If the partial register-part of the arg counts in its stack size,
2164 skip the part of stack space corresponding to the registers.
2165 Otherwise, start copying to the beginning of the stack space,
2166 by setting SKIP to 0. */
2167 #ifndef REG_PARM_STACK_SPACE
2168 skip = 0;
2169 #else
2170 skip = used;
2171 #endif
2172
2173 #ifdef PUSH_ROUNDING
2174 /* Do it with several push insns if that doesn't take lots of insns
2175 and if there is no difficulty with push insns that skip bytes
2176 on the stack for alignment purposes. */
2177 if (args_addr == 0
2178 && GET_CODE (size) == CONST_INT
2179 && skip == 0
2180 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2181 < MOVE_RATIO)
2182 /* Here we avoid the case of a structure whose weak alignment
2183 forces many pushes of a small amount of data,
2184 and such small pushes do rounding that causes trouble. */
2185 && ((! SLOW_UNALIGNED_ACCESS)
2186 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2187 || PUSH_ROUNDING (align) == align)
2188 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2189 {
2190 /* Push padding now if padding above and stack grows down,
2191 or if padding below and stack grows up.
2192 But if space already allocated, this has already been done. */
2193 if (extra && args_addr == 0
2194 && where_pad != none && where_pad != stack_direction)
2195 anti_adjust_stack (GEN_INT (extra));
2196
2197 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
2198 INTVAL (size) - used, align);
2199 }
2200 else
2201 #endif /* PUSH_ROUNDING */
2202 {
2203 /* Otherwise make space on the stack and copy the data
2204 to the address of that space. */
2205
2206 /* Deduct words put into registers from the size we must copy. */
2207 if (partial != 0)
2208 {
2209 if (GET_CODE (size) == CONST_INT)
2210 size = GEN_INT (INTVAL (size) - used);
2211 else
2212 size = expand_binop (GET_MODE (size), sub_optab, size,
2213 GEN_INT (used), NULL_RTX, 0,
2214 OPTAB_LIB_WIDEN);
2215 }
2216
2217 /* Get the address of the stack space.
2218 In this case, we do not deal with EXTRA separately.
2219 A single stack adjust will do. */
2220 if (! args_addr)
2221 {
2222 temp = push_block (size, extra, where_pad == downward);
2223 extra = 0;
2224 }
2225 else if (GET_CODE (args_so_far) == CONST_INT)
2226 temp = memory_address (BLKmode,
2227 plus_constant (args_addr,
2228 skip + INTVAL (args_so_far)));
2229 else
2230 temp = memory_address (BLKmode,
2231 plus_constant (gen_rtx (PLUS, Pmode,
2232 args_addr, args_so_far),
2233 skip));
2234
2235 /* TEMP is the address of the block. Copy the data there. */
2236 if (GET_CODE (size) == CONST_INT
2237 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2238 < MOVE_RATIO))
2239 {
2240 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
2241 INTVAL (size), align);
2242 goto ret;
2243 }
2244 /* Try the most limited insn first, because there's no point
2245 including more than one in the machine description unless
2246 the more limited one has some advantage. */
2247 #ifdef HAVE_movstrqi
2248 if (HAVE_movstrqi
2249 && GET_CODE (size) == CONST_INT
2250 && ((unsigned) INTVAL (size)
2251 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
2252 {
2253 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
2254 xinner, size, GEN_INT (align));
2255 if (pat != 0)
2256 {
2257 emit_insn (pat);
2258 goto ret;
2259 }
2260 }
2261 #endif
2262 #ifdef HAVE_movstrhi
2263 if (HAVE_movstrhi
2264 && GET_CODE (size) == CONST_INT
2265 && ((unsigned) INTVAL (size)
2266 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
2267 {
2268 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
2269 xinner, size, GEN_INT (align));
2270 if (pat != 0)
2271 {
2272 emit_insn (pat);
2273 goto ret;
2274 }
2275 }
2276 #endif
2277 #ifdef HAVE_movstrsi
2278 if (HAVE_movstrsi)
2279 {
2280 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
2281 xinner, size, GEN_INT (align));
2282 if (pat != 0)
2283 {
2284 emit_insn (pat);
2285 goto ret;
2286 }
2287 }
2288 #endif
2289 #ifdef HAVE_movstrdi
2290 if (HAVE_movstrdi)
2291 {
2292 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
2293 xinner, size, GEN_INT (align));
2294 if (pat != 0)
2295 {
2296 emit_insn (pat);
2297 goto ret;
2298 }
2299 }
2300 #endif
2301
2302 #ifndef ACCUMULATE_OUTGOING_ARGS
2303 /* If the source is referenced relative to the stack pointer,
2304 copy it to another register to stabilize it. We do not need
2305 to do this if we know that we won't be changing sp. */
2306
2307 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2308 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2309 temp = copy_to_reg (temp);
2310 #endif
2311
2312 /* Make inhibit_defer_pop nonzero around the library call
2313 to force it to pop the bcopy-arguments right away. */
2314 NO_DEFER_POP;
2315 #ifdef TARGET_MEM_FUNCTIONS
2316 emit_library_call (memcpy_libfunc, 0,
2317 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
2318 convert_to_mode (TYPE_MODE (sizetype),
2319 size, TREE_UNSIGNED (sizetype)),
2320 TYPE_MODE (sizetype));
2321 #else
2322 emit_library_call (bcopy_libfunc, 0,
2323 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
2324 convert_to_mode (TYPE_MODE (sizetype),
2325 size, TREE_UNSIGNED (sizetype)),
2326 TYPE_MODE (sizetype));
2327 #endif
2328 OK_DEFER_POP;
2329 }
2330 }
2331 else if (partial > 0)
2332 {
2333 /* Scalar partly in registers. */
2334
2335 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2336 int i;
2337 int not_stack;
2338 /* # words of start of argument
2339 that we must make space for but need not store. */
2340 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2341 int args_offset = INTVAL (args_so_far);
2342 int skip;
2343
2344 /* Push padding now if padding above and stack grows down,
2345 or if padding below and stack grows up.
2346 But if space already allocated, this has already been done. */
2347 if (extra && args_addr == 0
2348 && where_pad != none && where_pad != stack_direction)
2349 anti_adjust_stack (GEN_INT (extra));
2350
2351 /* If we make space by pushing it, we might as well push
2352 the real data. Otherwise, we can leave OFFSET nonzero
2353 and leave the space uninitialized. */
2354 if (args_addr == 0)
2355 offset = 0;
2356
2357 /* Now NOT_STACK gets the number of words that we don't need to
2358 allocate on the stack. */
2359 not_stack = partial - offset;
2360
2361 /* If the partial register-part of the arg counts in its stack size,
2362 skip the part of stack space corresponding to the registers.
2363 Otherwise, start copying to the beginning of the stack space,
2364 by setting SKIP to 0. */
2365 #ifndef REG_PARM_STACK_SPACE
2366 skip = 0;
2367 #else
2368 skip = not_stack;
2369 #endif
2370
2371 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2372 x = validize_mem (force_const_mem (mode, x));
2373
2374 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2375 SUBREGs of such registers are not allowed. */
2376 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2377 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2378 x = copy_to_reg (x);
2379
2380 /* Loop over all the words allocated on the stack for this arg. */
2381 /* We can do it by words, because any scalar bigger than a word
2382 has a size a multiple of a word. */
2383 #ifndef PUSH_ARGS_REVERSED
2384 for (i = not_stack; i < size; i++)
2385 #else
2386 for (i = size - 1; i >= not_stack; i--)
2387 #endif
2388 if (i >= not_stack + offset)
2389 emit_push_insn (operand_subword_force (x, i, mode),
2390 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2391 0, args_addr,
2392 GEN_INT (args_offset + ((i - not_stack + skip)
2393 * UNITS_PER_WORD)));
2394 }
2395 else
2396 {
2397 rtx addr;
2398
2399 /* Push padding now if padding above and stack grows down,
2400 or if padding below and stack grows up.
2401 But if space already allocated, this has already been done. */
2402 if (extra && args_addr == 0
2403 && where_pad != none && where_pad != stack_direction)
2404 anti_adjust_stack (GEN_INT (extra));
2405
2406 #ifdef PUSH_ROUNDING
2407 if (args_addr == 0)
2408 addr = gen_push_operand ();
2409 else
2410 #endif
2411 if (GET_CODE (args_so_far) == CONST_INT)
2412 addr
2413 = memory_address (mode,
2414 plus_constant (args_addr, INTVAL (args_so_far)));
2415 else
2416 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2417 args_so_far));
2418
2419 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2420 }
2421
2422 ret:
2423 /* If part should go in registers, copy that part
2424 into the appropriate registers. Do this now, at the end,
2425 since mem-to-mem copies above may do function calls. */
2426 if (partial > 0 && reg != 0)
2427 move_block_to_reg (REGNO (reg), x, partial, mode);
2428
2429 if (extra && args_addr == 0 && where_pad == stack_direction)
2430 anti_adjust_stack (GEN_INT (extra));
2431 }
2432 \f
2433 /* Expand an assignment that stores the value of FROM into TO.
2434 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2435 (This may contain a QUEUED rtx;
2436 if the value is constant, this rtx is a constant.)
2437 Otherwise, the returned value is NULL_RTX.
2438
2439 SUGGEST_REG is no longer actually used.
2440 It used to mean, copy the value through a register
2441 and return that register, if that is possible.
2442 We now use WANT_VALUE to decide whether to do this. */
2443
2444 rtx
2445 expand_assignment (to, from, want_value, suggest_reg)
2446 tree to, from;
2447 int want_value;
2448 int suggest_reg;
2449 {
2450 register rtx to_rtx = 0;
2451 rtx result;
2452
2453 /* Don't crash if the lhs of the assignment was erroneous. */
2454
2455 if (TREE_CODE (to) == ERROR_MARK)
2456 {
2457 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
2458 return want_value ? result : NULL_RTX;
2459 }
2460
2461 if (output_bytecode)
2462 {
2463 tree dest_innermost;
2464
2465 bc_expand_expr (from);
2466 bc_emit_instruction (duplicate);
2467
2468 dest_innermost = bc_expand_address (to);
2469
2470 /* Can't deduce from TYPE that we're dealing with a bitfield, so
2471 take care of it here. */
2472
2473 bc_store_memory (TREE_TYPE (to), dest_innermost);
2474 return NULL;
2475 }
2476
2477 /* Assignment of a structure component needs special treatment
2478 if the structure component's rtx is not simply a MEM.
2479 Assignment of an array element at a constant index, and assignment of
2480 an array element in an unaligned packed structure field, has the same
2481 problem. */
2482
2483 if (TREE_CODE (to) == COMPONENT_REF
2484 || TREE_CODE (to) == BIT_FIELD_REF
2485 || (TREE_CODE (to) == ARRAY_REF
2486 && ((TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2487 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST)
2488 || (SLOW_UNALIGNED_ACCESS && get_inner_unaligned_p (to)))))
2489 {
2490 enum machine_mode mode1;
2491 int bitsize;
2492 int bitpos;
2493 tree offset;
2494 int unsignedp;
2495 int volatilep = 0;
2496 tree tem;
2497 int alignment;
2498
2499 push_temp_slots ();
2500 tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
2501 &mode1, &unsignedp, &volatilep);
2502
2503 /* If we are going to use store_bit_field and extract_bit_field,
2504 make sure to_rtx will be safe for multiple use. */
2505
2506 if (mode1 == VOIDmode && want_value)
2507 tem = stabilize_reference (tem);
2508
2509 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
2510 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
2511 if (offset != 0)
2512 {
2513 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2514
2515 if (GET_CODE (to_rtx) != MEM)
2516 abort ();
2517 to_rtx = change_address (to_rtx, VOIDmode,
2518 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
2519 force_reg (ptr_mode, offset_rtx)));
2520 /* If we have a variable offset, the known alignment
2521 is only that of the innermost structure containing the field.
2522 (Actually, we could sometimes do better by using the
2523 align of an element of the innermost array, but no need.) */
2524 if (TREE_CODE (to) == COMPONENT_REF
2525 || TREE_CODE (to) == BIT_FIELD_REF)
2526 alignment
2527 = TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (to, 0))) / BITS_PER_UNIT;
2528 }
2529 if (volatilep)
2530 {
2531 if (GET_CODE (to_rtx) == MEM)
2532 {
2533 /* When the offset is zero, to_rtx is the address of the
2534 structure we are storing into, and hence may be shared.
2535 We must make a new MEM before setting the volatile bit. */
2536 if (offset == 0)
2537 to_rtx = change_address (to_rtx, VOIDmode, XEXP (to_rtx, 0));
2538 MEM_VOLATILE_P (to_rtx) = 1;
2539 }
2540 #if 0 /* This was turned off because, when a field is volatile
2541 in an object which is not volatile, the object may be in a register,
2542 and then we would abort over here. */
2543 else
2544 abort ();
2545 #endif
2546 }
2547
2548 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2549 (want_value
2550 /* Spurious cast makes HPUX compiler happy. */
2551 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2552 : VOIDmode),
2553 unsignedp,
2554 /* Required alignment of containing datum. */
2555 alignment,
2556 int_size_in_bytes (TREE_TYPE (tem)));
2557 preserve_temp_slots (result);
2558 free_temp_slots ();
2559 pop_temp_slots ();
2560
2561 /* If the value is meaningful, convert RESULT to the proper mode.
2562 Otherwise, return nothing. */
2563 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
2564 TYPE_MODE (TREE_TYPE (from)),
2565 result,
2566 TREE_UNSIGNED (TREE_TYPE (to)))
2567 : NULL_RTX);
2568 }
2569
2570 /* If the rhs is a function call and its value is not an aggregate,
2571 call the function before we start to compute the lhs.
2572 This is needed for correct code for cases such as
2573 val = setjmp (buf) on machines where reference to val
2574 requires loading up part of an address in a separate insn.
2575
2576 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
2577 a promoted variable where the zero- or sign- extension needs to be done.
2578 Handling this in the normal way is safe because no computation is done
2579 before the call. */
2580 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
2581 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
2582 {
2583 rtx value;
2584
2585 push_temp_slots ();
2586 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
2587 if (to_rtx == 0)
2588 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2589
2590 if (GET_MODE (to_rtx) == BLKmode)
2591 {
2592 int align = MIN (TYPE_ALIGN (TREE_TYPE (from)), BITS_PER_WORD);
2593 emit_block_move (to_rtx, value, expr_size (from), align);
2594 }
2595 else
2596 emit_move_insn (to_rtx, value);
2597 preserve_temp_slots (to_rtx);
2598 free_temp_slots ();
2599 pop_temp_slots ();
2600 return want_value ? to_rtx : NULL_RTX;
2601 }
2602
2603 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2604 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2605
2606 if (to_rtx == 0)
2607 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2608
2609 /* Don't move directly into a return register. */
2610 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
2611 {
2612 rtx temp;
2613
2614 push_temp_slots ();
2615 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
2616 emit_move_insn (to_rtx, temp);
2617 preserve_temp_slots (to_rtx);
2618 free_temp_slots ();
2619 pop_temp_slots ();
2620 return want_value ? to_rtx : NULL_RTX;
2621 }
2622
2623 /* In case we are returning the contents of an object which overlaps
2624 the place the value is being stored, use a safe function when copying
2625 a value through a pointer into a structure value return block. */
2626 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2627 && current_function_returns_struct
2628 && !current_function_returns_pcc_struct)
2629 {
2630 rtx from_rtx, size;
2631
2632 push_temp_slots ();
2633 size = expr_size (from);
2634 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
2635
2636 #ifdef TARGET_MEM_FUNCTIONS
2637 emit_library_call (memcpy_libfunc, 0,
2638 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2639 XEXP (from_rtx, 0), Pmode,
2640 convert_to_mode (TYPE_MODE (sizetype),
2641 size, TREE_UNSIGNED (sizetype)),
2642 TYPE_MODE (sizetype));
2643 #else
2644 emit_library_call (bcopy_libfunc, 0,
2645 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2646 XEXP (to_rtx, 0), Pmode,
2647 convert_to_mode (TYPE_MODE (sizetype),
2648 size, TREE_UNSIGNED (sizetype)),
2649 TYPE_MODE (sizetype));
2650 #endif
2651
2652 preserve_temp_slots (to_rtx);
2653 free_temp_slots ();
2654 pop_temp_slots ();
2655 return want_value ? to_rtx : NULL_RTX;
2656 }
2657
2658 /* Compute FROM and store the value in the rtx we got. */
2659
2660 push_temp_slots ();
2661 result = store_expr (from, to_rtx, want_value);
2662 preserve_temp_slots (result);
2663 free_temp_slots ();
2664 pop_temp_slots ();
2665 return want_value ? result : NULL_RTX;
2666 }
2667
2668 /* Generate code for computing expression EXP,
2669 and storing the value into TARGET.
2670 TARGET may contain a QUEUED rtx.
2671
2672 If WANT_VALUE is nonzero, return a copy of the value
2673 not in TARGET, so that we can be sure to use the proper
2674 value in a containing expression even if TARGET has something
2675 else stored in it. If possible, we copy the value through a pseudo
2676 and return that pseudo. Or, if the value is constant, we try to
2677 return the constant. In some cases, we return a pseudo
2678 copied *from* TARGET.
2679
2680 If the mode is BLKmode then we may return TARGET itself.
2681 It turns out that in BLKmode it doesn't cause a problem.
2682 because C has no operators that could combine two different
2683 assignments into the same BLKmode object with different values
2684 with no sequence point. Will other languages need this to
2685 be more thorough?
2686
2687 If WANT_VALUE is 0, we return NULL, to make sure
2688 to catch quickly any cases where the caller uses the value
2689 and fails to set WANT_VALUE. */
2690
2691 rtx
2692 store_expr (exp, target, want_value)
2693 register tree exp;
2694 register rtx target;
2695 int want_value;
2696 {
2697 register rtx temp;
2698 int dont_return_target = 0;
2699
2700 if (TREE_CODE (exp) == COMPOUND_EXPR)
2701 {
2702 /* Perform first part of compound expression, then assign from second
2703 part. */
2704 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2705 emit_queue ();
2706 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
2707 }
2708 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2709 {
2710 /* For conditional expression, get safe form of the target. Then
2711 test the condition, doing the appropriate assignment on either
2712 side. This avoids the creation of unnecessary temporaries.
2713 For non-BLKmode, it is more efficient not to do this. */
2714
2715 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2716
2717 emit_queue ();
2718 target = protect_from_queue (target, 1);
2719
2720 do_pending_stack_adjust ();
2721 NO_DEFER_POP;
2722 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2723 store_expr (TREE_OPERAND (exp, 1), target, 0);
2724 emit_queue ();
2725 emit_jump_insn (gen_jump (lab2));
2726 emit_barrier ();
2727 emit_label (lab1);
2728 store_expr (TREE_OPERAND (exp, 2), target, 0);
2729 emit_queue ();
2730 emit_label (lab2);
2731 OK_DEFER_POP;
2732 return want_value ? target : NULL_RTX;
2733 }
2734 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
2735 && GET_MODE (target) != BLKmode)
2736 /* If target is in memory and caller wants value in a register instead,
2737 arrange that. Pass TARGET as target for expand_expr so that,
2738 if EXP is another assignment, WANT_VALUE will be nonzero for it.
2739 We know expand_expr will not use the target in that case.
2740 Don't do this if TARGET is volatile because we are supposed
2741 to write it and then read it. */
2742 {
2743 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
2744 GET_MODE (target), 0);
2745 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2746 temp = copy_to_reg (temp);
2747 dont_return_target = 1;
2748 }
2749 else if (queued_subexp_p (target))
2750 /* If target contains a postincrement, let's not risk
2751 using it as the place to generate the rhs. */
2752 {
2753 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2754 {
2755 /* Expand EXP into a new pseudo. */
2756 temp = gen_reg_rtx (GET_MODE (target));
2757 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2758 }
2759 else
2760 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
2761
2762 /* If target is volatile, ANSI requires accessing the value
2763 *from* the target, if it is accessed. So make that happen.
2764 In no case return the target itself. */
2765 if (! MEM_VOLATILE_P (target) && want_value)
2766 dont_return_target = 1;
2767 }
2768 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2769 /* If this is an scalar in a register that is stored in a wider mode
2770 than the declared mode, compute the result into its declared mode
2771 and then convert to the wider mode. Our value is the computed
2772 expression. */
2773 {
2774 /* If we don't want a value, we can do the conversion inside EXP,
2775 which will often result in some optimizations. Do the conversion
2776 in two steps: first change the signedness, if needed, then
2777 the extend. */
2778 if (! want_value)
2779 {
2780 if (TREE_UNSIGNED (TREE_TYPE (exp))
2781 != SUBREG_PROMOTED_UNSIGNED_P (target))
2782 exp
2783 = convert
2784 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
2785 TREE_TYPE (exp)),
2786 exp);
2787
2788 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
2789 SUBREG_PROMOTED_UNSIGNED_P (target)),
2790 exp);
2791 }
2792
2793 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2794
2795 /* If TEMP is a volatile MEM and we want a result value, make
2796 the access now so it gets done only once. Likewise if
2797 it contains TARGET. */
2798 if (GET_CODE (temp) == MEM && want_value
2799 && (MEM_VOLATILE_P (temp)
2800 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
2801 temp = copy_to_reg (temp);
2802
2803 /* If TEMP is a VOIDmode constant, use convert_modes to make
2804 sure that we properly convert it. */
2805 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
2806 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
2807 TYPE_MODE (TREE_TYPE (exp)), temp,
2808 SUBREG_PROMOTED_UNSIGNED_P (target));
2809
2810 convert_move (SUBREG_REG (target), temp,
2811 SUBREG_PROMOTED_UNSIGNED_P (target));
2812 return want_value ? temp : NULL_RTX;
2813 }
2814 else
2815 {
2816 temp = expand_expr (exp, target, GET_MODE (target), 0);
2817 /* Return TARGET if it's a specified hardware register.
2818 If TARGET is a volatile mem ref, either return TARGET
2819 or return a reg copied *from* TARGET; ANSI requires this.
2820
2821 Otherwise, if TEMP is not TARGET, return TEMP
2822 if it is constant (for efficiency),
2823 or if we really want the correct value. */
2824 if (!(target && GET_CODE (target) == REG
2825 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2826 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
2827 && temp != target
2828 && (CONSTANT_P (temp) || want_value))
2829 dont_return_target = 1;
2830 }
2831
2832 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
2833 the same as that of TARGET, adjust the constant. This is needed, for
2834 example, in case it is a CONST_DOUBLE and we want only a word-sized
2835 value. */
2836 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
2837 && TREE_CODE (exp) != ERROR_MARK
2838 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2839 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
2840 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
2841
2842 /* If value was not generated in the target, store it there.
2843 Convert the value to TARGET's type first if nec. */
2844
2845 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2846 {
2847 target = protect_from_queue (target, 1);
2848 if (GET_MODE (temp) != GET_MODE (target)
2849 && GET_MODE (temp) != VOIDmode)
2850 {
2851 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2852 if (dont_return_target)
2853 {
2854 /* In this case, we will return TEMP,
2855 so make sure it has the proper mode.
2856 But don't forget to store the value into TARGET. */
2857 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2858 emit_move_insn (target, temp);
2859 }
2860 else
2861 convert_move (target, temp, unsignedp);
2862 }
2863
2864 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2865 {
2866 /* Handle copying a string constant into an array.
2867 The string constant may be shorter than the array.
2868 So copy just the string's actual length, and clear the rest. */
2869 rtx size;
2870 rtx addr;
2871
2872 /* Get the size of the data type of the string,
2873 which is actually the size of the target. */
2874 size = expr_size (exp);
2875 if (GET_CODE (size) == CONST_INT
2876 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2877 emit_block_move (target, temp, size,
2878 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2879 else
2880 {
2881 /* Compute the size of the data to copy from the string. */
2882 tree copy_size
2883 = size_binop (MIN_EXPR,
2884 make_tree (sizetype, size),
2885 convert (sizetype,
2886 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
2887 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
2888 VOIDmode, 0);
2889 rtx label = 0;
2890
2891 /* Copy that much. */
2892 emit_block_move (target, temp, copy_size_rtx,
2893 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2894
2895 /* Figure out how much is left in TARGET that we have to clear.
2896 Do all calculations in ptr_mode. */
2897
2898 addr = XEXP (target, 0);
2899 addr = convert_modes (ptr_mode, Pmode, addr, 1);
2900
2901 if (GET_CODE (copy_size_rtx) == CONST_INT)
2902 {
2903 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
2904 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
2905 }
2906 else
2907 {
2908 addr = force_reg (ptr_mode, addr);
2909 addr = expand_binop (ptr_mode, add_optab, addr,
2910 copy_size_rtx, NULL_RTX, 0,
2911 OPTAB_LIB_WIDEN);
2912
2913 size = expand_binop (ptr_mode, sub_optab, size,
2914 copy_size_rtx, NULL_RTX, 0,
2915 OPTAB_LIB_WIDEN);
2916
2917 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
2918 GET_MODE (size), 0, 0);
2919 label = gen_label_rtx ();
2920 emit_jump_insn (gen_blt (label));
2921 }
2922
2923 if (size != const0_rtx)
2924 {
2925 #ifdef TARGET_MEM_FUNCTIONS
2926 emit_library_call (memset_libfunc, 0, VOIDmode, 3, addr,
2927 Pmode, const0_rtx, Pmode, size, ptr_mode);
2928 #else
2929 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
2930 addr, Pmode, size, ptr_mode);
2931 #endif
2932 }
2933
2934 if (label)
2935 emit_label (label);
2936 }
2937 }
2938 else if (GET_MODE (temp) == BLKmode)
2939 emit_block_move (target, temp, expr_size (exp),
2940 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2941 else
2942 emit_move_insn (target, temp);
2943 }
2944
2945 /* If we don't want a value, return NULL_RTX. */
2946 if (! want_value)
2947 return NULL_RTX;
2948
2949 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
2950 ??? The latter test doesn't seem to make sense. */
2951 else if (dont_return_target && GET_CODE (temp) != MEM)
2952 return temp;
2953
2954 /* Return TARGET itself if it is a hard register. */
2955 else if (want_value && GET_MODE (target) != BLKmode
2956 && ! (GET_CODE (target) == REG
2957 && REGNO (target) < FIRST_PSEUDO_REGISTER))
2958 return copy_to_reg (target);
2959
2960 else
2961 return target;
2962 }
2963 \f
2964 /* Store the value of constructor EXP into the rtx TARGET.
2965 TARGET is either a REG or a MEM. */
2966
2967 static void
2968 store_constructor (exp, target)
2969 tree exp;
2970 rtx target;
2971 {
2972 tree type = TREE_TYPE (exp);
2973
2974 /* We know our target cannot conflict, since safe_from_p has been called. */
2975 #if 0
2976 /* Don't try copying piece by piece into a hard register
2977 since that is vulnerable to being clobbered by EXP.
2978 Instead, construct in a pseudo register and then copy it all. */
2979 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
2980 {
2981 rtx temp = gen_reg_rtx (GET_MODE (target));
2982 store_constructor (exp, temp);
2983 emit_move_insn (target, temp);
2984 return;
2985 }
2986 #endif
2987
2988 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
2989 || TREE_CODE (type) == QUAL_UNION_TYPE)
2990 {
2991 register tree elt;
2992
2993 /* Inform later passes that the whole union value is dead. */
2994 if (TREE_CODE (type) == UNION_TYPE
2995 || TREE_CODE (type) == QUAL_UNION_TYPE)
2996 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2997
2998 /* If we are building a static constructor into a register,
2999 set the initial value as zero so we can fold the value into
3000 a constant. But if more than one register is involved,
3001 this probably loses. */
3002 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
3003 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
3004 emit_move_insn (target, const0_rtx);
3005
3006 /* If the constructor has fewer fields than the structure,
3007 clear the whole structure first. */
3008 else if (list_length (CONSTRUCTOR_ELTS (exp))
3009 != list_length (TYPE_FIELDS (type)))
3010 clear_storage (target, expr_size (exp));
3011 else
3012 /* Inform later passes that the old value is dead. */
3013 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3014
3015 /* Store each element of the constructor into
3016 the corresponding field of TARGET. */
3017
3018 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3019 {
3020 register tree field = TREE_PURPOSE (elt);
3021 register enum machine_mode mode;
3022 int bitsize;
3023 int bitpos = 0;
3024 int unsignedp;
3025 tree pos, constant = 0, offset = 0;
3026 rtx to_rtx = target;
3027
3028 /* Just ignore missing fields.
3029 We cleared the whole structure, above,
3030 if any fields are missing. */
3031 if (field == 0)
3032 continue;
3033
3034 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
3035 unsignedp = TREE_UNSIGNED (field);
3036 mode = DECL_MODE (field);
3037 if (DECL_BIT_FIELD (field))
3038 mode = VOIDmode;
3039
3040 pos = DECL_FIELD_BITPOS (field);
3041 if (TREE_CODE (pos) == INTEGER_CST)
3042 constant = pos;
3043 else if (TREE_CODE (pos) == PLUS_EXPR
3044 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3045 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
3046 else
3047 offset = pos;
3048
3049 if (constant)
3050 bitpos = TREE_INT_CST_LOW (constant);
3051
3052 if (offset)
3053 {
3054 rtx offset_rtx;
3055
3056 if (contains_placeholder_p (offset))
3057 offset = build (WITH_RECORD_EXPR, sizetype,
3058 offset, exp);
3059
3060 offset = size_binop (FLOOR_DIV_EXPR, offset,
3061 size_int (BITS_PER_UNIT));
3062
3063 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3064 if (GET_CODE (to_rtx) != MEM)
3065 abort ();
3066
3067 to_rtx
3068 = change_address (to_rtx, VOIDmode,
3069 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
3070 force_reg (ptr_mode, offset_rtx)));
3071 }
3072
3073 if (TREE_READONLY (field))
3074 {
3075 to_rtx = copy_rtx (to_rtx);
3076 RTX_UNCHANGING_P (to_rtx) = 1;
3077 }
3078
3079 store_field (to_rtx, bitsize, bitpos, mode, TREE_VALUE (elt),
3080 /* The alignment of TARGET is
3081 at least what its type requires. */
3082 VOIDmode, 0,
3083 TYPE_ALIGN (type) / BITS_PER_UNIT,
3084 int_size_in_bytes (type));
3085 }
3086 }
3087 else if (TREE_CODE (type) == ARRAY_TYPE)
3088 {
3089 register tree elt;
3090 register int i;
3091 tree domain = TYPE_DOMAIN (type);
3092 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
3093 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
3094 tree elttype = TREE_TYPE (type);
3095
3096 /* If the constructor has fewer fields than the structure,
3097 clear the whole structure first. Similarly if this this is
3098 static constructor of a non-BLKmode object. */
3099
3100 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
3101 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
3102 clear_storage (target, expr_size (exp));
3103 else
3104 /* Inform later passes that the old value is dead. */
3105 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3106
3107 /* Store each element of the constructor into
3108 the corresponding element of TARGET, determined
3109 by counting the elements. */
3110 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
3111 elt;
3112 elt = TREE_CHAIN (elt), i++)
3113 {
3114 register enum machine_mode mode;
3115 int bitsize;
3116 int bitpos;
3117 int unsignedp;
3118 tree index = TREE_PURPOSE (elt);
3119 rtx xtarget = target;
3120
3121 mode = TYPE_MODE (elttype);
3122 bitsize = GET_MODE_BITSIZE (mode);
3123 unsignedp = TREE_UNSIGNED (elttype);
3124
3125 if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
3126 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
3127 {
3128 rtx pos_rtx, addr, xtarget;
3129 tree position;
3130
3131 if (index == 0)
3132 index = size_int (i);
3133
3134 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3135 size_int (BITS_PER_UNIT));
3136 position = size_binop (MULT_EXPR, index, position);
3137 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3138 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
3139 xtarget = change_address (target, mode, addr);
3140 store_expr (TREE_VALUE (elt), xtarget, 0);
3141 }
3142 else
3143 {
3144 if (index != 0)
3145 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
3146 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3147 else
3148 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3149
3150 store_field (xtarget, bitsize, bitpos, mode, TREE_VALUE (elt),
3151 /* The alignment of TARGET is
3152 at least what its type requires. */
3153 VOIDmode, 0,
3154 TYPE_ALIGN (type) / BITS_PER_UNIT,
3155 int_size_in_bytes (type));
3156 }
3157 }
3158 }
3159 /* set constructor assignments */
3160 else if (TREE_CODE (type) == SET_TYPE)
3161 {
3162 tree elt;
3163 rtx xtarget = XEXP (target, 0);
3164 int set_word_size = TYPE_ALIGN (type);
3165 int nbytes = int_size_in_bytes (type);
3166 tree non_const_elements;
3167 int need_to_clear_first;
3168 tree domain = TYPE_DOMAIN (type);
3169 tree domain_min, domain_max, bitlength;
3170
3171 /* The default implementation strategy is to extract the constant
3172 parts of the constructor, use that to initialize the target,
3173 and then "or" in whatever non-constant ranges we need in addition.
3174
3175 If a large set is all zero or all ones, it is
3176 probably better to set it using memset (if available) or bzero.
3177 Also, if a large set has just a single range, it may also be
3178 better to first clear all the first clear the set (using
3179 bzero/memset), and set the bits we want. */
3180
3181 /* Check for all zeros. */
3182 if (CONSTRUCTOR_ELTS (exp) == NULL_TREE)
3183 {
3184 clear_storage (target, expr_size (exp));
3185 return;
3186 }
3187
3188 if (nbytes < 0)
3189 abort ();
3190
3191 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
3192 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
3193 bitlength = size_binop (PLUS_EXPR,
3194 size_binop (MINUS_EXPR, domain_max, domain_min),
3195 size_one_node);
3196
3197 /* Check for range all ones, or at most a single range.
3198 (This optimization is only a win for big sets.) */
3199 if (GET_MODE (target) == BLKmode && nbytes > 16
3200 && TREE_CHAIN (CONSTRUCTOR_ELTS (exp)) == NULL_TREE)
3201 {
3202 need_to_clear_first = 1;
3203 non_const_elements = CONSTRUCTOR_ELTS (exp);
3204 }
3205 else
3206 {
3207 int nbits = nbytes * BITS_PER_UNIT;
3208 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
3209 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
3210 char *bit_buffer = (char*) alloca (nbits);
3211 HOST_WIDE_INT word = 0;
3212 int bit_pos = 0;
3213 int ibit = 0;
3214 int offset = 0; /* In bytes from beginning of set. */
3215 non_const_elements = get_set_constructor_bits (exp,
3216 bit_buffer, nbits);
3217 for (;;)
3218 {
3219 if (bit_buffer[ibit])
3220 {
3221 if (BYTES_BIG_ENDIAN)
3222 word |= (1 << (set_word_size - 1 - bit_pos));
3223 else
3224 word |= 1 << bit_pos;
3225 }
3226 bit_pos++; ibit++;
3227 if (bit_pos >= set_word_size || ibit == nbits)
3228 {
3229 rtx datum = GEN_INT (word);
3230 rtx to_rtx;
3231 /* The assumption here is that it is safe to use XEXP if
3232 the set is multi-word, but not if it's single-word. */
3233 if (GET_CODE (target) == MEM)
3234 to_rtx = change_address (target, mode,
3235 plus_constant (XEXP (target, 0),
3236 offset));
3237 else if (offset == 0)
3238 to_rtx = target;
3239 else
3240 abort ();
3241 emit_move_insn (to_rtx, datum);
3242 if (ibit == nbits)
3243 break;
3244 word = 0;
3245 bit_pos = 0;
3246 offset += set_word_size / BITS_PER_UNIT;
3247 }
3248 }
3249 need_to_clear_first = 0;
3250 }
3251
3252 for (elt = non_const_elements; elt != NULL_TREE; elt = TREE_CHAIN (elt))
3253 {
3254 /* start of range of element or NULL */
3255 tree startbit = TREE_PURPOSE (elt);
3256 /* end of range of element, or element value */
3257 tree endbit = TREE_VALUE (elt);
3258 HOST_WIDE_INT startb, endb;
3259 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
3260
3261 bitlength_rtx = expand_expr (bitlength,
3262 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
3263
3264 /* handle non-range tuple element like [ expr ] */
3265 if (startbit == NULL_TREE)
3266 {
3267 startbit = save_expr (endbit);
3268 endbit = startbit;
3269 }
3270 startbit = convert (sizetype, startbit);
3271 endbit = convert (sizetype, endbit);
3272 if (! integer_zerop (domain_min))
3273 {
3274 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
3275 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
3276 }
3277 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
3278 EXPAND_CONST_ADDRESS);
3279 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
3280 EXPAND_CONST_ADDRESS);
3281
3282 if (REG_P (target))
3283 {
3284 targetx = assign_stack_temp (GET_MODE (target),
3285 GET_MODE_SIZE (GET_MODE (target)),
3286 0);
3287 emit_move_insn (targetx, target);
3288 }
3289 else if (GET_CODE (target) == MEM)
3290 targetx = target;
3291 else
3292 abort ();
3293
3294 #ifdef TARGET_MEM_FUNCTIONS
3295 /* Optimization: If startbit and endbit are
3296 constants divisible by BITS_PER_UNIT,
3297 call memset instead. */
3298 if (TREE_CODE (startbit) == INTEGER_CST
3299 && TREE_CODE (endbit) == INTEGER_CST
3300 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
3301 && (endb = TREE_INT_CST_LOW (endbit)) % BITS_PER_UNIT == 0)
3302 {
3303
3304 if (need_to_clear_first
3305 && endb - startb != nbytes * BITS_PER_UNIT)
3306 clear_storage (target, expr_size (exp));
3307 need_to_clear_first = 0;
3308 emit_library_call (memset_libfunc, 0,
3309 VOIDmode, 3,
3310 plus_constant (XEXP (targetx, 0), startb),
3311 Pmode,
3312 constm1_rtx, Pmode,
3313 GEN_INT ((endb - startb) / BITS_PER_UNIT),
3314 Pmode);
3315 }
3316 else
3317 #endif
3318 {
3319 if (need_to_clear_first)
3320 {
3321 clear_storage (target, expr_size (exp));
3322 need_to_clear_first = 0;
3323 }
3324 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__setbits"),
3325 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
3326 bitlength_rtx, TYPE_MODE (sizetype),
3327 startbit_rtx, TYPE_MODE (sizetype),
3328 endbit_rtx, TYPE_MODE (sizetype));
3329 }
3330 if (REG_P (target))
3331 emit_move_insn (target, targetx);
3332 }
3333 }
3334
3335 else
3336 abort ();
3337 }
3338
3339 /* Store the value of EXP (an expression tree)
3340 into a subfield of TARGET which has mode MODE and occupies
3341 BITSIZE bits, starting BITPOS bits from the start of TARGET.
3342 If MODE is VOIDmode, it means that we are storing into a bit-field.
3343
3344 If VALUE_MODE is VOIDmode, return nothing in particular.
3345 UNSIGNEDP is not used in this case.
3346
3347 Otherwise, return an rtx for the value stored. This rtx
3348 has mode VALUE_MODE if that is convenient to do.
3349 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
3350
3351 ALIGN is the alignment that TARGET is known to have, measured in bytes.
3352 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
3353
3354 static rtx
3355 store_field (target, bitsize, bitpos, mode, exp, value_mode,
3356 unsignedp, align, total_size)
3357 rtx target;
3358 int bitsize, bitpos;
3359 enum machine_mode mode;
3360 tree exp;
3361 enum machine_mode value_mode;
3362 int unsignedp;
3363 int align;
3364 int total_size;
3365 {
3366 HOST_WIDE_INT width_mask = 0;
3367
3368 if (bitsize < HOST_BITS_PER_WIDE_INT)
3369 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
3370
3371 /* If we are storing into an unaligned field of an aligned union that is
3372 in a register, we may have the mode of TARGET being an integer mode but
3373 MODE == BLKmode. In that case, get an aligned object whose size and
3374 alignment are the same as TARGET and store TARGET into it (we can avoid
3375 the store if the field being stored is the entire width of TARGET). Then
3376 call ourselves recursively to store the field into a BLKmode version of
3377 that object. Finally, load from the object into TARGET. This is not
3378 very efficient in general, but should only be slightly more expensive
3379 than the otherwise-required unaligned accesses. Perhaps this can be
3380 cleaned up later. */
3381
3382 if (mode == BLKmode
3383 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
3384 {
3385 rtx object = assign_stack_temp (GET_MODE (target),
3386 GET_MODE_SIZE (GET_MODE (target)), 0);
3387 rtx blk_object = copy_rtx (object);
3388
3389 MEM_IN_STRUCT_P (object) = 1;
3390 MEM_IN_STRUCT_P (blk_object) = 1;
3391 PUT_MODE (blk_object, BLKmode);
3392
3393 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
3394 emit_move_insn (object, target);
3395
3396 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
3397 align, total_size);
3398
3399 /* Even though we aren't returning target, we need to
3400 give it the updated value. */
3401 emit_move_insn (target, object);
3402
3403 return blk_object;
3404 }
3405
3406 /* If the structure is in a register or if the component
3407 is a bit field, we cannot use addressing to access it.
3408 Use bit-field techniques or SUBREG to store in it. */
3409
3410 if (mode == VOIDmode
3411 || (mode != BLKmode && ! direct_store[(int) mode])
3412 || GET_CODE (target) == REG
3413 || GET_CODE (target) == SUBREG
3414 /* If the field isn't aligned enough to store as an ordinary memref,
3415 store it as a bit field. */
3416 || (SLOW_UNALIGNED_ACCESS
3417 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
3418 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
3419 {
3420 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3421
3422 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
3423 MODE. */
3424 if (mode != VOIDmode && mode != BLKmode
3425 && mode != TYPE_MODE (TREE_TYPE (exp)))
3426 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
3427
3428 /* Store the value in the bitfield. */
3429 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
3430 if (value_mode != VOIDmode)
3431 {
3432 /* The caller wants an rtx for the value. */
3433 /* If possible, avoid refetching from the bitfield itself. */
3434 if (width_mask != 0
3435 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
3436 {
3437 tree count;
3438 enum machine_mode tmode;
3439
3440 if (unsignedp)
3441 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
3442 tmode = GET_MODE (temp);
3443 if (tmode == VOIDmode)
3444 tmode = value_mode;
3445 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
3446 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
3447 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
3448 }
3449 return extract_bit_field (target, bitsize, bitpos, unsignedp,
3450 NULL_RTX, value_mode, 0, align,
3451 total_size);
3452 }
3453 return const0_rtx;
3454 }
3455 else
3456 {
3457 rtx addr = XEXP (target, 0);
3458 rtx to_rtx;
3459
3460 /* If a value is wanted, it must be the lhs;
3461 so make the address stable for multiple use. */
3462
3463 if (value_mode != VOIDmode && GET_CODE (addr) != REG
3464 && ! CONSTANT_ADDRESS_P (addr)
3465 /* A frame-pointer reference is already stable. */
3466 && ! (GET_CODE (addr) == PLUS
3467 && GET_CODE (XEXP (addr, 1)) == CONST_INT
3468 && (XEXP (addr, 0) == virtual_incoming_args_rtx
3469 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
3470 addr = copy_to_reg (addr);
3471
3472 /* Now build a reference to just the desired component. */
3473
3474 to_rtx = change_address (target, mode,
3475 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
3476 MEM_IN_STRUCT_P (to_rtx) = 1;
3477
3478 return store_expr (exp, to_rtx, value_mode != VOIDmode);
3479 }
3480 }
3481 \f
3482 /* Return true if any object containing the innermost array is an unaligned
3483 packed structure field. */
3484
3485 static int
3486 get_inner_unaligned_p (exp)
3487 tree exp;
3488 {
3489 int needed_alignment = TYPE_ALIGN (TREE_TYPE (exp));
3490
3491 while (1)
3492 {
3493 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
3494 {
3495 if (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
3496 < needed_alignment)
3497 return 1;
3498 }
3499 else if (TREE_CODE (exp) != ARRAY_REF
3500 && TREE_CODE (exp) != NON_LVALUE_EXPR
3501 && ! ((TREE_CODE (exp) == NOP_EXPR
3502 || TREE_CODE (exp) == CONVERT_EXPR)
3503 && (TYPE_MODE (TREE_TYPE (exp))
3504 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
3505 break;
3506
3507 exp = TREE_OPERAND (exp, 0);
3508 }
3509
3510 return 0;
3511 }
3512
3513 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
3514 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
3515 ARRAY_REFs and find the ultimate containing object, which we return.
3516
3517 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
3518 bit position, and *PUNSIGNEDP to the signedness of the field.
3519 If the position of the field is variable, we store a tree
3520 giving the variable offset (in units) in *POFFSET.
3521 This offset is in addition to the bit position.
3522 If the position is not variable, we store 0 in *POFFSET.
3523
3524 If any of the extraction expressions is volatile,
3525 we store 1 in *PVOLATILEP. Otherwise we don't change that.
3526
3527 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
3528 is a mode that can be used to access the field. In that case, *PBITSIZE
3529 is redundant.
3530
3531 If the field describes a variable-sized object, *PMODE is set to
3532 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
3533 this case, but the address of the object can be found. */
3534
3535 tree
3536 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
3537 punsignedp, pvolatilep)
3538 tree exp;
3539 int *pbitsize;
3540 int *pbitpos;
3541 tree *poffset;
3542 enum machine_mode *pmode;
3543 int *punsignedp;
3544 int *pvolatilep;
3545 {
3546 tree orig_exp = exp;
3547 tree size_tree = 0;
3548 enum machine_mode mode = VOIDmode;
3549 tree offset = integer_zero_node;
3550
3551 if (TREE_CODE (exp) == COMPONENT_REF)
3552 {
3553 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
3554 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
3555 mode = DECL_MODE (TREE_OPERAND (exp, 1));
3556 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
3557 }
3558 else if (TREE_CODE (exp) == BIT_FIELD_REF)
3559 {
3560 size_tree = TREE_OPERAND (exp, 1);
3561 *punsignedp = TREE_UNSIGNED (exp);
3562 }
3563 else
3564 {
3565 mode = TYPE_MODE (TREE_TYPE (exp));
3566 *pbitsize = GET_MODE_BITSIZE (mode);
3567 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3568 }
3569
3570 if (size_tree)
3571 {
3572 if (TREE_CODE (size_tree) != INTEGER_CST)
3573 mode = BLKmode, *pbitsize = -1;
3574 else
3575 *pbitsize = TREE_INT_CST_LOW (size_tree);
3576 }
3577
3578 /* Compute cumulative bit-offset for nested component-refs and array-refs,
3579 and find the ultimate containing object. */
3580
3581 *pbitpos = 0;
3582
3583 while (1)
3584 {
3585 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
3586 {
3587 tree pos = (TREE_CODE (exp) == COMPONENT_REF
3588 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
3589 : TREE_OPERAND (exp, 2));
3590 tree constant = integer_zero_node, var = pos;
3591
3592 /* If this field hasn't been filled in yet, don't go
3593 past it. This should only happen when folding expressions
3594 made during type construction. */
3595 if (pos == 0)
3596 break;
3597
3598 /* Assume here that the offset is a multiple of a unit.
3599 If not, there should be an explicitly added constant. */
3600 if (TREE_CODE (pos) == PLUS_EXPR
3601 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3602 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
3603 else if (TREE_CODE (pos) == INTEGER_CST)
3604 constant = pos, var = integer_zero_node;
3605
3606 *pbitpos += TREE_INT_CST_LOW (constant);
3607
3608 if (var)
3609 offset = size_binop (PLUS_EXPR, offset,
3610 size_binop (EXACT_DIV_EXPR, var,
3611 size_int (BITS_PER_UNIT)));
3612 }
3613
3614 else if (TREE_CODE (exp) == ARRAY_REF)
3615 {
3616 /* This code is based on the code in case ARRAY_REF in expand_expr
3617 below. We assume here that the size of an array element is
3618 always an integral multiple of BITS_PER_UNIT. */
3619
3620 tree index = TREE_OPERAND (exp, 1);
3621 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
3622 tree low_bound
3623 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
3624 tree index_type = TREE_TYPE (index);
3625
3626 if (! integer_zerop (low_bound))
3627 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
3628
3629 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
3630 {
3631 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
3632 index);
3633 index_type = TREE_TYPE (index);
3634 }
3635
3636 index = fold (build (MULT_EXPR, index_type, index,
3637 TYPE_SIZE (TREE_TYPE (exp))));
3638
3639 if (TREE_CODE (index) == INTEGER_CST
3640 && TREE_INT_CST_HIGH (index) == 0)
3641 *pbitpos += TREE_INT_CST_LOW (index);
3642 else
3643 offset = size_binop (PLUS_EXPR, offset,
3644 size_binop (FLOOR_DIV_EXPR, index,
3645 size_int (BITS_PER_UNIT)));
3646 }
3647 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
3648 && ! ((TREE_CODE (exp) == NOP_EXPR
3649 || TREE_CODE (exp) == CONVERT_EXPR)
3650 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
3651 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
3652 != UNION_TYPE))
3653 && (TYPE_MODE (TREE_TYPE (exp))
3654 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
3655 break;
3656
3657 /* If any reference in the chain is volatile, the effect is volatile. */
3658 if (TREE_THIS_VOLATILE (exp))
3659 *pvolatilep = 1;
3660 exp = TREE_OPERAND (exp, 0);
3661 }
3662
3663 /* If this was a bit-field, see if there is a mode that allows direct
3664 access in case EXP is in memory. */
3665 if (mode == VOIDmode && *pbitsize != 0 && *pbitpos % *pbitsize == 0)
3666 {
3667 mode = mode_for_size (*pbitsize, MODE_INT, 0);
3668 if (mode == BLKmode)
3669 mode = VOIDmode;
3670 }
3671
3672 if (integer_zerop (offset))
3673 offset = 0;
3674
3675 if (offset != 0 && contains_placeholder_p (offset))
3676 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
3677
3678 *pmode = mode;
3679 *poffset = offset;
3680 return exp;
3681 }
3682 \f
3683 /* Given an rtx VALUE that may contain additions and multiplications,
3684 return an equivalent value that just refers to a register or memory.
3685 This is done by generating instructions to perform the arithmetic
3686 and returning a pseudo-register containing the value.
3687
3688 The returned value may be a REG, SUBREG, MEM or constant. */
3689
3690 rtx
3691 force_operand (value, target)
3692 rtx value, target;
3693 {
3694 register optab binoptab = 0;
3695 /* Use a temporary to force order of execution of calls to
3696 `force_operand'. */
3697 rtx tmp;
3698 register rtx op2;
3699 /* Use subtarget as the target for operand 0 of a binary operation. */
3700 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3701
3702 if (GET_CODE (value) == PLUS)
3703 binoptab = add_optab;
3704 else if (GET_CODE (value) == MINUS)
3705 binoptab = sub_optab;
3706 else if (GET_CODE (value) == MULT)
3707 {
3708 op2 = XEXP (value, 1);
3709 if (!CONSTANT_P (op2)
3710 && !(GET_CODE (op2) == REG && op2 != subtarget))
3711 subtarget = 0;
3712 tmp = force_operand (XEXP (value, 0), subtarget);
3713 return expand_mult (GET_MODE (value), tmp,
3714 force_operand (op2, NULL_RTX),
3715 target, 0);
3716 }
3717
3718 if (binoptab)
3719 {
3720 op2 = XEXP (value, 1);
3721 if (!CONSTANT_P (op2)
3722 && !(GET_CODE (op2) == REG && op2 != subtarget))
3723 subtarget = 0;
3724 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
3725 {
3726 binoptab = add_optab;
3727 op2 = negate_rtx (GET_MODE (value), op2);
3728 }
3729
3730 /* Check for an addition with OP2 a constant integer and our first
3731 operand a PLUS of a virtual register and something else. In that
3732 case, we want to emit the sum of the virtual register and the
3733 constant first and then add the other value. This allows virtual
3734 register instantiation to simply modify the constant rather than
3735 creating another one around this addition. */
3736 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
3737 && GET_CODE (XEXP (value, 0)) == PLUS
3738 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
3739 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
3740 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
3741 {
3742 rtx temp = expand_binop (GET_MODE (value), binoptab,
3743 XEXP (XEXP (value, 0), 0), op2,
3744 subtarget, 0, OPTAB_LIB_WIDEN);
3745 return expand_binop (GET_MODE (value), binoptab, temp,
3746 force_operand (XEXP (XEXP (value, 0), 1), 0),
3747 target, 0, OPTAB_LIB_WIDEN);
3748 }
3749
3750 tmp = force_operand (XEXP (value, 0), subtarget);
3751 return expand_binop (GET_MODE (value), binoptab, tmp,
3752 force_operand (op2, NULL_RTX),
3753 target, 0, OPTAB_LIB_WIDEN);
3754 /* We give UNSIGNEDP = 0 to expand_binop
3755 because the only operations we are expanding here are signed ones. */
3756 }
3757 return value;
3758 }
3759 \f
3760 /* Subroutine of expand_expr:
3761 save the non-copied parts (LIST) of an expr (LHS), and return a list
3762 which can restore these values to their previous values,
3763 should something modify their storage. */
3764
3765 static tree
3766 save_noncopied_parts (lhs, list)
3767 tree lhs;
3768 tree list;
3769 {
3770 tree tail;
3771 tree parts = 0;
3772
3773 for (tail = list; tail; tail = TREE_CHAIN (tail))
3774 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3775 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
3776 else
3777 {
3778 tree part = TREE_VALUE (tail);
3779 tree part_type = TREE_TYPE (part);
3780 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
3781 rtx target = assign_stack_temp (TYPE_MODE (part_type),
3782 int_size_in_bytes (part_type), 0);
3783 MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (part_type);
3784 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
3785 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
3786 parts = tree_cons (to_be_saved,
3787 build (RTL_EXPR, part_type, NULL_TREE,
3788 (tree) target),
3789 parts);
3790 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
3791 }
3792 return parts;
3793 }
3794
3795 /* Subroutine of expand_expr:
3796 record the non-copied parts (LIST) of an expr (LHS), and return a list
3797 which specifies the initial values of these parts. */
3798
3799 static tree
3800 init_noncopied_parts (lhs, list)
3801 tree lhs;
3802 tree list;
3803 {
3804 tree tail;
3805 tree parts = 0;
3806
3807 for (tail = list; tail; tail = TREE_CHAIN (tail))
3808 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3809 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
3810 else
3811 {
3812 tree part = TREE_VALUE (tail);
3813 tree part_type = TREE_TYPE (part);
3814 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
3815 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
3816 }
3817 return parts;
3818 }
3819
3820 /* Subroutine of expand_expr: return nonzero iff there is no way that
3821 EXP can reference X, which is being modified. */
3822
3823 static int
3824 safe_from_p (x, exp)
3825 rtx x;
3826 tree exp;
3827 {
3828 rtx exp_rtl = 0;
3829 int i, nops;
3830
3831 if (x == 0
3832 /* If EXP has varying size, we MUST use a target since we currently
3833 have no way of allocating temporaries of variable size. So we
3834 assume here that something at a higher level has prevented a
3835 clash. This is somewhat bogus, but the best we can do. Only
3836 do this when X is BLKmode. */
3837 || (TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
3838 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
3839 && GET_MODE (x) == BLKmode))
3840 return 1;
3841
3842 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
3843 find the underlying pseudo. */
3844 if (GET_CODE (x) == SUBREG)
3845 {
3846 x = SUBREG_REG (x);
3847 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3848 return 0;
3849 }
3850
3851 /* If X is a location in the outgoing argument area, it is always safe. */
3852 if (GET_CODE (x) == MEM
3853 && (XEXP (x, 0) == virtual_outgoing_args_rtx
3854 || (GET_CODE (XEXP (x, 0)) == PLUS
3855 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
3856 return 1;
3857
3858 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3859 {
3860 case 'd':
3861 exp_rtl = DECL_RTL (exp);
3862 break;
3863
3864 case 'c':
3865 return 1;
3866
3867 case 'x':
3868 if (TREE_CODE (exp) == TREE_LIST)
3869 return ((TREE_VALUE (exp) == 0
3870 || safe_from_p (x, TREE_VALUE (exp)))
3871 && (TREE_CHAIN (exp) == 0
3872 || safe_from_p (x, TREE_CHAIN (exp))));
3873 else
3874 return 0;
3875
3876 case '1':
3877 return safe_from_p (x, TREE_OPERAND (exp, 0));
3878
3879 case '2':
3880 case '<':
3881 return (safe_from_p (x, TREE_OPERAND (exp, 0))
3882 && safe_from_p (x, TREE_OPERAND (exp, 1)));
3883
3884 case 'e':
3885 case 'r':
3886 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
3887 the expression. If it is set, we conflict iff we are that rtx or
3888 both are in memory. Otherwise, we check all operands of the
3889 expression recursively. */
3890
3891 switch (TREE_CODE (exp))
3892 {
3893 case ADDR_EXPR:
3894 return (staticp (TREE_OPERAND (exp, 0))
3895 || safe_from_p (x, TREE_OPERAND (exp, 0)));
3896
3897 case INDIRECT_REF:
3898 if (GET_CODE (x) == MEM)
3899 return 0;
3900 break;
3901
3902 case CALL_EXPR:
3903 exp_rtl = CALL_EXPR_RTL (exp);
3904 if (exp_rtl == 0)
3905 {
3906 /* Assume that the call will clobber all hard registers and
3907 all of memory. */
3908 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3909 || GET_CODE (x) == MEM)
3910 return 0;
3911 }
3912
3913 break;
3914
3915 case RTL_EXPR:
3916 /* If a sequence exists, we would have to scan every instruction
3917 in the sequence to see if it was safe. This is probably not
3918 worthwhile. */
3919 if (RTL_EXPR_SEQUENCE (exp))
3920 return 0;
3921
3922 exp_rtl = RTL_EXPR_RTL (exp);
3923 break;
3924
3925 case WITH_CLEANUP_EXPR:
3926 exp_rtl = RTL_EXPR_RTL (exp);
3927 break;
3928
3929 case CLEANUP_POINT_EXPR:
3930 return safe_from_p (x, TREE_OPERAND (exp, 0));
3931
3932 case SAVE_EXPR:
3933 exp_rtl = SAVE_EXPR_RTL (exp);
3934 break;
3935
3936 case BIND_EXPR:
3937 /* The only operand we look at is operand 1. The rest aren't
3938 part of the expression. */
3939 return safe_from_p (x, TREE_OPERAND (exp, 1));
3940
3941 case METHOD_CALL_EXPR:
3942 /* This takes a rtx argument, but shouldn't appear here. */
3943 abort ();
3944 }
3945
3946 /* If we have an rtx, we do not need to scan our operands. */
3947 if (exp_rtl)
3948 break;
3949
3950 nops = tree_code_length[(int) TREE_CODE (exp)];
3951 for (i = 0; i < nops; i++)
3952 if (TREE_OPERAND (exp, i) != 0
3953 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3954 return 0;
3955 }
3956
3957 /* If we have an rtl, find any enclosed object. Then see if we conflict
3958 with it. */
3959 if (exp_rtl)
3960 {
3961 if (GET_CODE (exp_rtl) == SUBREG)
3962 {
3963 exp_rtl = SUBREG_REG (exp_rtl);
3964 if (GET_CODE (exp_rtl) == REG
3965 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3966 return 0;
3967 }
3968
3969 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3970 are memory and EXP is not readonly. */
3971 return ! (rtx_equal_p (x, exp_rtl)
3972 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3973 && ! TREE_READONLY (exp)));
3974 }
3975
3976 /* If we reach here, it is safe. */
3977 return 1;
3978 }
3979
3980 /* Subroutine of expand_expr: return nonzero iff EXP is an
3981 expression whose type is statically determinable. */
3982
3983 static int
3984 fixed_type_p (exp)
3985 tree exp;
3986 {
3987 if (TREE_CODE (exp) == PARM_DECL
3988 || TREE_CODE (exp) == VAR_DECL
3989 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3990 || TREE_CODE (exp) == COMPONENT_REF
3991 || TREE_CODE (exp) == ARRAY_REF)
3992 return 1;
3993 return 0;
3994 }
3995 \f
3996 /* expand_expr: generate code for computing expression EXP.
3997 An rtx for the computed value is returned. The value is never null.
3998 In the case of a void EXP, const0_rtx is returned.
3999
4000 The value may be stored in TARGET if TARGET is nonzero.
4001 TARGET is just a suggestion; callers must assume that
4002 the rtx returned may not be the same as TARGET.
4003
4004 If TARGET is CONST0_RTX, it means that the value will be ignored.
4005
4006 If TMODE is not VOIDmode, it suggests generating the
4007 result in mode TMODE. But this is done only when convenient.
4008 Otherwise, TMODE is ignored and the value generated in its natural mode.
4009 TMODE is just a suggestion; callers must assume that
4010 the rtx returned may not have mode TMODE.
4011
4012 Note that TARGET may have neither TMODE nor MODE. In that case, it
4013 probably will not be used.
4014
4015 If MODIFIER is EXPAND_SUM then when EXP is an addition
4016 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
4017 or a nest of (PLUS ...) and (MINUS ...) where the terms are
4018 products as above, or REG or MEM, or constant.
4019 Ordinarily in such cases we would output mul or add instructions
4020 and then return a pseudo reg containing the sum.
4021
4022 EXPAND_INITIALIZER is much like EXPAND_SUM except that
4023 it also marks a label as absolutely required (it can't be dead).
4024 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
4025 This is used for outputting expressions used in initializers.
4026
4027 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
4028 with a constant address even if that address is not normally legitimate.
4029 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
4030
4031 rtx
4032 expand_expr (exp, target, tmode, modifier)
4033 register tree exp;
4034 rtx target;
4035 enum machine_mode tmode;
4036 enum expand_modifier modifier;
4037 {
4038 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
4039 This is static so it will be accessible to our recursive callees. */
4040 static tree placeholder_list = 0;
4041 register rtx op0, op1, temp;
4042 tree type = TREE_TYPE (exp);
4043 int unsignedp = TREE_UNSIGNED (type);
4044 register enum machine_mode mode = TYPE_MODE (type);
4045 register enum tree_code code = TREE_CODE (exp);
4046 optab this_optab;
4047 /* Use subtarget as the target for operand 0 of a binary operation. */
4048 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4049 rtx original_target = target;
4050 /* Maybe defer this until sure not doing bytecode? */
4051 int ignore = (target == const0_rtx
4052 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
4053 || code == CONVERT_EXPR || code == REFERENCE_EXPR
4054 || code == COND_EXPR)
4055 && TREE_CODE (type) == VOID_TYPE));
4056 tree context;
4057
4058
4059 if (output_bytecode && modifier != EXPAND_INITIALIZER)
4060 {
4061 bc_expand_expr (exp);
4062 return NULL;
4063 }
4064
4065 /* Don't use hard regs as subtargets, because the combiner
4066 can only handle pseudo regs. */
4067 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
4068 subtarget = 0;
4069 /* Avoid subtargets inside loops,
4070 since they hide some invariant expressions. */
4071 if (preserve_subexpressions_p ())
4072 subtarget = 0;
4073
4074 /* If we are going to ignore this result, we need only do something
4075 if there is a side-effect somewhere in the expression. If there
4076 is, short-circuit the most common cases here. Note that we must
4077 not call expand_expr with anything but const0_rtx in case this
4078 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
4079
4080 if (ignore)
4081 {
4082 if (! TREE_SIDE_EFFECTS (exp))
4083 return const0_rtx;
4084
4085 /* Ensure we reference a volatile object even if value is ignored. */
4086 if (TREE_THIS_VOLATILE (exp)
4087 && TREE_CODE (exp) != FUNCTION_DECL
4088 && mode != VOIDmode && mode != BLKmode)
4089 {
4090 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
4091 if (GET_CODE (temp) == MEM)
4092 temp = copy_to_reg (temp);
4093 return const0_rtx;
4094 }
4095
4096 if (TREE_CODE_CLASS (code) == '1')
4097 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4098 VOIDmode, modifier);
4099 else if (TREE_CODE_CLASS (code) == '2'
4100 || TREE_CODE_CLASS (code) == '<')
4101 {
4102 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
4103 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
4104 return const0_rtx;
4105 }
4106 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
4107 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
4108 /* If the second operand has no side effects, just evaluate
4109 the first. */
4110 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4111 VOIDmode, modifier);
4112
4113 target = 0;
4114 }
4115
4116 /* If will do cse, generate all results into pseudo registers
4117 since 1) that allows cse to find more things
4118 and 2) otherwise cse could produce an insn the machine
4119 cannot support. */
4120
4121 if (! cse_not_expected && mode != BLKmode && target
4122 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
4123 target = subtarget;
4124
4125 switch (code)
4126 {
4127 case LABEL_DECL:
4128 {
4129 tree function = decl_function_context (exp);
4130 /* Handle using a label in a containing function. */
4131 if (function != current_function_decl && function != 0)
4132 {
4133 struct function *p = find_function_data (function);
4134 /* Allocate in the memory associated with the function
4135 that the label is in. */
4136 push_obstacks (p->function_obstack,
4137 p->function_maybepermanent_obstack);
4138
4139 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4140 label_rtx (exp), p->forced_labels);
4141 pop_obstacks ();
4142 }
4143 else if (modifier == EXPAND_INITIALIZER)
4144 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4145 label_rtx (exp), forced_labels);
4146 temp = gen_rtx (MEM, FUNCTION_MODE,
4147 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
4148 if (function != current_function_decl && function != 0)
4149 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
4150 return temp;
4151 }
4152
4153 case PARM_DECL:
4154 if (DECL_RTL (exp) == 0)
4155 {
4156 error_with_decl (exp, "prior parameter's size depends on `%s'");
4157 return CONST0_RTX (mode);
4158 }
4159
4160 /* ... fall through ... */
4161
4162 case VAR_DECL:
4163 /* If a static var's type was incomplete when the decl was written,
4164 but the type is complete now, lay out the decl now. */
4165 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4166 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
4167 {
4168 push_obstacks_nochange ();
4169 end_temporary_allocation ();
4170 layout_decl (exp, 0);
4171 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
4172 pop_obstacks ();
4173 }
4174
4175 /* ... fall through ... */
4176
4177 case FUNCTION_DECL:
4178 case RESULT_DECL:
4179 if (DECL_RTL (exp) == 0)
4180 abort ();
4181
4182 /* Ensure variable marked as used even if it doesn't go through
4183 a parser. If it hasn't be used yet, write out an external
4184 definition. */
4185 if (! TREE_USED (exp))
4186 {
4187 assemble_external (exp);
4188 TREE_USED (exp) = 1;
4189 }
4190
4191 /* Handle variables inherited from containing functions. */
4192 context = decl_function_context (exp);
4193
4194 /* We treat inline_function_decl as an alias for the current function
4195 because that is the inline function whose vars, types, etc.
4196 are being merged into the current function.
4197 See expand_inline_function. */
4198
4199 if (context != 0 && context != current_function_decl
4200 && context != inline_function_decl
4201 /* If var is static, we don't need a static chain to access it. */
4202 && ! (GET_CODE (DECL_RTL (exp)) == MEM
4203 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
4204 {
4205 rtx addr;
4206
4207 /* Mark as non-local and addressable. */
4208 DECL_NONLOCAL (exp) = 1;
4209 mark_addressable (exp);
4210 if (GET_CODE (DECL_RTL (exp)) != MEM)
4211 abort ();
4212 addr = XEXP (DECL_RTL (exp), 0);
4213 if (GET_CODE (addr) == MEM)
4214 addr = gen_rtx (MEM, Pmode,
4215 fix_lexical_addr (XEXP (addr, 0), exp));
4216 else
4217 addr = fix_lexical_addr (addr, exp);
4218 return change_address (DECL_RTL (exp), mode, addr);
4219 }
4220
4221 /* This is the case of an array whose size is to be determined
4222 from its initializer, while the initializer is still being parsed.
4223 See expand_decl. */
4224
4225 if (GET_CODE (DECL_RTL (exp)) == MEM
4226 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
4227 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
4228 XEXP (DECL_RTL (exp), 0));
4229
4230 /* If DECL_RTL is memory, we are in the normal case and either
4231 the address is not valid or it is not a register and -fforce-addr
4232 is specified, get the address into a register. */
4233
4234 if (GET_CODE (DECL_RTL (exp)) == MEM
4235 && modifier != EXPAND_CONST_ADDRESS
4236 && modifier != EXPAND_SUM
4237 && modifier != EXPAND_INITIALIZER
4238 && (! memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
4239 || (flag_force_addr
4240 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
4241 return change_address (DECL_RTL (exp), VOIDmode,
4242 copy_rtx (XEXP (DECL_RTL (exp), 0)));
4243
4244 /* If the mode of DECL_RTL does not match that of the decl, it
4245 must be a promoted value. We return a SUBREG of the wanted mode,
4246 but mark it so that we know that it was already extended. */
4247
4248 if (GET_CODE (DECL_RTL (exp)) == REG
4249 && GET_MODE (DECL_RTL (exp)) != mode)
4250 {
4251 /* Get the signedness used for this variable. Ensure we get the
4252 same mode we got when the variable was declared. */
4253 if (GET_MODE (DECL_RTL (exp))
4254 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
4255 abort ();
4256
4257 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
4258 SUBREG_PROMOTED_VAR_P (temp) = 1;
4259 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4260 return temp;
4261 }
4262
4263 return DECL_RTL (exp);
4264
4265 case INTEGER_CST:
4266 return immed_double_const (TREE_INT_CST_LOW (exp),
4267 TREE_INT_CST_HIGH (exp),
4268 mode);
4269
4270 case CONST_DECL:
4271 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
4272
4273 case REAL_CST:
4274 /* If optimized, generate immediate CONST_DOUBLE
4275 which will be turned into memory by reload if necessary.
4276
4277 We used to force a register so that loop.c could see it. But
4278 this does not allow gen_* patterns to perform optimizations with
4279 the constants. It also produces two insns in cases like "x = 1.0;".
4280 On most machines, floating-point constants are not permitted in
4281 many insns, so we'd end up copying it to a register in any case.
4282
4283 Now, we do the copying in expand_binop, if appropriate. */
4284 return immed_real_const (exp);
4285
4286 case COMPLEX_CST:
4287 case STRING_CST:
4288 if (! TREE_CST_RTL (exp))
4289 output_constant_def (exp);
4290
4291 /* TREE_CST_RTL probably contains a constant address.
4292 On RISC machines where a constant address isn't valid,
4293 make some insns to get that address into a register. */
4294 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
4295 && modifier != EXPAND_CONST_ADDRESS
4296 && modifier != EXPAND_INITIALIZER
4297 && modifier != EXPAND_SUM
4298 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
4299 || (flag_force_addr
4300 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
4301 return change_address (TREE_CST_RTL (exp), VOIDmode,
4302 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
4303 return TREE_CST_RTL (exp);
4304
4305 case SAVE_EXPR:
4306 context = decl_function_context (exp);
4307
4308 /* We treat inline_function_decl as an alias for the current function
4309 because that is the inline function whose vars, types, etc.
4310 are being merged into the current function.
4311 See expand_inline_function. */
4312 if (context == current_function_decl || context == inline_function_decl)
4313 context = 0;
4314
4315 /* If this is non-local, handle it. */
4316 if (context)
4317 {
4318 temp = SAVE_EXPR_RTL (exp);
4319 if (temp && GET_CODE (temp) == REG)
4320 {
4321 put_var_into_stack (exp);
4322 temp = SAVE_EXPR_RTL (exp);
4323 }
4324 if (temp == 0 || GET_CODE (temp) != MEM)
4325 abort ();
4326 return change_address (temp, mode,
4327 fix_lexical_addr (XEXP (temp, 0), exp));
4328 }
4329 if (SAVE_EXPR_RTL (exp) == 0)
4330 {
4331 if (mode == BLKmode)
4332 {
4333 temp
4334 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
4335 MEM_IN_STRUCT_P (temp) = AGGREGATE_TYPE_P (type);
4336 }
4337 else if (mode == VOIDmode)
4338 temp = const0_rtx;
4339 else
4340 temp = gen_reg_rtx (promote_mode (type, mode, &unsignedp, 0));
4341
4342 SAVE_EXPR_RTL (exp) = temp;
4343 if (!optimize && GET_CODE (temp) == REG)
4344 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
4345 save_expr_regs);
4346
4347 /* If the mode of TEMP does not match that of the expression, it
4348 must be a promoted value. We pass store_expr a SUBREG of the
4349 wanted mode but mark it so that we know that it was already
4350 extended. Note that `unsignedp' was modified above in
4351 this case. */
4352
4353 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
4354 {
4355 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
4356 SUBREG_PROMOTED_VAR_P (temp) = 1;
4357 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4358 }
4359
4360 if (temp == const0_rtx)
4361 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4362 else
4363 store_expr (TREE_OPERAND (exp, 0), temp, 0);
4364 }
4365
4366 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
4367 must be a promoted value. We return a SUBREG of the wanted mode,
4368 but mark it so that we know that it was already extended. */
4369
4370 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
4371 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
4372 {
4373 /* Compute the signedness and make the proper SUBREG. */
4374 promote_mode (type, mode, &unsignedp, 0);
4375 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
4376 SUBREG_PROMOTED_VAR_P (temp) = 1;
4377 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4378 return temp;
4379 }
4380
4381 return SAVE_EXPR_RTL (exp);
4382
4383 case PLACEHOLDER_EXPR:
4384 /* If there is an object on the head of the placeholder list,
4385 see if some object in it's references is of type TYPE. For
4386 further information, see tree.def. */
4387 if (placeholder_list)
4388 {
4389 tree object;
4390 tree old_list = placeholder_list;
4391
4392 for (object = TREE_PURPOSE (placeholder_list);
4393 (TYPE_MAIN_VARIANT (TREE_TYPE (object))
4394 != TYPE_MAIN_VARIANT (type))
4395 && (TREE_CODE_CLASS (TREE_CODE (object)) == 'r'
4396 || TREE_CODE_CLASS (TREE_CODE (object)) == '1'
4397 || TREE_CODE_CLASS (TREE_CODE (object)) == '2'
4398 || TREE_CODE_CLASS (TREE_CODE (object)) == 'e');
4399 object = TREE_OPERAND (object, 0))
4400 ;
4401
4402 if (object != 0
4403 && (TYPE_MAIN_VARIANT (TREE_TYPE (object))
4404 == TYPE_MAIN_VARIANT (type)))
4405 {
4406 /* Expand this object skipping the list entries before
4407 it was found in case it is also a PLACEHOLDER_EXPR.
4408 In that case, we want to translate it using subsequent
4409 entries. */
4410 placeholder_list = TREE_CHAIN (placeholder_list);
4411 temp = expand_expr (object, original_target, tmode, modifier);
4412 placeholder_list = old_list;
4413 return temp;
4414 }
4415 }
4416
4417 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
4418 abort ();
4419
4420 case WITH_RECORD_EXPR:
4421 /* Put the object on the placeholder list, expand our first operand,
4422 and pop the list. */
4423 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
4424 placeholder_list);
4425 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
4426 tmode, modifier);
4427 placeholder_list = TREE_CHAIN (placeholder_list);
4428 return target;
4429
4430 case EXIT_EXPR:
4431 expand_exit_loop_if_false (NULL_PTR,
4432 invert_truthvalue (TREE_OPERAND (exp, 0)));
4433 return const0_rtx;
4434
4435 case LOOP_EXPR:
4436 push_temp_slots ();
4437 expand_start_loop (1);
4438 expand_expr_stmt (TREE_OPERAND (exp, 0));
4439 expand_end_loop ();
4440 pop_temp_slots ();
4441
4442 return const0_rtx;
4443
4444 case BIND_EXPR:
4445 {
4446 tree vars = TREE_OPERAND (exp, 0);
4447 int vars_need_expansion = 0;
4448
4449 /* Need to open a binding contour here because
4450 if there are any cleanups they most be contained here. */
4451 expand_start_bindings (0);
4452
4453 /* Mark the corresponding BLOCK for output in its proper place. */
4454 if (TREE_OPERAND (exp, 2) != 0
4455 && ! TREE_USED (TREE_OPERAND (exp, 2)))
4456 insert_block (TREE_OPERAND (exp, 2));
4457
4458 /* If VARS have not yet been expanded, expand them now. */
4459 while (vars)
4460 {
4461 if (DECL_RTL (vars) == 0)
4462 {
4463 vars_need_expansion = 1;
4464 expand_decl (vars);
4465 }
4466 expand_decl_init (vars);
4467 vars = TREE_CHAIN (vars);
4468 }
4469
4470 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
4471
4472 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
4473
4474 return temp;
4475 }
4476
4477 case RTL_EXPR:
4478 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
4479 abort ();
4480 emit_insns (RTL_EXPR_SEQUENCE (exp));
4481 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
4482 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
4483 free_temps_for_rtl_expr (exp);
4484 return RTL_EXPR_RTL (exp);
4485
4486 case CONSTRUCTOR:
4487 /* If we don't need the result, just ensure we evaluate any
4488 subexpressions. */
4489 if (ignore)
4490 {
4491 tree elt;
4492 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4493 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
4494 return const0_rtx;
4495 }
4496
4497 /* All elts simple constants => refer to a constant in memory. But
4498 if this is a non-BLKmode mode, let it store a field at a time
4499 since that should make a CONST_INT or CONST_DOUBLE when we
4500 fold. Likewise, if we have a target we can use, it is best to
4501 store directly into the target unless the type is large enough
4502 that memcpy will be used. If we are making an initializer and
4503 all operands are constant, put it in memory as well. */
4504 else if ((TREE_STATIC (exp)
4505 && ((mode == BLKmode
4506 && ! (target != 0 && safe_from_p (target, exp)))
4507 || TREE_ADDRESSABLE (exp)
4508 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
4509 && (move_by_pieces_ninsns
4510 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
4511 TYPE_ALIGN (type) / BITS_PER_UNIT)
4512 > MOVE_RATIO))))
4513 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
4514 {
4515 rtx constructor = output_constant_def (exp);
4516 if (modifier != EXPAND_CONST_ADDRESS
4517 && modifier != EXPAND_INITIALIZER
4518 && modifier != EXPAND_SUM
4519 && (! memory_address_p (GET_MODE (constructor),
4520 XEXP (constructor, 0))
4521 || (flag_force_addr
4522 && GET_CODE (XEXP (constructor, 0)) != REG)))
4523 constructor = change_address (constructor, VOIDmode,
4524 XEXP (constructor, 0));
4525 return constructor;
4526 }
4527
4528 else
4529 {
4530 if (target == 0 || ! safe_from_p (target, exp))
4531 {
4532 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
4533 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4534 else
4535 {
4536 target
4537 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
4538 if (AGGREGATE_TYPE_P (type))
4539 MEM_IN_STRUCT_P (target) = 1;
4540 }
4541 }
4542
4543 if (TREE_READONLY (exp))
4544 {
4545 target = copy_rtx (target);
4546 RTX_UNCHANGING_P (target) = 1;
4547 }
4548
4549 store_constructor (exp, target);
4550 return target;
4551 }
4552
4553 case INDIRECT_REF:
4554 {
4555 tree exp1 = TREE_OPERAND (exp, 0);
4556 tree exp2;
4557
4558 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
4559 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
4560 This code has the same general effect as simply doing
4561 expand_expr on the save expr, except that the expression PTR
4562 is computed for use as a memory address. This means different
4563 code, suitable for indexing, may be generated. */
4564 if (TREE_CODE (exp1) == SAVE_EXPR
4565 && SAVE_EXPR_RTL (exp1) == 0
4566 && TYPE_MODE (TREE_TYPE (exp1)) == ptr_mode)
4567 {
4568 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
4569 VOIDmode, EXPAND_SUM);
4570 op0 = memory_address (mode, temp);
4571 op0 = copy_all_regs (op0);
4572 SAVE_EXPR_RTL (exp1) = op0;
4573 }
4574 else
4575 {
4576 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
4577 op0 = memory_address (mode, op0);
4578 }
4579
4580 temp = gen_rtx (MEM, mode, op0);
4581 /* If address was computed by addition,
4582 mark this as an element of an aggregate. */
4583 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4584 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
4585 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
4586 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
4587 || (TREE_CODE (exp1) == ADDR_EXPR
4588 && (exp2 = TREE_OPERAND (exp1, 0))
4589 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
4590 MEM_IN_STRUCT_P (temp) = 1;
4591 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
4592
4593 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
4594 here, because, in C and C++, the fact that a location is accessed
4595 through a pointer to const does not mean that the value there can
4596 never change. Languages where it can never change should
4597 also set TREE_STATIC. */
4598 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
4599 return temp;
4600 }
4601
4602 case ARRAY_REF:
4603 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
4604 abort ();
4605
4606 {
4607 tree array = TREE_OPERAND (exp, 0);
4608 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
4609 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4610 tree index = TREE_OPERAND (exp, 1);
4611 tree index_type = TREE_TYPE (index);
4612 int i;
4613
4614 if (TREE_CODE (low_bound) != INTEGER_CST
4615 && contains_placeholder_p (low_bound))
4616 low_bound = build (WITH_RECORD_EXPR, sizetype, low_bound, exp);
4617
4618 /* Optimize the special-case of a zero lower bound.
4619
4620 We convert the low_bound to sizetype to avoid some problems
4621 with constant folding. (E.g. suppose the lower bound is 1,
4622 and its mode is QI. Without the conversion, (ARRAY
4623 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
4624 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
4625
4626 But sizetype isn't quite right either (especially if
4627 the lowbound is negative). FIXME */
4628
4629 if (! integer_zerop (low_bound))
4630 index = fold (build (MINUS_EXPR, index_type, index,
4631 convert (sizetype, low_bound)));
4632
4633 if ((TREE_CODE (index) != INTEGER_CST
4634 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4635 && (! SLOW_UNALIGNED_ACCESS || ! get_inner_unaligned_p (exp)))
4636 {
4637 /* Nonconstant array index or nonconstant element size, and
4638 not an array in an unaligned (packed) structure field.
4639 Generate the tree for *(&array+index) and expand that,
4640 except do it in a language-independent way
4641 and don't complain about non-lvalue arrays.
4642 `mark_addressable' should already have been called
4643 for any array for which this case will be reached. */
4644
4645 /* Don't forget the const or volatile flag from the array
4646 element. */
4647 tree variant_type = build_type_variant (type,
4648 TREE_READONLY (exp),
4649 TREE_THIS_VOLATILE (exp));
4650 tree array_adr = build1 (ADDR_EXPR,
4651 build_pointer_type (variant_type), array);
4652 tree elt;
4653 tree size = size_in_bytes (type);
4654
4655 /* Convert the integer argument to a type the same size as sizetype
4656 so the multiply won't overflow spuriously. */
4657 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
4658 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4659 index);
4660
4661 if (TREE_CODE (size) != INTEGER_CST
4662 && contains_placeholder_p (size))
4663 size = build (WITH_RECORD_EXPR, sizetype, size, exp);
4664
4665 /* Don't think the address has side effects
4666 just because the array does.
4667 (In some cases the address might have side effects,
4668 and we fail to record that fact here. However, it should not
4669 matter, since expand_expr should not care.) */
4670 TREE_SIDE_EFFECTS (array_adr) = 0;
4671
4672 elt
4673 = build1
4674 (INDIRECT_REF, type,
4675 fold (build (PLUS_EXPR,
4676 TYPE_POINTER_TO (variant_type),
4677 array_adr,
4678 fold
4679 (build1
4680 (NOP_EXPR,
4681 TYPE_POINTER_TO (variant_type),
4682 fold (build (MULT_EXPR, TREE_TYPE (index),
4683 index,
4684 convert (TREE_TYPE (index),
4685 size))))))));;
4686
4687 /* Volatility, etc., of new expression is same as old
4688 expression. */
4689 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
4690 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
4691 TREE_READONLY (elt) = TREE_READONLY (exp);
4692
4693 return expand_expr (elt, target, tmode, modifier);
4694 }
4695
4696 /* Fold an expression like: "foo"[2].
4697 This is not done in fold so it won't happen inside &.
4698 Don't fold if this is for wide characters since it's too
4699 difficult to do correctly and this is a very rare case. */
4700
4701 if (TREE_CODE (array) == STRING_CST
4702 && TREE_CODE (index) == INTEGER_CST
4703 && !TREE_INT_CST_HIGH (index)
4704 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
4705 && GET_MODE_CLASS (mode) == MODE_INT
4706 && GET_MODE_SIZE (mode) == 1)
4707 return GEN_INT (TREE_STRING_POINTER (array)[i]);
4708
4709 /* If this is a constant index into a constant array,
4710 just get the value from the array. Handle both the cases when
4711 we have an explicit constructor and when our operand is a variable
4712 that was declared const. */
4713
4714 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
4715 {
4716 if (TREE_CODE (index) == INTEGER_CST
4717 && TREE_INT_CST_HIGH (index) == 0)
4718 {
4719 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
4720
4721 i = TREE_INT_CST_LOW (index);
4722 while (elem && i--)
4723 elem = TREE_CHAIN (elem);
4724 if (elem)
4725 return expand_expr (fold (TREE_VALUE (elem)), target,
4726 tmode, modifier);
4727 }
4728 }
4729
4730 else if (optimize >= 1
4731 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
4732 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
4733 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
4734 {
4735 if (TREE_CODE (index) == INTEGER_CST
4736 && TREE_INT_CST_HIGH (index) == 0)
4737 {
4738 tree init = DECL_INITIAL (array);
4739
4740 i = TREE_INT_CST_LOW (index);
4741 if (TREE_CODE (init) == CONSTRUCTOR)
4742 {
4743 tree elem = CONSTRUCTOR_ELTS (init);
4744
4745 while (elem
4746 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
4747 elem = TREE_CHAIN (elem);
4748 if (elem)
4749 return expand_expr (fold (TREE_VALUE (elem)), target,
4750 tmode, modifier);
4751 }
4752 else if (TREE_CODE (init) == STRING_CST
4753 && i < TREE_STRING_LENGTH (init))
4754 return GEN_INT (TREE_STRING_POINTER (init)[i]);
4755 }
4756 }
4757 }
4758
4759 /* Treat array-ref with constant index as a component-ref. */
4760
4761 case COMPONENT_REF:
4762 case BIT_FIELD_REF:
4763 /* If the operand is a CONSTRUCTOR, we can just extract the
4764 appropriate field if it is present. Don't do this if we have
4765 already written the data since we want to refer to that copy
4766 and varasm.c assumes that's what we'll do. */
4767 if (code != ARRAY_REF
4768 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
4769 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
4770 {
4771 tree elt;
4772
4773 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
4774 elt = TREE_CHAIN (elt))
4775 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
4776 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
4777 }
4778
4779 {
4780 enum machine_mode mode1;
4781 int bitsize;
4782 int bitpos;
4783 tree offset;
4784 int volatilep = 0;
4785 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
4786 &mode1, &unsignedp, &volatilep);
4787 int alignment;
4788
4789 /* If we got back the original object, something is wrong. Perhaps
4790 we are evaluating an expression too early. In any event, don't
4791 infinitely recurse. */
4792 if (tem == exp)
4793 abort ();
4794
4795 /* In some cases, we will be offsetting OP0's address by a constant.
4796 So get it as a sum, if possible. If we will be using it
4797 directly in an insn, we validate it.
4798
4799 If TEM's type is a union of variable size, pass TARGET to the inner
4800 computation, since it will need a temporary and TARGET is known
4801 to have to do. This occurs in unchecked conversion in Ada. */
4802
4803 op0 = expand_expr (tem,
4804 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
4805 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
4806 != INTEGER_CST)
4807 ? target : NULL_RTX),
4808 VOIDmode, EXPAND_SUM);
4809
4810 /* If this is a constant, put it into a register if it is a
4811 legitimate constant and memory if it isn't. */
4812 if (CONSTANT_P (op0))
4813 {
4814 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
4815 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
4816 op0 = force_reg (mode, op0);
4817 else
4818 op0 = validize_mem (force_const_mem (mode, op0));
4819 }
4820
4821 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
4822 if (offset != 0)
4823 {
4824 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4825
4826 if (GET_CODE (op0) != MEM)
4827 abort ();
4828 op0 = change_address (op0, VOIDmode,
4829 gen_rtx (PLUS, ptr_mode, XEXP (op0, 0),
4830 force_reg (ptr_mode, offset_rtx)));
4831 /* If we have a variable offset, the known alignment
4832 is only that of the innermost structure containing the field.
4833 (Actually, we could sometimes do better by using the
4834 size of an element of the innermost array, but no need.) */
4835 if (TREE_CODE (exp) == COMPONENT_REF
4836 || TREE_CODE (exp) == BIT_FIELD_REF)
4837 alignment = (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
4838 / BITS_PER_UNIT);
4839 }
4840
4841 /* Don't forget about volatility even if this is a bitfield. */
4842 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
4843 {
4844 op0 = copy_rtx (op0);
4845 MEM_VOLATILE_P (op0) = 1;
4846 }
4847
4848 /* In cases where an aligned union has an unaligned object
4849 as a field, we might be extracting a BLKmode value from
4850 an integer-mode (e.g., SImode) object. Handle this case
4851 by doing the extract into an object as wide as the field
4852 (which we know to be the width of a basic mode), then
4853 storing into memory, and changing the mode to BLKmode. */
4854 if (mode1 == VOIDmode
4855 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
4856 || (modifier != EXPAND_CONST_ADDRESS
4857 && modifier != EXPAND_SUM
4858 && modifier != EXPAND_INITIALIZER
4859 && ((mode1 != BLKmode && ! direct_load[(int) mode1])
4860 /* If the field isn't aligned enough to fetch as a memref,
4861 fetch it as a bit field. */
4862 || (SLOW_UNALIGNED_ACCESS
4863 && ((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
4864 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
4865 {
4866 enum machine_mode ext_mode = mode;
4867
4868 if (ext_mode == BLKmode)
4869 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
4870
4871 if (ext_mode == BLKmode)
4872 abort ();
4873
4874 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
4875 unsignedp, target, ext_mode, ext_mode,
4876 alignment,
4877 int_size_in_bytes (TREE_TYPE (tem)));
4878 if (mode == BLKmode)
4879 {
4880 rtx new = assign_stack_temp (ext_mode,
4881 bitsize / BITS_PER_UNIT, 0);
4882
4883 emit_move_insn (new, op0);
4884 op0 = copy_rtx (new);
4885 PUT_MODE (op0, BLKmode);
4886 MEM_IN_STRUCT_P (op0) = 1;
4887 }
4888
4889 return op0;
4890 }
4891
4892 /* If the result is BLKmode, use that to access the object
4893 now as well. */
4894 if (mode == BLKmode)
4895 mode1 = BLKmode;
4896
4897 /* Get a reference to just this component. */
4898 if (modifier == EXPAND_CONST_ADDRESS
4899 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4900 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
4901 (bitpos / BITS_PER_UNIT)));
4902 else
4903 op0 = change_address (op0, mode1,
4904 plus_constant (XEXP (op0, 0),
4905 (bitpos / BITS_PER_UNIT)));
4906 MEM_IN_STRUCT_P (op0) = 1;
4907 MEM_VOLATILE_P (op0) |= volatilep;
4908 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
4909 return op0;
4910 if (target == 0)
4911 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4912 convert_move (target, op0, unsignedp);
4913 return target;
4914 }
4915
4916 case OFFSET_REF:
4917 {
4918 tree base = build1 (ADDR_EXPR, type, TREE_OPERAND (exp, 0));
4919 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
4920 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
4921 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
4922 MEM_IN_STRUCT_P (temp) = 1;
4923 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
4924 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
4925 a location is accessed through a pointer to const does not mean
4926 that the value there can never change. */
4927 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
4928 #endif
4929 return temp;
4930 }
4931
4932 /* Intended for a reference to a buffer of a file-object in Pascal.
4933 But it's not certain that a special tree code will really be
4934 necessary for these. INDIRECT_REF might work for them. */
4935 case BUFFER_REF:
4936 abort ();
4937
4938 case IN_EXPR:
4939 {
4940 /* Pascal set IN expression.
4941
4942 Algorithm:
4943 rlo = set_low - (set_low%bits_per_word);
4944 the_word = set [ (index - rlo)/bits_per_word ];
4945 bit_index = index % bits_per_word;
4946 bitmask = 1 << bit_index;
4947 return !!(the_word & bitmask); */
4948
4949 tree set = TREE_OPERAND (exp, 0);
4950 tree index = TREE_OPERAND (exp, 1);
4951 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
4952 tree set_type = TREE_TYPE (set);
4953 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
4954 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
4955 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
4956 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
4957 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
4958 rtx setval = expand_expr (set, 0, VOIDmode, 0);
4959 rtx setaddr = XEXP (setval, 0);
4960 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
4961 rtx rlow;
4962 rtx diff, quo, rem, addr, bit, result;
4963
4964 preexpand_calls (exp);
4965
4966 /* If domain is empty, answer is no. Likewise if index is constant
4967 and out of bounds. */
4968 if ((TREE_CODE (set_high_bound) == INTEGER_CST
4969 && TREE_CODE (set_low_bound) == INTEGER_CST
4970 && tree_int_cst_lt (set_high_bound, set_low_bound)
4971 || (TREE_CODE (index) == INTEGER_CST
4972 && TREE_CODE (set_low_bound) == INTEGER_CST
4973 && tree_int_cst_lt (index, set_low_bound))
4974 || (TREE_CODE (set_high_bound) == INTEGER_CST
4975 && TREE_CODE (index) == INTEGER_CST
4976 && tree_int_cst_lt (set_high_bound, index))))
4977 return const0_rtx;
4978
4979 if (target == 0)
4980 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4981
4982 /* If we get here, we have to generate the code for both cases
4983 (in range and out of range). */
4984
4985 op0 = gen_label_rtx ();
4986 op1 = gen_label_rtx ();
4987
4988 if (! (GET_CODE (index_val) == CONST_INT
4989 && GET_CODE (lo_r) == CONST_INT))
4990 {
4991 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
4992 GET_MODE (index_val), iunsignedp, 0);
4993 emit_jump_insn (gen_blt (op1));
4994 }
4995
4996 if (! (GET_CODE (index_val) == CONST_INT
4997 && GET_CODE (hi_r) == CONST_INT))
4998 {
4999 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
5000 GET_MODE (index_val), iunsignedp, 0);
5001 emit_jump_insn (gen_bgt (op1));
5002 }
5003
5004 /* Calculate the element number of bit zero in the first word
5005 of the set. */
5006 if (GET_CODE (lo_r) == CONST_INT)
5007 rlow = GEN_INT (INTVAL (lo_r)
5008 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
5009 else
5010 rlow = expand_binop (index_mode, and_optab, lo_r,
5011 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
5012 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
5013
5014 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
5015 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
5016
5017 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
5018 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
5019 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
5020 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
5021
5022 addr = memory_address (byte_mode,
5023 expand_binop (index_mode, add_optab, diff,
5024 setaddr, NULL_RTX, iunsignedp,
5025 OPTAB_LIB_WIDEN));
5026
5027 /* Extract the bit we want to examine */
5028 bit = expand_shift (RSHIFT_EXPR, byte_mode,
5029 gen_rtx (MEM, byte_mode, addr),
5030 make_tree (TREE_TYPE (index), rem),
5031 NULL_RTX, 1);
5032 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
5033 GET_MODE (target) == byte_mode ? target : 0,
5034 1, OPTAB_LIB_WIDEN);
5035
5036 if (result != target)
5037 convert_move (target, result, 1);
5038
5039 /* Output the code to handle the out-of-range case. */
5040 emit_jump (op0);
5041 emit_label (op1);
5042 emit_move_insn (target, const0_rtx);
5043 emit_label (op0);
5044 return target;
5045 }
5046
5047 case WITH_CLEANUP_EXPR:
5048 if (RTL_EXPR_RTL (exp) == 0)
5049 {
5050 RTL_EXPR_RTL (exp)
5051 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5052 cleanups_this_call
5053 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
5054 /* That's it for this cleanup. */
5055 TREE_OPERAND (exp, 2) = 0;
5056 (*interim_eh_hook) (NULL_TREE);
5057 }
5058 return RTL_EXPR_RTL (exp);
5059
5060 case CLEANUP_POINT_EXPR:
5061 {
5062 extern int temp_slot_level;
5063 tree old_cleanups = cleanups_this_call;
5064 int old_temp_level = target_temp_slot_level;
5065 push_temp_slots ();
5066 target_temp_slot_level = temp_slot_level;
5067 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5068 /* If we're going to use this value, load it up now. */
5069 if (! ignore)
5070 op0 = force_not_mem (op0);
5071 expand_cleanups_to (old_cleanups);
5072 preserve_temp_slots (op0);
5073 free_temp_slots ();
5074 pop_temp_slots ();
5075 target_temp_slot_level = old_temp_level;
5076 }
5077 return op0;
5078
5079 case CALL_EXPR:
5080 /* Check for a built-in function. */
5081 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5082 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5083 == FUNCTION_DECL)
5084 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5085 return expand_builtin (exp, target, subtarget, tmode, ignore);
5086
5087 /* If this call was expanded already by preexpand_calls,
5088 just return the result we got. */
5089 if (CALL_EXPR_RTL (exp) != 0)
5090 return CALL_EXPR_RTL (exp);
5091
5092 return expand_call (exp, target, ignore);
5093
5094 case NON_LVALUE_EXPR:
5095 case NOP_EXPR:
5096 case CONVERT_EXPR:
5097 case REFERENCE_EXPR:
5098 if (TREE_CODE (type) == UNION_TYPE)
5099 {
5100 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
5101 if (target == 0)
5102 {
5103 if (mode == BLKmode)
5104 {
5105 if (TYPE_SIZE (type) == 0
5106 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5107 abort ();
5108 target = assign_stack_temp (BLKmode,
5109 (TREE_INT_CST_LOW (TYPE_SIZE (type))
5110 + BITS_PER_UNIT - 1)
5111 / BITS_PER_UNIT, 0);
5112 MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (type);
5113 }
5114 else
5115 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5116 }
5117
5118 if (GET_CODE (target) == MEM)
5119 /* Store data into beginning of memory target. */
5120 store_expr (TREE_OPERAND (exp, 0),
5121 change_address (target, TYPE_MODE (valtype), 0), 0);
5122
5123 else if (GET_CODE (target) == REG)
5124 /* Store this field into a union of the proper type. */
5125 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
5126 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
5127 VOIDmode, 0, 1,
5128 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
5129 else
5130 abort ();
5131
5132 /* Return the entire union. */
5133 return target;
5134 }
5135
5136 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5137 {
5138 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
5139 modifier);
5140
5141 /* If the signedness of the conversion differs and OP0 is
5142 a promoted SUBREG, clear that indication since we now
5143 have to do the proper extension. */
5144 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
5145 && GET_CODE (op0) == SUBREG)
5146 SUBREG_PROMOTED_VAR_P (op0) = 0;
5147
5148 return op0;
5149 }
5150
5151 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
5152 if (GET_MODE (op0) == mode)
5153 return op0;
5154
5155 /* If OP0 is a constant, just convert it into the proper mode. */
5156 if (CONSTANT_P (op0))
5157 return
5158 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5159 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5160
5161 if (modifier == EXPAND_INITIALIZER)
5162 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
5163
5164 if (flag_force_mem && GET_CODE (op0) == MEM)
5165 op0 = copy_to_reg (op0);
5166
5167 if (target == 0)
5168 return
5169 convert_to_mode (mode, op0,
5170 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5171 else
5172 convert_move (target, op0,
5173 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5174 return target;
5175
5176 case PLUS_EXPR:
5177 /* We come here from MINUS_EXPR when the second operand is a constant. */
5178 plus_expr:
5179 this_optab = add_optab;
5180
5181 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
5182 something else, make sure we add the register to the constant and
5183 then to the other thing. This case can occur during strength
5184 reduction and doing it this way will produce better code if the
5185 frame pointer or argument pointer is eliminated.
5186
5187 fold-const.c will ensure that the constant is always in the inner
5188 PLUS_EXPR, so the only case we need to do anything about is if
5189 sp, ap, or fp is our second argument, in which case we must swap
5190 the innermost first argument and our second argument. */
5191
5192 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5193 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
5194 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
5195 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
5196 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
5197 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
5198 {
5199 tree t = TREE_OPERAND (exp, 1);
5200
5201 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5202 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
5203 }
5204
5205 /* If the result is to be ptr_mode and we are adding an integer to
5206 something, we might be forming a constant. So try to use
5207 plus_constant. If it produces a sum and we can't accept it,
5208 use force_operand. This allows P = &ARR[const] to generate
5209 efficient code on machines where a SYMBOL_REF is not a valid
5210 address.
5211
5212 If this is an EXPAND_SUM call, always return the sum. */
5213 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
5214 || mode == ptr_mode)
5215 {
5216 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
5217 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5218 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
5219 {
5220 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
5221 EXPAND_SUM);
5222 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
5223 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5224 op1 = force_operand (op1, target);
5225 return op1;
5226 }
5227
5228 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
5229 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
5230 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
5231 {
5232 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
5233 EXPAND_SUM);
5234 if (! CONSTANT_P (op0))
5235 {
5236 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5237 VOIDmode, modifier);
5238 /* Don't go to both_summands if modifier
5239 says it's not right to return a PLUS. */
5240 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5241 goto binop2;
5242 goto both_summands;
5243 }
5244 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
5245 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5246 op0 = force_operand (op0, target);
5247 return op0;
5248 }
5249 }
5250
5251 /* No sense saving up arithmetic to be done
5252 if it's all in the wrong mode to form part of an address.
5253 And force_operand won't know whether to sign-extend or
5254 zero-extend. */
5255 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5256 || mode != ptr_mode)
5257 goto binop;
5258
5259 preexpand_calls (exp);
5260 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5261 subtarget = 0;
5262
5263 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
5264 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
5265
5266 both_summands:
5267 /* Make sure any term that's a sum with a constant comes last. */
5268 if (GET_CODE (op0) == PLUS
5269 && CONSTANT_P (XEXP (op0, 1)))
5270 {
5271 temp = op0;
5272 op0 = op1;
5273 op1 = temp;
5274 }
5275 /* If adding to a sum including a constant,
5276 associate it to put the constant outside. */
5277 if (GET_CODE (op1) == PLUS
5278 && CONSTANT_P (XEXP (op1, 1)))
5279 {
5280 rtx constant_term = const0_rtx;
5281
5282 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
5283 if (temp != 0)
5284 op0 = temp;
5285 /* Ensure that MULT comes first if there is one. */
5286 else if (GET_CODE (op0) == MULT)
5287 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
5288 else
5289 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
5290
5291 /* Let's also eliminate constants from op0 if possible. */
5292 op0 = eliminate_constant_term (op0, &constant_term);
5293
5294 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
5295 their sum should be a constant. Form it into OP1, since the
5296 result we want will then be OP0 + OP1. */
5297
5298 temp = simplify_binary_operation (PLUS, mode, constant_term,
5299 XEXP (op1, 1));
5300 if (temp != 0)
5301 op1 = temp;
5302 else
5303 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
5304 }
5305
5306 /* Put a constant term last and put a multiplication first. */
5307 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
5308 temp = op1, op1 = op0, op0 = temp;
5309
5310 temp = simplify_binary_operation (PLUS, mode, op0, op1);
5311 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
5312
5313 case MINUS_EXPR:
5314 /* For initializers, we are allowed to return a MINUS of two
5315 symbolic constants. Here we handle all cases when both operands
5316 are constant. */
5317 /* Handle difference of two symbolic constants,
5318 for the sake of an initializer. */
5319 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5320 && really_constant_p (TREE_OPERAND (exp, 0))
5321 && really_constant_p (TREE_OPERAND (exp, 1)))
5322 {
5323 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
5324 VOIDmode, modifier);
5325 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5326 VOIDmode, modifier);
5327
5328 /* If the last operand is a CONST_INT, use plus_constant of
5329 the negated constant. Else make the MINUS. */
5330 if (GET_CODE (op1) == CONST_INT)
5331 return plus_constant (op0, - INTVAL (op1));
5332 else
5333 return gen_rtx (MINUS, mode, op0, op1);
5334 }
5335 /* Convert A - const to A + (-const). */
5336 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
5337 {
5338 tree negated = fold (build1 (NEGATE_EXPR, type,
5339 TREE_OPERAND (exp, 1)));
5340
5341 /* Deal with the case where we can't negate the constant
5342 in TYPE. */
5343 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
5344 {
5345 tree newtype = signed_type (type);
5346 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
5347 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
5348 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
5349
5350 if (! TREE_OVERFLOW (newneg))
5351 return expand_expr (convert (type,
5352 build (PLUS_EXPR, newtype,
5353 newop0, newneg)),
5354 target, tmode, modifier);
5355 }
5356 else
5357 {
5358 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
5359 goto plus_expr;
5360 }
5361 }
5362 this_optab = sub_optab;
5363 goto binop;
5364
5365 case MULT_EXPR:
5366 preexpand_calls (exp);
5367 /* If first operand is constant, swap them.
5368 Thus the following special case checks need only
5369 check the second operand. */
5370 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5371 {
5372 register tree t1 = TREE_OPERAND (exp, 0);
5373 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
5374 TREE_OPERAND (exp, 1) = t1;
5375 }
5376
5377 /* Attempt to return something suitable for generating an
5378 indexed address, for machines that support that. */
5379
5380 if (modifier == EXPAND_SUM && mode == ptr_mode
5381 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
5382 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
5383 {
5384 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
5385
5386 /* Apply distributive law if OP0 is x+c. */
5387 if (GET_CODE (op0) == PLUS
5388 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
5389 return gen_rtx (PLUS, mode,
5390 gen_rtx (MULT, mode, XEXP (op0, 0),
5391 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
5392 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
5393 * INTVAL (XEXP (op0, 1))));
5394
5395 if (GET_CODE (op0) != REG)
5396 op0 = force_operand (op0, NULL_RTX);
5397 if (GET_CODE (op0) != REG)
5398 op0 = copy_to_mode_reg (mode, op0);
5399
5400 return gen_rtx (MULT, mode, op0,
5401 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
5402 }
5403
5404 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5405 subtarget = 0;
5406
5407 /* Check for multiplying things that have been extended
5408 from a narrower type. If this machine supports multiplying
5409 in that narrower type with a result in the desired type,
5410 do it that way, and avoid the explicit type-conversion. */
5411 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
5412 && TREE_CODE (type) == INTEGER_TYPE
5413 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5414 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
5415 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
5416 && int_fits_type_p (TREE_OPERAND (exp, 1),
5417 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5418 /* Don't use a widening multiply if a shift will do. */
5419 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
5420 > HOST_BITS_PER_WIDE_INT)
5421 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
5422 ||
5423 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
5424 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
5425 ==
5426 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
5427 /* If both operands are extended, they must either both
5428 be zero-extended or both be sign-extended. */
5429 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
5430 ==
5431 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
5432 {
5433 enum machine_mode innermode
5434 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
5435 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5436 ? umul_widen_optab : smul_widen_optab);
5437 if (mode == GET_MODE_WIDER_MODE (innermode)
5438 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
5439 {
5440 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5441 NULL_RTX, VOIDmode, 0);
5442 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
5443 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5444 VOIDmode, 0);
5445 else
5446 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
5447 NULL_RTX, VOIDmode, 0);
5448 goto binop2;
5449 }
5450 }
5451 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5452 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5453 return expand_mult (mode, op0, op1, target, unsignedp);
5454
5455 case TRUNC_DIV_EXPR:
5456 case FLOOR_DIV_EXPR:
5457 case CEIL_DIV_EXPR:
5458 case ROUND_DIV_EXPR:
5459 case EXACT_DIV_EXPR:
5460 preexpand_calls (exp);
5461 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5462 subtarget = 0;
5463 /* Possible optimization: compute the dividend with EXPAND_SUM
5464 then if the divisor is constant can optimize the case
5465 where some terms of the dividend have coeffs divisible by it. */
5466 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5467 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5468 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
5469
5470 case RDIV_EXPR:
5471 this_optab = flodiv_optab;
5472 goto binop;
5473
5474 case TRUNC_MOD_EXPR:
5475 case FLOOR_MOD_EXPR:
5476 case CEIL_MOD_EXPR:
5477 case ROUND_MOD_EXPR:
5478 preexpand_calls (exp);
5479 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5480 subtarget = 0;
5481 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5482 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5483 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
5484
5485 case FIX_ROUND_EXPR:
5486 case FIX_FLOOR_EXPR:
5487 case FIX_CEIL_EXPR:
5488 abort (); /* Not used for C. */
5489
5490 case FIX_TRUNC_EXPR:
5491 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
5492 if (target == 0)
5493 target = gen_reg_rtx (mode);
5494 expand_fix (target, op0, unsignedp);
5495 return target;
5496
5497 case FLOAT_EXPR:
5498 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
5499 if (target == 0)
5500 target = gen_reg_rtx (mode);
5501 /* expand_float can't figure out what to do if FROM has VOIDmode.
5502 So give it the correct mode. With -O, cse will optimize this. */
5503 if (GET_MODE (op0) == VOIDmode)
5504 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5505 op0);
5506 expand_float (target, op0,
5507 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5508 return target;
5509
5510 case NEGATE_EXPR:
5511 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5512 temp = expand_unop (mode, neg_optab, op0, target, 0);
5513 if (temp == 0)
5514 abort ();
5515 return temp;
5516
5517 case ABS_EXPR:
5518 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5519
5520 /* Handle complex values specially. */
5521 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
5522 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
5523 return expand_complex_abs (mode, op0, target, unsignedp);
5524
5525 /* Unsigned abs is simply the operand. Testing here means we don't
5526 risk generating incorrect code below. */
5527 if (TREE_UNSIGNED (type))
5528 return op0;
5529
5530 return expand_abs (mode, op0, target, unsignedp,
5531 safe_from_p (target, TREE_OPERAND (exp, 0)));
5532
5533 case MAX_EXPR:
5534 case MIN_EXPR:
5535 target = original_target;
5536 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
5537 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
5538 || GET_MODE (target) != mode
5539 || (GET_CODE (target) == REG
5540 && REGNO (target) < FIRST_PSEUDO_REGISTER))
5541 target = gen_reg_rtx (mode);
5542 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5543 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5544
5545 /* First try to do it with a special MIN or MAX instruction.
5546 If that does not win, use a conditional jump to select the proper
5547 value. */
5548 this_optab = (TREE_UNSIGNED (type)
5549 ? (code == MIN_EXPR ? umin_optab : umax_optab)
5550 : (code == MIN_EXPR ? smin_optab : smax_optab));
5551
5552 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
5553 OPTAB_WIDEN);
5554 if (temp != 0)
5555 return temp;
5556
5557 /* At this point, a MEM target is no longer useful; we will get better
5558 code without it. */
5559
5560 if (GET_CODE (target) == MEM)
5561 target = gen_reg_rtx (mode);
5562
5563 if (target != op0)
5564 emit_move_insn (target, op0);
5565
5566 op0 = gen_label_rtx ();
5567
5568 /* If this mode is an integer too wide to compare properly,
5569 compare word by word. Rely on cse to optimize constant cases. */
5570 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
5571 {
5572 if (code == MAX_EXPR)
5573 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
5574 target, op1, NULL_RTX, op0);
5575 else
5576 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
5577 op1, target, NULL_RTX, op0);
5578 emit_move_insn (target, op1);
5579 }
5580 else
5581 {
5582 if (code == MAX_EXPR)
5583 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
5584 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
5585 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
5586 else
5587 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
5588 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
5589 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
5590 if (temp == const0_rtx)
5591 emit_move_insn (target, op1);
5592 else if (temp != const_true_rtx)
5593 {
5594 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
5595 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
5596 else
5597 abort ();
5598 emit_move_insn (target, op1);
5599 }
5600 }
5601 emit_label (op0);
5602 return target;
5603
5604 case BIT_NOT_EXPR:
5605 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5606 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
5607 if (temp == 0)
5608 abort ();
5609 return temp;
5610
5611 case FFS_EXPR:
5612 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5613 temp = expand_unop (mode, ffs_optab, op0, target, 1);
5614 if (temp == 0)
5615 abort ();
5616 return temp;
5617
5618 /* ??? Can optimize bitwise operations with one arg constant.
5619 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
5620 and (a bitwise1 b) bitwise2 b (etc)
5621 but that is probably not worth while. */
5622
5623 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
5624 boolean values when we want in all cases to compute both of them. In
5625 general it is fastest to do TRUTH_AND_EXPR by computing both operands
5626 as actual zero-or-1 values and then bitwise anding. In cases where
5627 there cannot be any side effects, better code would be made by
5628 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
5629 how to recognize those cases. */
5630
5631 case TRUTH_AND_EXPR:
5632 case BIT_AND_EXPR:
5633 this_optab = and_optab;
5634 goto binop;
5635
5636 case TRUTH_OR_EXPR:
5637 case BIT_IOR_EXPR:
5638 this_optab = ior_optab;
5639 goto binop;
5640
5641 case TRUTH_XOR_EXPR:
5642 case BIT_XOR_EXPR:
5643 this_optab = xor_optab;
5644 goto binop;
5645
5646 case LSHIFT_EXPR:
5647 case RSHIFT_EXPR:
5648 case LROTATE_EXPR:
5649 case RROTATE_EXPR:
5650 preexpand_calls (exp);
5651 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5652 subtarget = 0;
5653 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5654 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
5655 unsignedp);
5656
5657 /* Could determine the answer when only additive constants differ. Also,
5658 the addition of one can be handled by changing the condition. */
5659 case LT_EXPR:
5660 case LE_EXPR:
5661 case GT_EXPR:
5662 case GE_EXPR:
5663 case EQ_EXPR:
5664 case NE_EXPR:
5665 preexpand_calls (exp);
5666 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
5667 if (temp != 0)
5668 return temp;
5669
5670 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
5671 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
5672 && original_target
5673 && GET_CODE (original_target) == REG
5674 && (GET_MODE (original_target)
5675 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5676 {
5677 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
5678 VOIDmode, 0);
5679
5680 if (temp != original_target)
5681 temp = copy_to_reg (temp);
5682
5683 op1 = gen_label_rtx ();
5684 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
5685 GET_MODE (temp), unsignedp, 0);
5686 emit_jump_insn (gen_beq (op1));
5687 emit_move_insn (temp, const1_rtx);
5688 emit_label (op1);
5689 return temp;
5690 }
5691
5692 /* If no set-flag instruction, must generate a conditional
5693 store into a temporary variable. Drop through
5694 and handle this like && and ||. */
5695
5696 case TRUTH_ANDIF_EXPR:
5697 case TRUTH_ORIF_EXPR:
5698 if (! ignore
5699 && (target == 0 || ! safe_from_p (target, exp)
5700 /* Make sure we don't have a hard reg (such as function's return
5701 value) live across basic blocks, if not optimizing. */
5702 || (!optimize && GET_CODE (target) == REG
5703 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
5704 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5705
5706 if (target)
5707 emit_clr_insn (target);
5708
5709 op1 = gen_label_rtx ();
5710 jumpifnot (exp, op1);
5711
5712 if (target)
5713 emit_0_to_1_insn (target);
5714
5715 emit_label (op1);
5716 return ignore ? const0_rtx : target;
5717
5718 case TRUTH_NOT_EXPR:
5719 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5720 /* The parser is careful to generate TRUTH_NOT_EXPR
5721 only with operands that are always zero or one. */
5722 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
5723 target, 1, OPTAB_LIB_WIDEN);
5724 if (temp == 0)
5725 abort ();
5726 return temp;
5727
5728 case COMPOUND_EXPR:
5729 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
5730 emit_queue ();
5731 return expand_expr (TREE_OPERAND (exp, 1),
5732 (ignore ? const0_rtx : target),
5733 VOIDmode, 0);
5734
5735 case COND_EXPR:
5736 {
5737 rtx flag = NULL_RTX;
5738 tree left_cleanups = NULL_TREE;
5739 tree right_cleanups = NULL_TREE;
5740
5741 /* Used to save a pointer to the place to put the setting of
5742 the flag that indicates if this side of the conditional was
5743 taken. We backpatch the code, if we find out later that we
5744 have any conditional cleanups that need to be performed. */
5745 rtx dest_right_flag = NULL_RTX;
5746 rtx dest_left_flag = NULL_RTX;
5747
5748 /* Note that COND_EXPRs whose type is a structure or union
5749 are required to be constructed to contain assignments of
5750 a temporary variable, so that we can evaluate them here
5751 for side effect only. If type is void, we must do likewise. */
5752
5753 /* If an arm of the branch requires a cleanup,
5754 only that cleanup is performed. */
5755
5756 tree singleton = 0;
5757 tree binary_op = 0, unary_op = 0;
5758 tree old_cleanups = cleanups_this_call;
5759
5760 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
5761 convert it to our mode, if necessary. */
5762 if (integer_onep (TREE_OPERAND (exp, 1))
5763 && integer_zerop (TREE_OPERAND (exp, 2))
5764 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
5765 {
5766 if (ignore)
5767 {
5768 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5769 modifier);
5770 return const0_rtx;
5771 }
5772
5773 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
5774 if (GET_MODE (op0) == mode)
5775 return op0;
5776
5777 if (target == 0)
5778 target = gen_reg_rtx (mode);
5779 convert_move (target, op0, unsignedp);
5780 return target;
5781 }
5782
5783 /* If we are not to produce a result, we have no target. Otherwise,
5784 if a target was specified use it; it will not be used as an
5785 intermediate target unless it is safe. If no target, use a
5786 temporary. */
5787
5788 if (ignore)
5789 temp = 0;
5790 else if (original_target
5791 && safe_from_p (original_target, TREE_OPERAND (exp, 0))
5792 && GET_MODE (original_target) == mode
5793 && ! (GET_CODE (original_target) == MEM
5794 && MEM_VOLATILE_P (original_target)))
5795 temp = original_target;
5796 else if (mode == BLKmode)
5797 {
5798 if (TYPE_SIZE (type) == 0
5799 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5800 abort ();
5801
5802 temp = assign_stack_temp (BLKmode,
5803 (TREE_INT_CST_LOW (TYPE_SIZE (type))
5804 + BITS_PER_UNIT - 1)
5805 / BITS_PER_UNIT, 0);
5806 MEM_IN_STRUCT_P (temp) = AGGREGATE_TYPE_P (type);
5807 }
5808 else
5809 temp = gen_reg_rtx (mode);
5810
5811 /* Check for X ? A + B : A. If we have this, we can copy
5812 A to the output and conditionally add B. Similarly for unary
5813 operations. Don't do this if X has side-effects because
5814 those side effects might affect A or B and the "?" operation is
5815 a sequence point in ANSI. (We test for side effects later.) */
5816
5817 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
5818 && operand_equal_p (TREE_OPERAND (exp, 2),
5819 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
5820 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
5821 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
5822 && operand_equal_p (TREE_OPERAND (exp, 1),
5823 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
5824 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
5825 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
5826 && operand_equal_p (TREE_OPERAND (exp, 2),
5827 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
5828 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
5829 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
5830 && operand_equal_p (TREE_OPERAND (exp, 1),
5831 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
5832 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
5833
5834 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
5835 operation, do this as A + (X != 0). Similarly for other simple
5836 binary operators. */
5837 if (temp && singleton && binary_op
5838 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5839 && (TREE_CODE (binary_op) == PLUS_EXPR
5840 || TREE_CODE (binary_op) == MINUS_EXPR
5841 || TREE_CODE (binary_op) == BIT_IOR_EXPR
5842 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
5843 && integer_onep (TREE_OPERAND (binary_op, 1))
5844 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
5845 {
5846 rtx result;
5847 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
5848 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
5849 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
5850 : xor_optab);
5851
5852 /* If we had X ? A : A + 1, do this as A + (X == 0).
5853
5854 We have to invert the truth value here and then put it
5855 back later if do_store_flag fails. We cannot simply copy
5856 TREE_OPERAND (exp, 0) to another variable and modify that
5857 because invert_truthvalue can modify the tree pointed to
5858 by its argument. */
5859 if (singleton == TREE_OPERAND (exp, 1))
5860 TREE_OPERAND (exp, 0)
5861 = invert_truthvalue (TREE_OPERAND (exp, 0));
5862
5863 result = do_store_flag (TREE_OPERAND (exp, 0),
5864 (safe_from_p (temp, singleton)
5865 ? temp : NULL_RTX),
5866 mode, BRANCH_COST <= 1);
5867
5868 if (result)
5869 {
5870 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
5871 return expand_binop (mode, boptab, op1, result, temp,
5872 unsignedp, OPTAB_LIB_WIDEN);
5873 }
5874 else if (singleton == TREE_OPERAND (exp, 1))
5875 TREE_OPERAND (exp, 0)
5876 = invert_truthvalue (TREE_OPERAND (exp, 0));
5877 }
5878
5879 do_pending_stack_adjust ();
5880 NO_DEFER_POP;
5881 op0 = gen_label_rtx ();
5882
5883 flag = gen_reg_rtx (word_mode);
5884 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
5885 {
5886 if (temp != 0)
5887 {
5888 /* If the target conflicts with the other operand of the
5889 binary op, we can't use it. Also, we can't use the target
5890 if it is a hard register, because evaluating the condition
5891 might clobber it. */
5892 if ((binary_op
5893 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
5894 || (GET_CODE (temp) == REG
5895 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
5896 temp = gen_reg_rtx (mode);
5897 store_expr (singleton, temp, 0);
5898 }
5899 else
5900 expand_expr (singleton,
5901 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5902 dest_left_flag = get_last_insn ();
5903 if (singleton == TREE_OPERAND (exp, 1))
5904 jumpif (TREE_OPERAND (exp, 0), op0);
5905 else
5906 jumpifnot (TREE_OPERAND (exp, 0), op0);
5907
5908 /* Allows cleanups up to here. */
5909 old_cleanups = cleanups_this_call;
5910 if (binary_op && temp == 0)
5911 /* Just touch the other operand. */
5912 expand_expr (TREE_OPERAND (binary_op, 1),
5913 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5914 else if (binary_op)
5915 store_expr (build (TREE_CODE (binary_op), type,
5916 make_tree (type, temp),
5917 TREE_OPERAND (binary_op, 1)),
5918 temp, 0);
5919 else
5920 store_expr (build1 (TREE_CODE (unary_op), type,
5921 make_tree (type, temp)),
5922 temp, 0);
5923 op1 = op0;
5924 dest_right_flag = get_last_insn ();
5925 }
5926 #if 0
5927 /* This is now done in jump.c and is better done there because it
5928 produces shorter register lifetimes. */
5929
5930 /* Check for both possibilities either constants or variables
5931 in registers (but not the same as the target!). If so, can
5932 save branches by assigning one, branching, and assigning the
5933 other. */
5934 else if (temp && GET_MODE (temp) != BLKmode
5935 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
5936 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
5937 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
5938 && DECL_RTL (TREE_OPERAND (exp, 1))
5939 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
5940 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
5941 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
5942 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
5943 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
5944 && DECL_RTL (TREE_OPERAND (exp, 2))
5945 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
5946 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
5947 {
5948 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5949 temp = gen_reg_rtx (mode);
5950 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5951 dest_left_flag = get_last_insn ();
5952 jumpifnot (TREE_OPERAND (exp, 0), op0);
5953
5954 /* Allows cleanups up to here. */
5955 old_cleanups = cleanups_this_call;
5956 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5957 op1 = op0;
5958 dest_right_flag = get_last_insn ();
5959 }
5960 #endif
5961 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
5962 comparison operator. If we have one of these cases, set the
5963 output to A, branch on A (cse will merge these two references),
5964 then set the output to FOO. */
5965 else if (temp
5966 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5967 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5968 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5969 TREE_OPERAND (exp, 1), 0)
5970 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5971 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
5972 {
5973 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5974 temp = gen_reg_rtx (mode);
5975 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5976 dest_left_flag = get_last_insn ();
5977 jumpif (TREE_OPERAND (exp, 0), op0);
5978
5979 /* Allows cleanups up to here. */
5980 old_cleanups = cleanups_this_call;
5981 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5982 op1 = op0;
5983 dest_right_flag = get_last_insn ();
5984 }
5985 else if (temp
5986 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5987 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5988 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5989 TREE_OPERAND (exp, 2), 0)
5990 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5991 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
5992 {
5993 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5994 temp = gen_reg_rtx (mode);
5995 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5996 dest_left_flag = get_last_insn ();
5997 jumpifnot (TREE_OPERAND (exp, 0), op0);
5998
5999 /* Allows cleanups up to here. */
6000 old_cleanups = cleanups_this_call;
6001 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6002 op1 = op0;
6003 dest_right_flag = get_last_insn ();
6004 }
6005 else
6006 {
6007 op1 = gen_label_rtx ();
6008 jumpifnot (TREE_OPERAND (exp, 0), op0);
6009
6010 /* Allows cleanups up to here. */
6011 old_cleanups = cleanups_this_call;
6012 if (temp != 0)
6013 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6014 else
6015 expand_expr (TREE_OPERAND (exp, 1),
6016 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6017 dest_left_flag = get_last_insn ();
6018
6019 /* Handle conditional cleanups, if any. */
6020 left_cleanups = defer_cleanups_to (old_cleanups);
6021
6022 emit_queue ();
6023 emit_jump_insn (gen_jump (op1));
6024 emit_barrier ();
6025 emit_label (op0);
6026 if (temp != 0)
6027 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6028 else
6029 expand_expr (TREE_OPERAND (exp, 2),
6030 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6031 dest_right_flag = get_last_insn ();
6032 }
6033
6034 /* Handle conditional cleanups, if any. */
6035 right_cleanups = defer_cleanups_to (old_cleanups);
6036
6037 emit_queue ();
6038 emit_label (op1);
6039 OK_DEFER_POP;
6040
6041 /* Add back in, any conditional cleanups. */
6042 if (left_cleanups || right_cleanups)
6043 {
6044 tree new_cleanups;
6045 tree cond;
6046 rtx last;
6047
6048 /* Now that we know that a flag is needed, go back and add in the
6049 setting of the flag. */
6050
6051 /* Do the left side flag. */
6052 last = get_last_insn ();
6053 /* Flag left cleanups as needed. */
6054 emit_move_insn (flag, const1_rtx);
6055 /* ??? deprecated, use sequences instead. */
6056 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_left_flag);
6057
6058 /* Do the right side flag. */
6059 last = get_last_insn ();
6060 /* Flag left cleanups as needed. */
6061 emit_move_insn (flag, const0_rtx);
6062 /* ??? deprecated, use sequences instead. */
6063 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_right_flag);
6064
6065 /* convert flag, which is an rtx, into a tree. */
6066 cond = make_node (RTL_EXPR);
6067 TREE_TYPE (cond) = integer_type_node;
6068 RTL_EXPR_RTL (cond) = flag;
6069 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
6070 cond = save_expr (cond);
6071
6072 if (! left_cleanups)
6073 left_cleanups = integer_zero_node;
6074 if (! right_cleanups)
6075 right_cleanups = integer_zero_node;
6076 new_cleanups = build (COND_EXPR, void_type_node,
6077 truthvalue_conversion (cond),
6078 left_cleanups, right_cleanups);
6079 new_cleanups = fold (new_cleanups);
6080
6081 /* Now add in the conditionalized cleanups. */
6082 cleanups_this_call
6083 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
6084 (*interim_eh_hook) (NULL_TREE);
6085 }
6086 return temp;
6087 }
6088
6089 case TARGET_EXPR:
6090 {
6091 int need_exception_region = 0;
6092 /* Something needs to be initialized, but we didn't know
6093 where that thing was when building the tree. For example,
6094 it could be the return value of a function, or a parameter
6095 to a function which lays down in the stack, or a temporary
6096 variable which must be passed by reference.
6097
6098 We guarantee that the expression will either be constructed
6099 or copied into our original target. */
6100
6101 tree slot = TREE_OPERAND (exp, 0);
6102 tree exp1;
6103 rtx temp;
6104
6105 if (TREE_CODE (slot) != VAR_DECL)
6106 abort ();
6107
6108 if (! ignore)
6109 target = original_target;
6110
6111 if (target == 0)
6112 {
6113 if (DECL_RTL (slot) != 0)
6114 {
6115 target = DECL_RTL (slot);
6116 /* If we have already expanded the slot, so don't do
6117 it again. (mrs) */
6118 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6119 return target;
6120 }
6121 else
6122 {
6123 target = assign_stack_temp (mode, int_size_in_bytes (type), 2);
6124 MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (type);
6125 /* All temp slots at this level must not conflict. */
6126 preserve_temp_slots (target);
6127 DECL_RTL (slot) = target;
6128
6129 /* Since SLOT is not known to the called function
6130 to belong to its stack frame, we must build an explicit
6131 cleanup. This case occurs when we must build up a reference
6132 to pass the reference as an argument. In this case,
6133 it is very likely that such a reference need not be
6134 built here. */
6135
6136 if (TREE_OPERAND (exp, 2) == 0)
6137 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
6138 if (TREE_OPERAND (exp, 2))
6139 {
6140 cleanups_this_call = tree_cons (NULL_TREE,
6141 TREE_OPERAND (exp, 2),
6142 cleanups_this_call);
6143 need_exception_region = 1;
6144 }
6145 }
6146 }
6147 else
6148 {
6149 /* This case does occur, when expanding a parameter which
6150 needs to be constructed on the stack. The target
6151 is the actual stack address that we want to initialize.
6152 The function we call will perform the cleanup in this case. */
6153
6154 /* If we have already assigned it space, use that space,
6155 not target that we were passed in, as our target
6156 parameter is only a hint. */
6157 if (DECL_RTL (slot) != 0)
6158 {
6159 target = DECL_RTL (slot);
6160 /* If we have already expanded the slot, so don't do
6161 it again. (mrs) */
6162 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6163 return target;
6164 }
6165
6166 DECL_RTL (slot) = target;
6167 }
6168
6169 exp1 = TREE_OPERAND (exp, 1);
6170 /* Mark it as expanded. */
6171 TREE_OPERAND (exp, 1) = NULL_TREE;
6172
6173 temp = expand_expr (exp1, target, tmode, modifier);
6174
6175 if (need_exception_region)
6176 (*interim_eh_hook) (NULL_TREE);
6177
6178 return temp;
6179 }
6180
6181 case INIT_EXPR:
6182 {
6183 tree lhs = TREE_OPERAND (exp, 0);
6184 tree rhs = TREE_OPERAND (exp, 1);
6185 tree noncopied_parts = 0;
6186 tree lhs_type = TREE_TYPE (lhs);
6187
6188 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
6189 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
6190 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
6191 TYPE_NONCOPIED_PARTS (lhs_type));
6192 while (noncopied_parts != 0)
6193 {
6194 expand_assignment (TREE_VALUE (noncopied_parts),
6195 TREE_PURPOSE (noncopied_parts), 0, 0);
6196 noncopied_parts = TREE_CHAIN (noncopied_parts);
6197 }
6198 return temp;
6199 }
6200
6201 case MODIFY_EXPR:
6202 {
6203 /* If lhs is complex, expand calls in rhs before computing it.
6204 That's so we don't compute a pointer and save it over a call.
6205 If lhs is simple, compute it first so we can give it as a
6206 target if the rhs is just a call. This avoids an extra temp and copy
6207 and that prevents a partial-subsumption which makes bad code.
6208 Actually we could treat component_ref's of vars like vars. */
6209
6210 tree lhs = TREE_OPERAND (exp, 0);
6211 tree rhs = TREE_OPERAND (exp, 1);
6212 tree noncopied_parts = 0;
6213 tree lhs_type = TREE_TYPE (lhs);
6214
6215 temp = 0;
6216
6217 if (TREE_CODE (lhs) != VAR_DECL
6218 && TREE_CODE (lhs) != RESULT_DECL
6219 && TREE_CODE (lhs) != PARM_DECL)
6220 preexpand_calls (exp);
6221
6222 /* Check for |= or &= of a bitfield of size one into another bitfield
6223 of size 1. In this case, (unless we need the result of the
6224 assignment) we can do this more efficiently with a
6225 test followed by an assignment, if necessary.
6226
6227 ??? At this point, we can't get a BIT_FIELD_REF here. But if
6228 things change so we do, this code should be enhanced to
6229 support it. */
6230 if (ignore
6231 && TREE_CODE (lhs) == COMPONENT_REF
6232 && (TREE_CODE (rhs) == BIT_IOR_EXPR
6233 || TREE_CODE (rhs) == BIT_AND_EXPR)
6234 && TREE_OPERAND (rhs, 0) == lhs
6235 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
6236 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
6237 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
6238 {
6239 rtx label = gen_label_rtx ();
6240
6241 do_jump (TREE_OPERAND (rhs, 1),
6242 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
6243 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
6244 expand_assignment (lhs, convert (TREE_TYPE (rhs),
6245 (TREE_CODE (rhs) == BIT_IOR_EXPR
6246 ? integer_one_node
6247 : integer_zero_node)),
6248 0, 0);
6249 do_pending_stack_adjust ();
6250 emit_label (label);
6251 return const0_rtx;
6252 }
6253
6254 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
6255 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
6256 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
6257 TYPE_NONCOPIED_PARTS (lhs_type));
6258
6259 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
6260 while (noncopied_parts != 0)
6261 {
6262 expand_assignment (TREE_PURPOSE (noncopied_parts),
6263 TREE_VALUE (noncopied_parts), 0, 0);
6264 noncopied_parts = TREE_CHAIN (noncopied_parts);
6265 }
6266 return temp;
6267 }
6268
6269 case PREINCREMENT_EXPR:
6270 case PREDECREMENT_EXPR:
6271 return expand_increment (exp, 0);
6272
6273 case POSTINCREMENT_EXPR:
6274 case POSTDECREMENT_EXPR:
6275 /* Faster to treat as pre-increment if result is not used. */
6276 return expand_increment (exp, ! ignore);
6277
6278 case ADDR_EXPR:
6279 /* If nonzero, TEMP will be set to the address of something that might
6280 be a MEM corresponding to a stack slot. */
6281 temp = 0;
6282
6283 /* Are we taking the address of a nested function? */
6284 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
6285 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
6286 {
6287 op0 = trampoline_address (TREE_OPERAND (exp, 0));
6288 op0 = force_operand (op0, target);
6289 }
6290 /* If we are taking the address of something erroneous, just
6291 return a zero. */
6292 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
6293 return const0_rtx;
6294 else
6295 {
6296 /* We make sure to pass const0_rtx down if we came in with
6297 ignore set, to avoid doing the cleanups twice for something. */
6298 op0 = expand_expr (TREE_OPERAND (exp, 0),
6299 ignore ? const0_rtx : NULL_RTX, VOIDmode,
6300 (modifier == EXPAND_INITIALIZER
6301 ? modifier : EXPAND_CONST_ADDRESS));
6302
6303 /* If we are going to ignore the result, OP0 will have been set
6304 to const0_rtx, so just return it. Don't get confused and
6305 think we are taking the address of the constant. */
6306 if (ignore)
6307 return op0;
6308
6309 /* We would like the object in memory. If it is a constant,
6310 we can have it be statically allocated into memory. For
6311 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
6312 memory and store the value into it. */
6313
6314 if (CONSTANT_P (op0))
6315 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6316 op0);
6317 else if (GET_CODE (op0) == MEM)
6318 {
6319 mark_temp_addr_taken (op0);
6320 temp = XEXP (op0, 0);
6321 }
6322
6323 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6324 || GET_CODE (op0) == CONCAT)
6325 {
6326 /* If this object is in a register, it must be not
6327 be BLKmode. */
6328 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
6329 enum machine_mode inner_mode = TYPE_MODE (inner_type);
6330 rtx memloc
6331 = assign_stack_temp (inner_mode,
6332 int_size_in_bytes (inner_type), 1);
6333 MEM_IN_STRUCT_P (memloc) = AGGREGATE_TYPE_P (inner_type);
6334
6335 mark_temp_addr_taken (memloc);
6336 emit_move_insn (memloc, op0);
6337 op0 = memloc;
6338 }
6339
6340 if (GET_CODE (op0) != MEM)
6341 abort ();
6342
6343 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6344 {
6345 temp = XEXP (op0, 0);
6346 #ifdef POINTERS_EXTEND_UNSIGNED
6347 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
6348 && mode == ptr_mode)
6349 temp = convert_memory_address (ptr_mode, temp);
6350 #endif
6351 return temp;
6352 }
6353
6354 op0 = force_operand (XEXP (op0, 0), target);
6355 }
6356
6357 if (flag_force_addr && GET_CODE (op0) != REG)
6358 op0 = force_reg (Pmode, op0);
6359
6360 if (GET_CODE (op0) == REG)
6361 mark_reg_pointer (op0);
6362
6363 /* If we might have had a temp slot, add an equivalent address
6364 for it. */
6365 if (temp != 0)
6366 update_temp_slot_address (temp, op0);
6367
6368 #ifdef POINTERS_EXTEND_UNSIGNED
6369 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
6370 && mode == ptr_mode)
6371 op0 = convert_memory_address (ptr_mode, op0);
6372 #endif
6373
6374 return op0;
6375
6376 case ENTRY_VALUE_EXPR:
6377 abort ();
6378
6379 /* COMPLEX type for Extended Pascal & Fortran */
6380 case COMPLEX_EXPR:
6381 {
6382 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6383 rtx insns;
6384
6385 /* Get the rtx code of the operands. */
6386 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6387 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
6388
6389 if (! target)
6390 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6391
6392 start_sequence ();
6393
6394 /* Move the real (op0) and imaginary (op1) parts to their location. */
6395 emit_move_insn (gen_realpart (mode, target), op0);
6396 emit_move_insn (gen_imagpart (mode, target), op1);
6397
6398 insns = get_insns ();
6399 end_sequence ();
6400
6401 /* Complex construction should appear as a single unit. */
6402 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
6403 each with a separate pseudo as destination.
6404 It's not correct for flow to treat them as a unit. */
6405 if (GET_CODE (target) != CONCAT)
6406 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
6407 else
6408 emit_insns (insns);
6409
6410 return target;
6411 }
6412
6413 case REALPART_EXPR:
6414 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6415 return gen_realpart (mode, op0);
6416
6417 case IMAGPART_EXPR:
6418 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6419 return gen_imagpart (mode, op0);
6420
6421 case CONJ_EXPR:
6422 {
6423 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6424 rtx imag_t;
6425 rtx insns;
6426
6427 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6428
6429 if (! target)
6430 target = gen_reg_rtx (mode);
6431
6432 start_sequence ();
6433
6434 /* Store the realpart and the negated imagpart to target. */
6435 emit_move_insn (gen_realpart (partmode, target),
6436 gen_realpart (partmode, op0));
6437
6438 imag_t = gen_imagpart (partmode, target);
6439 temp = expand_unop (partmode, neg_optab,
6440 gen_imagpart (partmode, op0), imag_t, 0);
6441 if (temp != imag_t)
6442 emit_move_insn (imag_t, temp);
6443
6444 insns = get_insns ();
6445 end_sequence ();
6446
6447 /* Conjugate should appear as a single unit
6448 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
6449 each with a separate pseudo as destination.
6450 It's not correct for flow to treat them as a unit. */
6451 if (GET_CODE (target) != CONCAT)
6452 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
6453 else
6454 emit_insns (insns);
6455
6456 return target;
6457 }
6458
6459 case ERROR_MARK:
6460 op0 = CONST0_RTX (tmode);
6461 if (op0 != 0)
6462 return op0;
6463 return const0_rtx;
6464
6465 default:
6466 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
6467 }
6468
6469 /* Here to do an ordinary binary operator, generating an instruction
6470 from the optab already placed in `this_optab'. */
6471 binop:
6472 preexpand_calls (exp);
6473 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6474 subtarget = 0;
6475 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6476 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6477 binop2:
6478 temp = expand_binop (mode, this_optab, op0, op1, target,
6479 unsignedp, OPTAB_LIB_WIDEN);
6480 if (temp == 0)
6481 abort ();
6482 return temp;
6483 }
6484
6485
6486 /* Emit bytecode to evaluate the given expression EXP to the stack. */
6487 void
6488 bc_expand_expr (exp)
6489 tree exp;
6490 {
6491 enum tree_code code;
6492 tree type, arg0;
6493 rtx r;
6494 struct binary_operator *binoptab;
6495 struct unary_operator *unoptab;
6496 struct increment_operator *incroptab;
6497 struct bc_label *lab, *lab1;
6498 enum bytecode_opcode opcode;
6499
6500
6501 code = TREE_CODE (exp);
6502
6503 switch (code)
6504 {
6505 case PARM_DECL:
6506
6507 if (DECL_RTL (exp) == 0)
6508 {
6509 error_with_decl (exp, "prior parameter's size depends on `%s'");
6510 return;
6511 }
6512
6513 bc_load_parmaddr (DECL_RTL (exp));
6514 bc_load_memory (TREE_TYPE (exp), exp);
6515
6516 return;
6517
6518 case VAR_DECL:
6519
6520 if (DECL_RTL (exp) == 0)
6521 abort ();
6522
6523 #if 0
6524 if (BYTECODE_LABEL (DECL_RTL (exp)))
6525 bc_load_externaddr (DECL_RTL (exp));
6526 else
6527 bc_load_localaddr (DECL_RTL (exp));
6528 #endif
6529 if (TREE_PUBLIC (exp))
6530 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
6531 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
6532 else
6533 bc_load_localaddr (DECL_RTL (exp));
6534
6535 bc_load_memory (TREE_TYPE (exp), exp);
6536 return;
6537
6538 case INTEGER_CST:
6539
6540 #ifdef DEBUG_PRINT_CODE
6541 fprintf (stderr, " [%x]\n", TREE_INT_CST_LOW (exp));
6542 #endif
6543 bc_emit_instruction (mode_to_const_map[(int) (DECL_BIT_FIELD (exp)
6544 ? SImode
6545 : TYPE_MODE (TREE_TYPE (exp)))],
6546 (HOST_WIDE_INT) TREE_INT_CST_LOW (exp));
6547 return;
6548
6549 case REAL_CST:
6550
6551 #if 0
6552 #ifdef DEBUG_PRINT_CODE
6553 fprintf (stderr, " [%g]\n", (double) TREE_INT_CST_LOW (exp));
6554 #endif
6555 /* FIX THIS: find a better way to pass real_cst's. -bson */
6556 bc_emit_instruction (mode_to_const_map[TYPE_MODE (TREE_TYPE (exp))],
6557 (double) TREE_REAL_CST (exp));
6558 #else
6559 abort ();
6560 #endif
6561
6562 return;
6563
6564 case CALL_EXPR:
6565
6566 /* We build a call description vector describing the type of
6567 the return value and of the arguments; this call vector,
6568 together with a pointer to a location for the return value
6569 and the base of the argument list, is passed to the low
6570 level machine dependent call subroutine, which is responsible
6571 for putting the arguments wherever real functions expect
6572 them, as well as getting the return value back. */
6573 {
6574 tree calldesc = 0, arg;
6575 int nargs = 0, i;
6576 rtx retval;
6577
6578 /* Push the evaluated args on the evaluation stack in reverse
6579 order. Also make an entry for each arg in the calldesc
6580 vector while we're at it. */
6581
6582 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
6583
6584 for (arg = TREE_OPERAND (exp, 1); arg; arg = TREE_CHAIN (arg))
6585 {
6586 ++nargs;
6587 bc_expand_expr (TREE_VALUE (arg));
6588
6589 calldesc = tree_cons ((tree) 0,
6590 size_in_bytes (TREE_TYPE (TREE_VALUE (arg))),
6591 calldesc);
6592 calldesc = tree_cons ((tree) 0,
6593 bc_runtime_type_code (TREE_TYPE (TREE_VALUE (arg))),
6594 calldesc);
6595 }
6596
6597 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
6598
6599 /* Allocate a location for the return value and push its
6600 address on the evaluation stack. Also make an entry
6601 at the front of the calldesc for the return value type. */
6602
6603 type = TREE_TYPE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
6604 retval = bc_allocate_local (int_size_in_bytes (type), TYPE_ALIGN (type));
6605 bc_load_localaddr (retval);
6606
6607 calldesc = tree_cons ((tree) 0, size_in_bytes (type), calldesc);
6608 calldesc = tree_cons ((tree) 0, bc_runtime_type_code (type), calldesc);
6609
6610 /* Prepend the argument count. */
6611 calldesc = tree_cons ((tree) 0,
6612 build_int_2 (nargs, 0),
6613 calldesc);
6614
6615 /* Push the address of the call description vector on the stack. */
6616 calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
6617 TREE_TYPE (calldesc) = build_array_type (integer_type_node,
6618 build_index_type (build_int_2 (nargs * 2, 0)));
6619 r = output_constant_def (calldesc);
6620 bc_load_externaddr (r);
6621
6622 /* Push the address of the function to be called. */
6623 bc_expand_expr (TREE_OPERAND (exp, 0));
6624
6625 /* Call the function, popping its address and the calldesc vector
6626 address off the evaluation stack in the process. */
6627 bc_emit_instruction (call);
6628
6629 /* Pop the arguments off the stack. */
6630 bc_adjust_stack (nargs);
6631
6632 /* Load the return value onto the stack. */
6633 bc_load_localaddr (retval);
6634 bc_load_memory (type, TREE_OPERAND (exp, 0));
6635 }
6636 return;
6637
6638 case SAVE_EXPR:
6639
6640 if (!SAVE_EXPR_RTL (exp))
6641 {
6642 /* First time around: copy to local variable */
6643 SAVE_EXPR_RTL (exp) = bc_allocate_local (int_size_in_bytes (TREE_TYPE (exp)),
6644 TYPE_ALIGN (TREE_TYPE(exp)));
6645 bc_expand_expr (TREE_OPERAND (exp, 0));
6646 bc_emit_instruction (duplicate);
6647
6648 bc_load_localaddr (SAVE_EXPR_RTL (exp));
6649 bc_store_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6650 }
6651 else
6652 {
6653 /* Consecutive reference: use saved copy */
6654 bc_load_localaddr (SAVE_EXPR_RTL (exp));
6655 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6656 }
6657 return;
6658
6659 #if 0
6660 /* FIXME: the XXXX_STMT codes have been removed in GCC2, but
6661 how are they handled instead? */
6662 case LET_STMT:
6663
6664 TREE_USED (exp) = 1;
6665 bc_expand_expr (STMT_BODY (exp));
6666 return;
6667 #endif
6668
6669 case NOP_EXPR:
6670 case CONVERT_EXPR:
6671
6672 bc_expand_expr (TREE_OPERAND (exp, 0));
6673 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)), TREE_TYPE (exp));
6674 return;
6675
6676 case MODIFY_EXPR:
6677
6678 expand_assignment (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 0, 0);
6679 return;
6680
6681 case ADDR_EXPR:
6682
6683 bc_expand_address (TREE_OPERAND (exp, 0));
6684 return;
6685
6686 case INDIRECT_REF:
6687
6688 bc_expand_expr (TREE_OPERAND (exp, 0));
6689 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6690 return;
6691
6692 case ARRAY_REF:
6693
6694 bc_expand_expr (bc_canonicalize_array_ref (exp));
6695 return;
6696
6697 case COMPONENT_REF:
6698
6699 bc_expand_component_address (exp);
6700
6701 /* If we have a bitfield, generate a proper load */
6702 bc_load_memory (TREE_TYPE (TREE_OPERAND (exp, 1)), TREE_OPERAND (exp, 1));
6703 return;
6704
6705 case COMPOUND_EXPR:
6706
6707 bc_expand_expr (TREE_OPERAND (exp, 0));
6708 bc_emit_instruction (drop);
6709 bc_expand_expr (TREE_OPERAND (exp, 1));
6710 return;
6711
6712 case COND_EXPR:
6713
6714 bc_expand_expr (TREE_OPERAND (exp, 0));
6715 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
6716 lab = bc_get_bytecode_label ();
6717 bc_emit_bytecode (xjumpifnot);
6718 bc_emit_bytecode_labelref (lab);
6719
6720 #ifdef DEBUG_PRINT_CODE
6721 fputc ('\n', stderr);
6722 #endif
6723 bc_expand_expr (TREE_OPERAND (exp, 1));
6724 lab1 = bc_get_bytecode_label ();
6725 bc_emit_bytecode (jump);
6726 bc_emit_bytecode_labelref (lab1);
6727
6728 #ifdef DEBUG_PRINT_CODE
6729 fputc ('\n', stderr);
6730 #endif
6731
6732 bc_emit_bytecode_labeldef (lab);
6733 bc_expand_expr (TREE_OPERAND (exp, 2));
6734 bc_emit_bytecode_labeldef (lab1);
6735 return;
6736
6737 case TRUTH_ANDIF_EXPR:
6738
6739 opcode = xjumpifnot;
6740 goto andorif;
6741
6742 case TRUTH_ORIF_EXPR:
6743
6744 opcode = xjumpif;
6745 goto andorif;
6746
6747 case PLUS_EXPR:
6748
6749 binoptab = optab_plus_expr;
6750 goto binop;
6751
6752 case MINUS_EXPR:
6753
6754 binoptab = optab_minus_expr;
6755 goto binop;
6756
6757 case MULT_EXPR:
6758
6759 binoptab = optab_mult_expr;
6760 goto binop;
6761
6762 case TRUNC_DIV_EXPR:
6763 case FLOOR_DIV_EXPR:
6764 case CEIL_DIV_EXPR:
6765 case ROUND_DIV_EXPR:
6766 case EXACT_DIV_EXPR:
6767
6768 binoptab = optab_trunc_div_expr;
6769 goto binop;
6770
6771 case TRUNC_MOD_EXPR:
6772 case FLOOR_MOD_EXPR:
6773 case CEIL_MOD_EXPR:
6774 case ROUND_MOD_EXPR:
6775
6776 binoptab = optab_trunc_mod_expr;
6777 goto binop;
6778
6779 case FIX_ROUND_EXPR:
6780 case FIX_FLOOR_EXPR:
6781 case FIX_CEIL_EXPR:
6782 abort (); /* Not used for C. */
6783
6784 case FIX_TRUNC_EXPR:
6785 case FLOAT_EXPR:
6786 case MAX_EXPR:
6787 case MIN_EXPR:
6788 case FFS_EXPR:
6789 case LROTATE_EXPR:
6790 case RROTATE_EXPR:
6791 abort (); /* FIXME */
6792
6793 case RDIV_EXPR:
6794
6795 binoptab = optab_rdiv_expr;
6796 goto binop;
6797
6798 case BIT_AND_EXPR:
6799
6800 binoptab = optab_bit_and_expr;
6801 goto binop;
6802
6803 case BIT_IOR_EXPR:
6804
6805 binoptab = optab_bit_ior_expr;
6806 goto binop;
6807
6808 case BIT_XOR_EXPR:
6809
6810 binoptab = optab_bit_xor_expr;
6811 goto binop;
6812
6813 case LSHIFT_EXPR:
6814
6815 binoptab = optab_lshift_expr;
6816 goto binop;
6817
6818 case RSHIFT_EXPR:
6819
6820 binoptab = optab_rshift_expr;
6821 goto binop;
6822
6823 case TRUTH_AND_EXPR:
6824
6825 binoptab = optab_truth_and_expr;
6826 goto binop;
6827
6828 case TRUTH_OR_EXPR:
6829
6830 binoptab = optab_truth_or_expr;
6831 goto binop;
6832
6833 case LT_EXPR:
6834
6835 binoptab = optab_lt_expr;
6836 goto binop;
6837
6838 case LE_EXPR:
6839
6840 binoptab = optab_le_expr;
6841 goto binop;
6842
6843 case GE_EXPR:
6844
6845 binoptab = optab_ge_expr;
6846 goto binop;
6847
6848 case GT_EXPR:
6849
6850 binoptab = optab_gt_expr;
6851 goto binop;
6852
6853 case EQ_EXPR:
6854
6855 binoptab = optab_eq_expr;
6856 goto binop;
6857
6858 case NE_EXPR:
6859
6860 binoptab = optab_ne_expr;
6861 goto binop;
6862
6863 case NEGATE_EXPR:
6864
6865 unoptab = optab_negate_expr;
6866 goto unop;
6867
6868 case BIT_NOT_EXPR:
6869
6870 unoptab = optab_bit_not_expr;
6871 goto unop;
6872
6873 case TRUTH_NOT_EXPR:
6874
6875 unoptab = optab_truth_not_expr;
6876 goto unop;
6877
6878 case PREDECREMENT_EXPR:
6879
6880 incroptab = optab_predecrement_expr;
6881 goto increment;
6882
6883 case PREINCREMENT_EXPR:
6884
6885 incroptab = optab_preincrement_expr;
6886 goto increment;
6887
6888 case POSTDECREMENT_EXPR:
6889
6890 incroptab = optab_postdecrement_expr;
6891 goto increment;
6892
6893 case POSTINCREMENT_EXPR:
6894
6895 incroptab = optab_postincrement_expr;
6896 goto increment;
6897
6898 case CONSTRUCTOR:
6899
6900 bc_expand_constructor (exp);
6901 return;
6902
6903 case ERROR_MARK:
6904 case RTL_EXPR:
6905
6906 return;
6907
6908 case BIND_EXPR:
6909 {
6910 tree vars = TREE_OPERAND (exp, 0);
6911 int vars_need_expansion = 0;
6912
6913 /* Need to open a binding contour here because
6914 if there are any cleanups they most be contained here. */
6915 expand_start_bindings (0);
6916
6917 /* Mark the corresponding BLOCK for output. */
6918 if (TREE_OPERAND (exp, 2) != 0)
6919 TREE_USED (TREE_OPERAND (exp, 2)) = 1;
6920
6921 /* If VARS have not yet been expanded, expand them now. */
6922 while (vars)
6923 {
6924 if (DECL_RTL (vars) == 0)
6925 {
6926 vars_need_expansion = 1;
6927 expand_decl (vars);
6928 }
6929 expand_decl_init (vars);
6930 vars = TREE_CHAIN (vars);
6931 }
6932
6933 bc_expand_expr (TREE_OPERAND (exp, 1));
6934
6935 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6936
6937 return;
6938 }
6939 }
6940
6941 abort ();
6942
6943 binop:
6944
6945 bc_expand_binary_operation (binoptab, TREE_TYPE (exp),
6946 TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1));
6947 return;
6948
6949
6950 unop:
6951
6952 bc_expand_unary_operation (unoptab, TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6953 return;
6954
6955
6956 andorif:
6957
6958 bc_expand_expr (TREE_OPERAND (exp, 0));
6959 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
6960 lab = bc_get_bytecode_label ();
6961
6962 bc_emit_instruction (duplicate);
6963 bc_emit_bytecode (opcode);
6964 bc_emit_bytecode_labelref (lab);
6965
6966 #ifdef DEBUG_PRINT_CODE
6967 fputc ('\n', stderr);
6968 #endif
6969
6970 bc_emit_instruction (drop);
6971
6972 bc_expand_expr (TREE_OPERAND (exp, 1));
6973 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)));
6974 bc_emit_bytecode_labeldef (lab);
6975 return;
6976
6977
6978 increment:
6979
6980 type = TREE_TYPE (TREE_OPERAND (exp, 0));
6981
6982 /* Push the quantum. */
6983 bc_expand_expr (TREE_OPERAND (exp, 1));
6984
6985 /* Convert it to the lvalue's type. */
6986 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)), type);
6987
6988 /* Push the address of the lvalue */
6989 bc_expand_expr (build1 (ADDR_EXPR, TYPE_POINTER_TO (type), TREE_OPERAND (exp, 0)));
6990
6991 /* Perform actual increment */
6992 bc_expand_increment (incroptab, type);
6993 return;
6994 }
6995 \f
6996 /* Return the alignment in bits of EXP, a pointer valued expression.
6997 But don't return more than MAX_ALIGN no matter what.
6998 The alignment returned is, by default, the alignment of the thing that
6999 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
7000
7001 Otherwise, look at the expression to see if we can do better, i.e., if the
7002 expression is actually pointing at an object whose alignment is tighter. */
7003
7004 static int
7005 get_pointer_alignment (exp, max_align)
7006 tree exp;
7007 unsigned max_align;
7008 {
7009 unsigned align, inner;
7010
7011 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7012 return 0;
7013
7014 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7015 align = MIN (align, max_align);
7016
7017 while (1)
7018 {
7019 switch (TREE_CODE (exp))
7020 {
7021 case NOP_EXPR:
7022 case CONVERT_EXPR:
7023 case NON_LVALUE_EXPR:
7024 exp = TREE_OPERAND (exp, 0);
7025 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7026 return align;
7027 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7028 align = MIN (inner, max_align);
7029 break;
7030
7031 case PLUS_EXPR:
7032 /* If sum of pointer + int, restrict our maximum alignment to that
7033 imposed by the integer. If not, we can't do any better than
7034 ALIGN. */
7035 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
7036 return align;
7037
7038 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
7039 & (max_align - 1))
7040 != 0)
7041 max_align >>= 1;
7042
7043 exp = TREE_OPERAND (exp, 0);
7044 break;
7045
7046 case ADDR_EXPR:
7047 /* See what we are pointing at and look at its alignment. */
7048 exp = TREE_OPERAND (exp, 0);
7049 if (TREE_CODE (exp) == FUNCTION_DECL)
7050 align = FUNCTION_BOUNDARY;
7051 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
7052 align = DECL_ALIGN (exp);
7053 #ifdef CONSTANT_ALIGNMENT
7054 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
7055 align = CONSTANT_ALIGNMENT (exp, align);
7056 #endif
7057 return MIN (align, max_align);
7058
7059 default:
7060 return align;
7061 }
7062 }
7063 }
7064 \f
7065 /* Return the tree node and offset if a given argument corresponds to
7066 a string constant. */
7067
7068 static tree
7069 string_constant (arg, ptr_offset)
7070 tree arg;
7071 tree *ptr_offset;
7072 {
7073 STRIP_NOPS (arg);
7074
7075 if (TREE_CODE (arg) == ADDR_EXPR
7076 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
7077 {
7078 *ptr_offset = integer_zero_node;
7079 return TREE_OPERAND (arg, 0);
7080 }
7081 else if (TREE_CODE (arg) == PLUS_EXPR)
7082 {
7083 tree arg0 = TREE_OPERAND (arg, 0);
7084 tree arg1 = TREE_OPERAND (arg, 1);
7085
7086 STRIP_NOPS (arg0);
7087 STRIP_NOPS (arg1);
7088
7089 if (TREE_CODE (arg0) == ADDR_EXPR
7090 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
7091 {
7092 *ptr_offset = arg1;
7093 return TREE_OPERAND (arg0, 0);
7094 }
7095 else if (TREE_CODE (arg1) == ADDR_EXPR
7096 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
7097 {
7098 *ptr_offset = arg0;
7099 return TREE_OPERAND (arg1, 0);
7100 }
7101 }
7102
7103 return 0;
7104 }
7105
7106 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
7107 way, because it could contain a zero byte in the middle.
7108 TREE_STRING_LENGTH is the size of the character array, not the string.
7109
7110 Unfortunately, string_constant can't access the values of const char
7111 arrays with initializers, so neither can we do so here. */
7112
7113 static tree
7114 c_strlen (src)
7115 tree src;
7116 {
7117 tree offset_node;
7118 int offset, max;
7119 char *ptr;
7120
7121 src = string_constant (src, &offset_node);
7122 if (src == 0)
7123 return 0;
7124 max = TREE_STRING_LENGTH (src);
7125 ptr = TREE_STRING_POINTER (src);
7126 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
7127 {
7128 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
7129 compute the offset to the following null if we don't know where to
7130 start searching for it. */
7131 int i;
7132 for (i = 0; i < max; i++)
7133 if (ptr[i] == 0)
7134 return 0;
7135 /* We don't know the starting offset, but we do know that the string
7136 has no internal zero bytes. We can assume that the offset falls
7137 within the bounds of the string; otherwise, the programmer deserves
7138 what he gets. Subtract the offset from the length of the string,
7139 and return that. */
7140 /* This would perhaps not be valid if we were dealing with named
7141 arrays in addition to literal string constants. */
7142 return size_binop (MINUS_EXPR, size_int (max), offset_node);
7143 }
7144
7145 /* We have a known offset into the string. Start searching there for
7146 a null character. */
7147 if (offset_node == 0)
7148 offset = 0;
7149 else
7150 {
7151 /* Did we get a long long offset? If so, punt. */
7152 if (TREE_INT_CST_HIGH (offset_node) != 0)
7153 return 0;
7154 offset = TREE_INT_CST_LOW (offset_node);
7155 }
7156 /* If the offset is known to be out of bounds, warn, and call strlen at
7157 runtime. */
7158 if (offset < 0 || offset > max)
7159 {
7160 warning ("offset outside bounds of constant string");
7161 return 0;
7162 }
7163 /* Use strlen to search for the first zero byte. Since any strings
7164 constructed with build_string will have nulls appended, we win even
7165 if we get handed something like (char[4])"abcd".
7166
7167 Since OFFSET is our starting index into the string, no further
7168 calculation is needed. */
7169 return size_int (strlen (ptr + offset));
7170 }
7171
7172 rtx
7173 expand_builtin_return_addr (fndecl_code, count, tem)
7174 enum built_in_function fndecl_code;
7175 rtx tem;
7176 int count;
7177 {
7178 int i;
7179
7180 /* Some machines need special handling before we can access
7181 arbitrary frames. For example, on the sparc, we must first flush
7182 all register windows to the stack. */
7183 #ifdef SETUP_FRAME_ADDRESSES
7184 SETUP_FRAME_ADDRESSES ();
7185 #endif
7186
7187 /* On the sparc, the return address is not in the frame, it is in a
7188 register. There is no way to access it off of the current frame
7189 pointer, but it can be accessed off the previous frame pointer by
7190 reading the value from the register window save area. */
7191 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
7192 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
7193 count--;
7194 #endif
7195
7196 /* Scan back COUNT frames to the specified frame. */
7197 for (i = 0; i < count; i++)
7198 {
7199 /* Assume the dynamic chain pointer is in the word that the
7200 frame address points to, unless otherwise specified. */
7201 #ifdef DYNAMIC_CHAIN_ADDRESS
7202 tem = DYNAMIC_CHAIN_ADDRESS (tem);
7203 #endif
7204 tem = memory_address (Pmode, tem);
7205 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
7206 }
7207
7208 /* For __builtin_frame_address, return what we've got. */
7209 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
7210 return tem;
7211
7212 /* For __builtin_return_address, Get the return address from that
7213 frame. */
7214 #ifdef RETURN_ADDR_RTX
7215 tem = RETURN_ADDR_RTX (count, tem);
7216 #else
7217 tem = memory_address (Pmode,
7218 plus_constant (tem, GET_MODE_SIZE (Pmode)));
7219 tem = gen_rtx (MEM, Pmode, tem);
7220 #endif
7221 return tem;
7222 }
7223 \f
7224 /* Expand an expression EXP that calls a built-in function,
7225 with result going to TARGET if that's convenient
7226 (and in mode MODE if that's convenient).
7227 SUBTARGET may be used as the target for computing one of EXP's operands.
7228 IGNORE is nonzero if the value is to be ignored. */
7229
7230 #define CALLED_AS_BUILT_IN(NODE) \
7231 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
7232
7233 static rtx
7234 expand_builtin (exp, target, subtarget, mode, ignore)
7235 tree exp;
7236 rtx target;
7237 rtx subtarget;
7238 enum machine_mode mode;
7239 int ignore;
7240 {
7241 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7242 tree arglist = TREE_OPERAND (exp, 1);
7243 rtx op0;
7244 rtx lab1, insns;
7245 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
7246 optab builtin_optab;
7247
7248 switch (DECL_FUNCTION_CODE (fndecl))
7249 {
7250 case BUILT_IN_ABS:
7251 case BUILT_IN_LABS:
7252 case BUILT_IN_FABS:
7253 /* build_function_call changes these into ABS_EXPR. */
7254 abort ();
7255
7256 case BUILT_IN_SIN:
7257 case BUILT_IN_COS:
7258 /* Treat these like sqrt, but only if the user asks for them. */
7259 if (! flag_fast_math)
7260 break;
7261 case BUILT_IN_FSQRT:
7262 /* If not optimizing, call the library function. */
7263 if (! optimize)
7264 break;
7265
7266 if (arglist == 0
7267 /* Arg could be wrong type if user redeclared this fcn wrong. */
7268 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
7269 break;
7270
7271 /* Stabilize and compute the argument. */
7272 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
7273 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
7274 {
7275 exp = copy_node (exp);
7276 arglist = copy_node (arglist);
7277 TREE_OPERAND (exp, 1) = arglist;
7278 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
7279 }
7280 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
7281
7282 /* Make a suitable register to place result in. */
7283 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7284
7285 emit_queue ();
7286 start_sequence ();
7287
7288 switch (DECL_FUNCTION_CODE (fndecl))
7289 {
7290 case BUILT_IN_SIN:
7291 builtin_optab = sin_optab; break;
7292 case BUILT_IN_COS:
7293 builtin_optab = cos_optab; break;
7294 case BUILT_IN_FSQRT:
7295 builtin_optab = sqrt_optab; break;
7296 default:
7297 abort ();
7298 }
7299
7300 /* Compute into TARGET.
7301 Set TARGET to wherever the result comes back. */
7302 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
7303 builtin_optab, op0, target, 0);
7304
7305 /* If we were unable to expand via the builtin, stop the
7306 sequence (without outputting the insns) and break, causing
7307 a call the the library function. */
7308 if (target == 0)
7309 {
7310 end_sequence ();
7311 break;
7312 }
7313
7314 /* Check the results by default. But if flag_fast_math is turned on,
7315 then assume sqrt will always be called with valid arguments. */
7316
7317 if (! flag_fast_math)
7318 {
7319 /* Don't define the builtin FP instructions
7320 if your machine is not IEEE. */
7321 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
7322 abort ();
7323
7324 lab1 = gen_label_rtx ();
7325
7326 /* Test the result; if it is NaN, set errno=EDOM because
7327 the argument was not in the domain. */
7328 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
7329 emit_jump_insn (gen_beq (lab1));
7330
7331 #ifdef TARGET_EDOM
7332 {
7333 #ifdef GEN_ERRNO_RTX
7334 rtx errno_rtx = GEN_ERRNO_RTX;
7335 #else
7336 rtx errno_rtx
7337 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "errno"));
7338 #endif
7339
7340 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
7341 }
7342 #else
7343 /* We can't set errno=EDOM directly; let the library call do it.
7344 Pop the arguments right away in case the call gets deleted. */
7345 NO_DEFER_POP;
7346 expand_call (exp, target, 0);
7347 OK_DEFER_POP;
7348 #endif
7349
7350 emit_label (lab1);
7351 }
7352
7353 /* Output the entire sequence. */
7354 insns = get_insns ();
7355 end_sequence ();
7356 emit_insns (insns);
7357
7358 return target;
7359
7360 /* __builtin_apply_args returns block of memory allocated on
7361 the stack into which is stored the arg pointer, structure
7362 value address, static chain, and all the registers that might
7363 possibly be used in performing a function call. The code is
7364 moved to the start of the function so the incoming values are
7365 saved. */
7366 case BUILT_IN_APPLY_ARGS:
7367 /* Don't do __builtin_apply_args more than once in a function.
7368 Save the result of the first call and reuse it. */
7369 if (apply_args_value != 0)
7370 return apply_args_value;
7371 {
7372 /* When this function is called, it means that registers must be
7373 saved on entry to this function. So we migrate the
7374 call to the first insn of this function. */
7375 rtx temp;
7376 rtx seq;
7377
7378 start_sequence ();
7379 temp = expand_builtin_apply_args ();
7380 seq = get_insns ();
7381 end_sequence ();
7382
7383 apply_args_value = temp;
7384
7385 /* Put the sequence after the NOTE that starts the function.
7386 If this is inside a SEQUENCE, make the outer-level insn
7387 chain current, so the code is placed at the start of the
7388 function. */
7389 push_topmost_sequence ();
7390 emit_insns_before (seq, NEXT_INSN (get_insns ()));
7391 pop_topmost_sequence ();
7392 return temp;
7393 }
7394
7395 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7396 FUNCTION with a copy of the parameters described by
7397 ARGUMENTS, and ARGSIZE. It returns a block of memory
7398 allocated on the stack into which is stored all the registers
7399 that might possibly be used for returning the result of a
7400 function. ARGUMENTS is the value returned by
7401 __builtin_apply_args. ARGSIZE is the number of bytes of
7402 arguments that must be copied. ??? How should this value be
7403 computed? We'll also need a safe worst case value for varargs
7404 functions. */
7405 case BUILT_IN_APPLY:
7406 if (arglist == 0
7407 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7408 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7409 || TREE_CHAIN (arglist) == 0
7410 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
7411 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
7412 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7413 return const0_rtx;
7414 else
7415 {
7416 int i;
7417 tree t;
7418 rtx ops[3];
7419
7420 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
7421 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
7422
7423 return expand_builtin_apply (ops[0], ops[1], ops[2]);
7424 }
7425
7426 /* __builtin_return (RESULT) causes the function to return the
7427 value described by RESULT. RESULT is address of the block of
7428 memory returned by __builtin_apply. */
7429 case BUILT_IN_RETURN:
7430 if (arglist
7431 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7432 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
7433 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
7434 NULL_RTX, VOIDmode, 0));
7435 return const0_rtx;
7436
7437 case BUILT_IN_SAVEREGS:
7438 /* Don't do __builtin_saveregs more than once in a function.
7439 Save the result of the first call and reuse it. */
7440 if (saveregs_value != 0)
7441 return saveregs_value;
7442 {
7443 /* When this function is called, it means that registers must be
7444 saved on entry to this function. So we migrate the
7445 call to the first insn of this function. */
7446 rtx temp;
7447 rtx seq;
7448
7449 /* Now really call the function. `expand_call' does not call
7450 expand_builtin, so there is no danger of infinite recursion here. */
7451 start_sequence ();
7452
7453 #ifdef EXPAND_BUILTIN_SAVEREGS
7454 /* Do whatever the machine needs done in this case. */
7455 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
7456 #else
7457 /* The register where the function returns its value
7458 is likely to have something else in it, such as an argument.
7459 So preserve that register around the call. */
7460
7461 if (value_mode != VOIDmode)
7462 {
7463 rtx valreg = hard_libcall_value (value_mode);
7464 rtx saved_valreg = gen_reg_rtx (value_mode);
7465
7466 emit_move_insn (saved_valreg, valreg);
7467 temp = expand_call (exp, target, ignore);
7468 emit_move_insn (valreg, saved_valreg);
7469 }
7470 else
7471 /* Generate the call, putting the value in a pseudo. */
7472 temp = expand_call (exp, target, ignore);
7473 #endif
7474
7475 seq = get_insns ();
7476 end_sequence ();
7477
7478 saveregs_value = temp;
7479
7480 /* Put the sequence after the NOTE that starts the function.
7481 If this is inside a SEQUENCE, make the outer-level insn
7482 chain current, so the code is placed at the start of the
7483 function. */
7484 push_topmost_sequence ();
7485 emit_insns_before (seq, NEXT_INSN (get_insns ()));
7486 pop_topmost_sequence ();
7487 return temp;
7488 }
7489
7490 /* __builtin_args_info (N) returns word N of the arg space info
7491 for the current function. The number and meanings of words
7492 is controlled by the definition of CUMULATIVE_ARGS. */
7493 case BUILT_IN_ARGS_INFO:
7494 {
7495 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
7496 int i;
7497 int *word_ptr = (int *) &current_function_args_info;
7498 tree type, elts, result;
7499
7500 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
7501 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
7502 __FILE__, __LINE__);
7503
7504 if (arglist != 0)
7505 {
7506 tree arg = TREE_VALUE (arglist);
7507 if (TREE_CODE (arg) != INTEGER_CST)
7508 error ("argument of `__builtin_args_info' must be constant");
7509 else
7510 {
7511 int wordnum = TREE_INT_CST_LOW (arg);
7512
7513 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
7514 error ("argument of `__builtin_args_info' out of range");
7515 else
7516 return GEN_INT (word_ptr[wordnum]);
7517 }
7518 }
7519 else
7520 error ("missing argument in `__builtin_args_info'");
7521
7522 return const0_rtx;
7523
7524 #if 0
7525 for (i = 0; i < nwords; i++)
7526 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
7527
7528 type = build_array_type (integer_type_node,
7529 build_index_type (build_int_2 (nwords, 0)));
7530 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
7531 TREE_CONSTANT (result) = 1;
7532 TREE_STATIC (result) = 1;
7533 result = build (INDIRECT_REF, build_pointer_type (type), result);
7534 TREE_CONSTANT (result) = 1;
7535 return expand_expr (result, NULL_RTX, VOIDmode, 0);
7536 #endif
7537 }
7538
7539 /* Return the address of the first anonymous stack arg. */
7540 case BUILT_IN_NEXT_ARG:
7541 {
7542 tree fntype = TREE_TYPE (current_function_decl);
7543
7544 if ((TYPE_ARG_TYPES (fntype) == 0
7545 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
7546 == void_type_node))
7547 && ! current_function_varargs)
7548 {
7549 error ("`va_start' used in function with fixed args");
7550 return const0_rtx;
7551 }
7552
7553 if (arglist)
7554 {
7555 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
7556 tree arg = TREE_VALUE (arglist);
7557
7558 /* Strip off all nops for the sake of the comparison. This
7559 is not quite the same as STRIP_NOPS. It does more. */
7560 while (TREE_CODE (arg) == NOP_EXPR
7561 || TREE_CODE (arg) == CONVERT_EXPR
7562 || TREE_CODE (arg) == NON_LVALUE_EXPR)
7563 arg = TREE_OPERAND (arg, 0);
7564 if (arg != last_parm)
7565 warning ("second parameter of `va_start' not last named argument");
7566 }
7567 else if (! current_function_varargs)
7568 /* Evidently an out of date version of <stdarg.h>; can't validate
7569 va_start's second argument, but can still work as intended. */
7570 warning ("`__builtin_next_arg' called without an argument");
7571 }
7572
7573 return expand_binop (Pmode, add_optab,
7574 current_function_internal_arg_pointer,
7575 current_function_arg_offset_rtx,
7576 NULL_RTX, 0, OPTAB_LIB_WIDEN);
7577
7578 case BUILT_IN_CLASSIFY_TYPE:
7579 if (arglist != 0)
7580 {
7581 tree type = TREE_TYPE (TREE_VALUE (arglist));
7582 enum tree_code code = TREE_CODE (type);
7583 if (code == VOID_TYPE)
7584 return GEN_INT (void_type_class);
7585 if (code == INTEGER_TYPE)
7586 return GEN_INT (integer_type_class);
7587 if (code == CHAR_TYPE)
7588 return GEN_INT (char_type_class);
7589 if (code == ENUMERAL_TYPE)
7590 return GEN_INT (enumeral_type_class);
7591 if (code == BOOLEAN_TYPE)
7592 return GEN_INT (boolean_type_class);
7593 if (code == POINTER_TYPE)
7594 return GEN_INT (pointer_type_class);
7595 if (code == REFERENCE_TYPE)
7596 return GEN_INT (reference_type_class);
7597 if (code == OFFSET_TYPE)
7598 return GEN_INT (offset_type_class);
7599 if (code == REAL_TYPE)
7600 return GEN_INT (real_type_class);
7601 if (code == COMPLEX_TYPE)
7602 return GEN_INT (complex_type_class);
7603 if (code == FUNCTION_TYPE)
7604 return GEN_INT (function_type_class);
7605 if (code == METHOD_TYPE)
7606 return GEN_INT (method_type_class);
7607 if (code == RECORD_TYPE)
7608 return GEN_INT (record_type_class);
7609 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
7610 return GEN_INT (union_type_class);
7611 if (code == ARRAY_TYPE)
7612 {
7613 if (TYPE_STRING_FLAG (type))
7614 return GEN_INT (string_type_class);
7615 else
7616 return GEN_INT (array_type_class);
7617 }
7618 if (code == SET_TYPE)
7619 return GEN_INT (set_type_class);
7620 if (code == FILE_TYPE)
7621 return GEN_INT (file_type_class);
7622 if (code == LANG_TYPE)
7623 return GEN_INT (lang_type_class);
7624 }
7625 return GEN_INT (no_type_class);
7626
7627 case BUILT_IN_CONSTANT_P:
7628 if (arglist == 0)
7629 return const0_rtx;
7630 else
7631 {
7632 tree arg = TREE_VALUE (arglist);
7633
7634 STRIP_NOPS (arg);
7635 return (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c'
7636 || (TREE_CODE (arg) == ADDR_EXPR
7637 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
7638 ? const1_rtx : const0_rtx);
7639 }
7640
7641 case BUILT_IN_FRAME_ADDRESS:
7642 /* The argument must be a nonnegative integer constant.
7643 It counts the number of frames to scan up the stack.
7644 The value is the address of that frame. */
7645 case BUILT_IN_RETURN_ADDRESS:
7646 /* The argument must be a nonnegative integer constant.
7647 It counts the number of frames to scan up the stack.
7648 The value is the return address saved in that frame. */
7649 if (arglist == 0)
7650 /* Warning about missing arg was already issued. */
7651 return const0_rtx;
7652 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
7653 {
7654 error ("invalid arg to `__builtin_return_address'");
7655 return const0_rtx;
7656 }
7657 else if (tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
7658 {
7659 error ("invalid arg to `__builtin_return_address'");
7660 return const0_rtx;
7661 }
7662 else
7663 {
7664 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
7665 TREE_INT_CST_LOW (TREE_VALUE (arglist)),
7666 hard_frame_pointer_rtx);
7667
7668 /* For __builtin_frame_address, return what we've got. */
7669 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
7670 return tem;
7671
7672 if (GET_CODE (tem) != REG)
7673 tem = copy_to_reg (tem);
7674 return tem;
7675 }
7676
7677 case BUILT_IN_ALLOCA:
7678 if (arglist == 0
7679 /* Arg could be non-integer if user redeclared this fcn wrong. */
7680 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
7681 break;
7682
7683 /* Compute the argument. */
7684 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
7685
7686 /* Allocate the desired space. */
7687 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
7688
7689 case BUILT_IN_FFS:
7690 /* If not optimizing, call the library function. */
7691 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7692 break;
7693
7694 if (arglist == 0
7695 /* Arg could be non-integer if user redeclared this fcn wrong. */
7696 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
7697 break;
7698
7699 /* Compute the argument. */
7700 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
7701 /* Compute ffs, into TARGET if possible.
7702 Set TARGET to wherever the result comes back. */
7703 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
7704 ffs_optab, op0, target, 1);
7705 if (target == 0)
7706 abort ();
7707 return target;
7708
7709 case BUILT_IN_STRLEN:
7710 /* If not optimizing, call the library function. */
7711 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7712 break;
7713
7714 if (arglist == 0
7715 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7716 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
7717 break;
7718 else
7719 {
7720 tree src = TREE_VALUE (arglist);
7721 tree len = c_strlen (src);
7722
7723 int align
7724 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7725
7726 rtx result, src_rtx, char_rtx;
7727 enum machine_mode insn_mode = value_mode, char_mode;
7728 enum insn_code icode;
7729
7730 /* If the length is known, just return it. */
7731 if (len != 0)
7732 return expand_expr (len, target, mode, 0);
7733
7734 /* If SRC is not a pointer type, don't do this operation inline. */
7735 if (align == 0)
7736 break;
7737
7738 /* Call a function if we can't compute strlen in the right mode. */
7739
7740 while (insn_mode != VOIDmode)
7741 {
7742 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
7743 if (icode != CODE_FOR_nothing)
7744 break;
7745
7746 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
7747 }
7748 if (insn_mode == VOIDmode)
7749 break;
7750
7751 /* Make a place to write the result of the instruction. */
7752 result = target;
7753 if (! (result != 0
7754 && GET_CODE (result) == REG
7755 && GET_MODE (result) == insn_mode
7756 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
7757 result = gen_reg_rtx (insn_mode);
7758
7759 /* Make sure the operands are acceptable to the predicates. */
7760
7761 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
7762 result = gen_reg_rtx (insn_mode);
7763
7764 src_rtx = memory_address (BLKmode,
7765 expand_expr (src, NULL_RTX, ptr_mode,
7766 EXPAND_NORMAL));
7767 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
7768 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
7769
7770 char_rtx = const0_rtx;
7771 char_mode = insn_operand_mode[(int)icode][2];
7772 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
7773 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
7774
7775 emit_insn (GEN_FCN (icode) (result,
7776 gen_rtx (MEM, BLKmode, src_rtx),
7777 char_rtx, GEN_INT (align)));
7778
7779 /* Return the value in the proper mode for this function. */
7780 if (GET_MODE (result) == value_mode)
7781 return result;
7782 else if (target != 0)
7783 {
7784 convert_move (target, result, 0);
7785 return target;
7786 }
7787 else
7788 return convert_to_mode (value_mode, result, 0);
7789 }
7790
7791 case BUILT_IN_STRCPY:
7792 /* If not optimizing, call the library function. */
7793 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7794 break;
7795
7796 if (arglist == 0
7797 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7798 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7799 || TREE_CHAIN (arglist) == 0
7800 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
7801 break;
7802 else
7803 {
7804 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
7805
7806 if (len == 0)
7807 break;
7808
7809 len = size_binop (PLUS_EXPR, len, integer_one_node);
7810
7811 chainon (arglist, build_tree_list (NULL_TREE, len));
7812 }
7813
7814 /* Drops in. */
7815 case BUILT_IN_MEMCPY:
7816 /* If not optimizing, call the library function. */
7817 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7818 break;
7819
7820 if (arglist == 0
7821 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7822 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7823 || TREE_CHAIN (arglist) == 0
7824 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
7825 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
7826 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7827 break;
7828 else
7829 {
7830 tree dest = TREE_VALUE (arglist);
7831 tree src = TREE_VALUE (TREE_CHAIN (arglist));
7832 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7833 tree type;
7834
7835 int src_align
7836 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7837 int dest_align
7838 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7839 rtx dest_rtx, dest_mem, src_mem;
7840
7841 /* If either SRC or DEST is not a pointer type, don't do
7842 this operation in-line. */
7843 if (src_align == 0 || dest_align == 0)
7844 {
7845 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
7846 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
7847 break;
7848 }
7849
7850 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
7851 dest_mem = gen_rtx (MEM, BLKmode,
7852 memory_address (BLKmode, dest_rtx));
7853 /* There could be a void* cast on top of the object. */
7854 while (TREE_CODE (dest) == NOP_EXPR)
7855 dest = TREE_OPERAND (dest, 0);
7856 type = TREE_TYPE (TREE_TYPE (dest));
7857 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
7858 src_mem = gen_rtx (MEM, BLKmode,
7859 memory_address (BLKmode,
7860 expand_expr (src, NULL_RTX,
7861 ptr_mode,
7862 EXPAND_SUM)));
7863 /* There could be a void* cast on top of the object. */
7864 while (TREE_CODE (src) == NOP_EXPR)
7865 src = TREE_OPERAND (src, 0);
7866 type = TREE_TYPE (TREE_TYPE (src));
7867 MEM_IN_STRUCT_P (src_mem) = AGGREGATE_TYPE_P (type);
7868
7869 /* Copy word part most expediently. */
7870 emit_block_move (dest_mem, src_mem,
7871 expand_expr (len, NULL_RTX, VOIDmode, 0),
7872 MIN (src_align, dest_align));
7873 return force_operand (dest_rtx, NULL_RTX);
7874 }
7875
7876 /* These comparison functions need an instruction that returns an actual
7877 index. An ordinary compare that just sets the condition codes
7878 is not enough. */
7879 #ifdef HAVE_cmpstrsi
7880 case BUILT_IN_STRCMP:
7881 /* If not optimizing, call the library function. */
7882 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7883 break;
7884
7885 if (arglist == 0
7886 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7887 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7888 || TREE_CHAIN (arglist) == 0
7889 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
7890 break;
7891 else if (!HAVE_cmpstrsi)
7892 break;
7893 {
7894 tree arg1 = TREE_VALUE (arglist);
7895 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
7896 tree offset;
7897 tree len, len2;
7898
7899 len = c_strlen (arg1);
7900 if (len)
7901 len = size_binop (PLUS_EXPR, integer_one_node, len);
7902 len2 = c_strlen (arg2);
7903 if (len2)
7904 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
7905
7906 /* If we don't have a constant length for the first, use the length
7907 of the second, if we know it. We don't require a constant for
7908 this case; some cost analysis could be done if both are available
7909 but neither is constant. For now, assume they're equally cheap.
7910
7911 If both strings have constant lengths, use the smaller. This
7912 could arise if optimization results in strcpy being called with
7913 two fixed strings, or if the code was machine-generated. We should
7914 add some code to the `memcmp' handler below to deal with such
7915 situations, someday. */
7916 if (!len || TREE_CODE (len) != INTEGER_CST)
7917 {
7918 if (len2)
7919 len = len2;
7920 else if (len == 0)
7921 break;
7922 }
7923 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
7924 {
7925 if (tree_int_cst_lt (len2, len))
7926 len = len2;
7927 }
7928
7929 chainon (arglist, build_tree_list (NULL_TREE, len));
7930 }
7931
7932 /* Drops in. */
7933 case BUILT_IN_MEMCMP:
7934 /* If not optimizing, call the library function. */
7935 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7936 break;
7937
7938 if (arglist == 0
7939 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7940 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7941 || TREE_CHAIN (arglist) == 0
7942 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
7943 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
7944 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7945 break;
7946 else if (!HAVE_cmpstrsi)
7947 break;
7948 {
7949 tree arg1 = TREE_VALUE (arglist);
7950 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
7951 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7952 rtx result;
7953
7954 int arg1_align
7955 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7956 int arg2_align
7957 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7958 enum machine_mode insn_mode
7959 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
7960
7961 /* If we don't have POINTER_TYPE, call the function. */
7962 if (arg1_align == 0 || arg2_align == 0)
7963 {
7964 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
7965 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
7966 break;
7967 }
7968
7969 /* Make a place to write the result of the instruction. */
7970 result = target;
7971 if (! (result != 0
7972 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
7973 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
7974 result = gen_reg_rtx (insn_mode);
7975
7976 emit_insn (gen_cmpstrsi (result,
7977 gen_rtx (MEM, BLKmode,
7978 expand_expr (arg1, NULL_RTX,
7979 ptr_mode,
7980 EXPAND_NORMAL)),
7981 gen_rtx (MEM, BLKmode,
7982 expand_expr (arg2, NULL_RTX,
7983 ptr_mode,
7984 EXPAND_NORMAL)),
7985 expand_expr (len, NULL_RTX, VOIDmode, 0),
7986 GEN_INT (MIN (arg1_align, arg2_align))));
7987
7988 /* Return the value in the proper mode for this function. */
7989 mode = TYPE_MODE (TREE_TYPE (exp));
7990 if (GET_MODE (result) == mode)
7991 return result;
7992 else if (target != 0)
7993 {
7994 convert_move (target, result, 0);
7995 return target;
7996 }
7997 else
7998 return convert_to_mode (mode, result, 0);
7999 }
8000 #else
8001 case BUILT_IN_STRCMP:
8002 case BUILT_IN_MEMCMP:
8003 break;
8004 #endif
8005
8006 default: /* just do library call, if unknown builtin */
8007 error ("built-in function `%s' not currently supported",
8008 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
8009 }
8010
8011 /* The switch statement above can drop through to cause the function
8012 to be called normally. */
8013
8014 return expand_call (exp, target, ignore);
8015 }
8016 \f
8017 /* Built-in functions to perform an untyped call and return. */
8018
8019 /* For each register that may be used for calling a function, this
8020 gives a mode used to copy the register's value. VOIDmode indicates
8021 the register is not used for calling a function. If the machine
8022 has register windows, this gives only the outbound registers.
8023 INCOMING_REGNO gives the corresponding inbound register. */
8024 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
8025
8026 /* For each register that may be used for returning values, this gives
8027 a mode used to copy the register's value. VOIDmode indicates the
8028 register is not used for returning values. If the machine has
8029 register windows, this gives only the outbound registers.
8030 INCOMING_REGNO gives the corresponding inbound register. */
8031 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
8032
8033 /* For each register that may be used for calling a function, this
8034 gives the offset of that register into the block returned by
8035 __builtin_apply_args. 0 indicates that the register is not
8036 used for calling a function. */
8037 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
8038
8039 /* Return the offset of register REGNO into the block returned by
8040 __builtin_apply_args. This is not declared static, since it is
8041 needed in objc-act.c. */
8042
8043 int
8044 apply_args_register_offset (regno)
8045 int regno;
8046 {
8047 apply_args_size ();
8048
8049 /* Arguments are always put in outgoing registers (in the argument
8050 block) if such make sense. */
8051 #ifdef OUTGOING_REGNO
8052 regno = OUTGOING_REGNO(regno);
8053 #endif
8054 return apply_args_reg_offset[regno];
8055 }
8056
8057 /* Return the size required for the block returned by __builtin_apply_args,
8058 and initialize apply_args_mode. */
8059
8060 static int
8061 apply_args_size ()
8062 {
8063 static int size = -1;
8064 int align, regno;
8065 enum machine_mode mode;
8066
8067 /* The values computed by this function never change. */
8068 if (size < 0)
8069 {
8070 /* The first value is the incoming arg-pointer. */
8071 size = GET_MODE_SIZE (Pmode);
8072
8073 /* The second value is the structure value address unless this is
8074 passed as an "invisible" first argument. */
8075 if (struct_value_rtx)
8076 size += GET_MODE_SIZE (Pmode);
8077
8078 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8079 if (FUNCTION_ARG_REGNO_P (regno))
8080 {
8081 /* Search for the proper mode for copying this register's
8082 value. I'm not sure this is right, but it works so far. */
8083 enum machine_mode best_mode = VOIDmode;
8084
8085 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
8086 mode != VOIDmode;
8087 mode = GET_MODE_WIDER_MODE (mode))
8088 if (HARD_REGNO_MODE_OK (regno, mode)
8089 && HARD_REGNO_NREGS (regno, mode) == 1)
8090 best_mode = mode;
8091
8092 if (best_mode == VOIDmode)
8093 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
8094 mode != VOIDmode;
8095 mode = GET_MODE_WIDER_MODE (mode))
8096 if (HARD_REGNO_MODE_OK (regno, mode)
8097 && (mov_optab->handlers[(int) mode].insn_code
8098 != CODE_FOR_nothing))
8099 best_mode = mode;
8100
8101 mode = best_mode;
8102 if (mode == VOIDmode)
8103 abort ();
8104
8105 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8106 if (size % align != 0)
8107 size = CEIL (size, align) * align;
8108 apply_args_reg_offset[regno] = size;
8109 size += GET_MODE_SIZE (mode);
8110 apply_args_mode[regno] = mode;
8111 }
8112 else
8113 {
8114 apply_args_mode[regno] = VOIDmode;
8115 apply_args_reg_offset[regno] = 0;
8116 }
8117 }
8118 return size;
8119 }
8120
8121 /* Return the size required for the block returned by __builtin_apply,
8122 and initialize apply_result_mode. */
8123
8124 static int
8125 apply_result_size ()
8126 {
8127 static int size = -1;
8128 int align, regno;
8129 enum machine_mode mode;
8130
8131 /* The values computed by this function never change. */
8132 if (size < 0)
8133 {
8134 size = 0;
8135
8136 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8137 if (FUNCTION_VALUE_REGNO_P (regno))
8138 {
8139 /* Search for the proper mode for copying this register's
8140 value. I'm not sure this is right, but it works so far. */
8141 enum machine_mode best_mode = VOIDmode;
8142
8143 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
8144 mode != TImode;
8145 mode = GET_MODE_WIDER_MODE (mode))
8146 if (HARD_REGNO_MODE_OK (regno, mode))
8147 best_mode = mode;
8148
8149 if (best_mode == VOIDmode)
8150 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
8151 mode != VOIDmode;
8152 mode = GET_MODE_WIDER_MODE (mode))
8153 if (HARD_REGNO_MODE_OK (regno, mode)
8154 && (mov_optab->handlers[(int) mode].insn_code
8155 != CODE_FOR_nothing))
8156 best_mode = mode;
8157
8158 mode = best_mode;
8159 if (mode == VOIDmode)
8160 abort ();
8161
8162 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8163 if (size % align != 0)
8164 size = CEIL (size, align) * align;
8165 size += GET_MODE_SIZE (mode);
8166 apply_result_mode[regno] = mode;
8167 }
8168 else
8169 apply_result_mode[regno] = VOIDmode;
8170
8171 /* Allow targets that use untyped_call and untyped_return to override
8172 the size so that machine-specific information can be stored here. */
8173 #ifdef APPLY_RESULT_SIZE
8174 size = APPLY_RESULT_SIZE;
8175 #endif
8176 }
8177 return size;
8178 }
8179
8180 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
8181 /* Create a vector describing the result block RESULT. If SAVEP is true,
8182 the result block is used to save the values; otherwise it is used to
8183 restore the values. */
8184
8185 static rtx
8186 result_vector (savep, result)
8187 int savep;
8188 rtx result;
8189 {
8190 int regno, size, align, nelts;
8191 enum machine_mode mode;
8192 rtx reg, mem;
8193 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
8194
8195 size = nelts = 0;
8196 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8197 if ((mode = apply_result_mode[regno]) != VOIDmode)
8198 {
8199 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8200 if (size % align != 0)
8201 size = CEIL (size, align) * align;
8202 reg = gen_rtx (REG, mode, savep ? regno : INCOMING_REGNO (regno));
8203 mem = change_address (result, mode,
8204 plus_constant (XEXP (result, 0), size));
8205 savevec[nelts++] = (savep
8206 ? gen_rtx (SET, VOIDmode, mem, reg)
8207 : gen_rtx (SET, VOIDmode, reg, mem));
8208 size += GET_MODE_SIZE (mode);
8209 }
8210 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
8211 }
8212 #endif /* HAVE_untyped_call or HAVE_untyped_return */
8213
8214 /* Save the state required to perform an untyped call with the same
8215 arguments as were passed to the current function. */
8216
8217 static rtx
8218 expand_builtin_apply_args ()
8219 {
8220 rtx registers;
8221 int size, align, regno;
8222 enum machine_mode mode;
8223
8224 /* Create a block where the arg-pointer, structure value address,
8225 and argument registers can be saved. */
8226 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
8227
8228 /* Walk past the arg-pointer and structure value address. */
8229 size = GET_MODE_SIZE (Pmode);
8230 if (struct_value_rtx)
8231 size += GET_MODE_SIZE (Pmode);
8232
8233 /* Save each register used in calling a function to the block. */
8234 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8235 if ((mode = apply_args_mode[regno]) != VOIDmode)
8236 {
8237 rtx tem;
8238
8239 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8240 if (size % align != 0)
8241 size = CEIL (size, align) * align;
8242
8243 tem = gen_rtx (REG, mode, INCOMING_REGNO (regno));
8244
8245 #ifdef STACK_REGS
8246 /* For reg-stack.c's stack register household.
8247 Compare with a similar piece of code in function.c. */
8248
8249 emit_insn (gen_rtx (USE, mode, tem));
8250 #endif
8251
8252 emit_move_insn (change_address (registers, mode,
8253 plus_constant (XEXP (registers, 0),
8254 size)),
8255 tem);
8256 size += GET_MODE_SIZE (mode);
8257 }
8258
8259 /* Save the arg pointer to the block. */
8260 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
8261 copy_to_reg (virtual_incoming_args_rtx));
8262 size = GET_MODE_SIZE (Pmode);
8263
8264 /* Save the structure value address unless this is passed as an
8265 "invisible" first argument. */
8266 if (struct_value_incoming_rtx)
8267 {
8268 emit_move_insn (change_address (registers, Pmode,
8269 plus_constant (XEXP (registers, 0),
8270 size)),
8271 copy_to_reg (struct_value_incoming_rtx));
8272 size += GET_MODE_SIZE (Pmode);
8273 }
8274
8275 /* Return the address of the block. */
8276 return copy_addr_to_reg (XEXP (registers, 0));
8277 }
8278
8279 /* Perform an untyped call and save the state required to perform an
8280 untyped return of whatever value was returned by the given function. */
8281
8282 static rtx
8283 expand_builtin_apply (function, arguments, argsize)
8284 rtx function, arguments, argsize;
8285 {
8286 int size, align, regno;
8287 enum machine_mode mode;
8288 rtx incoming_args, result, reg, dest, call_insn;
8289 rtx old_stack_level = 0;
8290 rtx call_fusage = 0;
8291
8292 /* Create a block where the return registers can be saved. */
8293 result = assign_stack_local (BLKmode, apply_result_size (), -1);
8294
8295 /* ??? The argsize value should be adjusted here. */
8296
8297 /* Fetch the arg pointer from the ARGUMENTS block. */
8298 incoming_args = gen_reg_rtx (Pmode);
8299 emit_move_insn (incoming_args,
8300 gen_rtx (MEM, Pmode, arguments));
8301 #ifndef STACK_GROWS_DOWNWARD
8302 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
8303 incoming_args, 0, OPTAB_LIB_WIDEN);
8304 #endif
8305
8306 /* Perform postincrements before actually calling the function. */
8307 emit_queue ();
8308
8309 /* Push a new argument block and copy the arguments. */
8310 do_pending_stack_adjust ();
8311 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
8312
8313 /* Push a block of memory onto the stack to store the memory arguments.
8314 Save the address in a register, and copy the memory arguments. ??? I
8315 haven't figured out how the calling convention macros effect this,
8316 but it's likely that the source and/or destination addresses in
8317 the block copy will need updating in machine specific ways. */
8318 dest = copy_addr_to_reg (push_block (argsize, 0, 0));
8319 emit_block_move (gen_rtx (MEM, BLKmode, dest),
8320 gen_rtx (MEM, BLKmode, incoming_args),
8321 argsize,
8322 PARM_BOUNDARY / BITS_PER_UNIT);
8323
8324 /* Refer to the argument block. */
8325 apply_args_size ();
8326 arguments = gen_rtx (MEM, BLKmode, arguments);
8327
8328 /* Walk past the arg-pointer and structure value address. */
8329 size = GET_MODE_SIZE (Pmode);
8330 if (struct_value_rtx)
8331 size += GET_MODE_SIZE (Pmode);
8332
8333 /* Restore each of the registers previously saved. Make USE insns
8334 for each of these registers for use in making the call. */
8335 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8336 if ((mode = apply_args_mode[regno]) != VOIDmode)
8337 {
8338 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8339 if (size % align != 0)
8340 size = CEIL (size, align) * align;
8341 reg = gen_rtx (REG, mode, regno);
8342 emit_move_insn (reg,
8343 change_address (arguments, mode,
8344 plus_constant (XEXP (arguments, 0),
8345 size)));
8346
8347 use_reg (&call_fusage, reg);
8348 size += GET_MODE_SIZE (mode);
8349 }
8350
8351 /* Restore the structure value address unless this is passed as an
8352 "invisible" first argument. */
8353 size = GET_MODE_SIZE (Pmode);
8354 if (struct_value_rtx)
8355 {
8356 rtx value = gen_reg_rtx (Pmode);
8357 emit_move_insn (value,
8358 change_address (arguments, Pmode,
8359 plus_constant (XEXP (arguments, 0),
8360 size)));
8361 emit_move_insn (struct_value_rtx, value);
8362 if (GET_CODE (struct_value_rtx) == REG)
8363 use_reg (&call_fusage, struct_value_rtx);
8364 size += GET_MODE_SIZE (Pmode);
8365 }
8366
8367 /* All arguments and registers used for the call are set up by now! */
8368 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
8369
8370 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
8371 and we don't want to load it into a register as an optimization,
8372 because prepare_call_address already did it if it should be done. */
8373 if (GET_CODE (function) != SYMBOL_REF)
8374 function = memory_address (FUNCTION_MODE, function);
8375
8376 /* Generate the actual call instruction and save the return value. */
8377 #ifdef HAVE_untyped_call
8378 if (HAVE_untyped_call)
8379 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
8380 result, result_vector (1, result)));
8381 else
8382 #endif
8383 #ifdef HAVE_call_value
8384 if (HAVE_call_value)
8385 {
8386 rtx valreg = 0;
8387
8388 /* Locate the unique return register. It is not possible to
8389 express a call that sets more than one return register using
8390 call_value; use untyped_call for that. In fact, untyped_call
8391 only needs to save the return registers in the given block. */
8392 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8393 if ((mode = apply_result_mode[regno]) != VOIDmode)
8394 {
8395 if (valreg)
8396 abort (); /* HAVE_untyped_call required. */
8397 valreg = gen_rtx (REG, mode, regno);
8398 }
8399
8400 emit_call_insn (gen_call_value (valreg,
8401 gen_rtx (MEM, FUNCTION_MODE, function),
8402 const0_rtx, NULL_RTX, const0_rtx));
8403
8404 emit_move_insn (change_address (result, GET_MODE (valreg),
8405 XEXP (result, 0)),
8406 valreg);
8407 }
8408 else
8409 #endif
8410 abort ();
8411
8412 /* Find the CALL insn we just emitted. */
8413 for (call_insn = get_last_insn ();
8414 call_insn && GET_CODE (call_insn) != CALL_INSN;
8415 call_insn = PREV_INSN (call_insn))
8416 ;
8417
8418 if (! call_insn)
8419 abort ();
8420
8421 /* Put the register usage information on the CALL. If there is already
8422 some usage information, put ours at the end. */
8423 if (CALL_INSN_FUNCTION_USAGE (call_insn))
8424 {
8425 rtx link;
8426
8427 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
8428 link = XEXP (link, 1))
8429 ;
8430
8431 XEXP (link, 1) = call_fusage;
8432 }
8433 else
8434 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
8435
8436 /* Restore the stack. */
8437 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
8438
8439 /* Return the address of the result block. */
8440 return copy_addr_to_reg (XEXP (result, 0));
8441 }
8442
8443 /* Perform an untyped return. */
8444
8445 static void
8446 expand_builtin_return (result)
8447 rtx result;
8448 {
8449 int size, align, regno;
8450 enum machine_mode mode;
8451 rtx reg;
8452 rtx call_fusage = 0;
8453
8454 apply_result_size ();
8455 result = gen_rtx (MEM, BLKmode, result);
8456
8457 #ifdef HAVE_untyped_return
8458 if (HAVE_untyped_return)
8459 {
8460 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
8461 emit_barrier ();
8462 return;
8463 }
8464 #endif
8465
8466 /* Restore the return value and note that each value is used. */
8467 size = 0;
8468 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8469 if ((mode = apply_result_mode[regno]) != VOIDmode)
8470 {
8471 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8472 if (size % align != 0)
8473 size = CEIL (size, align) * align;
8474 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
8475 emit_move_insn (reg,
8476 change_address (result, mode,
8477 plus_constant (XEXP (result, 0),
8478 size)));
8479
8480 push_to_sequence (call_fusage);
8481 emit_insn (gen_rtx (USE, VOIDmode, reg));
8482 call_fusage = get_insns ();
8483 end_sequence ();
8484 size += GET_MODE_SIZE (mode);
8485 }
8486
8487 /* Put the USE insns before the return. */
8488 emit_insns (call_fusage);
8489
8490 /* Return whatever values was restored by jumping directly to the end
8491 of the function. */
8492 expand_null_return ();
8493 }
8494 \f
8495 /* Expand code for a post- or pre- increment or decrement
8496 and return the RTX for the result.
8497 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8498
8499 static rtx
8500 expand_increment (exp, post)
8501 register tree exp;
8502 int post;
8503 {
8504 register rtx op0, op1;
8505 register rtx temp, value;
8506 register tree incremented = TREE_OPERAND (exp, 0);
8507 optab this_optab = add_optab;
8508 int icode;
8509 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8510 int op0_is_copy = 0;
8511 int single_insn = 0;
8512 /* 1 means we can't store into OP0 directly,
8513 because it is a subreg narrower than a word,
8514 and we don't dare clobber the rest of the word. */
8515 int bad_subreg = 0;
8516
8517 if (output_bytecode)
8518 {
8519 bc_expand_expr (exp);
8520 return NULL_RTX;
8521 }
8522
8523 /* Stabilize any component ref that might need to be
8524 evaluated more than once below. */
8525 if (!post
8526 || TREE_CODE (incremented) == BIT_FIELD_REF
8527 || (TREE_CODE (incremented) == COMPONENT_REF
8528 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8529 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8530 incremented = stabilize_reference (incremented);
8531 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8532 ones into save exprs so that they don't accidentally get evaluated
8533 more than once by the code below. */
8534 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8535 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8536 incremented = save_expr (incremented);
8537
8538 /* Compute the operands as RTX.
8539 Note whether OP0 is the actual lvalue or a copy of it:
8540 I believe it is a copy iff it is a register or subreg
8541 and insns were generated in computing it. */
8542
8543 temp = get_last_insn ();
8544 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
8545
8546 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8547 in place but instead must do sign- or zero-extension during assignment,
8548 so we copy it into a new register and let the code below use it as
8549 a copy.
8550
8551 Note that we can safely modify this SUBREG since it is know not to be
8552 shared (it was made by the expand_expr call above). */
8553
8554 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
8555 {
8556 if (post)
8557 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8558 else
8559 bad_subreg = 1;
8560 }
8561 else if (GET_CODE (op0) == SUBREG
8562 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
8563 {
8564 /* We cannot increment this SUBREG in place. If we are
8565 post-incrementing, get a copy of the old value. Otherwise,
8566 just mark that we cannot increment in place. */
8567 if (post)
8568 op0 = copy_to_reg (op0);
8569 else
8570 bad_subreg = 1;
8571 }
8572
8573 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
8574 && temp != get_last_insn ());
8575 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8576
8577 /* Decide whether incrementing or decrementing. */
8578 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
8579 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8580 this_optab = sub_optab;
8581
8582 /* Convert decrement by a constant into a negative increment. */
8583 if (this_optab == sub_optab
8584 && GET_CODE (op1) == CONST_INT)
8585 {
8586 op1 = GEN_INT (- INTVAL (op1));
8587 this_optab = add_optab;
8588 }
8589
8590 /* For a preincrement, see if we can do this with a single instruction. */
8591 if (!post)
8592 {
8593 icode = (int) this_optab->handlers[(int) mode].insn_code;
8594 if (icode != (int) CODE_FOR_nothing
8595 /* Make sure that OP0 is valid for operands 0 and 1
8596 of the insn we want to queue. */
8597 && (*insn_operand_predicate[icode][0]) (op0, mode)
8598 && (*insn_operand_predicate[icode][1]) (op0, mode)
8599 && (*insn_operand_predicate[icode][2]) (op1, mode))
8600 single_insn = 1;
8601 }
8602
8603 /* If OP0 is not the actual lvalue, but rather a copy in a register,
8604 then we cannot just increment OP0. We must therefore contrive to
8605 increment the original value. Then, for postincrement, we can return
8606 OP0 since it is a copy of the old value. For preincrement, expand here
8607 unless we can do it with a single insn.
8608
8609 Likewise if storing directly into OP0 would clobber high bits
8610 we need to preserve (bad_subreg). */
8611 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
8612 {
8613 /* This is the easiest way to increment the value wherever it is.
8614 Problems with multiple evaluation of INCREMENTED are prevented
8615 because either (1) it is a component_ref or preincrement,
8616 in which case it was stabilized above, or (2) it is an array_ref
8617 with constant index in an array in a register, which is
8618 safe to reevaluate. */
8619 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
8620 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8621 ? MINUS_EXPR : PLUS_EXPR),
8622 TREE_TYPE (exp),
8623 incremented,
8624 TREE_OPERAND (exp, 1));
8625
8626 while (TREE_CODE (incremented) == NOP_EXPR
8627 || TREE_CODE (incremented) == CONVERT_EXPR)
8628 {
8629 newexp = convert (TREE_TYPE (incremented), newexp);
8630 incremented = TREE_OPERAND (incremented, 0);
8631 }
8632
8633 temp = expand_assignment (incremented, newexp, ! post, 0);
8634 return post ? op0 : temp;
8635 }
8636
8637 if (post)
8638 {
8639 /* We have a true reference to the value in OP0.
8640 If there is an insn to add or subtract in this mode, queue it.
8641 Queueing the increment insn avoids the register shuffling
8642 that often results if we must increment now and first save
8643 the old value for subsequent use. */
8644
8645 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
8646 op0 = stabilize (op0);
8647 #endif
8648
8649 icode = (int) this_optab->handlers[(int) mode].insn_code;
8650 if (icode != (int) CODE_FOR_nothing
8651 /* Make sure that OP0 is valid for operands 0 and 1
8652 of the insn we want to queue. */
8653 && (*insn_operand_predicate[icode][0]) (op0, mode)
8654 && (*insn_operand_predicate[icode][1]) (op0, mode))
8655 {
8656 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
8657 op1 = force_reg (mode, op1);
8658
8659 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
8660 }
8661 }
8662
8663 /* Preincrement, or we can't increment with one simple insn. */
8664 if (post)
8665 /* Save a copy of the value before inc or dec, to return it later. */
8666 temp = value = copy_to_reg (op0);
8667 else
8668 /* Arrange to return the incremented value. */
8669 /* Copy the rtx because expand_binop will protect from the queue,
8670 and the results of that would be invalid for us to return
8671 if our caller does emit_queue before using our result. */
8672 temp = copy_rtx (value = op0);
8673
8674 /* Increment however we can. */
8675 op1 = expand_binop (mode, this_optab, value, op1, op0,
8676 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
8677 /* Make sure the value is stored into OP0. */
8678 if (op1 != op0)
8679 emit_move_insn (op0, op1);
8680
8681 return temp;
8682 }
8683 \f
8684 /* Expand all function calls contained within EXP, innermost ones first.
8685 But don't look within expressions that have sequence points.
8686 For each CALL_EXPR, record the rtx for its value
8687 in the CALL_EXPR_RTL field. */
8688
8689 static void
8690 preexpand_calls (exp)
8691 tree exp;
8692 {
8693 register int nops, i;
8694 int type = TREE_CODE_CLASS (TREE_CODE (exp));
8695
8696 if (! do_preexpand_calls)
8697 return;
8698
8699 /* Only expressions and references can contain calls. */
8700
8701 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
8702 return;
8703
8704 switch (TREE_CODE (exp))
8705 {
8706 case CALL_EXPR:
8707 /* Do nothing if already expanded. */
8708 if (CALL_EXPR_RTL (exp) != 0)
8709 return;
8710
8711 /* Do nothing to built-in functions. */
8712 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
8713 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
8714 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
8715 /* Do nothing if the call returns a variable-sized object. */
8716 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST)
8717 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
8718 return;
8719
8720 case COMPOUND_EXPR:
8721 case COND_EXPR:
8722 case TRUTH_ANDIF_EXPR:
8723 case TRUTH_ORIF_EXPR:
8724 /* If we find one of these, then we can be sure
8725 the adjust will be done for it (since it makes jumps).
8726 Do it now, so that if this is inside an argument
8727 of a function, we don't get the stack adjustment
8728 after some other args have already been pushed. */
8729 do_pending_stack_adjust ();
8730 return;
8731
8732 case BLOCK:
8733 case RTL_EXPR:
8734 case WITH_CLEANUP_EXPR:
8735 case CLEANUP_POINT_EXPR:
8736 return;
8737
8738 case SAVE_EXPR:
8739 if (SAVE_EXPR_RTL (exp) != 0)
8740 return;
8741 }
8742
8743 nops = tree_code_length[(int) TREE_CODE (exp)];
8744 for (i = 0; i < nops; i++)
8745 if (TREE_OPERAND (exp, i) != 0)
8746 {
8747 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
8748 if (type == 'e' || type == '<' || type == '1' || type == '2'
8749 || type == 'r')
8750 preexpand_calls (TREE_OPERAND (exp, i));
8751 }
8752 }
8753 \f
8754 /* At the start of a function, record that we have no previously-pushed
8755 arguments waiting to be popped. */
8756
8757 void
8758 init_pending_stack_adjust ()
8759 {
8760 pending_stack_adjust = 0;
8761 }
8762
8763 /* When exiting from function, if safe, clear out any pending stack adjust
8764 so the adjustment won't get done. */
8765
8766 void
8767 clear_pending_stack_adjust ()
8768 {
8769 #ifdef EXIT_IGNORE_STACK
8770 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
8771 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
8772 && ! flag_inline_functions)
8773 pending_stack_adjust = 0;
8774 #endif
8775 }
8776
8777 /* Pop any previously-pushed arguments that have not been popped yet. */
8778
8779 void
8780 do_pending_stack_adjust ()
8781 {
8782 if (inhibit_defer_pop == 0)
8783 {
8784 if (pending_stack_adjust != 0)
8785 adjust_stack (GEN_INT (pending_stack_adjust));
8786 pending_stack_adjust = 0;
8787 }
8788 }
8789
8790 /* Defer the expansion all cleanups up to OLD_CLEANUPS.
8791 Returns the cleanups to be performed. */
8792
8793 static tree
8794 defer_cleanups_to (old_cleanups)
8795 tree old_cleanups;
8796 {
8797 tree new_cleanups = NULL_TREE;
8798 tree cleanups = cleanups_this_call;
8799 tree last = NULL_TREE;
8800
8801 while (cleanups_this_call != old_cleanups)
8802 {
8803 (*interim_eh_hook) (TREE_VALUE (cleanups_this_call));
8804 last = cleanups_this_call;
8805 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
8806 }
8807
8808 if (last)
8809 {
8810 /* Remove the list from the chain of cleanups. */
8811 TREE_CHAIN (last) = NULL_TREE;
8812
8813 /* reverse them so that we can build them in the right order. */
8814 cleanups = nreverse (cleanups);
8815
8816 while (cleanups)
8817 {
8818 if (new_cleanups)
8819 new_cleanups = build (COMPOUND_EXPR, TREE_TYPE (new_cleanups),
8820 TREE_VALUE (cleanups), new_cleanups);
8821 else
8822 new_cleanups = TREE_VALUE (cleanups);
8823
8824 cleanups = TREE_CHAIN (cleanups);
8825 }
8826 }
8827
8828 return new_cleanups;
8829 }
8830
8831 /* Expand all cleanups up to OLD_CLEANUPS.
8832 Needed here, and also for language-dependent calls. */
8833
8834 void
8835 expand_cleanups_to (old_cleanups)
8836 tree old_cleanups;
8837 {
8838 while (cleanups_this_call != old_cleanups)
8839 {
8840 (*interim_eh_hook) (TREE_VALUE (cleanups_this_call));
8841 expand_expr (TREE_VALUE (cleanups_this_call), const0_rtx, VOIDmode, 0);
8842 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
8843 }
8844 }
8845 \f
8846 /* Expand conditional expressions. */
8847
8848 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
8849 LABEL is an rtx of code CODE_LABEL, in this function and all the
8850 functions here. */
8851
8852 void
8853 jumpifnot (exp, label)
8854 tree exp;
8855 rtx label;
8856 {
8857 do_jump (exp, label, NULL_RTX);
8858 }
8859
8860 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
8861
8862 void
8863 jumpif (exp, label)
8864 tree exp;
8865 rtx label;
8866 {
8867 do_jump (exp, NULL_RTX, label);
8868 }
8869
8870 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
8871 the result is zero, or IF_TRUE_LABEL if the result is one.
8872 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
8873 meaning fall through in that case.
8874
8875 do_jump always does any pending stack adjust except when it does not
8876 actually perform a jump. An example where there is no jump
8877 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
8878
8879 This function is responsible for optimizing cases such as
8880 &&, || and comparison operators in EXP. */
8881
8882 void
8883 do_jump (exp, if_false_label, if_true_label)
8884 tree exp;
8885 rtx if_false_label, if_true_label;
8886 {
8887 register enum tree_code code = TREE_CODE (exp);
8888 /* Some cases need to create a label to jump to
8889 in order to properly fall through.
8890 These cases set DROP_THROUGH_LABEL nonzero. */
8891 rtx drop_through_label = 0;
8892 rtx temp;
8893 rtx comparison = 0;
8894 int i;
8895 tree type;
8896 enum machine_mode mode;
8897
8898 emit_queue ();
8899
8900 switch (code)
8901 {
8902 case ERROR_MARK:
8903 break;
8904
8905 case INTEGER_CST:
8906 temp = integer_zerop (exp) ? if_false_label : if_true_label;
8907 if (temp)
8908 emit_jump (temp);
8909 break;
8910
8911 #if 0
8912 /* This is not true with #pragma weak */
8913 case ADDR_EXPR:
8914 /* The address of something can never be zero. */
8915 if (if_true_label)
8916 emit_jump (if_true_label);
8917 break;
8918 #endif
8919
8920 case NOP_EXPR:
8921 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
8922 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
8923 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
8924 goto normal;
8925 case CONVERT_EXPR:
8926 /* If we are narrowing the operand, we have to do the compare in the
8927 narrower mode. */
8928 if ((TYPE_PRECISION (TREE_TYPE (exp))
8929 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8930 goto normal;
8931 case NON_LVALUE_EXPR:
8932 case REFERENCE_EXPR:
8933 case ABS_EXPR:
8934 case NEGATE_EXPR:
8935 case LROTATE_EXPR:
8936 case RROTATE_EXPR:
8937 /* These cannot change zero->non-zero or vice versa. */
8938 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8939 break;
8940
8941 #if 0
8942 /* This is never less insns than evaluating the PLUS_EXPR followed by
8943 a test and can be longer if the test is eliminated. */
8944 case PLUS_EXPR:
8945 /* Reduce to minus. */
8946 exp = build (MINUS_EXPR, TREE_TYPE (exp),
8947 TREE_OPERAND (exp, 0),
8948 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
8949 TREE_OPERAND (exp, 1))));
8950 /* Process as MINUS. */
8951 #endif
8952
8953 case MINUS_EXPR:
8954 /* Non-zero iff operands of minus differ. */
8955 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
8956 TREE_OPERAND (exp, 0),
8957 TREE_OPERAND (exp, 1)),
8958 NE, NE);
8959 break;
8960
8961 case BIT_AND_EXPR:
8962 /* If we are AND'ing with a small constant, do this comparison in the
8963 smallest type that fits. If the machine doesn't have comparisons
8964 that small, it will be converted back to the wider comparison.
8965 This helps if we are testing the sign bit of a narrower object.
8966 combine can't do this for us because it can't know whether a
8967 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
8968
8969 if (! SLOW_BYTE_ACCESS
8970 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8971 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
8972 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
8973 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
8974 && (type = type_for_mode (mode, 1)) != 0
8975 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
8976 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
8977 != CODE_FOR_nothing))
8978 {
8979 do_jump (convert (type, exp), if_false_label, if_true_label);
8980 break;
8981 }
8982 goto normal;
8983
8984 case TRUTH_NOT_EXPR:
8985 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8986 break;
8987
8988 case TRUTH_ANDIF_EXPR:
8989 {
8990 rtx seq1, seq2;
8991 tree cleanups, old_cleanups;
8992
8993 if (if_false_label == 0)
8994 if_false_label = drop_through_label = gen_label_rtx ();
8995 start_sequence ();
8996 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
8997 seq1 = get_insns ();
8998 end_sequence ();
8999
9000 old_cleanups = cleanups_this_call;
9001 start_sequence ();
9002 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9003 seq2 = get_insns ();
9004 end_sequence ();
9005
9006 cleanups = defer_cleanups_to (old_cleanups);
9007 if (cleanups)
9008 {
9009 rtx flag = gen_reg_rtx (word_mode);
9010 tree new_cleanups;
9011 tree cond;
9012
9013 /* Flag cleanups as not needed. */
9014 emit_move_insn (flag, const0_rtx);
9015 emit_insns (seq1);
9016
9017 /* Flag cleanups as needed. */
9018 emit_move_insn (flag, const1_rtx);
9019 emit_insns (seq2);
9020
9021 /* convert flag, which is an rtx, into a tree. */
9022 cond = make_node (RTL_EXPR);
9023 TREE_TYPE (cond) = integer_type_node;
9024 RTL_EXPR_RTL (cond) = flag;
9025 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
9026 cond = save_expr (cond);
9027
9028 new_cleanups = build (COND_EXPR, void_type_node,
9029 truthvalue_conversion (cond),
9030 cleanups, integer_zero_node);
9031 new_cleanups = fold (new_cleanups);
9032
9033 /* Now add in the conditionalized cleanups. */
9034 cleanups_this_call
9035 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
9036 (*interim_eh_hook) (NULL_TREE);
9037 }
9038 else
9039 {
9040 emit_insns (seq1);
9041 emit_insns (seq2);
9042 }
9043 }
9044 break;
9045
9046 case TRUTH_ORIF_EXPR:
9047 {
9048 rtx seq1, seq2;
9049 tree cleanups, old_cleanups;
9050
9051 if (if_true_label == 0)
9052 if_true_label = drop_through_label = gen_label_rtx ();
9053 start_sequence ();
9054 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9055 seq1 = get_insns ();
9056 end_sequence ();
9057
9058 old_cleanups = cleanups_this_call;
9059 start_sequence ();
9060 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9061 seq2 = get_insns ();
9062 end_sequence ();
9063
9064 cleanups = defer_cleanups_to (old_cleanups);
9065 if (cleanups)
9066 {
9067 rtx flag = gen_reg_rtx (word_mode);
9068 tree new_cleanups;
9069 tree cond;
9070
9071 /* Flag cleanups as not needed. */
9072 emit_move_insn (flag, const0_rtx);
9073 emit_insns (seq1);
9074
9075 /* Flag cleanups as needed. */
9076 emit_move_insn (flag, const1_rtx);
9077 emit_insns (seq2);
9078
9079 /* convert flag, which is an rtx, into a tree. */
9080 cond = make_node (RTL_EXPR);
9081 TREE_TYPE (cond) = integer_type_node;
9082 RTL_EXPR_RTL (cond) = flag;
9083 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
9084 cond = save_expr (cond);
9085
9086 new_cleanups = build (COND_EXPR, void_type_node,
9087 truthvalue_conversion (cond),
9088 cleanups, integer_zero_node);
9089 new_cleanups = fold (new_cleanups);
9090
9091 /* Now add in the conditionalized cleanups. */
9092 cleanups_this_call
9093 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
9094 (*interim_eh_hook) (NULL_TREE);
9095 }
9096 else
9097 {
9098 emit_insns (seq1);
9099 emit_insns (seq2);
9100 }
9101 }
9102 break;
9103
9104 case COMPOUND_EXPR:
9105 push_temp_slots ();
9106 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9107 free_temp_slots ();
9108 pop_temp_slots ();
9109 emit_queue ();
9110 do_pending_stack_adjust ();
9111 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9112 break;
9113
9114 case COMPONENT_REF:
9115 case BIT_FIELD_REF:
9116 case ARRAY_REF:
9117 {
9118 int bitsize, bitpos, unsignedp;
9119 enum machine_mode mode;
9120 tree type;
9121 tree offset;
9122 int volatilep = 0;
9123
9124 /* Get description of this reference. We don't actually care
9125 about the underlying object here. */
9126 get_inner_reference (exp, &bitsize, &bitpos, &offset,
9127 &mode, &unsignedp, &volatilep);
9128
9129 type = type_for_size (bitsize, unsignedp);
9130 if (! SLOW_BYTE_ACCESS
9131 && type != 0 && bitsize >= 0
9132 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9133 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9134 != CODE_FOR_nothing))
9135 {
9136 do_jump (convert (type, exp), if_false_label, if_true_label);
9137 break;
9138 }
9139 goto normal;
9140 }
9141
9142 case COND_EXPR:
9143 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9144 if (integer_onep (TREE_OPERAND (exp, 1))
9145 && integer_zerop (TREE_OPERAND (exp, 2)))
9146 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9147
9148 else if (integer_zerop (TREE_OPERAND (exp, 1))
9149 && integer_onep (TREE_OPERAND (exp, 2)))
9150 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9151
9152 else
9153 {
9154 register rtx label1 = gen_label_rtx ();
9155 drop_through_label = gen_label_rtx ();
9156 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9157 /* Now the THEN-expression. */
9158 do_jump (TREE_OPERAND (exp, 1),
9159 if_false_label ? if_false_label : drop_through_label,
9160 if_true_label ? if_true_label : drop_through_label);
9161 /* In case the do_jump just above never jumps. */
9162 do_pending_stack_adjust ();
9163 emit_label (label1);
9164 /* Now the ELSE-expression. */
9165 do_jump (TREE_OPERAND (exp, 2),
9166 if_false_label ? if_false_label : drop_through_label,
9167 if_true_label ? if_true_label : drop_through_label);
9168 }
9169 break;
9170
9171 case EQ_EXPR:
9172 {
9173 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9174
9175 if (integer_zerop (TREE_OPERAND (exp, 1)))
9176 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9177 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9178 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9179 do_jump
9180 (fold
9181 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9182 fold (build (EQ_EXPR, TREE_TYPE (exp),
9183 fold (build1 (REALPART_EXPR,
9184 TREE_TYPE (inner_type),
9185 TREE_OPERAND (exp, 0))),
9186 fold (build1 (REALPART_EXPR,
9187 TREE_TYPE (inner_type),
9188 TREE_OPERAND (exp, 1))))),
9189 fold (build (EQ_EXPR, TREE_TYPE (exp),
9190 fold (build1 (IMAGPART_EXPR,
9191 TREE_TYPE (inner_type),
9192 TREE_OPERAND (exp, 0))),
9193 fold (build1 (IMAGPART_EXPR,
9194 TREE_TYPE (inner_type),
9195 TREE_OPERAND (exp, 1))))))),
9196 if_false_label, if_true_label);
9197 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9198 && !can_compare_p (TYPE_MODE (inner_type)))
9199 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9200 else
9201 comparison = compare (exp, EQ, EQ);
9202 break;
9203 }
9204
9205 case NE_EXPR:
9206 {
9207 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9208
9209 if (integer_zerop (TREE_OPERAND (exp, 1)))
9210 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9211 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9212 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9213 do_jump
9214 (fold
9215 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9216 fold (build (NE_EXPR, TREE_TYPE (exp),
9217 fold (build1 (REALPART_EXPR,
9218 TREE_TYPE (inner_type),
9219 TREE_OPERAND (exp, 0))),
9220 fold (build1 (REALPART_EXPR,
9221 TREE_TYPE (inner_type),
9222 TREE_OPERAND (exp, 1))))),
9223 fold (build (NE_EXPR, TREE_TYPE (exp),
9224 fold (build1 (IMAGPART_EXPR,
9225 TREE_TYPE (inner_type),
9226 TREE_OPERAND (exp, 0))),
9227 fold (build1 (IMAGPART_EXPR,
9228 TREE_TYPE (inner_type),
9229 TREE_OPERAND (exp, 1))))))),
9230 if_false_label, if_true_label);
9231 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9232 && !can_compare_p (TYPE_MODE (inner_type)))
9233 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9234 else
9235 comparison = compare (exp, NE, NE);
9236 break;
9237 }
9238
9239 case LT_EXPR:
9240 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9241 == MODE_INT)
9242 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9243 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9244 else
9245 comparison = compare (exp, LT, LTU);
9246 break;
9247
9248 case LE_EXPR:
9249 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9250 == MODE_INT)
9251 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9252 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9253 else
9254 comparison = compare (exp, LE, LEU);
9255 break;
9256
9257 case GT_EXPR:
9258 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9259 == MODE_INT)
9260 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9261 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9262 else
9263 comparison = compare (exp, GT, GTU);
9264 break;
9265
9266 case GE_EXPR:
9267 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9268 == MODE_INT)
9269 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9270 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9271 else
9272 comparison = compare (exp, GE, GEU);
9273 break;
9274
9275 default:
9276 normal:
9277 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9278 #if 0
9279 /* This is not needed any more and causes poor code since it causes
9280 comparisons and tests from non-SI objects to have different code
9281 sequences. */
9282 /* Copy to register to avoid generating bad insns by cse
9283 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9284 if (!cse_not_expected && GET_CODE (temp) == MEM)
9285 temp = copy_to_reg (temp);
9286 #endif
9287 do_pending_stack_adjust ();
9288 if (GET_CODE (temp) == CONST_INT)
9289 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
9290 else if (GET_CODE (temp) == LABEL_REF)
9291 comparison = const_true_rtx;
9292 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9293 && !can_compare_p (GET_MODE (temp)))
9294 /* Note swapping the labels gives us not-equal. */
9295 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9296 else if (GET_MODE (temp) != VOIDmode)
9297 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
9298 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9299 GET_MODE (temp), NULL_RTX, 0);
9300 else
9301 abort ();
9302 }
9303
9304 /* Do any postincrements in the expression that was tested. */
9305 emit_queue ();
9306
9307 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
9308 straight into a conditional jump instruction as the jump condition.
9309 Otherwise, all the work has been done already. */
9310
9311 if (comparison == const_true_rtx)
9312 {
9313 if (if_true_label)
9314 emit_jump (if_true_label);
9315 }
9316 else if (comparison == const0_rtx)
9317 {
9318 if (if_false_label)
9319 emit_jump (if_false_label);
9320 }
9321 else if (comparison)
9322 do_jump_for_compare (comparison, if_false_label, if_true_label);
9323
9324 if (drop_through_label)
9325 {
9326 /* If do_jump produces code that might be jumped around,
9327 do any stack adjusts from that code, before the place
9328 where control merges in. */
9329 do_pending_stack_adjust ();
9330 emit_label (drop_through_label);
9331 }
9332 }
9333 \f
9334 /* Given a comparison expression EXP for values too wide to be compared
9335 with one insn, test the comparison and jump to the appropriate label.
9336 The code of EXP is ignored; we always test GT if SWAP is 0,
9337 and LT if SWAP is 1. */
9338
9339 static void
9340 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9341 tree exp;
9342 int swap;
9343 rtx if_false_label, if_true_label;
9344 {
9345 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9346 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9347 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9348 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9349 rtx drop_through_label = 0;
9350 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9351 int i;
9352
9353 if (! if_true_label || ! if_false_label)
9354 drop_through_label = gen_label_rtx ();
9355 if (! if_true_label)
9356 if_true_label = drop_through_label;
9357 if (! if_false_label)
9358 if_false_label = drop_through_label;
9359
9360 /* Compare a word at a time, high order first. */
9361 for (i = 0; i < nwords; i++)
9362 {
9363 rtx comp;
9364 rtx op0_word, op1_word;
9365
9366 if (WORDS_BIG_ENDIAN)
9367 {
9368 op0_word = operand_subword_force (op0, i, mode);
9369 op1_word = operand_subword_force (op1, i, mode);
9370 }
9371 else
9372 {
9373 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9374 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9375 }
9376
9377 /* All but high-order word must be compared as unsigned. */
9378 comp = compare_from_rtx (op0_word, op1_word,
9379 (unsignedp || i > 0) ? GTU : GT,
9380 unsignedp, word_mode, NULL_RTX, 0);
9381 if (comp == const_true_rtx)
9382 emit_jump (if_true_label);
9383 else if (comp != const0_rtx)
9384 do_jump_for_compare (comp, NULL_RTX, if_true_label);
9385
9386 /* Consider lower words only if these are equal. */
9387 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
9388 NULL_RTX, 0);
9389 if (comp == const_true_rtx)
9390 emit_jump (if_false_label);
9391 else if (comp != const0_rtx)
9392 do_jump_for_compare (comp, NULL_RTX, if_false_label);
9393 }
9394
9395 if (if_false_label)
9396 emit_jump (if_false_label);
9397 if (drop_through_label)
9398 emit_label (drop_through_label);
9399 }
9400
9401 /* Compare OP0 with OP1, word at a time, in mode MODE.
9402 UNSIGNEDP says to do unsigned comparison.
9403 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9404
9405 void
9406 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9407 enum machine_mode mode;
9408 int unsignedp;
9409 rtx op0, op1;
9410 rtx if_false_label, if_true_label;
9411 {
9412 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9413 rtx drop_through_label = 0;
9414 int i;
9415
9416 if (! if_true_label || ! if_false_label)
9417 drop_through_label = gen_label_rtx ();
9418 if (! if_true_label)
9419 if_true_label = drop_through_label;
9420 if (! if_false_label)
9421 if_false_label = drop_through_label;
9422
9423 /* Compare a word at a time, high order first. */
9424 for (i = 0; i < nwords; i++)
9425 {
9426 rtx comp;
9427 rtx op0_word, op1_word;
9428
9429 if (WORDS_BIG_ENDIAN)
9430 {
9431 op0_word = operand_subword_force (op0, i, mode);
9432 op1_word = operand_subword_force (op1, i, mode);
9433 }
9434 else
9435 {
9436 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9437 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9438 }
9439
9440 /* All but high-order word must be compared as unsigned. */
9441 comp = compare_from_rtx (op0_word, op1_word,
9442 (unsignedp || i > 0) ? GTU : GT,
9443 unsignedp, word_mode, NULL_RTX, 0);
9444 if (comp == const_true_rtx)
9445 emit_jump (if_true_label);
9446 else if (comp != const0_rtx)
9447 do_jump_for_compare (comp, NULL_RTX, if_true_label);
9448
9449 /* Consider lower words only if these are equal. */
9450 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
9451 NULL_RTX, 0);
9452 if (comp == const_true_rtx)
9453 emit_jump (if_false_label);
9454 else if (comp != const0_rtx)
9455 do_jump_for_compare (comp, NULL_RTX, if_false_label);
9456 }
9457
9458 if (if_false_label)
9459 emit_jump (if_false_label);
9460 if (drop_through_label)
9461 emit_label (drop_through_label);
9462 }
9463
9464 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9465 with one insn, test the comparison and jump to the appropriate label. */
9466
9467 static void
9468 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9469 tree exp;
9470 rtx if_false_label, if_true_label;
9471 {
9472 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9473 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9474 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9475 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9476 int i;
9477 rtx drop_through_label = 0;
9478
9479 if (! if_false_label)
9480 drop_through_label = if_false_label = gen_label_rtx ();
9481
9482 for (i = 0; i < nwords; i++)
9483 {
9484 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
9485 operand_subword_force (op1, i, mode),
9486 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9487 word_mode, NULL_RTX, 0);
9488 if (comp == const_true_rtx)
9489 emit_jump (if_false_label);
9490 else if (comp != const0_rtx)
9491 do_jump_for_compare (comp, if_false_label, NULL_RTX);
9492 }
9493
9494 if (if_true_label)
9495 emit_jump (if_true_label);
9496 if (drop_through_label)
9497 emit_label (drop_through_label);
9498 }
9499 \f
9500 /* Jump according to whether OP0 is 0.
9501 We assume that OP0 has an integer mode that is too wide
9502 for the available compare insns. */
9503
9504 static void
9505 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9506 rtx op0;
9507 rtx if_false_label, if_true_label;
9508 {
9509 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9510 int i;
9511 rtx drop_through_label = 0;
9512
9513 if (! if_false_label)
9514 drop_through_label = if_false_label = gen_label_rtx ();
9515
9516 for (i = 0; i < nwords; i++)
9517 {
9518 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
9519 GET_MODE (op0)),
9520 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
9521 if (comp == const_true_rtx)
9522 emit_jump (if_false_label);
9523 else if (comp != const0_rtx)
9524 do_jump_for_compare (comp, if_false_label, NULL_RTX);
9525 }
9526
9527 if (if_true_label)
9528 emit_jump (if_true_label);
9529 if (drop_through_label)
9530 emit_label (drop_through_label);
9531 }
9532
9533 /* Given a comparison expression in rtl form, output conditional branches to
9534 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
9535
9536 static void
9537 do_jump_for_compare (comparison, if_false_label, if_true_label)
9538 rtx comparison, if_false_label, if_true_label;
9539 {
9540 if (if_true_label)
9541 {
9542 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
9543 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
9544 else
9545 abort ();
9546
9547 if (if_false_label)
9548 emit_jump (if_false_label);
9549 }
9550 else if (if_false_label)
9551 {
9552 rtx insn;
9553 rtx prev = get_last_insn ();
9554 rtx branch = 0;
9555
9556 /* Output the branch with the opposite condition. Then try to invert
9557 what is generated. If more than one insn is a branch, or if the
9558 branch is not the last insn written, abort. If we can't invert
9559 the branch, emit make a true label, redirect this jump to that,
9560 emit a jump to the false label and define the true label. */
9561
9562 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
9563 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])(if_false_label));
9564 else
9565 abort ();
9566
9567 /* Here we get the first insn that was just emitted. It used to be the
9568 case that, on some machines, emitting the branch would discard
9569 the previous compare insn and emit a replacement. This isn't
9570 done anymore, but abort if we see that PREV is deleted. */
9571
9572 if (prev == 0)
9573 insn = get_insns ();
9574 else if (INSN_DELETED_P (prev))
9575 abort ();
9576 else
9577 insn = NEXT_INSN (prev);
9578
9579 for (; insn; insn = NEXT_INSN (insn))
9580 if (GET_CODE (insn) == JUMP_INSN)
9581 {
9582 if (branch)
9583 abort ();
9584 branch = insn;
9585 }
9586
9587 if (branch != get_last_insn ())
9588 abort ();
9589
9590 JUMP_LABEL (branch) = if_false_label;
9591 if (! invert_jump (branch, if_false_label))
9592 {
9593 if_true_label = gen_label_rtx ();
9594 redirect_jump (branch, if_true_label);
9595 emit_jump (if_false_label);
9596 emit_label (if_true_label);
9597 }
9598 }
9599 }
9600 \f
9601 /* Generate code for a comparison expression EXP
9602 (including code to compute the values to be compared)
9603 and set (CC0) according to the result.
9604 SIGNED_CODE should be the rtx operation for this comparison for
9605 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
9606
9607 We force a stack adjustment unless there are currently
9608 things pushed on the stack that aren't yet used. */
9609
9610 static rtx
9611 compare (exp, signed_code, unsigned_code)
9612 register tree exp;
9613 enum rtx_code signed_code, unsigned_code;
9614 {
9615 register rtx op0
9616 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9617 register rtx op1
9618 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9619 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
9620 register enum machine_mode mode = TYPE_MODE (type);
9621 int unsignedp = TREE_UNSIGNED (type);
9622 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
9623
9624 return compare_from_rtx (op0, op1, code, unsignedp, mode,
9625 ((mode == BLKmode)
9626 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
9627 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
9628 }
9629
9630 /* Like compare but expects the values to compare as two rtx's.
9631 The decision as to signed or unsigned comparison must be made by the caller.
9632
9633 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9634 compared.
9635
9636 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9637 size of MODE should be used. */
9638
9639 rtx
9640 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
9641 register rtx op0, op1;
9642 enum rtx_code code;
9643 int unsignedp;
9644 enum machine_mode mode;
9645 rtx size;
9646 int align;
9647 {
9648 rtx tem;
9649
9650 /* If one operand is constant, make it the second one. Only do this
9651 if the other operand is not constant as well. */
9652
9653 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9654 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9655 {
9656 tem = op0;
9657 op0 = op1;
9658 op1 = tem;
9659 code = swap_condition (code);
9660 }
9661
9662 if (flag_force_mem)
9663 {
9664 op0 = force_not_mem (op0);
9665 op1 = force_not_mem (op1);
9666 }
9667
9668 do_pending_stack_adjust ();
9669
9670 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9671 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9672 return tem;
9673
9674 #if 0
9675 /* There's no need to do this now that combine.c can eliminate lots of
9676 sign extensions. This can be less efficient in certain cases on other
9677 machines. */
9678
9679 /* If this is a signed equality comparison, we can do it as an
9680 unsigned comparison since zero-extension is cheaper than sign
9681 extension and comparisons with zero are done as unsigned. This is
9682 the case even on machines that can do fast sign extension, since
9683 zero-extension is easier to combine with other operations than
9684 sign-extension is. If we are comparing against a constant, we must
9685 convert it to what it would look like unsigned. */
9686 if ((code == EQ || code == NE) && ! unsignedp
9687 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9688 {
9689 if (GET_CODE (op1) == CONST_INT
9690 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9691 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9692 unsignedp = 1;
9693 }
9694 #endif
9695
9696 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
9697
9698 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
9699 }
9700 \f
9701 /* Generate code to calculate EXP using a store-flag instruction
9702 and return an rtx for the result. EXP is either a comparison
9703 or a TRUTH_NOT_EXPR whose operand is a comparison.
9704
9705 If TARGET is nonzero, store the result there if convenient.
9706
9707 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
9708 cheap.
9709
9710 Return zero if there is no suitable set-flag instruction
9711 available on this machine.
9712
9713 Once expand_expr has been called on the arguments of the comparison,
9714 we are committed to doing the store flag, since it is not safe to
9715 re-evaluate the expression. We emit the store-flag insn by calling
9716 emit_store_flag, but only expand the arguments if we have a reason
9717 to believe that emit_store_flag will be successful. If we think that
9718 it will, but it isn't, we have to simulate the store-flag with a
9719 set/jump/set sequence. */
9720
9721 static rtx
9722 do_store_flag (exp, target, mode, only_cheap)
9723 tree exp;
9724 rtx target;
9725 enum machine_mode mode;
9726 int only_cheap;
9727 {
9728 enum rtx_code code;
9729 tree arg0, arg1, type;
9730 tree tem;
9731 enum machine_mode operand_mode;
9732 int invert = 0;
9733 int unsignedp;
9734 rtx op0, op1;
9735 enum insn_code icode;
9736 rtx subtarget = target;
9737 rtx result, label, pattern, jump_pat;
9738
9739 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9740 result at the end. We can't simply invert the test since it would
9741 have already been inverted if it were valid. This case occurs for
9742 some floating-point comparisons. */
9743
9744 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9745 invert = 1, exp = TREE_OPERAND (exp, 0);
9746
9747 arg0 = TREE_OPERAND (exp, 0);
9748 arg1 = TREE_OPERAND (exp, 1);
9749 type = TREE_TYPE (arg0);
9750 operand_mode = TYPE_MODE (type);
9751 unsignedp = TREE_UNSIGNED (type);
9752
9753 /* We won't bother with BLKmode store-flag operations because it would mean
9754 passing a lot of information to emit_store_flag. */
9755 if (operand_mode == BLKmode)
9756 return 0;
9757
9758 STRIP_NOPS (arg0);
9759 STRIP_NOPS (arg1);
9760
9761 /* Get the rtx comparison code to use. We know that EXP is a comparison
9762 operation of some type. Some comparisons against 1 and -1 can be
9763 converted to comparisons with zero. Do so here so that the tests
9764 below will be aware that we have a comparison with zero. These
9765 tests will not catch constants in the first operand, but constants
9766 are rarely passed as the first operand. */
9767
9768 switch (TREE_CODE (exp))
9769 {
9770 case EQ_EXPR:
9771 code = EQ;
9772 break;
9773 case NE_EXPR:
9774 code = NE;
9775 break;
9776 case LT_EXPR:
9777 if (integer_onep (arg1))
9778 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9779 else
9780 code = unsignedp ? LTU : LT;
9781 break;
9782 case LE_EXPR:
9783 if (! unsignedp && integer_all_onesp (arg1))
9784 arg1 = integer_zero_node, code = LT;
9785 else
9786 code = unsignedp ? LEU : LE;
9787 break;
9788 case GT_EXPR:
9789 if (! unsignedp && integer_all_onesp (arg1))
9790 arg1 = integer_zero_node, code = GE;
9791 else
9792 code = unsignedp ? GTU : GT;
9793 break;
9794 case GE_EXPR:
9795 if (integer_onep (arg1))
9796 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9797 else
9798 code = unsignedp ? GEU : GE;
9799 break;
9800 default:
9801 abort ();
9802 }
9803
9804 /* Put a constant second. */
9805 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9806 {
9807 tem = arg0; arg0 = arg1; arg1 = tem;
9808 code = swap_condition (code);
9809 }
9810
9811 /* If this is an equality or inequality test of a single bit, we can
9812 do this by shifting the bit being tested to the low-order bit and
9813 masking the result with the constant 1. If the condition was EQ,
9814 we xor it with 1. This does not require an scc insn and is faster
9815 than an scc insn even if we have it. */
9816
9817 if ((code == NE || code == EQ)
9818 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9819 && integer_pow2p (TREE_OPERAND (arg0, 1))
9820 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
9821 {
9822 tree inner = TREE_OPERAND (arg0, 0);
9823 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
9824 NULL_RTX, VOIDmode, 0)));
9825 int ops_unsignedp;
9826
9827 /* If INNER is a right shift of a constant and it plus BITNUM does
9828 not overflow, adjust BITNUM and INNER. */
9829
9830 if (TREE_CODE (inner) == RSHIFT_EXPR
9831 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
9832 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
9833 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
9834 < TYPE_PRECISION (type)))
9835 {
9836 bitnum +=TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
9837 inner = TREE_OPERAND (inner, 0);
9838 }
9839
9840 /* If we are going to be able to omit the AND below, we must do our
9841 operations as unsigned. If we must use the AND, we have a choice.
9842 Normally unsigned is faster, but for some machines signed is. */
9843 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
9844 #ifdef LOAD_EXTEND_OP
9845 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
9846 #else
9847 : 1
9848 #endif
9849 );
9850
9851 if (subtarget == 0 || GET_CODE (subtarget) != REG
9852 || GET_MODE (subtarget) != operand_mode
9853 || ! safe_from_p (subtarget, inner))
9854 subtarget = 0;
9855
9856 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
9857
9858 if (bitnum != 0)
9859 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
9860 size_int (bitnum), subtarget, ops_unsignedp);
9861
9862 if (GET_MODE (op0) != mode)
9863 op0 = convert_to_mode (mode, op0, ops_unsignedp);
9864
9865 if ((code == EQ && ! invert) || (code == NE && invert))
9866 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
9867 ops_unsignedp, OPTAB_LIB_WIDEN);
9868
9869 /* Put the AND last so it can combine with more things. */
9870 if (bitnum != TYPE_PRECISION (type) - 1)
9871 op0 = expand_and (op0, const1_rtx, subtarget);
9872
9873 return op0;
9874 }
9875
9876 /* Now see if we are likely to be able to do this. Return if not. */
9877 if (! can_compare_p (operand_mode))
9878 return 0;
9879 icode = setcc_gen_code[(int) code];
9880 if (icode == CODE_FOR_nothing
9881 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
9882 {
9883 /* We can only do this if it is one of the special cases that
9884 can be handled without an scc insn. */
9885 if ((code == LT && integer_zerop (arg1))
9886 || (! only_cheap && code == GE && integer_zerop (arg1)))
9887 ;
9888 else if (BRANCH_COST >= 0
9889 && ! only_cheap && (code == NE || code == EQ)
9890 && TREE_CODE (type) != REAL_TYPE
9891 && ((abs_optab->handlers[(int) operand_mode].insn_code
9892 != CODE_FOR_nothing)
9893 || (ffs_optab->handlers[(int) operand_mode].insn_code
9894 != CODE_FOR_nothing)))
9895 ;
9896 else
9897 return 0;
9898 }
9899
9900 preexpand_calls (exp);
9901 if (subtarget == 0 || GET_CODE (subtarget) != REG
9902 || GET_MODE (subtarget) != operand_mode
9903 || ! safe_from_p (subtarget, arg1))
9904 subtarget = 0;
9905
9906 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
9907 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
9908
9909 if (target == 0)
9910 target = gen_reg_rtx (mode);
9911
9912 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9913 because, if the emit_store_flag does anything it will succeed and
9914 OP0 and OP1 will not be used subsequently. */
9915
9916 result = emit_store_flag (target, code,
9917 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9918 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9919 operand_mode, unsignedp, 1);
9920
9921 if (result)
9922 {
9923 if (invert)
9924 result = expand_binop (mode, xor_optab, result, const1_rtx,
9925 result, 0, OPTAB_LIB_WIDEN);
9926 return result;
9927 }
9928
9929 /* If this failed, we have to do this with set/compare/jump/set code. */
9930 if (target == 0 || GET_CODE (target) != REG
9931 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9932 target = gen_reg_rtx (GET_MODE (target));
9933
9934 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9935 result = compare_from_rtx (op0, op1, code, unsignedp,
9936 operand_mode, NULL_RTX, 0);
9937 if (GET_CODE (result) == CONST_INT)
9938 return (((result == const0_rtx && ! invert)
9939 || (result != const0_rtx && invert))
9940 ? const0_rtx : const1_rtx);
9941
9942 label = gen_label_rtx ();
9943 if (bcc_gen_fctn[(int) code] == 0)
9944 abort ();
9945
9946 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9947 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9948 emit_label (label);
9949
9950 return target;
9951 }
9952 \f
9953 /* Generate a tablejump instruction (used for switch statements). */
9954
9955 #ifdef HAVE_tablejump
9956
9957 /* INDEX is the value being switched on, with the lowest value
9958 in the table already subtracted.
9959 MODE is its expected mode (needed if INDEX is constant).
9960 RANGE is the length of the jump table.
9961 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9962
9963 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9964 index value is out of range. */
9965
9966 void
9967 do_tablejump (index, mode, range, table_label, default_label)
9968 rtx index, range, table_label, default_label;
9969 enum machine_mode mode;
9970 {
9971 register rtx temp, vector;
9972
9973 /* Do an unsigned comparison (in the proper mode) between the index
9974 expression and the value which represents the length of the range.
9975 Since we just finished subtracting the lower bound of the range
9976 from the index expression, this comparison allows us to simultaneously
9977 check that the original index expression value is both greater than
9978 or equal to the minimum value of the range and less than or equal to
9979 the maximum value of the range. */
9980
9981 emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
9982 emit_jump_insn (gen_bgtu (default_label));
9983
9984 /* If index is in range, it must fit in Pmode.
9985 Convert to Pmode so we can index with it. */
9986 if (mode != Pmode)
9987 index = convert_to_mode (Pmode, index, 1);
9988
9989 /* Don't let a MEM slip thru, because then INDEX that comes
9990 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9991 and break_out_memory_refs will go to work on it and mess it up. */
9992 #ifdef PIC_CASE_VECTOR_ADDRESS
9993 if (flag_pic && GET_CODE (index) != REG)
9994 index = copy_to_mode_reg (Pmode, index);
9995 #endif
9996
9997 /* If flag_force_addr were to affect this address
9998 it could interfere with the tricky assumptions made
9999 about addresses that contain label-refs,
10000 which may be valid only very near the tablejump itself. */
10001 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10002 GET_MODE_SIZE, because this indicates how large insns are. The other
10003 uses should all be Pmode, because they are addresses. This code
10004 could fail if addresses and insns are not the same size. */
10005 index = gen_rtx (PLUS, Pmode,
10006 gen_rtx (MULT, Pmode, index,
10007 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10008 gen_rtx (LABEL_REF, Pmode, table_label));
10009 #ifdef PIC_CASE_VECTOR_ADDRESS
10010 if (flag_pic)
10011 index = PIC_CASE_VECTOR_ADDRESS (index);
10012 else
10013 #endif
10014 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10015 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10016 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
10017 RTX_UNCHANGING_P (vector) = 1;
10018 convert_move (temp, vector, 0);
10019
10020 emit_jump_insn (gen_tablejump (temp, table_label));
10021
10022 #ifndef CASE_VECTOR_PC_RELATIVE
10023 /* If we are generating PIC code or if the table is PC-relative, the
10024 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10025 if (! flag_pic)
10026 emit_barrier ();
10027 #endif
10028 }
10029
10030 #endif /* HAVE_tablejump */
10031
10032
10033 /* Emit a suitable bytecode to load a value from memory, assuming a pointer
10034 to that value is on the top of the stack. The resulting type is TYPE, and
10035 the source declaration is DECL. */
10036
10037 void
10038 bc_load_memory (type, decl)
10039 tree type, decl;
10040 {
10041 enum bytecode_opcode opcode;
10042
10043
10044 /* Bit fields are special. We only know about signed and
10045 unsigned ints, and enums. The latter are treated as
10046 signed integers. */
10047
10048 if (DECL_BIT_FIELD (decl))
10049 if (TREE_CODE (type) == ENUMERAL_TYPE
10050 || TREE_CODE (type) == INTEGER_TYPE)
10051 opcode = TREE_UNSIGNED (type) ? zxloadBI : sxloadBI;
10052 else
10053 abort ();
10054 else
10055 /* See corresponding comment in bc_store_memory(). */
10056 if (TYPE_MODE (type) == BLKmode
10057 || TYPE_MODE (type) == VOIDmode)
10058 return;
10059 else
10060 opcode = mode_to_load_map [(int) TYPE_MODE (type)];
10061
10062 if (opcode == neverneverland)
10063 abort ();
10064
10065 bc_emit_bytecode (opcode);
10066
10067 #ifdef DEBUG_PRINT_CODE
10068 fputc ('\n', stderr);
10069 #endif
10070 }
10071
10072
10073 /* Store the contents of the second stack slot to the address in the
10074 top stack slot. DECL is the declaration of the destination and is used
10075 to determine whether we're dealing with a bitfield. */
10076
10077 void
10078 bc_store_memory (type, decl)
10079 tree type, decl;
10080 {
10081 enum bytecode_opcode opcode;
10082
10083
10084 if (DECL_BIT_FIELD (decl))
10085 {
10086 if (TREE_CODE (type) == ENUMERAL_TYPE
10087 || TREE_CODE (type) == INTEGER_TYPE)
10088 opcode = sstoreBI;
10089 else
10090 abort ();
10091 }
10092 else
10093 if (TYPE_MODE (type) == BLKmode)
10094 {
10095 /* Copy structure. This expands to a block copy instruction, storeBLK.
10096 In addition to the arguments expected by the other store instructions,
10097 it also expects a type size (SImode) on top of the stack, which is the
10098 structure size in size units (usually bytes). The two first arguments
10099 are already on the stack; so we just put the size on level 1. For some
10100 other languages, the size may be variable, this is why we don't encode
10101 it as a storeBLK literal, but rather treat it as a full-fledged expression. */
10102
10103 bc_expand_expr (TYPE_SIZE (type));
10104 opcode = storeBLK;
10105 }
10106 else
10107 opcode = mode_to_store_map [(int) TYPE_MODE (type)];
10108
10109 if (opcode == neverneverland)
10110 abort ();
10111
10112 bc_emit_bytecode (opcode);
10113
10114 #ifdef DEBUG_PRINT_CODE
10115 fputc ('\n', stderr);
10116 #endif
10117 }
10118
10119
10120 /* Allocate local stack space sufficient to hold a value of the given
10121 SIZE at alignment boundary ALIGNMENT bits. ALIGNMENT must be an
10122 integral power of 2. A special case is locals of type VOID, which
10123 have size 0 and alignment 1 - any "voidish" SIZE or ALIGNMENT is
10124 remapped into the corresponding attribute of SI. */
10125
10126 rtx
10127 bc_allocate_local (size, alignment)
10128 int size, alignment;
10129 {
10130 rtx retval;
10131 int byte_alignment;
10132
10133 if (size < 0)
10134 abort ();
10135
10136 /* Normalize size and alignment */
10137 if (!size)
10138 size = UNITS_PER_WORD;
10139
10140 if (alignment < BITS_PER_UNIT)
10141 byte_alignment = 1 << (INT_ALIGN - 1);
10142 else
10143 /* Align */
10144 byte_alignment = alignment / BITS_PER_UNIT;
10145
10146 if (local_vars_size & (byte_alignment - 1))
10147 local_vars_size += byte_alignment - (local_vars_size & (byte_alignment - 1));
10148
10149 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
10150 local_vars_size += size;
10151
10152 return retval;
10153 }
10154
10155
10156 /* Allocate variable-sized local array. Variable-sized arrays are
10157 actually pointers to the address in memory where they are stored. */
10158
10159 rtx
10160 bc_allocate_variable_array (size)
10161 tree size;
10162 {
10163 rtx retval;
10164 const int ptralign = (1 << (PTR_ALIGN - 1));
10165
10166 /* Align pointer */
10167 if (local_vars_size & ptralign)
10168 local_vars_size += ptralign - (local_vars_size & ptralign);
10169
10170 /* Note down local space needed: pointer to block; also return
10171 dummy rtx */
10172
10173 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
10174 local_vars_size += POINTER_SIZE / BITS_PER_UNIT;
10175 return retval;
10176 }
10177
10178
10179 /* Push the machine address for the given external variable offset. */
10180 void
10181 bc_load_externaddr (externaddr)
10182 rtx externaddr;
10183 {
10184 bc_emit_bytecode (constP);
10185 bc_emit_code_labelref (BYTECODE_LABEL (externaddr),
10186 BYTECODE_BC_LABEL (externaddr)->offset);
10187
10188 #ifdef DEBUG_PRINT_CODE
10189 fputc ('\n', stderr);
10190 #endif
10191 }
10192
10193
10194 static char *
10195 bc_strdup (s)
10196 char *s;
10197 {
10198 char *new = (char *) xmalloc ((strlen (s) + 1) * sizeof *s);
10199 strcpy (new, s);
10200 return new;
10201 }
10202
10203
10204 /* Like above, but expects an IDENTIFIER. */
10205 void
10206 bc_load_externaddr_id (id, offset)
10207 tree id;
10208 int offset;
10209 {
10210 if (!IDENTIFIER_POINTER (id))
10211 abort ();
10212
10213 bc_emit_bytecode (constP);
10214 bc_emit_code_labelref (bc_xstrdup (IDENTIFIER_POINTER (id)), offset);
10215
10216 #ifdef DEBUG_PRINT_CODE
10217 fputc ('\n', stderr);
10218 #endif
10219 }
10220
10221
10222 /* Push the machine address for the given local variable offset. */
10223 void
10224 bc_load_localaddr (localaddr)
10225 rtx localaddr;
10226 {
10227 bc_emit_instruction (localP, (HOST_WIDE_INT) BYTECODE_BC_LABEL (localaddr)->offset);
10228 }
10229
10230
10231 /* Push the machine address for the given parameter offset.
10232 NOTE: offset is in bits. */
10233 void
10234 bc_load_parmaddr (parmaddr)
10235 rtx parmaddr;
10236 {
10237 bc_emit_instruction (argP, ((HOST_WIDE_INT) BYTECODE_BC_LABEL (parmaddr)->offset
10238 / BITS_PER_UNIT));
10239 }
10240
10241
10242 /* Convert a[i] into *(a + i). */
10243 tree
10244 bc_canonicalize_array_ref (exp)
10245 tree exp;
10246 {
10247 tree type = TREE_TYPE (exp);
10248 tree array_adr = build1 (ADDR_EXPR, TYPE_POINTER_TO (type),
10249 TREE_OPERAND (exp, 0));
10250 tree index = TREE_OPERAND (exp, 1);
10251
10252
10253 /* Convert the integer argument to a type the same size as a pointer
10254 so the multiply won't overflow spuriously. */
10255
10256 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
10257 index = convert (type_for_size (POINTER_SIZE, 0), index);
10258
10259 /* The array address isn't volatile even if the array is.
10260 (Of course this isn't terribly relevant since the bytecode
10261 translator treats nearly everything as volatile anyway.) */
10262 TREE_THIS_VOLATILE (array_adr) = 0;
10263
10264 return build1 (INDIRECT_REF, type,
10265 fold (build (PLUS_EXPR,
10266 TYPE_POINTER_TO (type),
10267 array_adr,
10268 fold (build (MULT_EXPR,
10269 TYPE_POINTER_TO (type),
10270 index,
10271 size_in_bytes (type))))));
10272 }
10273
10274
10275 /* Load the address of the component referenced by the given
10276 COMPONENT_REF expression.
10277
10278 Returns innermost lvalue. */
10279
10280 tree
10281 bc_expand_component_address (exp)
10282 tree exp;
10283 {
10284 tree tem, chain;
10285 enum machine_mode mode;
10286 int bitpos = 0;
10287 HOST_WIDE_INT SIval;
10288
10289
10290 tem = TREE_OPERAND (exp, 1);
10291 mode = DECL_MODE (tem);
10292
10293
10294 /* Compute cumulative bit offset for nested component refs
10295 and array refs, and find the ultimate containing object. */
10296
10297 for (tem = exp;; tem = TREE_OPERAND (tem, 0))
10298 {
10299 if (TREE_CODE (tem) == COMPONENT_REF)
10300 bitpos += TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (tem, 1)));
10301 else
10302 if (TREE_CODE (tem) == ARRAY_REF
10303 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
10304 && TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) == INTEGER_CST)
10305
10306 bitpos += (TREE_INT_CST_LOW (TREE_OPERAND (tem, 1))
10307 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)))
10308 /* * TYPE_SIZE_UNIT (TREE_TYPE (tem)) */);
10309 else
10310 break;
10311 }
10312
10313 bc_expand_expr (tem);
10314
10315
10316 /* For bitfields also push their offset and size */
10317 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
10318 bc_push_offset_and_size (bitpos, /* DECL_SIZE_UNIT */ (TREE_OPERAND (exp, 1)));
10319 else
10320 if (SIval = bitpos / BITS_PER_UNIT)
10321 bc_emit_instruction (addconstPSI, SIval);
10322
10323 return (TREE_OPERAND (exp, 1));
10324 }
10325
10326
10327 /* Emit code to push two SI constants */
10328 void
10329 bc_push_offset_and_size (offset, size)
10330 HOST_WIDE_INT offset, size;
10331 {
10332 bc_emit_instruction (constSI, offset);
10333 bc_emit_instruction (constSI, size);
10334 }
10335
10336
10337 /* Emit byte code to push the address of the given lvalue expression to
10338 the stack. If it's a bit field, we also push offset and size info.
10339
10340 Returns innermost component, which allows us to determine not only
10341 its type, but also whether it's a bitfield. */
10342
10343 tree
10344 bc_expand_address (exp)
10345 tree exp;
10346 {
10347 /* Safeguard */
10348 if (!exp || TREE_CODE (exp) == ERROR_MARK)
10349 return (exp);
10350
10351
10352 switch (TREE_CODE (exp))
10353 {
10354 case ARRAY_REF:
10355
10356 return (bc_expand_address (bc_canonicalize_array_ref (exp)));
10357
10358 case COMPONENT_REF:
10359
10360 return (bc_expand_component_address (exp));
10361
10362 case INDIRECT_REF:
10363
10364 bc_expand_expr (TREE_OPERAND (exp, 0));
10365
10366 /* For variable-sized types: retrieve pointer. Sometimes the
10367 TYPE_SIZE tree is NULL. Is this a bug or a feature? Let's
10368 also make sure we have an operand, just in case... */
10369
10370 if (TREE_OPERAND (exp, 0)
10371 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))
10372 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))) != INTEGER_CST)
10373 bc_emit_instruction (loadP);
10374
10375 /* If packed, also return offset and size */
10376 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 0)))
10377
10378 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (exp, 0))),
10379 TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (exp, 0))));
10380
10381 return (TREE_OPERAND (exp, 0));
10382
10383 case FUNCTION_DECL:
10384
10385 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
10386 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
10387 break;
10388
10389 case PARM_DECL:
10390
10391 bc_load_parmaddr (DECL_RTL (exp));
10392
10393 /* For variable-sized types: retrieve pointer */
10394 if (TYPE_SIZE (TREE_TYPE (exp))
10395 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
10396 bc_emit_instruction (loadP);
10397
10398 /* If packed, also return offset and size */
10399 if (DECL_BIT_FIELD (exp))
10400 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
10401 TREE_INT_CST_LOW (DECL_SIZE (exp)));
10402
10403 break;
10404
10405 case RESULT_DECL:
10406
10407 bc_emit_instruction (returnP);
10408 break;
10409
10410 case VAR_DECL:
10411
10412 #if 0
10413 if (BYTECODE_LABEL (DECL_RTL (exp)))
10414 bc_load_externaddr (DECL_RTL (exp));
10415 #endif
10416
10417 if (DECL_EXTERNAL (exp))
10418 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
10419 (BYTECODE_BC_LABEL (DECL_RTL (exp)))->offset);
10420 else
10421 bc_load_localaddr (DECL_RTL (exp));
10422
10423 /* For variable-sized types: retrieve pointer */
10424 if (TYPE_SIZE (TREE_TYPE (exp))
10425 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
10426 bc_emit_instruction (loadP);
10427
10428 /* If packed, also return offset and size */
10429 if (DECL_BIT_FIELD (exp))
10430 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
10431 TREE_INT_CST_LOW (DECL_SIZE (exp)));
10432
10433 break;
10434
10435 case STRING_CST:
10436 {
10437 rtx r;
10438
10439 bc_emit_bytecode (constP);
10440 r = output_constant_def (exp);
10441 bc_emit_code_labelref (BYTECODE_LABEL (r), BYTECODE_BC_LABEL (r)->offset);
10442
10443 #ifdef DEBUG_PRINT_CODE
10444 fputc ('\n', stderr);
10445 #endif
10446 }
10447 break;
10448
10449 default:
10450
10451 abort();
10452 break;
10453 }
10454
10455 /* Most lvalues don't have components. */
10456 return (exp);
10457 }
10458
10459
10460 /* Emit a type code to be used by the runtime support in handling
10461 parameter passing. The type code consists of the machine mode
10462 plus the minimal alignment shifted left 8 bits. */
10463
10464 tree
10465 bc_runtime_type_code (type)
10466 tree type;
10467 {
10468 int val;
10469
10470 switch (TREE_CODE (type))
10471 {
10472 case VOID_TYPE:
10473 case INTEGER_TYPE:
10474 case REAL_TYPE:
10475 case COMPLEX_TYPE:
10476 case ENUMERAL_TYPE:
10477 case POINTER_TYPE:
10478 case RECORD_TYPE:
10479
10480 val = (int) TYPE_MODE (type) | TYPE_ALIGN (type) << 8;
10481 break;
10482
10483 case ERROR_MARK:
10484
10485 val = 0;
10486 break;
10487
10488 default:
10489
10490 abort ();
10491 }
10492 return build_int_2 (val, 0);
10493 }
10494
10495
10496 /* Generate constructor label */
10497 char *
10498 bc_gen_constr_label ()
10499 {
10500 static int label_counter;
10501 static char label[20];
10502
10503 sprintf (label, "*LR%d", label_counter++);
10504
10505 return (obstack_copy0 (&permanent_obstack, label, strlen (label)));
10506 }
10507
10508
10509 /* Evaluate constructor CONSTR and return pointer to it on level one. We
10510 expand the constructor data as static data, and push a pointer to it.
10511 The pointer is put in the pointer table and is retrieved by a constP
10512 bytecode instruction. We then loop and store each constructor member in
10513 the corresponding component. Finally, we return the original pointer on
10514 the stack. */
10515
10516 void
10517 bc_expand_constructor (constr)
10518 tree constr;
10519 {
10520 char *l;
10521 HOST_WIDE_INT ptroffs;
10522 rtx constr_rtx;
10523
10524
10525 /* Literal constructors are handled as constants, whereas
10526 non-literals are evaluated and stored element by element
10527 into the data segment. */
10528
10529 /* Allocate space in proper segment and push pointer to space on stack.
10530 */
10531
10532 l = bc_gen_constr_label ();
10533
10534 if (TREE_CONSTANT (constr))
10535 {
10536 text_section ();
10537
10538 bc_emit_const_labeldef (l);
10539 bc_output_constructor (constr, int_size_in_bytes (TREE_TYPE (constr)));
10540 }
10541 else
10542 {
10543 data_section ();
10544
10545 bc_emit_data_labeldef (l);
10546 bc_output_data_constructor (constr);
10547 }
10548
10549
10550 /* Add reference to pointer table and recall pointer to stack;
10551 this code is common for both types of constructors: literals
10552 and non-literals. */
10553
10554 ptroffs = bc_define_pointer (l);
10555 bc_emit_instruction (constP, ptroffs);
10556
10557 /* This is all that has to be done if it's a literal. */
10558 if (TREE_CONSTANT (constr))
10559 return;
10560
10561
10562 /* At this point, we have the pointer to the structure on top of the stack.
10563 Generate sequences of store_memory calls for the constructor. */
10564
10565 /* constructor type is structure */
10566 if (TREE_CODE (TREE_TYPE (constr)) == RECORD_TYPE)
10567 {
10568 register tree elt;
10569
10570 /* If the constructor has fewer fields than the structure,
10571 clear the whole structure first. */
10572
10573 if (list_length (CONSTRUCTOR_ELTS (constr))
10574 != list_length (TYPE_FIELDS (TREE_TYPE (constr))))
10575 {
10576 bc_emit_instruction (duplicate);
10577 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
10578 bc_emit_instruction (clearBLK);
10579 }
10580
10581 /* Store each element of the constructor into the corresponding
10582 field of TARGET. */
10583
10584 for (elt = CONSTRUCTOR_ELTS (constr); elt; elt = TREE_CHAIN (elt))
10585 {
10586 register tree field = TREE_PURPOSE (elt);
10587 register enum machine_mode mode;
10588 int bitsize;
10589 int bitpos;
10590 int unsignedp;
10591
10592 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field)) /* * DECL_SIZE_UNIT (field) */;
10593 mode = DECL_MODE (field);
10594 unsignedp = TREE_UNSIGNED (field);
10595
10596 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
10597
10598 bc_store_field (elt, bitsize, bitpos, mode, TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
10599 /* The alignment of TARGET is
10600 at least what its type requires. */
10601 VOIDmode, 0,
10602 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
10603 int_size_in_bytes (TREE_TYPE (constr)));
10604 }
10605 }
10606 else
10607
10608 /* Constructor type is array */
10609 if (TREE_CODE (TREE_TYPE (constr)) == ARRAY_TYPE)
10610 {
10611 register tree elt;
10612 register int i;
10613 tree domain = TYPE_DOMAIN (TREE_TYPE (constr));
10614 int minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
10615 int maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
10616 tree elttype = TREE_TYPE (TREE_TYPE (constr));
10617
10618 /* If the constructor has fewer fields than the structure,
10619 clear the whole structure first. */
10620
10621 if (list_length (CONSTRUCTOR_ELTS (constr)) < maxelt - minelt + 1)
10622 {
10623 bc_emit_instruction (duplicate);
10624 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
10625 bc_emit_instruction (clearBLK);
10626 }
10627
10628
10629 /* Store each element of the constructor into the corresponding
10630 element of TARGET, determined by counting the elements. */
10631
10632 for (elt = CONSTRUCTOR_ELTS (constr), i = 0;
10633 elt;
10634 elt = TREE_CHAIN (elt), i++)
10635 {
10636 register enum machine_mode mode;
10637 int bitsize;
10638 int bitpos;
10639 int unsignedp;
10640
10641 mode = TYPE_MODE (elttype);
10642 bitsize = GET_MODE_BITSIZE (mode);
10643 unsignedp = TREE_UNSIGNED (elttype);
10644
10645 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype))
10646 /* * TYPE_SIZE_UNIT (elttype) */ );
10647
10648 bc_store_field (elt, bitsize, bitpos, mode,
10649 TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
10650 /* The alignment of TARGET is
10651 at least what its type requires. */
10652 VOIDmode, 0,
10653 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
10654 int_size_in_bytes (TREE_TYPE (constr)));
10655 }
10656
10657 }
10658 }
10659
10660
10661 /* Store the value of EXP (an expression tree) into member FIELD of
10662 structure at address on stack, which has type TYPE, mode MODE and
10663 occupies BITSIZE bits, starting BITPOS bits from the beginning of the
10664 structure.
10665
10666 ALIGN is the alignment that TARGET is known to have, measured in bytes.
10667 TOTAL_SIZE is its size in bytes, or -1 if variable. */
10668
10669 void
10670 bc_store_field (field, bitsize, bitpos, mode, exp, type,
10671 value_mode, unsignedp, align, total_size)
10672 int bitsize, bitpos;
10673 enum machine_mode mode;
10674 tree field, exp, type;
10675 enum machine_mode value_mode;
10676 int unsignedp;
10677 int align;
10678 int total_size;
10679 {
10680
10681 /* Expand expression and copy pointer */
10682 bc_expand_expr (exp);
10683 bc_emit_instruction (over);
10684
10685
10686 /* If the component is a bit field, we cannot use addressing to access
10687 it. Use bit-field techniques to store in it. */
10688
10689 if (DECL_BIT_FIELD (field))
10690 {
10691 bc_store_bit_field (bitpos, bitsize, unsignedp);
10692 return;
10693 }
10694 else
10695 /* Not bit field */
10696 {
10697 HOST_WIDE_INT offset = bitpos / BITS_PER_UNIT;
10698
10699 /* Advance pointer to the desired member */
10700 if (offset)
10701 bc_emit_instruction (addconstPSI, offset);
10702
10703 /* Store */
10704 bc_store_memory (type, field);
10705 }
10706 }
10707
10708
10709 /* Store SI/SU in bitfield */
10710 void
10711 bc_store_bit_field (offset, size, unsignedp)
10712 int offset, size, unsignedp;
10713 {
10714 /* Push bitfield offset and size */
10715 bc_push_offset_and_size (offset, size);
10716
10717 /* Store */
10718 bc_emit_instruction (sstoreBI);
10719 }
10720
10721
10722 /* Load SI/SU from bitfield */
10723 void
10724 bc_load_bit_field (offset, size, unsignedp)
10725 int offset, size, unsignedp;
10726 {
10727 /* Push bitfield offset and size */
10728 bc_push_offset_and_size (offset, size);
10729
10730 /* Load: sign-extend if signed, else zero-extend */
10731 bc_emit_instruction (unsignedp ? zxloadBI : sxloadBI);
10732 }
10733
10734
10735 /* Adjust interpreter stack by NLEVELS. Positive means drop NLEVELS
10736 (adjust stack pointer upwards), negative means add that number of
10737 levels (adjust the stack pointer downwards). Only positive values
10738 normally make sense. */
10739
10740 void
10741 bc_adjust_stack (nlevels)
10742 int nlevels;
10743 {
10744 switch (nlevels)
10745 {
10746 case 0:
10747 break;
10748
10749 case 2:
10750 bc_emit_instruction (drop);
10751
10752 case 1:
10753 bc_emit_instruction (drop);
10754 break;
10755
10756 default:
10757
10758 bc_emit_instruction (adjstackSI, (HOST_WIDE_INT) nlevels);
10759 stack_depth -= nlevels;
10760 }
10761
10762 #if defined (VALIDATE_STACK_FOR_BC)
10763 VALIDATE_STACK_FOR_BC ();
10764 #endif
10765 }
This page took 0.52495 seconds and 6 git commands to generate.