]> gcc.gnu.org Git - gcc.git/blame - gcc/expr.c
(expand_assignment): Remove bogus resetting of alignment to inner alignment.
[gcc.git] / gcc / expr.c
CommitLineData
bbf6f052 1/* Convert tree expression to rtl instructions, for GNU compiler.
3d27140a 2 Copyright (C) 1988, 92, 93, 94, 95, 1996 Free Software Foundation, Inc.
bbf6f052
RK
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
940d9d63
RK
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA. */
bbf6f052
RK
20
21
22#include "config.h"
ca695ac9 23#include "machmode.h"
bbf6f052
RK
24#include "rtl.h"
25#include "tree.h"
ca695ac9 26#include "obstack.h"
bbf6f052 27#include "flags.h"
bf76bb5a 28#include "regs.h"
4ed67205 29#include "hard-reg-set.h"
3d195391 30#include "except.h"
bbf6f052
RK
31#include "function.h"
32#include "insn-flags.h"
33#include "insn-codes.h"
34#include "expr.h"
35#include "insn-config.h"
36#include "recog.h"
37#include "output.h"
bbf6f052
RK
38#include "typeclass.h"
39
ca695ac9
JB
40#include "bytecode.h"
41#include "bc-opcode.h"
42#include "bc-typecd.h"
43#include "bc-optab.h"
44#include "bc-emit.h"
45
46
bbf6f052
RK
47#define CEIL(x,y) (((x) + (y) - 1) / (y))
48
49/* Decide whether a function's arguments should be processed
bbc8a071
RK
50 from first to last or from last to first.
51
52 They should if the stack and args grow in opposite directions, but
53 only if we have push insns. */
bbf6f052 54
bbf6f052 55#ifdef PUSH_ROUNDING
bbc8a071 56
3319a347 57#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
bbf6f052
RK
58#define PUSH_ARGS_REVERSED /* If it's last to first */
59#endif
bbc8a071 60
bbf6f052
RK
61#endif
62
63#ifndef STACK_PUSH_CODE
64#ifdef STACK_GROWS_DOWNWARD
65#define STACK_PUSH_CODE PRE_DEC
66#else
67#define STACK_PUSH_CODE PRE_INC
68#endif
69#endif
70
71/* Like STACK_BOUNDARY but in units of bytes, not bits. */
72#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
73
74/* If this is nonzero, we do not bother generating VOLATILE
75 around volatile memory references, and we are willing to
76 output indirect addresses. If cse is to follow, we reject
77 indirect addresses so a useful potential cse is generated;
78 if it is used only once, instruction combination will produce
79 the same indirect address eventually. */
80int cse_not_expected;
81
82/* Nonzero to generate code for all the subroutines within an
83 expression before generating the upper levels of the expression.
84 Nowadays this is never zero. */
85int do_preexpand_calls = 1;
86
87/* Number of units that we should eventually pop off the stack.
88 These are the arguments to function calls that have already returned. */
89int pending_stack_adjust;
90
91/* Nonzero means stack pops must not be deferred, and deferred stack
92 pops must not be output. It is nonzero inside a function call,
93 inside a conditional expression, inside a statement expression,
94 and in other cases as well. */
95int inhibit_defer_pop;
96
97/* A list of all cleanups which belong to the arguments of
98 function calls being expanded by expand_call. */
99tree cleanups_this_call;
100
d93d4205
MS
101/* When temporaries are created by TARGET_EXPRs, they are created at
102 this level of temp_slot_level, so that they can remain allocated
103 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
104 of TARGET_EXPRs. */
105int target_temp_slot_level;
106
bbf6f052
RK
107/* Nonzero means __builtin_saveregs has already been done in this function.
108 The value is the pseudoreg containing the value __builtin_saveregs
109 returned. */
110static rtx saveregs_value;
111
dcf76fff
TW
112/* Similarly for __builtin_apply_args. */
113static rtx apply_args_value;
114
4969d05d
RK
115/* This structure is used by move_by_pieces to describe the move to
116 be performed. */
117
118struct move_by_pieces
119{
120 rtx to;
121 rtx to_addr;
122 int autinc_to;
123 int explicit_inc_to;
e9cf6a97 124 int to_struct;
4969d05d
RK
125 rtx from;
126 rtx from_addr;
127 int autinc_from;
128 int explicit_inc_from;
e9cf6a97 129 int from_struct;
4969d05d
RK
130 int len;
131 int offset;
132 int reverse;
133};
134
9de08200
RK
135/* This structure is used by clear_by_pieces to describe the clear to
136 be performed. */
137
138struct clear_by_pieces
139{
140 rtx to;
141 rtx to_addr;
142 int autinc_to;
143 int explicit_inc_to;
144 int to_struct;
145 int len;
146 int offset;
147 int reverse;
148};
149
c02bd5d9
JB
150/* Used to generate bytecodes: keep track of size of local variables,
151 as well as depth of arithmetic stack. (Notice that variables are
152 stored on the machine's stack, not the arithmetic stack.) */
153
186f92ce 154extern int local_vars_size;
c02bd5d9
JB
155extern int stack_depth;
156extern int max_stack_depth;
292b1216 157extern struct obstack permanent_obstack;
4ed67205 158extern rtx arg_pointer_save_area;
c02bd5d9 159
4969d05d
RK
160static rtx enqueue_insn PROTO((rtx, rtx));
161static int queued_subexp_p PROTO((rtx));
162static void init_queue PROTO((void));
163static void move_by_pieces PROTO((rtx, rtx, int, int));
164static int move_by_pieces_ninsns PROTO((unsigned int, int));
165static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
166 struct move_by_pieces *));
9de08200
RK
167static void clear_by_pieces PROTO((rtx, int, int));
168static void clear_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
169 struct clear_by_pieces *));
170static int is_zeros_p PROTO((tree));
171static int mostly_zeros_p PROTO((tree));
e1a43f73 172static void store_constructor PROTO((tree, rtx, int));
4969d05d
RK
173static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
174 enum machine_mode, int, int, int));
6be58303 175static int get_inner_unaligned_p PROTO((tree));
4969d05d
RK
176static tree save_noncopied_parts PROTO((tree, tree));
177static tree init_noncopied_parts PROTO((tree, tree));
178static int safe_from_p PROTO((rtx, tree));
179static int fixed_type_p PROTO((tree));
01c8a7c8 180static rtx var_rtx PROTO((tree));
4969d05d
RK
181static int get_pointer_alignment PROTO((tree, unsigned));
182static tree string_constant PROTO((tree, tree *));
183static tree c_strlen PROTO((tree));
307b821c
RK
184static rtx expand_builtin PROTO((tree, rtx, rtx,
185 enum machine_mode, int));
0006469d
TW
186static int apply_args_size PROTO((void));
187static int apply_result_size PROTO((void));
188static rtx result_vector PROTO((int, rtx));
189static rtx expand_builtin_apply_args PROTO((void));
190static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
191static void expand_builtin_return PROTO((rtx));
7b8b9722 192static rtx expand_increment PROTO((tree, int, int));
0dc09c0f 193void bc_expand_increment PROTO((struct increment_operator *, tree));
ca695ac9
JB
194rtx bc_allocate_local PROTO((int, int));
195void bc_store_memory PROTO((tree, tree));
196tree bc_expand_component_address PROTO((tree));
197tree bc_expand_address PROTO((tree));
198void bc_expand_constructor PROTO((tree));
199void bc_adjust_stack PROTO((int));
200tree bc_canonicalize_array_ref PROTO((tree));
201void bc_load_memory PROTO((tree, tree));
202void bc_load_externaddr PROTO((rtx));
203void bc_load_externaddr_id PROTO((tree, int));
204void bc_load_localaddr PROTO((rtx));
205void bc_load_parmaddr PROTO((rtx));
4969d05d
RK
206static void preexpand_calls PROTO((tree));
207static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
2e5ec6cf 208void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
4969d05d
RK
209static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
210static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
211static void do_jump_for_compare PROTO((rtx, rtx, rtx));
212static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
213static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
5dab5552 214static tree defer_cleanups_to PROTO((tree));
16545b0a 215extern tree truthvalue_conversion PROTO((tree));
bbf6f052 216
4fa52007
RK
217/* Record for each mode whether we can move a register directly to or
218 from an object of that mode in memory. If we can't, we won't try
219 to use that mode directly when accessing a field of that mode. */
220
221static char direct_load[NUM_MACHINE_MODES];
222static char direct_store[NUM_MACHINE_MODES];
223
bbf6f052
RK
224/* MOVE_RATIO is the number of move instructions that is better than
225 a block move. */
226
227#ifndef MOVE_RATIO
266007a7 228#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
bbf6f052
RK
229#define MOVE_RATIO 2
230#else
231/* A value of around 6 would minimize code size; infinity would minimize
232 execution time. */
233#define MOVE_RATIO 15
234#endif
235#endif
e87b4f3f 236
266007a7 237/* This array records the insn_code of insns to perform block moves. */
e6677db3 238enum insn_code movstr_optab[NUM_MACHINE_MODES];
266007a7 239
9de08200
RK
240/* This array records the insn_code of insns to perform block clears. */
241enum insn_code clrstr_optab[NUM_MACHINE_MODES];
242
0f41302f 243/* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
e87b4f3f
RS
244
245#ifndef SLOW_UNALIGNED_ACCESS
c7a7ac46 246#define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
e87b4f3f 247#endif
0006469d
TW
248
249/* Register mappings for target machines without register windows. */
250#ifndef INCOMING_REGNO
251#define INCOMING_REGNO(OUT) (OUT)
252#endif
253#ifndef OUTGOING_REGNO
254#define OUTGOING_REGNO(IN) (IN)
255#endif
bbf6f052 256\f
0f41302f 257/* Maps used to convert modes to const, load, and store bytecodes. */
ca695ac9
JB
258enum bytecode_opcode mode_to_const_map[MAX_MACHINE_MODE];
259enum bytecode_opcode mode_to_load_map[MAX_MACHINE_MODE];
260enum bytecode_opcode mode_to_store_map[MAX_MACHINE_MODE];
261
262/* Initialize maps used to convert modes to const, load, and store
0f41302f
MS
263 bytecodes. */
264
ca695ac9
JB
265void
266bc_init_mode_to_opcode_maps ()
267{
268 int mode;
269
6bd6178d 270 for (mode = 0; mode < (int) MAX_MACHINE_MODE; mode++)
ca695ac9
JB
271 mode_to_const_map[mode] =
272 mode_to_load_map[mode] =
273 mode_to_store_map[mode] = neverneverland;
274
275#define DEF_MODEMAP(SYM, CODE, UCODE, CONST, LOAD, STORE) \
6bd6178d
RK
276 mode_to_const_map[(int) SYM] = CONST; \
277 mode_to_load_map[(int) SYM] = LOAD; \
278 mode_to_store_map[(int) SYM] = STORE;
ca695ac9
JB
279
280#include "modemap.def"
281#undef DEF_MODEMAP
282}
283\f
4fa52007 284/* This is run once per compilation to set up which modes can be used
266007a7 285 directly in memory and to initialize the block move optab. */
4fa52007
RK
286
287void
288init_expr_once ()
289{
290 rtx insn, pat;
291 enum machine_mode mode;
e2549997
RS
292 /* Try indexing by frame ptr and try by stack ptr.
293 It is known that on the Convex the stack ptr isn't a valid index.
294 With luck, one or the other is valid on any machine. */
4fa52007 295 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
e2549997 296 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
4fa52007
RK
297
298 start_sequence ();
299 insn = emit_insn (gen_rtx (SET, 0, 0));
300 pat = PATTERN (insn);
301
302 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
303 mode = (enum machine_mode) ((int) mode + 1))
304 {
305 int regno;
306 rtx reg;
307 int num_clobbers;
308
309 direct_load[(int) mode] = direct_store[(int) mode] = 0;
310 PUT_MODE (mem, mode);
e2549997 311 PUT_MODE (mem1, mode);
4fa52007 312
e6fe56a4
RK
313 /* See if there is some register that can be used in this mode and
314 directly loaded or stored from memory. */
315
7308a047
RS
316 if (mode != VOIDmode && mode != BLKmode)
317 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
318 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
319 regno++)
320 {
321 if (! HARD_REGNO_MODE_OK (regno, mode))
322 continue;
e6fe56a4 323
7308a047 324 reg = gen_rtx (REG, mode, regno);
e6fe56a4 325
7308a047
RS
326 SET_SRC (pat) = mem;
327 SET_DEST (pat) = reg;
328 if (recog (pat, insn, &num_clobbers) >= 0)
329 direct_load[(int) mode] = 1;
e6fe56a4 330
e2549997
RS
331 SET_SRC (pat) = mem1;
332 SET_DEST (pat) = reg;
333 if (recog (pat, insn, &num_clobbers) >= 0)
334 direct_load[(int) mode] = 1;
335
7308a047
RS
336 SET_SRC (pat) = reg;
337 SET_DEST (pat) = mem;
338 if (recog (pat, insn, &num_clobbers) >= 0)
339 direct_store[(int) mode] = 1;
e2549997
RS
340
341 SET_SRC (pat) = reg;
342 SET_DEST (pat) = mem1;
343 if (recog (pat, insn, &num_clobbers) >= 0)
344 direct_store[(int) mode] = 1;
7308a047 345 }
4fa52007
RK
346 }
347
348 end_sequence ();
349}
350
bbf6f052
RK
351/* This is run at the start of compiling a function. */
352
353void
354init_expr ()
355{
356 init_queue ();
357
358 pending_stack_adjust = 0;
359 inhibit_defer_pop = 0;
360 cleanups_this_call = 0;
361 saveregs_value = 0;
0006469d 362 apply_args_value = 0;
e87b4f3f 363 forced_labels = 0;
bbf6f052
RK
364}
365
366/* Save all variables describing the current status into the structure *P.
367 This is used before starting a nested function. */
368
369void
370save_expr_status (p)
371 struct function *p;
372{
373 /* Instead of saving the postincrement queue, empty it. */
374 emit_queue ();
375
376 p->pending_stack_adjust = pending_stack_adjust;
377 p->inhibit_defer_pop = inhibit_defer_pop;
378 p->cleanups_this_call = cleanups_this_call;
379 p->saveregs_value = saveregs_value;
0006469d 380 p->apply_args_value = apply_args_value;
e87b4f3f 381 p->forced_labels = forced_labels;
bbf6f052
RK
382
383 pending_stack_adjust = 0;
384 inhibit_defer_pop = 0;
385 cleanups_this_call = 0;
386 saveregs_value = 0;
0006469d 387 apply_args_value = 0;
e87b4f3f 388 forced_labels = 0;
bbf6f052
RK
389}
390
391/* Restore all variables describing the current status from the structure *P.
392 This is used after a nested function. */
393
394void
395restore_expr_status (p)
396 struct function *p;
397{
398 pending_stack_adjust = p->pending_stack_adjust;
399 inhibit_defer_pop = p->inhibit_defer_pop;
400 cleanups_this_call = p->cleanups_this_call;
401 saveregs_value = p->saveregs_value;
0006469d 402 apply_args_value = p->apply_args_value;
e87b4f3f 403 forced_labels = p->forced_labels;
bbf6f052
RK
404}
405\f
406/* Manage the queue of increment instructions to be output
407 for POSTINCREMENT_EXPR expressions, etc. */
408
409static rtx pending_chain;
410
411/* Queue up to increment (or change) VAR later. BODY says how:
412 BODY should be the same thing you would pass to emit_insn
413 to increment right away. It will go to emit_insn later on.
414
415 The value is a QUEUED expression to be used in place of VAR
416 where you want to guarantee the pre-incrementation value of VAR. */
417
418static rtx
419enqueue_insn (var, body)
420 rtx var, body;
421{
422 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
906c4e36 423 var, NULL_RTX, NULL_RTX, body, pending_chain);
bbf6f052
RK
424 return pending_chain;
425}
426
427/* Use protect_from_queue to convert a QUEUED expression
428 into something that you can put immediately into an instruction.
429 If the queued incrementation has not happened yet,
430 protect_from_queue returns the variable itself.
431 If the incrementation has happened, protect_from_queue returns a temp
432 that contains a copy of the old value of the variable.
433
434 Any time an rtx which might possibly be a QUEUED is to be put
435 into an instruction, it must be passed through protect_from_queue first.
436 QUEUED expressions are not meaningful in instructions.
437
438 Do not pass a value through protect_from_queue and then hold
439 on to it for a while before putting it in an instruction!
440 If the queue is flushed in between, incorrect code will result. */
441
442rtx
443protect_from_queue (x, modify)
444 register rtx x;
445 int modify;
446{
447 register RTX_CODE code = GET_CODE (x);
448
449#if 0 /* A QUEUED can hang around after the queue is forced out. */
450 /* Shortcut for most common case. */
451 if (pending_chain == 0)
452 return x;
453#endif
454
455 if (code != QUEUED)
456 {
e9baa644
RK
457 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
458 use of autoincrement. Make a copy of the contents of the memory
459 location rather than a copy of the address, but not if the value is
460 of mode BLKmode. Don't modify X in place since it might be
461 shared. */
bbf6f052
RK
462 if (code == MEM && GET_MODE (x) != BLKmode
463 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
464 {
465 register rtx y = XEXP (x, 0);
e9baa644
RK
466 register rtx new = gen_rtx (MEM, GET_MODE (x), QUEUED_VAR (y));
467
468 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
469 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
470 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
471
bbf6f052
RK
472 if (QUEUED_INSN (y))
473 {
e9baa644
RK
474 register rtx temp = gen_reg_rtx (GET_MODE (new));
475 emit_insn_before (gen_move_insn (temp, new),
bbf6f052
RK
476 QUEUED_INSN (y));
477 return temp;
478 }
e9baa644 479 return new;
bbf6f052
RK
480 }
481 /* Otherwise, recursively protect the subexpressions of all
482 the kinds of rtx's that can contain a QUEUED. */
483 if (code == MEM)
3f15938e
RS
484 {
485 rtx tem = protect_from_queue (XEXP (x, 0), 0);
486 if (tem != XEXP (x, 0))
487 {
488 x = copy_rtx (x);
489 XEXP (x, 0) = tem;
490 }
491 }
bbf6f052
RK
492 else if (code == PLUS || code == MULT)
493 {
3f15938e
RS
494 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
495 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
496 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
497 {
498 x = copy_rtx (x);
499 XEXP (x, 0) = new0;
500 XEXP (x, 1) = new1;
501 }
bbf6f052
RK
502 }
503 return x;
504 }
505 /* If the increment has not happened, use the variable itself. */
506 if (QUEUED_INSN (x) == 0)
507 return QUEUED_VAR (x);
508 /* If the increment has happened and a pre-increment copy exists,
509 use that copy. */
510 if (QUEUED_COPY (x) != 0)
511 return QUEUED_COPY (x);
512 /* The increment has happened but we haven't set up a pre-increment copy.
513 Set one up now, and use it. */
514 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
515 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
516 QUEUED_INSN (x));
517 return QUEUED_COPY (x);
518}
519
520/* Return nonzero if X contains a QUEUED expression:
521 if it contains anything that will be altered by a queued increment.
522 We handle only combinations of MEM, PLUS, MINUS and MULT operators
523 since memory addresses generally contain only those. */
524
525static int
526queued_subexp_p (x)
527 rtx x;
528{
529 register enum rtx_code code = GET_CODE (x);
530 switch (code)
531 {
532 case QUEUED:
533 return 1;
534 case MEM:
535 return queued_subexp_p (XEXP (x, 0));
536 case MULT:
537 case PLUS:
538 case MINUS:
539 return queued_subexp_p (XEXP (x, 0))
540 || queued_subexp_p (XEXP (x, 1));
541 }
542 return 0;
543}
544
545/* Perform all the pending incrementations. */
546
547void
548emit_queue ()
549{
550 register rtx p;
551 while (p = pending_chain)
552 {
553 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
554 pending_chain = QUEUED_NEXT (p);
555 }
556}
557
558static void
559init_queue ()
560{
561 if (pending_chain)
562 abort ();
563}
564\f
565/* Copy data from FROM to TO, where the machine modes are not the same.
566 Both modes may be integer, or both may be floating.
567 UNSIGNEDP should be nonzero if FROM is an unsigned type.
568 This causes zero-extension instead of sign-extension. */
569
570void
571convert_move (to, from, unsignedp)
572 register rtx to, from;
573 int unsignedp;
574{
575 enum machine_mode to_mode = GET_MODE (to);
576 enum machine_mode from_mode = GET_MODE (from);
577 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
578 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
579 enum insn_code code;
580 rtx libcall;
581
582 /* rtx code for making an equivalent value. */
583 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
584
585 to = protect_from_queue (to, 1);
586 from = protect_from_queue (from, 0);
587
588 if (to_real != from_real)
589 abort ();
590
1499e0a8
RK
591 /* If FROM is a SUBREG that indicates that we have already done at least
592 the required extension, strip it. We don't handle such SUBREGs as
593 TO here. */
594
595 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
596 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
597 >= GET_MODE_SIZE (to_mode))
598 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
599 from = gen_lowpart (to_mode, from), from_mode = to_mode;
600
601 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
602 abort ();
603
bbf6f052
RK
604 if (to_mode == from_mode
605 || (from_mode == VOIDmode && CONSTANT_P (from)))
606 {
607 emit_move_insn (to, from);
608 return;
609 }
610
611 if (to_real)
612 {
81d79e2c
RS
613 rtx value;
614
2b01c326 615 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
b424402e 616 {
2b01c326
RK
617 /* Try converting directly if the insn is supported. */
618 if ((code = can_extend_p (to_mode, from_mode, 0))
619 != CODE_FOR_nothing)
620 {
621 emit_unop_insn (code, to, from, UNKNOWN);
622 return;
623 }
bbf6f052 624 }
2b01c326 625
b424402e
RS
626#ifdef HAVE_trunchfqf2
627 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
628 {
629 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
630 return;
631 }
632#endif
633#ifdef HAVE_truncsfqf2
634 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
635 {
636 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
637 return;
638 }
639#endif
640#ifdef HAVE_truncdfqf2
641 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
642 {
643 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
644 return;
645 }
646#endif
647#ifdef HAVE_truncxfqf2
648 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
649 {
650 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
651 return;
652 }
653#endif
654#ifdef HAVE_trunctfqf2
655 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
656 {
657 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
658 return;
659 }
660#endif
03747aa3
RK
661
662#ifdef HAVE_trunctqfhf2
663 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
664 {
665 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
666 return;
667 }
668#endif
b424402e
RS
669#ifdef HAVE_truncsfhf2
670 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
671 {
672 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
673 return;
674 }
675#endif
676#ifdef HAVE_truncdfhf2
677 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
678 {
679 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
680 return;
681 }
682#endif
683#ifdef HAVE_truncxfhf2
684 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
685 {
686 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
687 return;
688 }
689#endif
690#ifdef HAVE_trunctfhf2
691 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
692 {
693 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
694 return;
695 }
696#endif
2b01c326
RK
697
698#ifdef HAVE_truncsftqf2
699 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
700 {
701 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
702 return;
703 }
704#endif
705#ifdef HAVE_truncdftqf2
706 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
707 {
708 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
709 return;
710 }
711#endif
712#ifdef HAVE_truncxftqf2
713 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
714 {
715 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
716 return;
717 }
718#endif
719#ifdef HAVE_trunctftqf2
720 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
721 {
722 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
723 return;
724 }
725#endif
726
bbf6f052
RK
727#ifdef HAVE_truncdfsf2
728 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
729 {
730 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
731 return;
732 }
733#endif
b092b471
JW
734#ifdef HAVE_truncxfsf2
735 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
736 {
737 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
738 return;
739 }
740#endif
bbf6f052
RK
741#ifdef HAVE_trunctfsf2
742 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
743 {
744 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
745 return;
746 }
747#endif
b092b471
JW
748#ifdef HAVE_truncxfdf2
749 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
750 {
751 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
752 return;
753 }
754#endif
bbf6f052
RK
755#ifdef HAVE_trunctfdf2
756 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
757 {
758 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
759 return;
760 }
761#endif
762
b092b471
JW
763 libcall = (rtx) 0;
764 switch (from_mode)
765 {
766 case SFmode:
767 switch (to_mode)
768 {
769 case DFmode:
770 libcall = extendsfdf2_libfunc;
771 break;
772
773 case XFmode:
774 libcall = extendsfxf2_libfunc;
775 break;
776
777 case TFmode:
778 libcall = extendsftf2_libfunc;
779 break;
780 }
781 break;
782
783 case DFmode:
784 switch (to_mode)
785 {
786 case SFmode:
787 libcall = truncdfsf2_libfunc;
788 break;
789
790 case XFmode:
791 libcall = extenddfxf2_libfunc;
792 break;
793
794 case TFmode:
795 libcall = extenddftf2_libfunc;
796 break;
797 }
798 break;
799
800 case XFmode:
801 switch (to_mode)
802 {
803 case SFmode:
804 libcall = truncxfsf2_libfunc;
805 break;
806
807 case DFmode:
808 libcall = truncxfdf2_libfunc;
809 break;
810 }
811 break;
812
813 case TFmode:
814 switch (to_mode)
815 {
816 case SFmode:
817 libcall = trunctfsf2_libfunc;
818 break;
819
820 case DFmode:
821 libcall = trunctfdf2_libfunc;
822 break;
823 }
824 break;
825 }
826
827 if (libcall == (rtx) 0)
828 /* This conversion is not implemented yet. */
bbf6f052
RK
829 abort ();
830
81d79e2c
RS
831 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
832 1, from, from_mode);
833 emit_move_insn (to, value);
bbf6f052
RK
834 return;
835 }
836
837 /* Now both modes are integers. */
838
839 /* Handle expanding beyond a word. */
840 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
841 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
842 {
843 rtx insns;
844 rtx lowpart;
845 rtx fill_value;
846 rtx lowfrom;
847 int i;
848 enum machine_mode lowpart_mode;
849 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
850
851 /* Try converting directly if the insn is supported. */
852 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
853 != CODE_FOR_nothing)
854 {
cd1b4b44
RK
855 /* If FROM is a SUBREG, put it into a register. Do this
856 so that we always generate the same set of insns for
857 better cse'ing; if an intermediate assignment occurred,
858 we won't be doing the operation directly on the SUBREG. */
859 if (optimize > 0 && GET_CODE (from) == SUBREG)
860 from = force_reg (from_mode, from);
bbf6f052
RK
861 emit_unop_insn (code, to, from, equiv_code);
862 return;
863 }
864 /* Next, try converting via full word. */
865 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
866 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
867 != CODE_FOR_nothing))
868 {
a81fee56
RS
869 if (GET_CODE (to) == REG)
870 emit_insn (gen_rtx (CLOBBER, VOIDmode, to));
bbf6f052
RK
871 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
872 emit_unop_insn (code, to,
873 gen_lowpart (word_mode, to), equiv_code);
874 return;
875 }
876
877 /* No special multiword conversion insn; do it by hand. */
878 start_sequence ();
879
5c5033c3
RK
880 /* Since we will turn this into a no conflict block, we must ensure
881 that the source does not overlap the target. */
882
883 if (reg_overlap_mentioned_p (to, from))
884 from = force_reg (from_mode, from);
885
bbf6f052
RK
886 /* Get a copy of FROM widened to a word, if necessary. */
887 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
888 lowpart_mode = word_mode;
889 else
890 lowpart_mode = from_mode;
891
892 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
893
894 lowpart = gen_lowpart (lowpart_mode, to);
895 emit_move_insn (lowpart, lowfrom);
896
897 /* Compute the value to put in each remaining word. */
898 if (unsignedp)
899 fill_value = const0_rtx;
900 else
901 {
902#ifdef HAVE_slt
903 if (HAVE_slt
904 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
905 && STORE_FLAG_VALUE == -1)
906 {
906c4e36
RK
907 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
908 lowpart_mode, 0, 0);
bbf6f052
RK
909 fill_value = gen_reg_rtx (word_mode);
910 emit_insn (gen_slt (fill_value));
911 }
912 else
913#endif
914 {
915 fill_value
916 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
917 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
906c4e36 918 NULL_RTX, 0);
bbf6f052
RK
919 fill_value = convert_to_mode (word_mode, fill_value, 1);
920 }
921 }
922
923 /* Fill the remaining words. */
924 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
925 {
926 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
927 rtx subword = operand_subword (to, index, 1, to_mode);
928
929 if (subword == 0)
930 abort ();
931
932 if (fill_value != subword)
933 emit_move_insn (subword, fill_value);
934 }
935
936 insns = get_insns ();
937 end_sequence ();
938
906c4e36 939 emit_no_conflict_block (insns, to, from, NULL_RTX,
2abec1b7 940 gen_rtx (equiv_code, to_mode, copy_rtx (from)));
bbf6f052
RK
941 return;
942 }
943
d3c64ee3
RS
944 /* Truncating multi-word to a word or less. */
945 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
946 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
bbf6f052 947 {
431a6eca
JW
948 if (!((GET_CODE (from) == MEM
949 && ! MEM_VOLATILE_P (from)
950 && direct_load[(int) to_mode]
951 && ! mode_dependent_address_p (XEXP (from, 0)))
952 || GET_CODE (from) == REG
953 || GET_CODE (from) == SUBREG))
954 from = force_reg (from_mode, from);
bbf6f052
RK
955 convert_move (to, gen_lowpart (word_mode, from), 0);
956 return;
957 }
958
959 /* Handle pointer conversion */ /* SPEE 900220 */
960 if (to_mode == PSImode)
961 {
962 if (from_mode != SImode)
963 from = convert_to_mode (SImode, from, unsignedp);
964
1f584163
DE
965#ifdef HAVE_truncsipsi2
966 if (HAVE_truncsipsi2)
bbf6f052 967 {
1f584163 968 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
bbf6f052
RK
969 return;
970 }
1f584163 971#endif /* HAVE_truncsipsi2 */
bbf6f052
RK
972 abort ();
973 }
974
975 if (from_mode == PSImode)
976 {
977 if (to_mode != SImode)
978 {
979 from = convert_to_mode (SImode, from, unsignedp);
980 from_mode = SImode;
981 }
982 else
983 {
1f584163
DE
984#ifdef HAVE_extendpsisi2
985 if (HAVE_extendpsisi2)
bbf6f052 986 {
1f584163 987 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
bbf6f052
RK
988 return;
989 }
1f584163 990#endif /* HAVE_extendpsisi2 */
bbf6f052
RK
991 abort ();
992 }
993 }
994
0407367d
RK
995 if (to_mode == PDImode)
996 {
997 if (from_mode != DImode)
998 from = convert_to_mode (DImode, from, unsignedp);
999
1000#ifdef HAVE_truncdipdi2
1001 if (HAVE_truncdipdi2)
1002 {
1003 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1004 return;
1005 }
1006#endif /* HAVE_truncdipdi2 */
1007 abort ();
1008 }
1009
1010 if (from_mode == PDImode)
1011 {
1012 if (to_mode != DImode)
1013 {
1014 from = convert_to_mode (DImode, from, unsignedp);
1015 from_mode = DImode;
1016 }
1017 else
1018 {
1019#ifdef HAVE_extendpdidi2
1020 if (HAVE_extendpdidi2)
1021 {
1022 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1023 return;
1024 }
1025#endif /* HAVE_extendpdidi2 */
1026 abort ();
1027 }
1028 }
1029
bbf6f052
RK
1030 /* Now follow all the conversions between integers
1031 no more than a word long. */
1032
1033 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1034 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1035 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
d3c64ee3 1036 GET_MODE_BITSIZE (from_mode)))
bbf6f052 1037 {
d3c64ee3
RS
1038 if (!((GET_CODE (from) == MEM
1039 && ! MEM_VOLATILE_P (from)
1040 && direct_load[(int) to_mode]
1041 && ! mode_dependent_address_p (XEXP (from, 0)))
1042 || GET_CODE (from) == REG
1043 || GET_CODE (from) == SUBREG))
1044 from = force_reg (from_mode, from);
34aa3599
RK
1045 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1046 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1047 from = copy_to_reg (from);
bbf6f052
RK
1048 emit_move_insn (to, gen_lowpart (to_mode, from));
1049 return;
1050 }
1051
d3c64ee3 1052 /* Handle extension. */
bbf6f052
RK
1053 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1054 {
1055 /* Convert directly if that works. */
1056 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1057 != CODE_FOR_nothing)
1058 {
1059 emit_unop_insn (code, to, from, equiv_code);
1060 return;
1061 }
1062 else
1063 {
1064 enum machine_mode intermediate;
1065
1066 /* Search for a mode to convert via. */
1067 for (intermediate = from_mode; intermediate != VOIDmode;
1068 intermediate = GET_MODE_WIDER_MODE (intermediate))
930b4e39
RK
1069 if (((can_extend_p (to_mode, intermediate, unsignedp)
1070 != CODE_FOR_nothing)
1071 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1072 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
bbf6f052
RK
1073 && (can_extend_p (intermediate, from_mode, unsignedp)
1074 != CODE_FOR_nothing))
1075 {
1076 convert_move (to, convert_to_mode (intermediate, from,
1077 unsignedp), unsignedp);
1078 return;
1079 }
1080
1081 /* No suitable intermediate mode. */
1082 abort ();
1083 }
1084 }
1085
1086 /* Support special truncate insns for certain modes. */
1087
1088 if (from_mode == DImode && to_mode == SImode)
1089 {
1090#ifdef HAVE_truncdisi2
1091 if (HAVE_truncdisi2)
1092 {
1093 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1094 return;
1095 }
1096#endif
1097 convert_move (to, force_reg (from_mode, from), unsignedp);
1098 return;
1099 }
1100
1101 if (from_mode == DImode && to_mode == HImode)
1102 {
1103#ifdef HAVE_truncdihi2
1104 if (HAVE_truncdihi2)
1105 {
1106 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1107 return;
1108 }
1109#endif
1110 convert_move (to, force_reg (from_mode, from), unsignedp);
1111 return;
1112 }
1113
1114 if (from_mode == DImode && to_mode == QImode)
1115 {
1116#ifdef HAVE_truncdiqi2
1117 if (HAVE_truncdiqi2)
1118 {
1119 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1120 return;
1121 }
1122#endif
1123 convert_move (to, force_reg (from_mode, from), unsignedp);
1124 return;
1125 }
1126
1127 if (from_mode == SImode && to_mode == HImode)
1128 {
1129#ifdef HAVE_truncsihi2
1130 if (HAVE_truncsihi2)
1131 {
1132 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1133 return;
1134 }
1135#endif
1136 convert_move (to, force_reg (from_mode, from), unsignedp);
1137 return;
1138 }
1139
1140 if (from_mode == SImode && to_mode == QImode)
1141 {
1142#ifdef HAVE_truncsiqi2
1143 if (HAVE_truncsiqi2)
1144 {
1145 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1146 return;
1147 }
1148#endif
1149 convert_move (to, force_reg (from_mode, from), unsignedp);
1150 return;
1151 }
1152
1153 if (from_mode == HImode && to_mode == QImode)
1154 {
1155#ifdef HAVE_trunchiqi2
1156 if (HAVE_trunchiqi2)
1157 {
1158 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1159 return;
1160 }
1161#endif
1162 convert_move (to, force_reg (from_mode, from), unsignedp);
1163 return;
1164 }
1165
b9bcad65
RK
1166 if (from_mode == TImode && to_mode == DImode)
1167 {
1168#ifdef HAVE_trunctidi2
1169 if (HAVE_trunctidi2)
1170 {
1171 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1172 return;
1173 }
1174#endif
1175 convert_move (to, force_reg (from_mode, from), unsignedp);
1176 return;
1177 }
1178
1179 if (from_mode == TImode && to_mode == SImode)
1180 {
1181#ifdef HAVE_trunctisi2
1182 if (HAVE_trunctisi2)
1183 {
1184 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1185 return;
1186 }
1187#endif
1188 convert_move (to, force_reg (from_mode, from), unsignedp);
1189 return;
1190 }
1191
1192 if (from_mode == TImode && to_mode == HImode)
1193 {
1194#ifdef HAVE_trunctihi2
1195 if (HAVE_trunctihi2)
1196 {
1197 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1198 return;
1199 }
1200#endif
1201 convert_move (to, force_reg (from_mode, from), unsignedp);
1202 return;
1203 }
1204
1205 if (from_mode == TImode && to_mode == QImode)
1206 {
1207#ifdef HAVE_trunctiqi2
1208 if (HAVE_trunctiqi2)
1209 {
1210 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1211 return;
1212 }
1213#endif
1214 convert_move (to, force_reg (from_mode, from), unsignedp);
1215 return;
1216 }
1217
bbf6f052
RK
1218 /* Handle truncation of volatile memrefs, and so on;
1219 the things that couldn't be truncated directly,
1220 and for which there was no special instruction. */
1221 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1222 {
1223 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1224 emit_move_insn (to, temp);
1225 return;
1226 }
1227
1228 /* Mode combination is not recognized. */
1229 abort ();
1230}
1231
1232/* Return an rtx for a value that would result
1233 from converting X to mode MODE.
1234 Both X and MODE may be floating, or both integer.
1235 UNSIGNEDP is nonzero if X is an unsigned value.
1236 This can be done by referring to a part of X in place
5d901c31
RS
1237 or by copying to a new temporary with conversion.
1238
1239 This function *must not* call protect_from_queue
1240 except when putting X into an insn (in which case convert_move does it). */
bbf6f052
RK
1241
1242rtx
1243convert_to_mode (mode, x, unsignedp)
1244 enum machine_mode mode;
1245 rtx x;
1246 int unsignedp;
5ffe63ed
RS
1247{
1248 return convert_modes (mode, VOIDmode, x, unsignedp);
1249}
1250
1251/* Return an rtx for a value that would result
1252 from converting X from mode OLDMODE to mode MODE.
1253 Both modes may be floating, or both integer.
1254 UNSIGNEDP is nonzero if X is an unsigned value.
1255
1256 This can be done by referring to a part of X in place
1257 or by copying to a new temporary with conversion.
1258
1259 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1260
1261 This function *must not* call protect_from_queue
1262 except when putting X into an insn (in which case convert_move does it). */
1263
1264rtx
1265convert_modes (mode, oldmode, x, unsignedp)
1266 enum machine_mode mode, oldmode;
1267 rtx x;
1268 int unsignedp;
bbf6f052
RK
1269{
1270 register rtx temp;
5ffe63ed 1271
1499e0a8
RK
1272 /* If FROM is a SUBREG that indicates that we have already done at least
1273 the required extension, strip it. */
1274
1275 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1276 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1277 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1278 x = gen_lowpart (mode, x);
bbf6f052 1279
64791b18
RK
1280 if (GET_MODE (x) != VOIDmode)
1281 oldmode = GET_MODE (x);
1282
5ffe63ed 1283 if (mode == oldmode)
bbf6f052
RK
1284 return x;
1285
1286 /* There is one case that we must handle specially: If we are converting
906c4e36 1287 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
bbf6f052
RK
1288 we are to interpret the constant as unsigned, gen_lowpart will do
1289 the wrong if the constant appears negative. What we want to do is
1290 make the high-order word of the constant zero, not all ones. */
1291
1292 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
906c4e36 1293 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
bbf6f052 1294 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
96ff8a16
ILT
1295 {
1296 HOST_WIDE_INT val = INTVAL (x);
1297
1298 if (oldmode != VOIDmode
1299 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1300 {
1301 int width = GET_MODE_BITSIZE (oldmode);
1302
1303 /* We need to zero extend VAL. */
1304 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1305 }
1306
1307 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1308 }
bbf6f052
RK
1309
1310 /* We can do this with a gen_lowpart if both desired and current modes
1311 are integer, and this is either a constant integer, a register, or a
ba2e110c
RK
1312 non-volatile MEM. Except for the constant case where MODE is no
1313 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
bbf6f052 1314
ba2e110c
RK
1315 if ((GET_CODE (x) == CONST_INT
1316 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 1317 || (GET_MODE_CLASS (mode) == MODE_INT
5ffe63ed 1318 && GET_MODE_CLASS (oldmode) == MODE_INT
bbf6f052 1319 && (GET_CODE (x) == CONST_DOUBLE
5ffe63ed 1320 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
d57c66da
JW
1321 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1322 && direct_load[(int) mode])
2bf29316
JW
1323 || (GET_CODE (x) == REG
1324 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1325 GET_MODE_BITSIZE (GET_MODE (x)))))))))
ba2e110c
RK
1326 {
1327 /* ?? If we don't know OLDMODE, we have to assume here that
1328 X does not need sign- or zero-extension. This may not be
1329 the case, but it's the best we can do. */
1330 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1331 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1332 {
1333 HOST_WIDE_INT val = INTVAL (x);
1334 int width = GET_MODE_BITSIZE (oldmode);
1335
1336 /* We must sign or zero-extend in this case. Start by
1337 zero-extending, then sign extend if we need to. */
1338 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1339 if (! unsignedp
1340 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1341 val |= (HOST_WIDE_INT) (-1) << width;
1342
1343 return GEN_INT (val);
1344 }
1345
1346 return gen_lowpart (mode, x);
1347 }
bbf6f052
RK
1348
1349 temp = gen_reg_rtx (mode);
1350 convert_move (temp, x, unsignedp);
1351 return temp;
1352}
1353\f
1354/* Generate several move instructions to copy LEN bytes
1355 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1356 The caller must pass FROM and TO
1357 through protect_from_queue before calling.
1358 ALIGN (in bytes) is maximum alignment we can assume. */
1359
bbf6f052
RK
1360static void
1361move_by_pieces (to, from, len, align)
1362 rtx to, from;
1363 int len, align;
1364{
1365 struct move_by_pieces data;
1366 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
e87b4f3f 1367 int max_size = MOVE_MAX + 1;
bbf6f052
RK
1368
1369 data.offset = 0;
1370 data.to_addr = to_addr;
1371 data.from_addr = from_addr;
1372 data.to = to;
1373 data.from = from;
1374 data.autinc_to
1375 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1376 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1377 data.autinc_from
1378 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1379 || GET_CODE (from_addr) == POST_INC
1380 || GET_CODE (from_addr) == POST_DEC);
1381
1382 data.explicit_inc_from = 0;
1383 data.explicit_inc_to = 0;
1384 data.reverse
1385 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1386 if (data.reverse) data.offset = len;
1387 data.len = len;
1388
e9cf6a97
JW
1389 data.to_struct = MEM_IN_STRUCT_P (to);
1390 data.from_struct = MEM_IN_STRUCT_P (from);
1391
bbf6f052
RK
1392 /* If copying requires more than two move insns,
1393 copy addresses to registers (to make displacements shorter)
1394 and use post-increment if available. */
1395 if (!(data.autinc_from && data.autinc_to)
1396 && move_by_pieces_ninsns (len, align) > 2)
1397 {
1398#ifdef HAVE_PRE_DECREMENT
1399 if (data.reverse && ! data.autinc_from)
1400 {
1401 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1402 data.autinc_from = 1;
1403 data.explicit_inc_from = -1;
1404 }
1405#endif
1406#ifdef HAVE_POST_INCREMENT
1407 if (! data.autinc_from)
1408 {
1409 data.from_addr = copy_addr_to_reg (from_addr);
1410 data.autinc_from = 1;
1411 data.explicit_inc_from = 1;
1412 }
1413#endif
1414 if (!data.autinc_from && CONSTANT_P (from_addr))
1415 data.from_addr = copy_addr_to_reg (from_addr);
1416#ifdef HAVE_PRE_DECREMENT
1417 if (data.reverse && ! data.autinc_to)
1418 {
1419 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1420 data.autinc_to = 1;
1421 data.explicit_inc_to = -1;
1422 }
1423#endif
1424#ifdef HAVE_POST_INCREMENT
1425 if (! data.reverse && ! data.autinc_to)
1426 {
1427 data.to_addr = copy_addr_to_reg (to_addr);
1428 data.autinc_to = 1;
1429 data.explicit_inc_to = 1;
1430 }
1431#endif
1432 if (!data.autinc_to && CONSTANT_P (to_addr))
1433 data.to_addr = copy_addr_to_reg (to_addr);
1434 }
1435
c7a7ac46 1436 if (! SLOW_UNALIGNED_ACCESS
e87b4f3f 1437 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1438 align = MOVE_MAX;
bbf6f052
RK
1439
1440 /* First move what we can in the largest integer mode, then go to
1441 successively smaller modes. */
1442
1443 while (max_size > 1)
1444 {
1445 enum machine_mode mode = VOIDmode, tmode;
1446 enum insn_code icode;
1447
e7c33f54
RK
1448 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1449 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1450 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1451 mode = tmode;
1452
1453 if (mode == VOIDmode)
1454 break;
1455
1456 icode = mov_optab->handlers[(int) mode].insn_code;
1457 if (icode != CODE_FOR_nothing
1458 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1459 GET_MODE_SIZE (mode)))
1460 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1461
1462 max_size = GET_MODE_SIZE (mode);
1463 }
1464
1465 /* The code above should have handled everything. */
1466 if (data.len != 0)
1467 abort ();
1468}
1469
1470/* Return number of insns required to move L bytes by pieces.
1471 ALIGN (in bytes) is maximum alignment we can assume. */
1472
1473static int
1474move_by_pieces_ninsns (l, align)
1475 unsigned int l;
1476 int align;
1477{
1478 register int n_insns = 0;
e87b4f3f 1479 int max_size = MOVE_MAX + 1;
bbf6f052 1480
c7a7ac46 1481 if (! SLOW_UNALIGNED_ACCESS
e87b4f3f 1482 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1483 align = MOVE_MAX;
bbf6f052
RK
1484
1485 while (max_size > 1)
1486 {
1487 enum machine_mode mode = VOIDmode, tmode;
1488 enum insn_code icode;
1489
e7c33f54
RK
1490 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1491 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1492 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1493 mode = tmode;
1494
1495 if (mode == VOIDmode)
1496 break;
1497
1498 icode = mov_optab->handlers[(int) mode].insn_code;
1499 if (icode != CODE_FOR_nothing
1500 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1501 GET_MODE_SIZE (mode)))
1502 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1503
1504 max_size = GET_MODE_SIZE (mode);
1505 }
1506
1507 return n_insns;
1508}
1509
1510/* Subroutine of move_by_pieces. Move as many bytes as appropriate
1511 with move instructions for mode MODE. GENFUN is the gen_... function
1512 to make a move insn for that mode. DATA has all the other info. */
1513
1514static void
1515move_by_pieces_1 (genfun, mode, data)
1516 rtx (*genfun) ();
1517 enum machine_mode mode;
1518 struct move_by_pieces *data;
1519{
1520 register int size = GET_MODE_SIZE (mode);
1521 register rtx to1, from1;
1522
1523 while (data->len >= size)
1524 {
1525 if (data->reverse) data->offset -= size;
1526
1527 to1 = (data->autinc_to
1528 ? gen_rtx (MEM, mode, data->to_addr)
1529 : change_address (data->to, mode,
1530 plus_constant (data->to_addr, data->offset)));
e9cf6a97 1531 MEM_IN_STRUCT_P (to1) = data->to_struct;
bbf6f052
RK
1532 from1 =
1533 (data->autinc_from
1534 ? gen_rtx (MEM, mode, data->from_addr)
1535 : change_address (data->from, mode,
1536 plus_constant (data->from_addr, data->offset)));
e9cf6a97 1537 MEM_IN_STRUCT_P (from1) = data->from_struct;
bbf6f052
RK
1538
1539#ifdef HAVE_PRE_DECREMENT
1540 if (data->explicit_inc_to < 0)
906c4e36 1541 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
bbf6f052 1542 if (data->explicit_inc_from < 0)
906c4e36 1543 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
bbf6f052
RK
1544#endif
1545
1546 emit_insn ((*genfun) (to1, from1));
1547#ifdef HAVE_POST_INCREMENT
1548 if (data->explicit_inc_to > 0)
906c4e36 1549 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
bbf6f052 1550 if (data->explicit_inc_from > 0)
906c4e36 1551 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
bbf6f052
RK
1552#endif
1553
1554 if (! data->reverse) data->offset += size;
1555
1556 data->len -= size;
1557 }
1558}
1559\f
1560/* Emit code to move a block Y to a block X.
1561 This may be done with string-move instructions,
1562 with multiple scalar move instructions, or with a library call.
1563
1564 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1565 with mode BLKmode.
1566 SIZE is an rtx that says how long they are.
1567 ALIGN is the maximum alignment we can assume they have,
1568 measured in bytes. */
1569
1570void
1571emit_block_move (x, y, size, align)
1572 rtx x, y;
1573 rtx size;
1574 int align;
1575{
1576 if (GET_MODE (x) != BLKmode)
1577 abort ();
1578
1579 if (GET_MODE (y) != BLKmode)
1580 abort ();
1581
1582 x = protect_from_queue (x, 1);
1583 y = protect_from_queue (y, 0);
5d901c31 1584 size = protect_from_queue (size, 0);
bbf6f052
RK
1585
1586 if (GET_CODE (x) != MEM)
1587 abort ();
1588 if (GET_CODE (y) != MEM)
1589 abort ();
1590 if (size == 0)
1591 abort ();
1592
1593 if (GET_CODE (size) == CONST_INT
906c4e36 1594 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
bbf6f052
RK
1595 move_by_pieces (x, y, INTVAL (size), align);
1596 else
1597 {
1598 /* Try the most limited insn first, because there's no point
1599 including more than one in the machine description unless
1600 the more limited one has some advantage. */
266007a7 1601
0bba3f6f 1602 rtx opalign = GEN_INT (align);
266007a7
RK
1603 enum machine_mode mode;
1604
1605 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1606 mode = GET_MODE_WIDER_MODE (mode))
bbf6f052 1607 {
266007a7 1608 enum insn_code code = movstr_optab[(int) mode];
266007a7
RK
1609
1610 if (code != CODE_FOR_nothing
803090c4
RK
1611 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1612 here because if SIZE is less than the mode mask, as it is
8008b228 1613 returned by the macro, it will definitely be less than the
803090c4 1614 actual mode mask. */
8ca00751
RK
1615 && ((GET_CODE (size) == CONST_INT
1616 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1617 <= GET_MODE_MASK (mode)))
1618 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
0bba3f6f
RK
1619 && (insn_operand_predicate[(int) code][0] == 0
1620 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1621 && (insn_operand_predicate[(int) code][1] == 0
1622 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1623 && (insn_operand_predicate[(int) code][3] == 0
1624 || (*insn_operand_predicate[(int) code][3]) (opalign,
1625 VOIDmode)))
bbf6f052 1626 {
1ba1e2a8 1627 rtx op2;
266007a7
RK
1628 rtx last = get_last_insn ();
1629 rtx pat;
1630
1ba1e2a8 1631 op2 = convert_to_mode (mode, size, 1);
0bba3f6f
RK
1632 if (insn_operand_predicate[(int) code][2] != 0
1633 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
266007a7
RK
1634 op2 = copy_to_mode_reg (mode, op2);
1635
1636 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1637 if (pat)
1638 {
1639 emit_insn (pat);
1640 return;
1641 }
1642 else
1643 delete_insns_since (last);
bbf6f052
RK
1644 }
1645 }
bbf6f052
RK
1646
1647#ifdef TARGET_MEM_FUNCTIONS
d562e42e 1648 emit_library_call (memcpy_libfunc, 0,
bbf6f052
RK
1649 VOIDmode, 3, XEXP (x, 0), Pmode,
1650 XEXP (y, 0), Pmode,
0fa83258
RK
1651 convert_to_mode (TYPE_MODE (sizetype), size,
1652 TREE_UNSIGNED (sizetype)),
1653 TYPE_MODE (sizetype));
bbf6f052 1654#else
d562e42e 1655 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
1656 VOIDmode, 3, XEXP (y, 0), Pmode,
1657 XEXP (x, 0), Pmode,
3b6f75e2
JW
1658 convert_to_mode (TYPE_MODE (integer_type_node), size,
1659 TREE_UNSIGNED (integer_type_node)),
1660 TYPE_MODE (integer_type_node));
bbf6f052
RK
1661#endif
1662 }
1663}
1664\f
1665/* Copy all or part of a value X into registers starting at REGNO.
1666 The number of registers to be filled is NREGS. */
1667
1668void
1669move_block_to_reg (regno, x, nregs, mode)
1670 int regno;
1671 rtx x;
1672 int nregs;
1673 enum machine_mode mode;
1674{
1675 int i;
1676 rtx pat, last;
1677
72bb9717
RK
1678 if (nregs == 0)
1679 return;
1680
bbf6f052
RK
1681 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1682 x = validize_mem (force_const_mem (mode, x));
1683
1684 /* See if the machine can do this with a load multiple insn. */
1685#ifdef HAVE_load_multiple
c3a02afe 1686 if (HAVE_load_multiple)
bbf6f052 1687 {
c3a02afe
RK
1688 last = get_last_insn ();
1689 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1690 GEN_INT (nregs));
1691 if (pat)
1692 {
1693 emit_insn (pat);
1694 return;
1695 }
1696 else
1697 delete_insns_since (last);
bbf6f052 1698 }
bbf6f052
RK
1699#endif
1700
1701 for (i = 0; i < nregs; i++)
1702 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1703 operand_subword_force (x, i, mode));
1704}
1705
1706/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
0040593d
JW
1707 The number of registers to be filled is NREGS. SIZE indicates the number
1708 of bytes in the object X. */
1709
bbf6f052
RK
1710
1711void
0040593d 1712move_block_from_reg (regno, x, nregs, size)
bbf6f052
RK
1713 int regno;
1714 rtx x;
1715 int nregs;
0040593d 1716 int size;
bbf6f052
RK
1717{
1718 int i;
1719 rtx pat, last;
1720
0040593d
JW
1721 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1722 to the left before storing to memory. */
1723 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1724 {
1725 rtx tem = operand_subword (x, 0, 1, BLKmode);
1726 rtx shift;
1727
1728 if (tem == 0)
1729 abort ();
1730
1731 shift = expand_shift (LSHIFT_EXPR, word_mode,
1732 gen_rtx (REG, word_mode, regno),
1733 build_int_2 ((UNITS_PER_WORD - size)
1734 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1735 emit_move_insn (tem, shift);
1736 return;
1737 }
1738
bbf6f052
RK
1739 /* See if the machine can do this with a store multiple insn. */
1740#ifdef HAVE_store_multiple
c3a02afe 1741 if (HAVE_store_multiple)
bbf6f052 1742 {
c3a02afe
RK
1743 last = get_last_insn ();
1744 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1745 GEN_INT (nregs));
1746 if (pat)
1747 {
1748 emit_insn (pat);
1749 return;
1750 }
1751 else
1752 delete_insns_since (last);
bbf6f052 1753 }
bbf6f052
RK
1754#endif
1755
1756 for (i = 0; i < nregs; i++)
1757 {
1758 rtx tem = operand_subword (x, i, 1, BLKmode);
1759
1760 if (tem == 0)
1761 abort ();
1762
1763 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1764 }
1765}
1766
fffa9c1d
JW
1767/* Emit code to move a block Y to a block X, where X is non-consecutive
1768 registers represented by a PARALLEL. */
1769
1770void
1771emit_group_load (x, y)
1772 rtx x, y;
1773{
1774 rtx target_reg, source;
1775 int i;
1776
1777 if (GET_CODE (x) != PARALLEL)
1778 abort ();
1779
1780 /* Check for a NULL entry, used to indicate that the parameter goes
1781 both on the stack and in registers. */
1782 if (XEXP (XVECEXP (x, 0, 0), 0))
1783 i = 0;
1784 else
1785 i = 1;
1786
1787 for (; i < XVECLEN (x, 0); i++)
1788 {
1789 rtx element = XVECEXP (x, 0, i);
1790
1791 target_reg = XEXP (element, 0);
1792
1793 if (GET_CODE (y) == MEM)
1794 source = change_address (y, GET_MODE (target_reg),
1795 plus_constant (XEXP (y, 0),
1796 INTVAL (XEXP (element, 1))));
1797 else if (XEXP (element, 1) == const0_rtx)
1798 {
1799 if (GET_MODE (target_reg) == GET_MODE (y))
1800 source = y;
eaa9b4d9
MM
1801 /* Allow for the target_reg to be smaller than the input register
1802 to allow for AIX with 4 DF arguments after a single SI arg. The
1803 last DF argument will only load 1 word into the integer registers,
1804 but load a DF value into the float registers. */
fffa9c1d 1805 else if (GET_MODE_SIZE (GET_MODE (target_reg))
eaa9b4d9 1806 <= GET_MODE_SIZE (GET_MODE (y)))
fffa9c1d
JW
1807 source = gen_rtx (SUBREG, GET_MODE (target_reg), y, 0);
1808 else
1809 abort ();
1810 }
1811 else
1812 abort ();
1813
1814 emit_move_insn (target_reg, source);
1815 }
1816}
1817
1818/* Emit code to move a block Y to a block X, where Y is non-consecutive
1819 registers represented by a PARALLEL. */
1820
1821void
1822emit_group_store (x, y)
1823 rtx x, y;
1824{
1825 rtx source_reg, target;
1826 int i;
1827
1828 if (GET_CODE (y) != PARALLEL)
1829 abort ();
1830
1831 /* Check for a NULL entry, used to indicate that the parameter goes
1832 both on the stack and in registers. */
1833 if (XEXP (XVECEXP (y, 0, 0), 0))
1834 i = 0;
1835 else
1836 i = 1;
1837
1838 for (; i < XVECLEN (y, 0); i++)
1839 {
1840 rtx element = XVECEXP (y, 0, i);
1841
1842 source_reg = XEXP (element, 0);
1843
1844 if (GET_CODE (x) == MEM)
1845 target = change_address (x, GET_MODE (source_reg),
1846 plus_constant (XEXP (x, 0),
1847 INTVAL (XEXP (element, 1))));
1848 else if (XEXP (element, 1) == const0_rtx)
1849 target = x;
1850 else
1851 abort ();
1852
1853 emit_move_insn (target, source_reg);
1854 }
1855}
1856
94b25f81
RK
1857/* Add a USE expression for REG to the (possibly empty) list pointed
1858 to by CALL_FUSAGE. REG must denote a hard register. */
bbf6f052
RK
1859
1860void
b3f8cf4a
RK
1861use_reg (call_fusage, reg)
1862 rtx *call_fusage, reg;
1863{
0304dfbb
DE
1864 if (GET_CODE (reg) != REG
1865 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
b3f8cf4a
RK
1866 abort();
1867
1868 *call_fusage
1869 = gen_rtx (EXPR_LIST, VOIDmode,
0304dfbb 1870 gen_rtx (USE, VOIDmode, reg), *call_fusage);
b3f8cf4a
RK
1871}
1872
94b25f81
RK
1873/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1874 starting at REGNO. All of these registers must be hard registers. */
b3f8cf4a
RK
1875
1876void
0304dfbb
DE
1877use_regs (call_fusage, regno, nregs)
1878 rtx *call_fusage;
bbf6f052
RK
1879 int regno;
1880 int nregs;
1881{
0304dfbb 1882 int i;
bbf6f052 1883
0304dfbb
DE
1884 if (regno + nregs > FIRST_PSEUDO_REGISTER)
1885 abort ();
1886
1887 for (i = 0; i < nregs; i++)
1888 use_reg (call_fusage, gen_rtx (REG, reg_raw_mode[regno + i], regno + i));
bbf6f052 1889}
fffa9c1d
JW
1890
1891/* Add USE expressions to *CALL_FUSAGE for each REG contained in the
1892 PARALLEL REGS. This is for calls that pass values in multiple
1893 non-contiguous locations. The Irix 6 ABI has examples of this. */
1894
1895void
1896use_group_regs (call_fusage, regs)
1897 rtx *call_fusage;
1898 rtx regs;
1899{
1900 int i;
1901
1902 /* Check for a NULL entry, used to indicate that the parameter goes
1903 both on the stack and in registers. */
1904 if (XEXP (XVECEXP (regs, 0, 0), 0))
1905 i = 0;
1906 else
1907 i = 1;
1908
1909 for (; i < XVECLEN (regs, 0); i++)
1910 use_reg (call_fusage, XEXP (XVECEXP (regs, 0, i), 0));
1911}
bbf6f052 1912\f
9de08200
RK
1913/* Generate several move instructions to clear LEN bytes of block TO.
1914 (A MEM rtx with BLKmode). The caller must pass TO through
1915 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
1916 we can assume. */
1917
1918static void
1919clear_by_pieces (to, len, align)
1920 rtx to;
1921 int len, align;
1922{
1923 struct clear_by_pieces data;
1924 rtx to_addr = XEXP (to, 0);
1925 int max_size = MOVE_MAX + 1;
1926
1927 data.offset = 0;
1928 data.to_addr = to_addr;
1929 data.to = to;
1930 data.autinc_to
1931 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1932 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1933
1934 data.explicit_inc_to = 0;
1935 data.reverse
1936 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1937 if (data.reverse) data.offset = len;
1938 data.len = len;
1939
1940 data.to_struct = MEM_IN_STRUCT_P (to);
1941
1942 /* If copying requires more than two move insns,
1943 copy addresses to registers (to make displacements shorter)
1944 and use post-increment if available. */
1945 if (!data.autinc_to
1946 && move_by_pieces_ninsns (len, align) > 2)
1947 {
1948#ifdef HAVE_PRE_DECREMENT
1949 if (data.reverse && ! data.autinc_to)
1950 {
1951 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1952 data.autinc_to = 1;
1953 data.explicit_inc_to = -1;
1954 }
1955#endif
1956#ifdef HAVE_POST_INCREMENT
1957 if (! data.reverse && ! data.autinc_to)
1958 {
1959 data.to_addr = copy_addr_to_reg (to_addr);
1960 data.autinc_to = 1;
1961 data.explicit_inc_to = 1;
1962 }
1963#endif
1964 if (!data.autinc_to && CONSTANT_P (to_addr))
1965 data.to_addr = copy_addr_to_reg (to_addr);
1966 }
1967
1968 if (! SLOW_UNALIGNED_ACCESS
1969 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1970 align = MOVE_MAX;
1971
1972 /* First move what we can in the largest integer mode, then go to
1973 successively smaller modes. */
1974
1975 while (max_size > 1)
1976 {
1977 enum machine_mode mode = VOIDmode, tmode;
1978 enum insn_code icode;
1979
1980 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1981 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1982 if (GET_MODE_SIZE (tmode) < max_size)
1983 mode = tmode;
1984
1985 if (mode == VOIDmode)
1986 break;
1987
1988 icode = mov_optab->handlers[(int) mode].insn_code;
1989 if (icode != CODE_FOR_nothing
1990 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1991 GET_MODE_SIZE (mode)))
1992 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
1993
1994 max_size = GET_MODE_SIZE (mode);
1995 }
1996
1997 /* The code above should have handled everything. */
1998 if (data.len != 0)
1999 abort ();
2000}
2001
2002/* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2003 with move instructions for mode MODE. GENFUN is the gen_... function
2004 to make a move insn for that mode. DATA has all the other info. */
2005
2006static void
2007clear_by_pieces_1 (genfun, mode, data)
2008 rtx (*genfun) ();
2009 enum machine_mode mode;
2010 struct clear_by_pieces *data;
2011{
2012 register int size = GET_MODE_SIZE (mode);
2013 register rtx to1;
2014
2015 while (data->len >= size)
2016 {
2017 if (data->reverse) data->offset -= size;
2018
2019 to1 = (data->autinc_to
2020 ? gen_rtx (MEM, mode, data->to_addr)
2021 : change_address (data->to, mode,
2022 plus_constant (data->to_addr, data->offset)));
2023 MEM_IN_STRUCT_P (to1) = data->to_struct;
2024
2025#ifdef HAVE_PRE_DECREMENT
2026 if (data->explicit_inc_to < 0)
2027 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2028#endif
2029
2030 emit_insn ((*genfun) (to1, const0_rtx));
2031#ifdef HAVE_POST_INCREMENT
2032 if (data->explicit_inc_to > 0)
2033 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2034#endif
2035
2036 if (! data->reverse) data->offset += size;
2037
2038 data->len -= size;
2039 }
2040}
2041\f
bbf6f052 2042/* Write zeros through the storage of OBJECT.
9de08200
RK
2043 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2044 the maximum alignment we can is has, measured in bytes. */
bbf6f052
RK
2045
2046void
9de08200 2047clear_storage (object, size, align)
bbf6f052 2048 rtx object;
4c08eef0 2049 rtx size;
9de08200 2050 int align;
bbf6f052
RK
2051{
2052 if (GET_MODE (object) == BLKmode)
2053 {
9de08200
RK
2054 object = protect_from_queue (object, 1);
2055 size = protect_from_queue (size, 0);
2056
2057 if (GET_CODE (size) == CONST_INT
2058 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
2059 clear_by_pieces (object, INTVAL (size), align);
2060
2061 else
2062 {
2063 /* Try the most limited insn first, because there's no point
2064 including more than one in the machine description unless
2065 the more limited one has some advantage. */
2066
2067 rtx opalign = GEN_INT (align);
2068 enum machine_mode mode;
2069
2070 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2071 mode = GET_MODE_WIDER_MODE (mode))
2072 {
2073 enum insn_code code = clrstr_optab[(int) mode];
2074
2075 if (code != CODE_FOR_nothing
2076 /* We don't need MODE to be narrower than
2077 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2078 the mode mask, as it is returned by the macro, it will
2079 definitely be less than the actual mode mask. */
2080 && ((GET_CODE (size) == CONST_INT
2081 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2082 <= GET_MODE_MASK (mode)))
2083 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2084 && (insn_operand_predicate[(int) code][0] == 0
2085 || (*insn_operand_predicate[(int) code][0]) (object,
2086 BLKmode))
2087 && (insn_operand_predicate[(int) code][2] == 0
2088 || (*insn_operand_predicate[(int) code][2]) (opalign,
2089 VOIDmode)))
2090 {
2091 rtx op1;
2092 rtx last = get_last_insn ();
2093 rtx pat;
2094
2095 op1 = convert_to_mode (mode, size, 1);
2096 if (insn_operand_predicate[(int) code][1] != 0
2097 && ! (*insn_operand_predicate[(int) code][1]) (op1,
2098 mode))
2099 op1 = copy_to_mode_reg (mode, op1);
2100
2101 pat = GEN_FCN ((int) code) (object, op1, opalign);
2102 if (pat)
2103 {
2104 emit_insn (pat);
2105 return;
2106 }
2107 else
2108 delete_insns_since (last);
2109 }
2110 }
2111
2112
bbf6f052 2113#ifdef TARGET_MEM_FUNCTIONS
9de08200
RK
2114 emit_library_call (memset_libfunc, 0,
2115 VOIDmode, 3,
2116 XEXP (object, 0), Pmode,
2117 const0_rtx, TYPE_MODE (integer_type_node),
2118 convert_to_mode (TYPE_MODE (sizetype),
2119 size, TREE_UNSIGNED (sizetype)),
2120 TYPE_MODE (sizetype));
bbf6f052 2121#else
9de08200
RK
2122 emit_library_call (bzero_libfunc, 0,
2123 VOIDmode, 2,
2124 XEXP (object, 0), Pmode,
2125 convert_to_mode (TYPE_MODE (integer_type_node),
2126 size,
2127 TREE_UNSIGNED (integer_type_node)),
2128 TYPE_MODE (integer_type_node));
bbf6f052 2129#endif
9de08200 2130 }
bbf6f052
RK
2131 }
2132 else
2133 emit_move_insn (object, const0_rtx);
2134}
2135
2136/* Generate code to copy Y into X.
2137 Both Y and X must have the same mode, except that
2138 Y can be a constant with VOIDmode.
2139 This mode cannot be BLKmode; use emit_block_move for that.
2140
2141 Return the last instruction emitted. */
2142
2143rtx
2144emit_move_insn (x, y)
2145 rtx x, y;
2146{
2147 enum machine_mode mode = GET_MODE (x);
bbf6f052
RK
2148
2149 x = protect_from_queue (x, 1);
2150 y = protect_from_queue (y, 0);
2151
2152 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2153 abort ();
2154
2155 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2156 y = force_const_mem (mode, y);
2157
2158 /* If X or Y are memory references, verify that their addresses are valid
2159 for the machine. */
2160 if (GET_CODE (x) == MEM
2161 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2162 && ! push_operand (x, GET_MODE (x)))
2163 || (flag_force_addr
2164 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2165 x = change_address (x, VOIDmode, XEXP (x, 0));
2166
2167 if (GET_CODE (y) == MEM
2168 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2169 || (flag_force_addr
2170 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2171 y = change_address (y, VOIDmode, XEXP (y, 0));
2172
2173 if (mode == BLKmode)
2174 abort ();
2175
261c4230
RS
2176 return emit_move_insn_1 (x, y);
2177}
2178
2179/* Low level part of emit_move_insn.
2180 Called just like emit_move_insn, but assumes X and Y
2181 are basically valid. */
2182
2183rtx
2184emit_move_insn_1 (x, y)
2185 rtx x, y;
2186{
2187 enum machine_mode mode = GET_MODE (x);
2188 enum machine_mode submode;
2189 enum mode_class class = GET_MODE_CLASS (mode);
2190 int i;
2191
bbf6f052
RK
2192 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2193 return
2194 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2195
89742723 2196 /* Expand complex moves by moving real part and imag part, if possible. */
7308a047 2197 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
d0c76654
RK
2198 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2199 * BITS_PER_UNIT),
2200 (class == MODE_COMPLEX_INT
2201 ? MODE_INT : MODE_FLOAT),
2202 0))
7308a047
RS
2203 && (mov_optab->handlers[(int) submode].insn_code
2204 != CODE_FOR_nothing))
2205 {
2206 /* Don't split destination if it is a stack push. */
2207 int stack = push_operand (x, GET_MODE (x));
6551fa4d 2208 rtx insns;
7308a047 2209
7308a047
RS
2210 /* If this is a stack, push the highpart first, so it
2211 will be in the argument order.
2212
2213 In that case, change_address is used only to convert
2214 the mode, not to change the address. */
c937357e
RS
2215 if (stack)
2216 {
e33c0d66
RS
2217 /* Note that the real part always precedes the imag part in memory
2218 regardless of machine's endianness. */
c937357e
RS
2219#ifdef STACK_GROWS_DOWNWARD
2220 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2221 (gen_rtx (MEM, submode, (XEXP (x, 0))),
e33c0d66 2222 gen_imagpart (submode, y)));
c937357e
RS
2223 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2224 (gen_rtx (MEM, submode, (XEXP (x, 0))),
e33c0d66 2225 gen_realpart (submode, y)));
c937357e
RS
2226#else
2227 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2228 (gen_rtx (MEM, submode, (XEXP (x, 0))),
e33c0d66 2229 gen_realpart (submode, y)));
c937357e
RS
2230 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2231 (gen_rtx (MEM, submode, (XEXP (x, 0))),
e33c0d66 2232 gen_imagpart (submode, y)));
c937357e
RS
2233#endif
2234 }
2235 else
2236 {
2237 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
976ff203 2238 (gen_realpart (submode, x), gen_realpart (submode, y)));
c937357e 2239 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
976ff203 2240 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
c937357e 2241 }
7308a047 2242
7a1ab50a 2243 return get_last_insn ();
7308a047
RS
2244 }
2245
bbf6f052
RK
2246 /* This will handle any multi-word mode that lacks a move_insn pattern.
2247 However, you will get better code if you define such patterns,
2248 even if they must turn into multiple assembler instructions. */
a4320483 2249 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
bbf6f052
RK
2250 {
2251 rtx last_insn = 0;
6551fa4d
JW
2252 rtx insns;
2253
a98c9f1a
RK
2254#ifdef PUSH_ROUNDING
2255
2256 /* If X is a push on the stack, do the push now and replace
2257 X with a reference to the stack pointer. */
2258 if (push_operand (x, GET_MODE (x)))
2259 {
2260 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2261 x = change_address (x, VOIDmode, stack_pointer_rtx);
2262 }
2263#endif
2264
15a7a8ec 2265 /* Show the output dies here. */
43e046cb
RK
2266 if (x != y)
2267 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
15a7a8ec 2268
bbf6f052
RK
2269 for (i = 0;
2270 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2271 i++)
2272 {
2273 rtx xpart = operand_subword (x, i, 1, mode);
2274 rtx ypart = operand_subword (y, i, 1, mode);
2275
2276 /* If we can't get a part of Y, put Y into memory if it is a
2277 constant. Otherwise, force it into a register. If we still
2278 can't get a part of Y, abort. */
2279 if (ypart == 0 && CONSTANT_P (y))
2280 {
2281 y = force_const_mem (mode, y);
2282 ypart = operand_subword (y, i, 1, mode);
2283 }
2284 else if (ypart == 0)
2285 ypart = operand_subword_force (y, i, mode);
2286
2287 if (xpart == 0 || ypart == 0)
2288 abort ();
2289
2290 last_insn = emit_move_insn (xpart, ypart);
2291 }
6551fa4d 2292
bbf6f052
RK
2293 return last_insn;
2294 }
2295 else
2296 abort ();
2297}
2298\f
2299/* Pushing data onto the stack. */
2300
2301/* Push a block of length SIZE (perhaps variable)
2302 and return an rtx to address the beginning of the block.
2303 Note that it is not possible for the value returned to be a QUEUED.
2304 The value may be virtual_outgoing_args_rtx.
2305
2306 EXTRA is the number of bytes of padding to push in addition to SIZE.
2307 BELOW nonzero means this padding comes at low addresses;
2308 otherwise, the padding comes at high addresses. */
2309
2310rtx
2311push_block (size, extra, below)
2312 rtx size;
2313 int extra, below;
2314{
2315 register rtx temp;
88f63c77
RK
2316
2317 size = convert_modes (Pmode, ptr_mode, size, 1);
bbf6f052
RK
2318 if (CONSTANT_P (size))
2319 anti_adjust_stack (plus_constant (size, extra));
2320 else if (GET_CODE (size) == REG && extra == 0)
2321 anti_adjust_stack (size);
2322 else
2323 {
2324 rtx temp = copy_to_mode_reg (Pmode, size);
2325 if (extra != 0)
906c4e36 2326 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
bbf6f052
RK
2327 temp, 0, OPTAB_LIB_WIDEN);
2328 anti_adjust_stack (temp);
2329 }
2330
2331#ifdef STACK_GROWS_DOWNWARD
2332 temp = virtual_outgoing_args_rtx;
2333 if (extra != 0 && below)
2334 temp = plus_constant (temp, extra);
2335#else
2336 if (GET_CODE (size) == CONST_INT)
2337 temp = plus_constant (virtual_outgoing_args_rtx,
2338 - INTVAL (size) - (below ? 0 : extra));
2339 else if (extra != 0 && !below)
2340 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2341 negate_rtx (Pmode, plus_constant (size, extra)));
2342 else
2343 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2344 negate_rtx (Pmode, size));
2345#endif
2346
2347 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2348}
2349
87e38d84 2350rtx
bbf6f052
RK
2351gen_push_operand ()
2352{
2353 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2354}
2355
2356/* Generate code to push X onto the stack, assuming it has mode MODE and
2357 type TYPE.
2358 MODE is redundant except when X is a CONST_INT (since they don't
2359 carry mode info).
2360 SIZE is an rtx for the size of data to be copied (in bytes),
2361 needed only if X is BLKmode.
2362
2363 ALIGN (in bytes) is maximum alignment we can assume.
2364
cd048831
RK
2365 If PARTIAL and REG are both nonzero, then copy that many of the first
2366 words of X into registers starting with REG, and push the rest of X.
bbf6f052
RK
2367 The amount of space pushed is decreased by PARTIAL words,
2368 rounded *down* to a multiple of PARM_BOUNDARY.
2369 REG must be a hard register in this case.
cd048831
RK
2370 If REG is zero but PARTIAL is not, take any all others actions for an
2371 argument partially in registers, but do not actually load any
2372 registers.
bbf6f052
RK
2373
2374 EXTRA is the amount in bytes of extra space to leave next to this arg.
6dc42e49 2375 This is ignored if an argument block has already been allocated.
bbf6f052
RK
2376
2377 On a machine that lacks real push insns, ARGS_ADDR is the address of
2378 the bottom of the argument block for this call. We use indexing off there
2379 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2380 argument block has not been preallocated.
2381
2382 ARGS_SO_FAR is the size of args previously pushed for this call. */
2383
2384void
2385emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2386 args_addr, args_so_far)
2387 register rtx x;
2388 enum machine_mode mode;
2389 tree type;
2390 rtx size;
2391 int align;
2392 int partial;
2393 rtx reg;
2394 int extra;
2395 rtx args_addr;
2396 rtx args_so_far;
2397{
2398 rtx xinner;
2399 enum direction stack_direction
2400#ifdef STACK_GROWS_DOWNWARD
2401 = downward;
2402#else
2403 = upward;
2404#endif
2405
2406 /* Decide where to pad the argument: `downward' for below,
2407 `upward' for above, or `none' for don't pad it.
2408 Default is below for small data on big-endian machines; else above. */
2409 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2410
9c7be814
JL
2411 /* If we're placing part of X into a register and part of X onto
2412 the stack, indicate that the entire register is clobbered to
2413 keep flow from thinking the unused part of the register is live. */
22745c7e 2414 if (partial > 0 && reg != 0)
9c7be814
JL
2415 emit_insn (gen_rtx (CLOBBER, VOIDmode, reg));
2416
bbf6f052
RK
2417 /* Invert direction if stack is post-update. */
2418 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2419 if (where_pad != none)
2420 where_pad = (where_pad == downward ? upward : downward);
2421
2422 xinner = x = protect_from_queue (x, 0);
2423
2424 if (mode == BLKmode)
2425 {
2426 /* Copy a block into the stack, entirely or partially. */
2427
2428 register rtx temp;
2429 int used = partial * UNITS_PER_WORD;
2430 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2431 int skip;
2432
2433 if (size == 0)
2434 abort ();
2435
2436 used -= offset;
2437
2438 /* USED is now the # of bytes we need not copy to the stack
2439 because registers will take care of them. */
2440
2441 if (partial != 0)
2442 xinner = change_address (xinner, BLKmode,
2443 plus_constant (XEXP (xinner, 0), used));
2444
2445 /* If the partial register-part of the arg counts in its stack size,
2446 skip the part of stack space corresponding to the registers.
2447 Otherwise, start copying to the beginning of the stack space,
2448 by setting SKIP to 0. */
2449#ifndef REG_PARM_STACK_SPACE
2450 skip = 0;
2451#else
2452 skip = used;
2453#endif
2454
2455#ifdef PUSH_ROUNDING
2456 /* Do it with several push insns if that doesn't take lots of insns
2457 and if there is no difficulty with push insns that skip bytes
2458 on the stack for alignment purposes. */
2459 if (args_addr == 0
2460 && GET_CODE (size) == CONST_INT
2461 && skip == 0
2462 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2463 < MOVE_RATIO)
bbf6f052
RK
2464 /* Here we avoid the case of a structure whose weak alignment
2465 forces many pushes of a small amount of data,
2466 and such small pushes do rounding that causes trouble. */
c7a7ac46 2467 && ((! SLOW_UNALIGNED_ACCESS)
e87b4f3f 2468 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
bbf6f052 2469 || PUSH_ROUNDING (align) == align)
bbf6f052
RK
2470 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2471 {
2472 /* Push padding now if padding above and stack grows down,
2473 or if padding below and stack grows up.
2474 But if space already allocated, this has already been done. */
2475 if (extra && args_addr == 0
2476 && where_pad != none && where_pad != stack_direction)
906c4e36 2477 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2478
2479 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
2480 INTVAL (size) - used, align);
2481 }
2482 else
2483#endif /* PUSH_ROUNDING */
2484 {
2485 /* Otherwise make space on the stack and copy the data
2486 to the address of that space. */
2487
2488 /* Deduct words put into registers from the size we must copy. */
2489 if (partial != 0)
2490 {
2491 if (GET_CODE (size) == CONST_INT)
906c4e36 2492 size = GEN_INT (INTVAL (size) - used);
bbf6f052
RK
2493 else
2494 size = expand_binop (GET_MODE (size), sub_optab, size,
906c4e36
RK
2495 GEN_INT (used), NULL_RTX, 0,
2496 OPTAB_LIB_WIDEN);
bbf6f052
RK
2497 }
2498
2499 /* Get the address of the stack space.
2500 In this case, we do not deal with EXTRA separately.
2501 A single stack adjust will do. */
2502 if (! args_addr)
2503 {
2504 temp = push_block (size, extra, where_pad == downward);
2505 extra = 0;
2506 }
2507 else if (GET_CODE (args_so_far) == CONST_INT)
2508 temp = memory_address (BLKmode,
2509 plus_constant (args_addr,
2510 skip + INTVAL (args_so_far)));
2511 else
2512 temp = memory_address (BLKmode,
2513 plus_constant (gen_rtx (PLUS, Pmode,
2514 args_addr, args_so_far),
2515 skip));
2516
2517 /* TEMP is the address of the block. Copy the data there. */
2518 if (GET_CODE (size) == CONST_INT
2519 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2520 < MOVE_RATIO))
2521 {
2522 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
2523 INTVAL (size), align);
2524 goto ret;
2525 }
2526 /* Try the most limited insn first, because there's no point
2527 including more than one in the machine description unless
2528 the more limited one has some advantage. */
2529#ifdef HAVE_movstrqi
2530 if (HAVE_movstrqi
2531 && GET_CODE (size) == CONST_INT
2532 && ((unsigned) INTVAL (size)
2533 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
2534 {
c841050e
RS
2535 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
2536 xinner, size, GEN_INT (align));
2537 if (pat != 0)
2538 {
2539 emit_insn (pat);
2540 goto ret;
2541 }
bbf6f052
RK
2542 }
2543#endif
2544#ifdef HAVE_movstrhi
2545 if (HAVE_movstrhi
2546 && GET_CODE (size) == CONST_INT
2547 && ((unsigned) INTVAL (size)
2548 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
2549 {
c841050e
RS
2550 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
2551 xinner, size, GEN_INT (align));
2552 if (pat != 0)
2553 {
2554 emit_insn (pat);
2555 goto ret;
2556 }
bbf6f052
RK
2557 }
2558#endif
2559#ifdef HAVE_movstrsi
2560 if (HAVE_movstrsi)
2561 {
c841050e
RS
2562 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
2563 xinner, size, GEN_INT (align));
2564 if (pat != 0)
2565 {
2566 emit_insn (pat);
2567 goto ret;
2568 }
bbf6f052
RK
2569 }
2570#endif
2571#ifdef HAVE_movstrdi
2572 if (HAVE_movstrdi)
2573 {
c841050e
RS
2574 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
2575 xinner, size, GEN_INT (align));
2576 if (pat != 0)
2577 {
2578 emit_insn (pat);
2579 goto ret;
2580 }
bbf6f052
RK
2581 }
2582#endif
2583
2584#ifndef ACCUMULATE_OUTGOING_ARGS
2585 /* If the source is referenced relative to the stack pointer,
2586 copy it to another register to stabilize it. We do not need
2587 to do this if we know that we won't be changing sp. */
2588
2589 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2590 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2591 temp = copy_to_reg (temp);
2592#endif
2593
2594 /* Make inhibit_defer_pop nonzero around the library call
2595 to force it to pop the bcopy-arguments right away. */
2596 NO_DEFER_POP;
2597#ifdef TARGET_MEM_FUNCTIONS
d562e42e 2598 emit_library_call (memcpy_libfunc, 0,
bbf6f052 2599 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
0fa83258
RK
2600 convert_to_mode (TYPE_MODE (sizetype),
2601 size, TREE_UNSIGNED (sizetype)),
26ba80fc 2602 TYPE_MODE (sizetype));
bbf6f052 2603#else
d562e42e 2604 emit_library_call (bcopy_libfunc, 0,
bbf6f052 2605 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3b6f75e2
JW
2606 convert_to_mode (TYPE_MODE (integer_type_node),
2607 size,
2608 TREE_UNSIGNED (integer_type_node)),
2609 TYPE_MODE (integer_type_node));
bbf6f052
RK
2610#endif
2611 OK_DEFER_POP;
2612 }
2613 }
2614 else if (partial > 0)
2615 {
2616 /* Scalar partly in registers. */
2617
2618 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2619 int i;
2620 int not_stack;
2621 /* # words of start of argument
2622 that we must make space for but need not store. */
2623 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2624 int args_offset = INTVAL (args_so_far);
2625 int skip;
2626
2627 /* Push padding now if padding above and stack grows down,
2628 or if padding below and stack grows up.
2629 But if space already allocated, this has already been done. */
2630 if (extra && args_addr == 0
2631 && where_pad != none && where_pad != stack_direction)
906c4e36 2632 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2633
2634 /* If we make space by pushing it, we might as well push
2635 the real data. Otherwise, we can leave OFFSET nonzero
2636 and leave the space uninitialized. */
2637 if (args_addr == 0)
2638 offset = 0;
2639
2640 /* Now NOT_STACK gets the number of words that we don't need to
2641 allocate on the stack. */
2642 not_stack = partial - offset;
2643
2644 /* If the partial register-part of the arg counts in its stack size,
2645 skip the part of stack space corresponding to the registers.
2646 Otherwise, start copying to the beginning of the stack space,
2647 by setting SKIP to 0. */
2648#ifndef REG_PARM_STACK_SPACE
2649 skip = 0;
2650#else
2651 skip = not_stack;
2652#endif
2653
2654 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2655 x = validize_mem (force_const_mem (mode, x));
2656
2657 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2658 SUBREGs of such registers are not allowed. */
2659 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2660 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2661 x = copy_to_reg (x);
2662
2663 /* Loop over all the words allocated on the stack for this arg. */
2664 /* We can do it by words, because any scalar bigger than a word
2665 has a size a multiple of a word. */
2666#ifndef PUSH_ARGS_REVERSED
2667 for (i = not_stack; i < size; i++)
2668#else
2669 for (i = size - 1; i >= not_stack; i--)
2670#endif
2671 if (i >= not_stack + offset)
2672 emit_push_insn (operand_subword_force (x, i, mode),
906c4e36
RK
2673 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2674 0, args_addr,
2675 GEN_INT (args_offset + ((i - not_stack + skip)
bbf6f052
RK
2676 * UNITS_PER_WORD)));
2677 }
2678 else
2679 {
2680 rtx addr;
2681
2682 /* Push padding now if padding above and stack grows down,
2683 or if padding below and stack grows up.
2684 But if space already allocated, this has already been done. */
2685 if (extra && args_addr == 0
2686 && where_pad != none && where_pad != stack_direction)
906c4e36 2687 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2688
2689#ifdef PUSH_ROUNDING
2690 if (args_addr == 0)
2691 addr = gen_push_operand ();
2692 else
2693#endif
2694 if (GET_CODE (args_so_far) == CONST_INT)
2695 addr
2696 = memory_address (mode,
2697 plus_constant (args_addr, INTVAL (args_so_far)));
2698 else
2699 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2700 args_so_far));
2701
2702 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2703 }
2704
2705 ret:
2706 /* If part should go in registers, copy that part
2707 into the appropriate registers. Do this now, at the end,
2708 since mem-to-mem copies above may do function calls. */
cd048831 2709 if (partial > 0 && reg != 0)
fffa9c1d
JW
2710 {
2711 /* Handle calls that pass values in multiple non-contiguous locations.
2712 The Irix 6 ABI has examples of this. */
2713 if (GET_CODE (reg) == PARALLEL)
2714 emit_group_load (reg, x);
2715 else
2716 move_block_to_reg (REGNO (reg), x, partial, mode);
2717 }
bbf6f052
RK
2718
2719 if (extra && args_addr == 0 && where_pad == stack_direction)
906c4e36 2720 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2721}
2722\f
bbf6f052
RK
2723/* Expand an assignment that stores the value of FROM into TO.
2724 If WANT_VALUE is nonzero, return an rtx for the value of TO.
709f5be1
RS
2725 (This may contain a QUEUED rtx;
2726 if the value is constant, this rtx is a constant.)
2727 Otherwise, the returned value is NULL_RTX.
bbf6f052
RK
2728
2729 SUGGEST_REG is no longer actually used.
2730 It used to mean, copy the value through a register
2731 and return that register, if that is possible.
709f5be1 2732 We now use WANT_VALUE to decide whether to do this. */
bbf6f052
RK
2733
2734rtx
2735expand_assignment (to, from, want_value, suggest_reg)
2736 tree to, from;
2737 int want_value;
2738 int suggest_reg;
2739{
2740 register rtx to_rtx = 0;
2741 rtx result;
2742
2743 /* Don't crash if the lhs of the assignment was erroneous. */
2744
2745 if (TREE_CODE (to) == ERROR_MARK)
709f5be1
RS
2746 {
2747 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
2748 return want_value ? result : NULL_RTX;
2749 }
bbf6f052 2750
ca695ac9
JB
2751 if (output_bytecode)
2752 {
2753 tree dest_innermost;
2754
2755 bc_expand_expr (from);
6d6e61ce 2756 bc_emit_instruction (duplicate);
ca695ac9
JB
2757
2758 dest_innermost = bc_expand_address (to);
2759
2760 /* Can't deduce from TYPE that we're dealing with a bitfield, so
0f41302f 2761 take care of it here. */
ca695ac9
JB
2762
2763 bc_store_memory (TREE_TYPE (to), dest_innermost);
2764 return NULL;
2765 }
2766
bbf6f052
RK
2767 /* Assignment of a structure component needs special treatment
2768 if the structure component's rtx is not simply a MEM.
6be58303
JW
2769 Assignment of an array element at a constant index, and assignment of
2770 an array element in an unaligned packed structure field, has the same
2771 problem. */
bbf6f052
RK
2772
2773 if (TREE_CODE (to) == COMPONENT_REF
2774 || TREE_CODE (to) == BIT_FIELD_REF
2775 || (TREE_CODE (to) == ARRAY_REF
6be58303
JW
2776 && ((TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2777 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST)
c7a7ac46 2778 || (SLOW_UNALIGNED_ACCESS && get_inner_unaligned_p (to)))))
bbf6f052
RK
2779 {
2780 enum machine_mode mode1;
2781 int bitsize;
2782 int bitpos;
7bb0943f 2783 tree offset;
bbf6f052
RK
2784 int unsignedp;
2785 int volatilep = 0;
0088fcb1 2786 tree tem;
d78d243c 2787 int alignment;
0088fcb1
RK
2788
2789 push_temp_slots ();
2790 tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
bbf6f052
RK
2791 &mode1, &unsignedp, &volatilep);
2792
2793 /* If we are going to use store_bit_field and extract_bit_field,
2794 make sure to_rtx will be safe for multiple use. */
2795
2796 if (mode1 == VOIDmode && want_value)
2797 tem = stabilize_reference (tem);
2798
d78d243c 2799 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
906c4e36 2800 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
2801 if (offset != 0)
2802 {
906c4e36 2803 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
2804
2805 if (GET_CODE (to_rtx) != MEM)
2806 abort ();
2807 to_rtx = change_address (to_rtx, VOIDmode,
88f63c77
RK
2808 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
2809 force_reg (ptr_mode, offset_rtx)));
7bb0943f 2810 }
bbf6f052
RK
2811 if (volatilep)
2812 {
2813 if (GET_CODE (to_rtx) == MEM)
01188446
JW
2814 {
2815 /* When the offset is zero, to_rtx is the address of the
2816 structure we are storing into, and hence may be shared.
2817 We must make a new MEM before setting the volatile bit. */
2818 if (offset == 0)
2819 to_rtx = change_address (to_rtx, VOIDmode, XEXP (to_rtx, 0));
2820 MEM_VOLATILE_P (to_rtx) = 1;
2821 }
bbf6f052
RK
2822#if 0 /* This was turned off because, when a field is volatile
2823 in an object which is not volatile, the object may be in a register,
2824 and then we would abort over here. */
2825 else
2826 abort ();
2827#endif
2828 }
2829
2830 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2831 (want_value
2832 /* Spurious cast makes HPUX compiler happy. */
2833 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2834 : VOIDmode),
2835 unsignedp,
2836 /* Required alignment of containing datum. */
d78d243c 2837 alignment,
bbf6f052
RK
2838 int_size_in_bytes (TREE_TYPE (tem)));
2839 preserve_temp_slots (result);
2840 free_temp_slots ();
0088fcb1 2841 pop_temp_slots ();
bbf6f052 2842
709f5be1
RS
2843 /* If the value is meaningful, convert RESULT to the proper mode.
2844 Otherwise, return nothing. */
5ffe63ed
RS
2845 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
2846 TYPE_MODE (TREE_TYPE (from)),
2847 result,
2848 TREE_UNSIGNED (TREE_TYPE (to)))
709f5be1 2849 : NULL_RTX);
bbf6f052
RK
2850 }
2851
cd1db108
RS
2852 /* If the rhs is a function call and its value is not an aggregate,
2853 call the function before we start to compute the lhs.
2854 This is needed for correct code for cases such as
2855 val = setjmp (buf) on machines where reference to val
1ad87b63
RK
2856 requires loading up part of an address in a separate insn.
2857
2858 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
2859 a promoted variable where the zero- or sign- extension needs to be done.
2860 Handling this in the normal way is safe because no computation is done
2861 before the call. */
2862 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
b35cd3c1 2863 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
1ad87b63 2864 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
cd1db108 2865 {
0088fcb1
RK
2866 rtx value;
2867
2868 push_temp_slots ();
2869 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
cd1db108
RS
2870 if (to_rtx == 0)
2871 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
aaf87c45 2872
fffa9c1d
JW
2873 /* Handle calls that return values in multiple non-contiguous locations.
2874 The Irix 6 ABI has examples of this. */
2875 if (GET_CODE (to_rtx) == PARALLEL)
2876 emit_group_load (to_rtx, value);
2877 else if (GET_MODE (to_rtx) == BLKmode)
db3ec607 2878 emit_block_move (to_rtx, value, expr_size (from),
ff9b5bd8 2879 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
aaf87c45
JL
2880 else
2881 emit_move_insn (to_rtx, value);
cd1db108
RS
2882 preserve_temp_slots (to_rtx);
2883 free_temp_slots ();
0088fcb1 2884 pop_temp_slots ();
709f5be1 2885 return want_value ? to_rtx : NULL_RTX;
cd1db108
RS
2886 }
2887
bbf6f052
RK
2888 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2889 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2890
2891 if (to_rtx == 0)
906c4e36 2892 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
bbf6f052 2893
86d38d25
RS
2894 /* Don't move directly into a return register. */
2895 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
2896 {
0088fcb1
RK
2897 rtx temp;
2898
2899 push_temp_slots ();
2900 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
86d38d25
RS
2901 emit_move_insn (to_rtx, temp);
2902 preserve_temp_slots (to_rtx);
2903 free_temp_slots ();
0088fcb1 2904 pop_temp_slots ();
709f5be1 2905 return want_value ? to_rtx : NULL_RTX;
86d38d25
RS
2906 }
2907
bbf6f052
RK
2908 /* In case we are returning the contents of an object which overlaps
2909 the place the value is being stored, use a safe function when copying
2910 a value through a pointer into a structure value return block. */
2911 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2912 && current_function_returns_struct
2913 && !current_function_returns_pcc_struct)
2914 {
0088fcb1
RK
2915 rtx from_rtx, size;
2916
2917 push_temp_slots ();
33a20d10
RK
2918 size = expr_size (from);
2919 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
2920
2921#ifdef TARGET_MEM_FUNCTIONS
d562e42e 2922 emit_library_call (memcpy_libfunc, 0,
bbf6f052
RK
2923 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2924 XEXP (from_rtx, 0), Pmode,
0fa83258
RK
2925 convert_to_mode (TYPE_MODE (sizetype),
2926 size, TREE_UNSIGNED (sizetype)),
26ba80fc 2927 TYPE_MODE (sizetype));
bbf6f052 2928#else
d562e42e 2929 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
2930 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2931 XEXP (to_rtx, 0), Pmode,
3b6f75e2
JW
2932 convert_to_mode (TYPE_MODE (integer_type_node),
2933 size, TREE_UNSIGNED (integer_type_node)),
2934 TYPE_MODE (integer_type_node));
bbf6f052
RK
2935#endif
2936
2937 preserve_temp_slots (to_rtx);
2938 free_temp_slots ();
0088fcb1 2939 pop_temp_slots ();
709f5be1 2940 return want_value ? to_rtx : NULL_RTX;
bbf6f052
RK
2941 }
2942
2943 /* Compute FROM and store the value in the rtx we got. */
2944
0088fcb1 2945 push_temp_slots ();
bbf6f052
RK
2946 result = store_expr (from, to_rtx, want_value);
2947 preserve_temp_slots (result);
2948 free_temp_slots ();
0088fcb1 2949 pop_temp_slots ();
709f5be1 2950 return want_value ? result : NULL_RTX;
bbf6f052
RK
2951}
2952
2953/* Generate code for computing expression EXP,
2954 and storing the value into TARGET.
bbf6f052
RK
2955 TARGET may contain a QUEUED rtx.
2956
709f5be1
RS
2957 If WANT_VALUE is nonzero, return a copy of the value
2958 not in TARGET, so that we can be sure to use the proper
2959 value in a containing expression even if TARGET has something
2960 else stored in it. If possible, we copy the value through a pseudo
2961 and return that pseudo. Or, if the value is constant, we try to
2962 return the constant. In some cases, we return a pseudo
2963 copied *from* TARGET.
2964
2965 If the mode is BLKmode then we may return TARGET itself.
2966 It turns out that in BLKmode it doesn't cause a problem.
2967 because C has no operators that could combine two different
2968 assignments into the same BLKmode object with different values
2969 with no sequence point. Will other languages need this to
2970 be more thorough?
2971
2972 If WANT_VALUE is 0, we return NULL, to make sure
2973 to catch quickly any cases where the caller uses the value
2974 and fails to set WANT_VALUE. */
bbf6f052
RK
2975
2976rtx
709f5be1 2977store_expr (exp, target, want_value)
bbf6f052
RK
2978 register tree exp;
2979 register rtx target;
709f5be1 2980 int want_value;
bbf6f052
RK
2981{
2982 register rtx temp;
2983 int dont_return_target = 0;
2984
2985 if (TREE_CODE (exp) == COMPOUND_EXPR)
2986 {
2987 /* Perform first part of compound expression, then assign from second
2988 part. */
2989 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2990 emit_queue ();
709f5be1 2991 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
bbf6f052
RK
2992 }
2993 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2994 {
2995 /* For conditional expression, get safe form of the target. Then
2996 test the condition, doing the appropriate assignment on either
2997 side. This avoids the creation of unnecessary temporaries.
2998 For non-BLKmode, it is more efficient not to do this. */
2999
3000 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
a3a58acc
JM
3001 rtx flag = NULL_RTX;
3002 tree left_cleanups = NULL_TREE;
3003 tree right_cleanups = NULL_TREE;
3004 tree old_cleanups = cleanups_this_call;
3005
3006 /* Used to save a pointer to the place to put the setting of
3007 the flag that indicates if this side of the conditional was
3008 taken. We backpatch the code, if we find out later that we
3009 have any conditional cleanups that need to be performed. */
3010 rtx dest_right_flag = NULL_RTX;
3011 rtx dest_left_flag = NULL_RTX;
bbf6f052
RK
3012
3013 emit_queue ();
3014 target = protect_from_queue (target, 1);
3015
dabf8373 3016 do_pending_stack_adjust ();
bbf6f052
RK
3017 NO_DEFER_POP;
3018 jumpifnot (TREE_OPERAND (exp, 0), lab1);
709f5be1 3019 store_expr (TREE_OPERAND (exp, 1), target, 0);
a3a58acc
JM
3020 dest_left_flag = get_last_insn ();
3021 /* Handle conditional cleanups, if any. */
3022 left_cleanups = defer_cleanups_to (old_cleanups);
bbf6f052
RK
3023 emit_queue ();
3024 emit_jump_insn (gen_jump (lab2));
3025 emit_barrier ();
3026 emit_label (lab1);
709f5be1 3027 store_expr (TREE_OPERAND (exp, 2), target, 0);
a3a58acc
JM
3028 dest_right_flag = get_last_insn ();
3029 /* Handle conditional cleanups, if any. */
3030 right_cleanups = defer_cleanups_to (old_cleanups);
bbf6f052
RK
3031 emit_queue ();
3032 emit_label (lab2);
3033 OK_DEFER_POP;
a3a58acc
JM
3034
3035 /* Add back in any conditional cleanups. */
3036 if (left_cleanups || right_cleanups)
3037 {
3038 tree new_cleanups;
3039 tree cond;
3040 rtx last;
3041
3042 /* Now that we know that a flag is needed, go back and add in the
3043 setting of the flag. */
3044
3045 flag = gen_reg_rtx (word_mode);
3046
3047 /* Do the left side flag. */
3048 last = get_last_insn ();
3049 /* Flag left cleanups as needed. */
3050 emit_move_insn (flag, const1_rtx);
3051 /* ??? deprecated, use sequences instead. */
3052 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_left_flag);
3053
3054 /* Do the right side flag. */
3055 last = get_last_insn ();
3056 /* Flag left cleanups as needed. */
3057 emit_move_insn (flag, const0_rtx);
3058 /* ??? deprecated, use sequences instead. */
3059 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_right_flag);
3060
3061 /* All cleanups must be on the function_obstack. */
3062 push_obstacks_nochange ();
3063 resume_temporary_allocation ();
3064
3065 /* convert flag, which is an rtx, into a tree. */
3066 cond = make_node (RTL_EXPR);
3067 TREE_TYPE (cond) = integer_type_node;
3068 RTL_EXPR_RTL (cond) = flag;
3069 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
3070 cond = save_expr (cond);
3071
3072 if (! left_cleanups)
3073 left_cleanups = integer_zero_node;
3074 if (! right_cleanups)
3075 right_cleanups = integer_zero_node;
3076 new_cleanups = build (COND_EXPR, void_type_node,
3077 truthvalue_conversion (cond),
3078 left_cleanups, right_cleanups);
3079 new_cleanups = fold (new_cleanups);
3080
3081 pop_obstacks ();
3082
3083 /* Now add in the conditionalized cleanups. */
3084 cleanups_this_call
3085 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
3086 expand_eh_region_start ();
3087 }
709f5be1 3088 return want_value ? target : NULL_RTX;
bbf6f052 3089 }
709f5be1 3090 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
bbf6f052
RK
3091 && GET_MODE (target) != BLKmode)
3092 /* If target is in memory and caller wants value in a register instead,
3093 arrange that. Pass TARGET as target for expand_expr so that,
709f5be1 3094 if EXP is another assignment, WANT_VALUE will be nonzero for it.
c2e6aff6
RS
3095 We know expand_expr will not use the target in that case.
3096 Don't do this if TARGET is volatile because we are supposed
3097 to write it and then read it. */
bbf6f052 3098 {
906c4e36 3099 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
bbf6f052
RK
3100 GET_MODE (target), 0);
3101 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3102 temp = copy_to_reg (temp);
3103 dont_return_target = 1;
3104 }
3105 else if (queued_subexp_p (target))
709f5be1
RS
3106 /* If target contains a postincrement, let's not risk
3107 using it as the place to generate the rhs. */
bbf6f052
RK
3108 {
3109 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3110 {
3111 /* Expand EXP into a new pseudo. */
3112 temp = gen_reg_rtx (GET_MODE (target));
3113 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3114 }
3115 else
906c4e36 3116 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
709f5be1
RS
3117
3118 /* If target is volatile, ANSI requires accessing the value
3119 *from* the target, if it is accessed. So make that happen.
3120 In no case return the target itself. */
3121 if (! MEM_VOLATILE_P (target) && want_value)
3122 dont_return_target = 1;
bbf6f052 3123 }
1499e0a8
RK
3124 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3125 /* If this is an scalar in a register that is stored in a wider mode
3126 than the declared mode, compute the result into its declared mode
3127 and then convert to the wider mode. Our value is the computed
3128 expression. */
3129 {
5a32d038 3130 /* If we don't want a value, we can do the conversion inside EXP,
f635a84d
RK
3131 which will often result in some optimizations. Do the conversion
3132 in two steps: first change the signedness, if needed, then
ab6c58f1
RK
3133 the extend. But don't do this if the type of EXP is a subtype
3134 of something else since then the conversion might involve
3135 more than just converting modes. */
3136 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3137 && TREE_TYPE (TREE_TYPE (exp)) == 0)
f635a84d
RK
3138 {
3139 if (TREE_UNSIGNED (TREE_TYPE (exp))
3140 != SUBREG_PROMOTED_UNSIGNED_P (target))
3141 exp
3142 = convert
3143 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3144 TREE_TYPE (exp)),
3145 exp);
3146
3147 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3148 SUBREG_PROMOTED_UNSIGNED_P (target)),
3149 exp);
3150 }
5a32d038 3151
1499e0a8 3152 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
b258707c 3153
766f36c7 3154 /* If TEMP is a volatile MEM and we want a result value, make
f29369b9
RK
3155 the access now so it gets done only once. Likewise if
3156 it contains TARGET. */
3157 if (GET_CODE (temp) == MEM && want_value
3158 && (MEM_VOLATILE_P (temp)
3159 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
766f36c7
RK
3160 temp = copy_to_reg (temp);
3161
b258707c
RS
3162 /* If TEMP is a VOIDmode constant, use convert_modes to make
3163 sure that we properly convert it. */
3164 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3165 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3166 TYPE_MODE (TREE_TYPE (exp)), temp,
3167 SUBREG_PROMOTED_UNSIGNED_P (target));
3168
1499e0a8
RK
3169 convert_move (SUBREG_REG (target), temp,
3170 SUBREG_PROMOTED_UNSIGNED_P (target));
709f5be1 3171 return want_value ? temp : NULL_RTX;
1499e0a8 3172 }
bbf6f052
RK
3173 else
3174 {
3175 temp = expand_expr (exp, target, GET_MODE (target), 0);
766f36c7 3176 /* Return TARGET if it's a specified hardware register.
709f5be1
RS
3177 If TARGET is a volatile mem ref, either return TARGET
3178 or return a reg copied *from* TARGET; ANSI requires this.
3179
3180 Otherwise, if TEMP is not TARGET, return TEMP
3181 if it is constant (for efficiency),
3182 or if we really want the correct value. */
bbf6f052
RK
3183 if (!(target && GET_CODE (target) == REG
3184 && REGNO (target) < FIRST_PSEUDO_REGISTER)
709f5be1
RS
3185 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3186 && temp != target
3187 && (CONSTANT_P (temp) || want_value))
bbf6f052
RK
3188 dont_return_target = 1;
3189 }
3190
b258707c
RS
3191 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3192 the same as that of TARGET, adjust the constant. This is needed, for
3193 example, in case it is a CONST_DOUBLE and we want only a word-sized
3194 value. */
3195 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
c1da1f33 3196 && TREE_CODE (exp) != ERROR_MARK
b258707c
RS
3197 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3198 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3199 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3200
bbf6f052
RK
3201 /* If value was not generated in the target, store it there.
3202 Convert the value to TARGET's type first if nec. */
3203
3204 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
3205 {
3206 target = protect_from_queue (target, 1);
3207 if (GET_MODE (temp) != GET_MODE (target)
3208 && GET_MODE (temp) != VOIDmode)
3209 {
3210 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3211 if (dont_return_target)
3212 {
3213 /* In this case, we will return TEMP,
3214 so make sure it has the proper mode.
3215 But don't forget to store the value into TARGET. */
3216 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3217 emit_move_insn (target, temp);
3218 }
3219 else
3220 convert_move (target, temp, unsignedp);
3221 }
3222
3223 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3224 {
3225 /* Handle copying a string constant into an array.
3226 The string constant may be shorter than the array.
3227 So copy just the string's actual length, and clear the rest. */
3228 rtx size;
22619c3f 3229 rtx addr;
bbf6f052 3230
e87b4f3f
RS
3231 /* Get the size of the data type of the string,
3232 which is actually the size of the target. */
3233 size = expr_size (exp);
3234 if (GET_CODE (size) == CONST_INT
3235 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3236 emit_block_move (target, temp, size,
3237 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3238 else
bbf6f052 3239 {
e87b4f3f
RS
3240 /* Compute the size of the data to copy from the string. */
3241 tree copy_size
c03b7665 3242 = size_binop (MIN_EXPR,
b50d17a1 3243 make_tree (sizetype, size),
c03b7665
RK
3244 convert (sizetype,
3245 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
906c4e36
RK
3246 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3247 VOIDmode, 0);
e87b4f3f
RS
3248 rtx label = 0;
3249
3250 /* Copy that much. */
3251 emit_block_move (target, temp, copy_size_rtx,
3252 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3253
88f63c77
RK
3254 /* Figure out how much is left in TARGET that we have to clear.
3255 Do all calculations in ptr_mode. */
3256
3257 addr = XEXP (target, 0);
3258 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3259
e87b4f3f
RS
3260 if (GET_CODE (copy_size_rtx) == CONST_INT)
3261 {
88f63c77 3262 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
22619c3f 3263 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
e87b4f3f
RS
3264 }
3265 else
3266 {
88f63c77
RK
3267 addr = force_reg (ptr_mode, addr);
3268 addr = expand_binop (ptr_mode, add_optab, addr,
906c4e36
RK
3269 copy_size_rtx, NULL_RTX, 0,
3270 OPTAB_LIB_WIDEN);
e87b4f3f 3271
88f63c77 3272 size = expand_binop (ptr_mode, sub_optab, size,
906c4e36
RK
3273 copy_size_rtx, NULL_RTX, 0,
3274 OPTAB_LIB_WIDEN);
e87b4f3f 3275
906c4e36 3276 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
e87b4f3f
RS
3277 GET_MODE (size), 0, 0);
3278 label = gen_label_rtx ();
3279 emit_jump_insn (gen_blt (label));
3280 }
3281
3282 if (size != const0_rtx)
3283 {
bbf6f052 3284#ifdef TARGET_MEM_FUNCTIONS
3b6f75e2 3285 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
86242483 3286 addr, ptr_mode,
3b6f75e2
JW
3287 const0_rtx, TYPE_MODE (integer_type_node),
3288 convert_to_mode (TYPE_MODE (sizetype),
3289 size,
3290 TREE_UNSIGNED (sizetype)),
3291 TYPE_MODE (sizetype));
bbf6f052 3292#else
d562e42e 3293 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
86242483 3294 addr, ptr_mode,
3b6f75e2
JW
3295 convert_to_mode (TYPE_MODE (integer_type_node),
3296 size,
3297 TREE_UNSIGNED (integer_type_node)),
3298 TYPE_MODE (integer_type_node));
bbf6f052 3299#endif
e87b4f3f 3300 }
22619c3f 3301
e87b4f3f
RS
3302 if (label)
3303 emit_label (label);
bbf6f052
RK
3304 }
3305 }
fffa9c1d
JW
3306 /* Handle calls that return values in multiple non-contiguous locations.
3307 The Irix 6 ABI has examples of this. */
3308 else if (GET_CODE (target) == PARALLEL)
3309 emit_group_load (target, temp);
bbf6f052
RK
3310 else if (GET_MODE (temp) == BLKmode)
3311 emit_block_move (target, temp, expr_size (exp),
3312 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3313 else
3314 emit_move_insn (target, temp);
3315 }
709f5be1 3316
766f36c7
RK
3317 /* If we don't want a value, return NULL_RTX. */
3318 if (! want_value)
3319 return NULL_RTX;
3320
3321 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3322 ??? The latter test doesn't seem to make sense. */
3323 else if (dont_return_target && GET_CODE (temp) != MEM)
bbf6f052 3324 return temp;
766f36c7
RK
3325
3326 /* Return TARGET itself if it is a hard register. */
3327 else if (want_value && GET_MODE (target) != BLKmode
3328 && ! (GET_CODE (target) == REG
3329 && REGNO (target) < FIRST_PSEUDO_REGISTER))
709f5be1 3330 return copy_to_reg (target);
766f36c7
RK
3331
3332 else
709f5be1 3333 return target;
bbf6f052
RK
3334}
3335\f
9de08200
RK
3336/* Return 1 if EXP just contains zeros. */
3337
3338static int
3339is_zeros_p (exp)
3340 tree exp;
3341{
3342 tree elt;
3343
3344 switch (TREE_CODE (exp))
3345 {
3346 case CONVERT_EXPR:
3347 case NOP_EXPR:
3348 case NON_LVALUE_EXPR:
3349 return is_zeros_p (TREE_OPERAND (exp, 0));
3350
3351 case INTEGER_CST:
3352 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3353
3354 case COMPLEX_CST:
3355 return
3356 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3357
3358 case REAL_CST:
3359 return REAL_VALUES_EQUAL (TREE_REAL_CST (exp), dconst0);
3360
3361 case CONSTRUCTOR:
e1a43f73
PB
3362 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3363 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
9de08200
RK
3364 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3365 if (! is_zeros_p (TREE_VALUE (elt)))
3366 return 0;
3367
3368 return 1;
3369 }
3370
3371 return 0;
3372}
3373
3374/* Return 1 if EXP contains mostly (3/4) zeros. */
3375
3376static int
3377mostly_zeros_p (exp)
3378 tree exp;
3379{
9de08200
RK
3380 if (TREE_CODE (exp) == CONSTRUCTOR)
3381 {
e1a43f73
PB
3382 int elts = 0, zeros = 0;
3383 tree elt = CONSTRUCTOR_ELTS (exp);
3384 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3385 {
3386 /* If there are no ranges of true bits, it is all zero. */
3387 return elt == NULL_TREE;
3388 }
3389 for (; elt; elt = TREE_CHAIN (elt))
3390 {
3391 /* We do not handle the case where the index is a RANGE_EXPR,
3392 so the statistic will be somewhat inaccurate.
3393 We do make a more accurate count in store_constructor itself,
3394 so since this function is only used for nested array elements,
0f41302f 3395 this should be close enough. */
e1a43f73
PB
3396 if (mostly_zeros_p (TREE_VALUE (elt)))
3397 zeros++;
3398 elts++;
3399 }
9de08200
RK
3400
3401 return 4 * zeros >= 3 * elts;
3402 }
3403
3404 return is_zeros_p (exp);
3405}
3406\f
e1a43f73
PB
3407/* Helper function for store_constructor.
3408 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3409 TYPE is the type of the CONSTRUCTOR, not the element type.
23ccec44
JW
3410 CLEARED is as for store_constructor.
3411
3412 This provides a recursive shortcut back to store_constructor when it isn't
3413 necessary to go through store_field. This is so that we can pass through
3414 the cleared field to let store_constructor know that we may not have to
3415 clear a substructure if the outer structure has already been cleared. */
e1a43f73
PB
3416
3417static void
3418store_constructor_field (target, bitsize, bitpos,
3419 mode, exp, type, cleared)
3420 rtx target;
3421 int bitsize, bitpos;
3422 enum machine_mode mode;
3423 tree exp, type;
3424 int cleared;
3425{
3426 if (TREE_CODE (exp) == CONSTRUCTOR
23ccec44
JW
3427 && bitpos % BITS_PER_UNIT == 0
3428 /* If we have a non-zero bitpos for a register target, then we just
3429 let store_field do the bitfield handling. This is unlikely to
3430 generate unnecessary clear instructions anyways. */
3431 && (bitpos == 0 || GET_CODE (target) == MEM))
e1a43f73 3432 {
126e5b0d
JW
3433 if (bitpos != 0)
3434 target = change_address (target, VOIDmode,
3435 plus_constant (XEXP (target, 0),
3436 bitpos / BITS_PER_UNIT));
3437 store_constructor (exp, target, cleared);
e1a43f73
PB
3438 }
3439 else
3440 store_field (target, bitsize, bitpos, mode, exp,
3441 VOIDmode, 0, TYPE_ALIGN (type) / BITS_PER_UNIT,
3442 int_size_in_bytes (type));
3443}
3444
bbf6f052 3445/* Store the value of constructor EXP into the rtx TARGET.
e1a43f73 3446 TARGET is either a REG or a MEM.
0f41302f 3447 CLEARED is true if TARGET is known to have been zero'd. */
bbf6f052
RK
3448
3449static void
e1a43f73 3450store_constructor (exp, target, cleared)
bbf6f052
RK
3451 tree exp;
3452 rtx target;
e1a43f73 3453 int cleared;
bbf6f052 3454{
4af3895e
JVA
3455 tree type = TREE_TYPE (exp);
3456
bbf6f052
RK
3457 /* We know our target cannot conflict, since safe_from_p has been called. */
3458#if 0
3459 /* Don't try copying piece by piece into a hard register
3460 since that is vulnerable to being clobbered by EXP.
3461 Instead, construct in a pseudo register and then copy it all. */
3462 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
3463 {
3464 rtx temp = gen_reg_rtx (GET_MODE (target));
e1a43f73 3465 store_constructor (exp, temp, 0);
bbf6f052
RK
3466 emit_move_insn (target, temp);
3467 return;
3468 }
3469#endif
3470
e44842fe
RK
3471 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
3472 || TREE_CODE (type) == QUAL_UNION_TYPE)
bbf6f052
RK
3473 {
3474 register tree elt;
3475
4af3895e 3476 /* Inform later passes that the whole union value is dead. */
e44842fe
RK
3477 if (TREE_CODE (type) == UNION_TYPE
3478 || TREE_CODE (type) == QUAL_UNION_TYPE)
bbf6f052 3479 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
4af3895e
JVA
3480
3481 /* If we are building a static constructor into a register,
3482 set the initial value as zero so we can fold the value into
67225c15
RK
3483 a constant. But if more than one register is involved,
3484 this probably loses. */
3485 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
3486 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
9de08200
RK
3487 {
3488 if (! cleared)
3489 emit_move_insn (target, const0_rtx);
4af3895e 3490
9de08200
RK
3491 cleared = 1;
3492 }
3493
3494 /* If the constructor has fewer fields than the structure
3495 or if we are initializing the structure to mostly zeros,
bbf6f052 3496 clear the whole structure first. */
9de08200
RK
3497 else if ((list_length (CONSTRUCTOR_ELTS (exp))
3498 != list_length (TYPE_FIELDS (type)))
3499 || mostly_zeros_p (exp))
3500 {
3501 if (! cleared)
3502 clear_storage (target, expr_size (exp),
3503 TYPE_ALIGN (type) / BITS_PER_UNIT);
3504
3505 cleared = 1;
3506 }
bbf6f052
RK
3507 else
3508 /* Inform later passes that the old value is dead. */
3509 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3510
3511 /* Store each element of the constructor into
3512 the corresponding field of TARGET. */
3513
3514 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3515 {
3516 register tree field = TREE_PURPOSE (elt);
3517 register enum machine_mode mode;
3518 int bitsize;
b50d17a1 3519 int bitpos = 0;
bbf6f052 3520 int unsignedp;
b50d17a1
RK
3521 tree pos, constant = 0, offset = 0;
3522 rtx to_rtx = target;
bbf6f052 3523
f32fd778
RS
3524 /* Just ignore missing fields.
3525 We cleared the whole structure, above,
3526 if any fields are missing. */
3527 if (field == 0)
3528 continue;
3529
e1a43f73
PB
3530 if (cleared && is_zeros_p (TREE_VALUE (elt)))
3531 continue;
9de08200 3532
bbf6f052
RK
3533 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
3534 unsignedp = TREE_UNSIGNED (field);
3535 mode = DECL_MODE (field);
3536 if (DECL_BIT_FIELD (field))
3537 mode = VOIDmode;
3538
b50d17a1
RK
3539 pos = DECL_FIELD_BITPOS (field);
3540 if (TREE_CODE (pos) == INTEGER_CST)
3541 constant = pos;
3542 else if (TREE_CODE (pos) == PLUS_EXPR
3543 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3544 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
3545 else
3546 offset = pos;
3547
3548 if (constant)
cd11b87e 3549 bitpos = TREE_INT_CST_LOW (constant);
b50d17a1
RK
3550
3551 if (offset)
3552 {
3553 rtx offset_rtx;
3554
3555 if (contains_placeholder_p (offset))
3556 offset = build (WITH_RECORD_EXPR, sizetype,
3557 offset, exp);
bbf6f052 3558
b50d17a1
RK
3559 offset = size_binop (FLOOR_DIV_EXPR, offset,
3560 size_int (BITS_PER_UNIT));
bbf6f052 3561
b50d17a1
RK
3562 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3563 if (GET_CODE (to_rtx) != MEM)
3564 abort ();
3565
3566 to_rtx
3567 = change_address (to_rtx, VOIDmode,
88f63c77
RK
3568 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
3569 force_reg (ptr_mode, offset_rtx)));
b50d17a1 3570 }
cf04eb80
RK
3571 if (TREE_READONLY (field))
3572 {
9151b3bf
RK
3573 if (GET_CODE (to_rtx) == MEM)
3574 to_rtx = change_address (to_rtx, GET_MODE (to_rtx),
3575 XEXP (to_rtx, 0));
cf04eb80
RK
3576 RTX_UNCHANGING_P (to_rtx) = 1;
3577 }
3578
e1a43f73
PB
3579 store_constructor_field (to_rtx, bitsize, bitpos,
3580 mode, TREE_VALUE (elt), type, cleared);
bbf6f052
RK
3581 }
3582 }
4af3895e 3583 else if (TREE_CODE (type) == ARRAY_TYPE)
bbf6f052
RK
3584 {
3585 register tree elt;
3586 register int i;
e1a43f73 3587 int need_to_clear;
4af3895e 3588 tree domain = TYPE_DOMAIN (type);
906c4e36
RK
3589 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
3590 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4af3895e 3591 tree elttype = TREE_TYPE (type);
bbf6f052 3592
e1a43f73
PB
3593 /* If the constructor has fewer elements than the array,
3594 clear the whole array first. Similarly if this this is
3595 static constructor of a non-BLKmode object. */
3596 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
3597 need_to_clear = 1;
3598 else
3599 {
3600 HOST_WIDE_INT count = 0, zero_count = 0;
3601 need_to_clear = 0;
3602 /* This loop is a more accurate version of the loop in
3603 mostly_zeros_p (it handles RANGE_EXPR in an index).
3604 It is also needed to check for missing elements. */
3605 for (elt = CONSTRUCTOR_ELTS (exp);
3606 elt != NULL_TREE;
df0faff1 3607 elt = TREE_CHAIN (elt))
e1a43f73
PB
3608 {
3609 tree index = TREE_PURPOSE (elt);
3610 HOST_WIDE_INT this_node_count;
3611 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3612 {
3613 tree lo_index = TREE_OPERAND (index, 0);
3614 tree hi_index = TREE_OPERAND (index, 1);
3615 if (TREE_CODE (lo_index) != INTEGER_CST
3616 || TREE_CODE (hi_index) != INTEGER_CST)
3617 {
3618 need_to_clear = 1;
3619 break;
3620 }
3621 this_node_count = TREE_INT_CST_LOW (hi_index)
3622 - TREE_INT_CST_LOW (lo_index) + 1;
3623 }
3624 else
3625 this_node_count = 1;
3626 count += this_node_count;
3627 if (mostly_zeros_p (TREE_VALUE (elt)))
3628 zero_count += this_node_count;
3629 }
8e958f70 3630 /* Clear the entire array first if there are any missing elements,
0f41302f 3631 or if the incidence of zero elements is >= 75%. */
8e958f70
PB
3632 if (count < maxelt - minelt + 1
3633 || 4 * zero_count >= 3 * count)
e1a43f73
PB
3634 need_to_clear = 1;
3635 }
3636 if (need_to_clear)
9de08200
RK
3637 {
3638 if (! cleared)
3639 clear_storage (target, expr_size (exp),
3640 TYPE_ALIGN (type) / BITS_PER_UNIT);
9de08200
RK
3641 cleared = 1;
3642 }
bbf6f052
RK
3643 else
3644 /* Inform later passes that the old value is dead. */
3645 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3646
3647 /* Store each element of the constructor into
3648 the corresponding element of TARGET, determined
3649 by counting the elements. */
3650 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
3651 elt;
3652 elt = TREE_CHAIN (elt), i++)
3653 {
3654 register enum machine_mode mode;
3655 int bitsize;
3656 int bitpos;
3657 int unsignedp;
e1a43f73 3658 tree value = TREE_VALUE (elt);
03dc44a6
RS
3659 tree index = TREE_PURPOSE (elt);
3660 rtx xtarget = target;
bbf6f052 3661
e1a43f73
PB
3662 if (cleared && is_zeros_p (value))
3663 continue;
9de08200 3664
bbf6f052
RK
3665 mode = TYPE_MODE (elttype);
3666 bitsize = GET_MODE_BITSIZE (mode);
3667 unsignedp = TREE_UNSIGNED (elttype);
3668
e1a43f73
PB
3669 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3670 {
3671 tree lo_index = TREE_OPERAND (index, 0);
3672 tree hi_index = TREE_OPERAND (index, 1);
3673 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
3674 struct nesting *loop;
05c0b405
PB
3675 HOST_WIDE_INT lo, hi, count;
3676 tree position;
e1a43f73 3677
0f41302f 3678 /* If the range is constant and "small", unroll the loop. */
e1a43f73 3679 if (TREE_CODE (lo_index) == INTEGER_CST
05c0b405
PB
3680 && TREE_CODE (hi_index) == INTEGER_CST
3681 && (lo = TREE_INT_CST_LOW (lo_index),
3682 hi = TREE_INT_CST_LOW (hi_index),
3683 count = hi - lo + 1,
3684 (GET_CODE (target) != MEM
3685 || count <= 2
3686 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
3687 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
3688 <= 40 * 8))))
e1a43f73 3689 {
05c0b405
PB
3690 lo -= minelt; hi -= minelt;
3691 for (; lo <= hi; lo++)
e1a43f73 3692 {
05c0b405
PB
3693 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
3694 store_constructor_field (target, bitsize, bitpos,
3695 mode, value, type, cleared);
e1a43f73
PB
3696 }
3697 }
3698 else
3699 {
3700 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
3701 loop_top = gen_label_rtx ();
3702 loop_end = gen_label_rtx ();
3703
3704 unsignedp = TREE_UNSIGNED (domain);
3705
3706 index = build_decl (VAR_DECL, NULL_TREE, domain);
3707
3708 DECL_RTL (index) = index_r
3709 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
3710 &unsignedp, 0));
3711
3712 if (TREE_CODE (value) == SAVE_EXPR
3713 && SAVE_EXPR_RTL (value) == 0)
3714 {
0f41302f
MS
3715 /* Make sure value gets expanded once before the
3716 loop. */
e1a43f73
PB
3717 expand_expr (value, const0_rtx, VOIDmode, 0);
3718 emit_queue ();
3719 }
3720 store_expr (lo_index, index_r, 0);
3721 loop = expand_start_loop (0);
3722
0f41302f 3723 /* Assign value to element index. */
e1a43f73
PB
3724 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3725 size_int (BITS_PER_UNIT));
3726 position = size_binop (MULT_EXPR,
3727 size_binop (MINUS_EXPR, index,
3728 TYPE_MIN_VALUE (domain)),
3729 position);
3730 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3731 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
3732 xtarget = change_address (target, mode, addr);
3733 if (TREE_CODE (value) == CONSTRUCTOR)
05c0b405 3734 store_constructor (value, xtarget, cleared);
e1a43f73
PB
3735 else
3736 store_expr (value, xtarget, 0);
3737
3738 expand_exit_loop_if_false (loop,
3739 build (LT_EXPR, integer_type_node,
3740 index, hi_index));
3741
3742 expand_increment (build (PREINCREMENT_EXPR,
3743 TREE_TYPE (index),
7b8b9722 3744 index, integer_one_node), 0, 0);
e1a43f73
PB
3745 expand_end_loop ();
3746 emit_label (loop_end);
3747
3748 /* Needed by stupid register allocation. to extend the
3749 lifetime of pseudo-regs used by target past the end
3750 of the loop. */
3751 emit_insn (gen_rtx (USE, GET_MODE (target), target));
3752 }
3753 }
3754 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
5b6c44ff 3755 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
03dc44a6 3756 {
e1a43f73 3757 rtx pos_rtx, addr;
03dc44a6
RS
3758 tree position;
3759
5b6c44ff
RK
3760 if (index == 0)
3761 index = size_int (i);
3762
e1a43f73
PB
3763 if (minelt)
3764 index = size_binop (MINUS_EXPR, index,
3765 TYPE_MIN_VALUE (domain));
5b6c44ff
RK
3766 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3767 size_int (BITS_PER_UNIT));
3768 position = size_binop (MULT_EXPR, index, position);
03dc44a6
RS
3769 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3770 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
3771 xtarget = change_address (target, mode, addr);
e1a43f73 3772 store_expr (value, xtarget, 0);
03dc44a6
RS
3773 }
3774 else
3775 {
3776 if (index != 0)
7c314719 3777 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
03dc44a6
RS
3778 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3779 else
3780 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
e1a43f73
PB
3781 store_constructor_field (target, bitsize, bitpos,
3782 mode, value, type, cleared);
03dc44a6 3783 }
bbf6f052
RK
3784 }
3785 }
071a6595
PB
3786 /* set constructor assignments */
3787 else if (TREE_CODE (type) == SET_TYPE)
3788 {
e1a43f73 3789 tree elt = CONSTRUCTOR_ELTS (exp);
071a6595
PB
3790 rtx xtarget = XEXP (target, 0);
3791 int set_word_size = TYPE_ALIGN (type);
e1a43f73 3792 int nbytes = int_size_in_bytes (type), nbits;
071a6595
PB
3793 tree domain = TYPE_DOMAIN (type);
3794 tree domain_min, domain_max, bitlength;
3795
9faa82d8 3796 /* The default implementation strategy is to extract the constant
071a6595
PB
3797 parts of the constructor, use that to initialize the target,
3798 and then "or" in whatever non-constant ranges we need in addition.
3799
3800 If a large set is all zero or all ones, it is
3801 probably better to set it using memset (if available) or bzero.
3802 Also, if a large set has just a single range, it may also be
3803 better to first clear all the first clear the set (using
0f41302f 3804 bzero/memset), and set the bits we want. */
071a6595 3805
0f41302f 3806 /* Check for all zeros. */
e1a43f73 3807 if (elt == NULL_TREE)
071a6595 3808 {
e1a43f73
PB
3809 if (!cleared)
3810 clear_storage (target, expr_size (exp),
3811 TYPE_ALIGN (type) / BITS_PER_UNIT);
071a6595
PB
3812 return;
3813 }
3814
071a6595
PB
3815 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
3816 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
3817 bitlength = size_binop (PLUS_EXPR,
3818 size_binop (MINUS_EXPR, domain_max, domain_min),
3819 size_one_node);
3820
e1a43f73
PB
3821 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
3822 abort ();
3823 nbits = TREE_INT_CST_LOW (bitlength);
3824
3825 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
3826 are "complicated" (more than one range), initialize (the
3827 constant parts) by copying from a constant. */
3828 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
3829 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
071a6595 3830 {
b4ee5a72
PB
3831 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
3832 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
0f41302f 3833 char *bit_buffer = (char *) alloca (nbits);
b4ee5a72
PB
3834 HOST_WIDE_INT word = 0;
3835 int bit_pos = 0;
3836 int ibit = 0;
0f41302f 3837 int offset = 0; /* In bytes from beginning of set. */
e1a43f73 3838 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
b4ee5a72 3839 for (;;)
071a6595 3840 {
b4ee5a72
PB
3841 if (bit_buffer[ibit])
3842 {
b09f3348 3843 if (BYTES_BIG_ENDIAN)
b4ee5a72
PB
3844 word |= (1 << (set_word_size - 1 - bit_pos));
3845 else
3846 word |= 1 << bit_pos;
3847 }
3848 bit_pos++; ibit++;
3849 if (bit_pos >= set_word_size || ibit == nbits)
071a6595 3850 {
e1a43f73
PB
3851 if (word != 0 || ! cleared)
3852 {
3853 rtx datum = GEN_INT (word);
3854 rtx to_rtx;
0f41302f
MS
3855 /* The assumption here is that it is safe to use
3856 XEXP if the set is multi-word, but not if
3857 it's single-word. */
e1a43f73
PB
3858 if (GET_CODE (target) == MEM)
3859 {
3860 to_rtx = plus_constant (XEXP (target, 0), offset);
3861 to_rtx = change_address (target, mode, to_rtx);
3862 }
3863 else if (offset == 0)
3864 to_rtx = target;
3865 else
3866 abort ();
3867 emit_move_insn (to_rtx, datum);
3868 }
b4ee5a72
PB
3869 if (ibit == nbits)
3870 break;
3871 word = 0;
3872 bit_pos = 0;
3873 offset += set_word_size / BITS_PER_UNIT;
071a6595
PB
3874 }
3875 }
071a6595 3876 }
e1a43f73
PB
3877 else if (!cleared)
3878 {
0f41302f 3879 /* Don't bother clearing storage if the set is all ones. */
e1a43f73
PB
3880 if (TREE_CHAIN (elt) != NULL_TREE
3881 || (TREE_PURPOSE (elt) == NULL_TREE
3882 ? nbits != 1
3883 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
3884 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
3885 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
3886 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
3887 != nbits))))
3888 clear_storage (target, expr_size (exp),
3889 TYPE_ALIGN (type) / BITS_PER_UNIT);
3890 }
3891
3892 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
071a6595
PB
3893 {
3894 /* start of range of element or NULL */
3895 tree startbit = TREE_PURPOSE (elt);
3896 /* end of range of element, or element value */
3897 tree endbit = TREE_VALUE (elt);
3898 HOST_WIDE_INT startb, endb;
3899 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
3900
3901 bitlength_rtx = expand_expr (bitlength,
3902 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
3903
3904 /* handle non-range tuple element like [ expr ] */
3905 if (startbit == NULL_TREE)
3906 {
3907 startbit = save_expr (endbit);
3908 endbit = startbit;
3909 }
3910 startbit = convert (sizetype, startbit);
3911 endbit = convert (sizetype, endbit);
3912 if (! integer_zerop (domain_min))
3913 {
3914 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
3915 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
3916 }
3917 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
3918 EXPAND_CONST_ADDRESS);
3919 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
3920 EXPAND_CONST_ADDRESS);
3921
3922 if (REG_P (target))
3923 {
3924 targetx = assign_stack_temp (GET_MODE (target),
3925 GET_MODE_SIZE (GET_MODE (target)),
3926 0);
3927 emit_move_insn (targetx, target);
3928 }
3929 else if (GET_CODE (target) == MEM)
3930 targetx = target;
3931 else
3932 abort ();
3933
3934#ifdef TARGET_MEM_FUNCTIONS
3935 /* Optimization: If startbit and endbit are
9faa82d8 3936 constants divisible by BITS_PER_UNIT,
0f41302f 3937 call memset instead. */
071a6595
PB
3938 if (TREE_CODE (startbit) == INTEGER_CST
3939 && TREE_CODE (endbit) == INTEGER_CST
3940 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
e1a43f73 3941 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
071a6595 3942 {
071a6595
PB
3943 emit_library_call (memset_libfunc, 0,
3944 VOIDmode, 3,
e1a43f73
PB
3945 plus_constant (XEXP (targetx, 0),
3946 startb / BITS_PER_UNIT),
071a6595 3947 Pmode,
3b6f75e2 3948 constm1_rtx, TYPE_MODE (integer_type_node),
071a6595 3949 GEN_INT ((endb - startb) / BITS_PER_UNIT),
3b6f75e2 3950 TYPE_MODE (sizetype));
071a6595
PB
3951 }
3952 else
3953#endif
3954 {
071a6595
PB
3955 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__setbits"),
3956 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
3957 bitlength_rtx, TYPE_MODE (sizetype),
3958 startbit_rtx, TYPE_MODE (sizetype),
3959 endbit_rtx, TYPE_MODE (sizetype));
3960 }
3961 if (REG_P (target))
3962 emit_move_insn (target, targetx);
3963 }
3964 }
bbf6f052
RK
3965
3966 else
3967 abort ();
3968}
3969
3970/* Store the value of EXP (an expression tree)
3971 into a subfield of TARGET which has mode MODE and occupies
3972 BITSIZE bits, starting BITPOS bits from the start of TARGET.
3973 If MODE is VOIDmode, it means that we are storing into a bit-field.
3974
3975 If VALUE_MODE is VOIDmode, return nothing in particular.
3976 UNSIGNEDP is not used in this case.
3977
3978 Otherwise, return an rtx for the value stored. This rtx
3979 has mode VALUE_MODE if that is convenient to do.
3980 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
3981
3982 ALIGN is the alignment that TARGET is known to have, measured in bytes.
3983 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
3984
3985static rtx
3986store_field (target, bitsize, bitpos, mode, exp, value_mode,
3987 unsignedp, align, total_size)
3988 rtx target;
3989 int bitsize, bitpos;
3990 enum machine_mode mode;
3991 tree exp;
3992 enum machine_mode value_mode;
3993 int unsignedp;
3994 int align;
3995 int total_size;
3996{
906c4e36 3997 HOST_WIDE_INT width_mask = 0;
bbf6f052 3998
906c4e36
RK
3999 if (bitsize < HOST_BITS_PER_WIDE_INT)
4000 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
bbf6f052
RK
4001
4002 /* If we are storing into an unaligned field of an aligned union that is
4003 in a register, we may have the mode of TARGET being an integer mode but
4004 MODE == BLKmode. In that case, get an aligned object whose size and
4005 alignment are the same as TARGET and store TARGET into it (we can avoid
4006 the store if the field being stored is the entire width of TARGET). Then
4007 call ourselves recursively to store the field into a BLKmode version of
4008 that object. Finally, load from the object into TARGET. This is not
4009 very efficient in general, but should only be slightly more expensive
4010 than the otherwise-required unaligned accesses. Perhaps this can be
4011 cleaned up later. */
4012
4013 if (mode == BLKmode
4014 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4015 {
4016 rtx object = assign_stack_temp (GET_MODE (target),
4017 GET_MODE_SIZE (GET_MODE (target)), 0);
4018 rtx blk_object = copy_rtx (object);
4019
24a13950
JW
4020 MEM_IN_STRUCT_P (object) = 1;
4021 MEM_IN_STRUCT_P (blk_object) = 1;
bbf6f052
RK
4022 PUT_MODE (blk_object, BLKmode);
4023
4024 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4025 emit_move_insn (object, target);
4026
4027 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4028 align, total_size);
4029
46093b97
RS
4030 /* Even though we aren't returning target, we need to
4031 give it the updated value. */
bbf6f052
RK
4032 emit_move_insn (target, object);
4033
46093b97 4034 return blk_object;
bbf6f052
RK
4035 }
4036
4037 /* If the structure is in a register or if the component
4038 is a bit field, we cannot use addressing to access it.
4039 Use bit-field techniques or SUBREG to store in it. */
4040
4fa52007
RK
4041 if (mode == VOIDmode
4042 || (mode != BLKmode && ! direct_store[(int) mode])
4043 || GET_CODE (target) == REG
c980ac49 4044 || GET_CODE (target) == SUBREG
ccc98036
RS
4045 /* If the field isn't aligned enough to store as an ordinary memref,
4046 store it as a bit field. */
c7a7ac46 4047 || (SLOW_UNALIGNED_ACCESS
ccc98036 4048 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
c7a7ac46 4049 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
bbf6f052 4050 {
906c4e36 4051 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbd6cf73 4052
ef19912d
RK
4053 /* If BITSIZE is narrower than the size of the type of EXP
4054 we will be narrowing TEMP. Normally, what's wanted are the
4055 low-order bits. However, if EXP's type is a record and this is
4056 big-endian machine, we want the upper BITSIZE bits. */
4057 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4058 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4059 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4060 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4061 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4062 - bitsize),
4063 temp, 1);
4064
bbd6cf73
RK
4065 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4066 MODE. */
4067 if (mode != VOIDmode && mode != BLKmode
4068 && mode != TYPE_MODE (TREE_TYPE (exp)))
4069 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4070
a281e72d
RK
4071 /* If the modes of TARGET and TEMP are both BLKmode, both
4072 must be in memory and BITPOS must be aligned on a byte
4073 boundary. If so, we simply do a block copy. */
4074 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4075 {
4076 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4077 || bitpos % BITS_PER_UNIT != 0)
4078 abort ();
4079
0086427c
RK
4080 target = change_address (target, VOIDmode,
4081 plus_constant (XEXP (target, 0),
a281e72d
RK
4082 bitpos / BITS_PER_UNIT));
4083
4084 emit_block_move (target, temp,
4085 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4086 / BITS_PER_UNIT),
4087 1);
4088
4089 return value_mode == VOIDmode ? const0_rtx : target;
4090 }
4091
bbf6f052
RK
4092 /* Store the value in the bitfield. */
4093 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4094 if (value_mode != VOIDmode)
4095 {
4096 /* The caller wants an rtx for the value. */
4097 /* If possible, avoid refetching from the bitfield itself. */
4098 if (width_mask != 0
4099 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5c4d7cfb 4100 {
9074de27 4101 tree count;
5c4d7cfb 4102 enum machine_mode tmode;
86a2c12a 4103
5c4d7cfb
RS
4104 if (unsignedp)
4105 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4106 tmode = GET_MODE (temp);
86a2c12a
RS
4107 if (tmode == VOIDmode)
4108 tmode = value_mode;
5c4d7cfb
RS
4109 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4110 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4111 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4112 }
bbf6f052 4113 return extract_bit_field (target, bitsize, bitpos, unsignedp,
906c4e36
RK
4114 NULL_RTX, value_mode, 0, align,
4115 total_size);
bbf6f052
RK
4116 }
4117 return const0_rtx;
4118 }
4119 else
4120 {
4121 rtx addr = XEXP (target, 0);
4122 rtx to_rtx;
4123
4124 /* If a value is wanted, it must be the lhs;
4125 so make the address stable for multiple use. */
4126
4127 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4128 && ! CONSTANT_ADDRESS_P (addr)
4129 /* A frame-pointer reference is already stable. */
4130 && ! (GET_CODE (addr) == PLUS
4131 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4132 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4133 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4134 addr = copy_to_reg (addr);
4135
4136 /* Now build a reference to just the desired component. */
4137
4138 to_rtx = change_address (target, mode,
4139 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
4140 MEM_IN_STRUCT_P (to_rtx) = 1;
4141
4142 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4143 }
4144}
4145\f
6be58303
JW
4146/* Return true if any object containing the innermost array is an unaligned
4147 packed structure field. */
4148
4149static int
4150get_inner_unaligned_p (exp)
4151 tree exp;
4152{
4153 int needed_alignment = TYPE_ALIGN (TREE_TYPE (exp));
4154
4155 while (1)
4156 {
4157 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4158 {
4159 if (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
4160 < needed_alignment)
4161 return 1;
4162 }
4163 else if (TREE_CODE (exp) != ARRAY_REF
4164 && TREE_CODE (exp) != NON_LVALUE_EXPR
4165 && ! ((TREE_CODE (exp) == NOP_EXPR
4166 || TREE_CODE (exp) == CONVERT_EXPR)
4167 && (TYPE_MODE (TREE_TYPE (exp))
4168 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4169 break;
4170
4171 exp = TREE_OPERAND (exp, 0);
4172 }
4173
4174 return 0;
4175}
4176
bbf6f052
RK
4177/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4178 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
742920c7 4179 ARRAY_REFs and find the ultimate containing object, which we return.
bbf6f052
RK
4180
4181 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4182 bit position, and *PUNSIGNEDP to the signedness of the field.
7bb0943f
RS
4183 If the position of the field is variable, we store a tree
4184 giving the variable offset (in units) in *POFFSET.
4185 This offset is in addition to the bit position.
4186 If the position is not variable, we store 0 in *POFFSET.
bbf6f052
RK
4187
4188 If any of the extraction expressions is volatile,
4189 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4190
4191 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4192 is a mode that can be used to access the field. In that case, *PBITSIZE
e7c33f54
RK
4193 is redundant.
4194
4195 If the field describes a variable-sized object, *PMODE is set to
4196 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4197 this case, but the address of the object can be found. */
bbf6f052
RK
4198
4199tree
4969d05d
RK
4200get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4201 punsignedp, pvolatilep)
bbf6f052
RK
4202 tree exp;
4203 int *pbitsize;
4204 int *pbitpos;
7bb0943f 4205 tree *poffset;
bbf6f052
RK
4206 enum machine_mode *pmode;
4207 int *punsignedp;
4208 int *pvolatilep;
4209{
b50d17a1 4210 tree orig_exp = exp;
bbf6f052
RK
4211 tree size_tree = 0;
4212 enum machine_mode mode = VOIDmode;
742920c7 4213 tree offset = integer_zero_node;
bbf6f052
RK
4214
4215 if (TREE_CODE (exp) == COMPONENT_REF)
4216 {
4217 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4218 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4219 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4220 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4221 }
4222 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4223 {
4224 size_tree = TREE_OPERAND (exp, 1);
4225 *punsignedp = TREE_UNSIGNED (exp);
4226 }
4227 else
4228 {
4229 mode = TYPE_MODE (TREE_TYPE (exp));
4230 *pbitsize = GET_MODE_BITSIZE (mode);
4231 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4232 }
4233
4234 if (size_tree)
4235 {
4236 if (TREE_CODE (size_tree) != INTEGER_CST)
e7c33f54
RK
4237 mode = BLKmode, *pbitsize = -1;
4238 else
4239 *pbitsize = TREE_INT_CST_LOW (size_tree);
bbf6f052
RK
4240 }
4241
4242 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4243 and find the ultimate containing object. */
4244
4245 *pbitpos = 0;
4246
4247 while (1)
4248 {
7bb0943f 4249 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
bbf6f052 4250 {
7bb0943f
RS
4251 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4252 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4253 : TREE_OPERAND (exp, 2));
e6d8c385 4254 tree constant = integer_zero_node, var = pos;
bbf6f052 4255
e7f3c83f
RK
4256 /* If this field hasn't been filled in yet, don't go
4257 past it. This should only happen when folding expressions
4258 made during type construction. */
4259 if (pos == 0)
4260 break;
4261
e6d8c385
RK
4262 /* Assume here that the offset is a multiple of a unit.
4263 If not, there should be an explicitly added constant. */
4264 if (TREE_CODE (pos) == PLUS_EXPR
4265 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4266 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
7bb0943f 4267 else if (TREE_CODE (pos) == INTEGER_CST)
e6d8c385
RK
4268 constant = pos, var = integer_zero_node;
4269
4270 *pbitpos += TREE_INT_CST_LOW (constant);
8d8c9ba9
RK
4271 offset = size_binop (PLUS_EXPR, offset,
4272 size_binop (EXACT_DIV_EXPR, var,
4273 size_int (BITS_PER_UNIT)));
bbf6f052 4274 }
bbf6f052 4275
742920c7 4276 else if (TREE_CODE (exp) == ARRAY_REF)
bbf6f052 4277 {
742920c7
RK
4278 /* This code is based on the code in case ARRAY_REF in expand_expr
4279 below. We assume here that the size of an array element is
4280 always an integral multiple of BITS_PER_UNIT. */
4281
4282 tree index = TREE_OPERAND (exp, 1);
4283 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4284 tree low_bound
4285 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4286 tree index_type = TREE_TYPE (index);
4287
4288 if (! integer_zerop (low_bound))
4289 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
4290
4c08eef0 4291 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
742920c7 4292 {
4c08eef0
RK
4293 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4294 index);
742920c7
RK
4295 index_type = TREE_TYPE (index);
4296 }
4297
4298 index = fold (build (MULT_EXPR, index_type, index,
4299 TYPE_SIZE (TREE_TYPE (exp))));
4300
4301 if (TREE_CODE (index) == INTEGER_CST
4302 && TREE_INT_CST_HIGH (index) == 0)
4303 *pbitpos += TREE_INT_CST_LOW (index);
4304 else
4305 offset = size_binop (PLUS_EXPR, offset,
4306 size_binop (FLOOR_DIV_EXPR, index,
4307 size_int (BITS_PER_UNIT)));
bbf6f052
RK
4308 }
4309 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
4310 && ! ((TREE_CODE (exp) == NOP_EXPR
4311 || TREE_CODE (exp) == CONVERT_EXPR)
7f62854a
RK
4312 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4313 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
4314 != UNION_TYPE))
bbf6f052
RK
4315 && (TYPE_MODE (TREE_TYPE (exp))
4316 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4317 break;
7bb0943f
RS
4318
4319 /* If any reference in the chain is volatile, the effect is volatile. */
4320 if (TREE_THIS_VOLATILE (exp))
4321 *pvolatilep = 1;
bbf6f052
RK
4322 exp = TREE_OPERAND (exp, 0);
4323 }
4324
742920c7
RK
4325 if (integer_zerop (offset))
4326 offset = 0;
4327
b50d17a1
RK
4328 if (offset != 0 && contains_placeholder_p (offset))
4329 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
4330
bbf6f052 4331 *pmode = mode;
7bb0943f 4332 *poffset = offset;
bbf6f052
RK
4333 return exp;
4334}
4335\f
4336/* Given an rtx VALUE that may contain additions and multiplications,
4337 return an equivalent value that just refers to a register or memory.
4338 This is done by generating instructions to perform the arithmetic
c45a13a6
RK
4339 and returning a pseudo-register containing the value.
4340
4341 The returned value may be a REG, SUBREG, MEM or constant. */
bbf6f052
RK
4342
4343rtx
4344force_operand (value, target)
4345 rtx value, target;
4346{
4347 register optab binoptab = 0;
4348 /* Use a temporary to force order of execution of calls to
4349 `force_operand'. */
4350 rtx tmp;
4351 register rtx op2;
4352 /* Use subtarget as the target for operand 0 of a binary operation. */
4353 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4354
4355 if (GET_CODE (value) == PLUS)
4356 binoptab = add_optab;
4357 else if (GET_CODE (value) == MINUS)
4358 binoptab = sub_optab;
4359 else if (GET_CODE (value) == MULT)
4360 {
4361 op2 = XEXP (value, 1);
4362 if (!CONSTANT_P (op2)
4363 && !(GET_CODE (op2) == REG && op2 != subtarget))
4364 subtarget = 0;
4365 tmp = force_operand (XEXP (value, 0), subtarget);
4366 return expand_mult (GET_MODE (value), tmp,
906c4e36 4367 force_operand (op2, NULL_RTX),
bbf6f052
RK
4368 target, 0);
4369 }
4370
4371 if (binoptab)
4372 {
4373 op2 = XEXP (value, 1);
4374 if (!CONSTANT_P (op2)
4375 && !(GET_CODE (op2) == REG && op2 != subtarget))
4376 subtarget = 0;
4377 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
4378 {
4379 binoptab = add_optab;
4380 op2 = negate_rtx (GET_MODE (value), op2);
4381 }
4382
4383 /* Check for an addition with OP2 a constant integer and our first
4384 operand a PLUS of a virtual register and something else. In that
4385 case, we want to emit the sum of the virtual register and the
4386 constant first and then add the other value. This allows virtual
4387 register instantiation to simply modify the constant rather than
4388 creating another one around this addition. */
4389 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
4390 && GET_CODE (XEXP (value, 0)) == PLUS
4391 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
4392 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
4393 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
4394 {
4395 rtx temp = expand_binop (GET_MODE (value), binoptab,
4396 XEXP (XEXP (value, 0), 0), op2,
4397 subtarget, 0, OPTAB_LIB_WIDEN);
4398 return expand_binop (GET_MODE (value), binoptab, temp,
4399 force_operand (XEXP (XEXP (value, 0), 1), 0),
4400 target, 0, OPTAB_LIB_WIDEN);
4401 }
4402
4403 tmp = force_operand (XEXP (value, 0), subtarget);
4404 return expand_binop (GET_MODE (value), binoptab, tmp,
906c4e36 4405 force_operand (op2, NULL_RTX),
bbf6f052 4406 target, 0, OPTAB_LIB_WIDEN);
8008b228 4407 /* We give UNSIGNEDP = 0 to expand_binop
bbf6f052
RK
4408 because the only operations we are expanding here are signed ones. */
4409 }
4410 return value;
4411}
4412\f
4413/* Subroutine of expand_expr:
4414 save the non-copied parts (LIST) of an expr (LHS), and return a list
4415 which can restore these values to their previous values,
4416 should something modify their storage. */
4417
4418static tree
4419save_noncopied_parts (lhs, list)
4420 tree lhs;
4421 tree list;
4422{
4423 tree tail;
4424 tree parts = 0;
4425
4426 for (tail = list; tail; tail = TREE_CHAIN (tail))
4427 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4428 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
4429 else
4430 {
4431 tree part = TREE_VALUE (tail);
4432 tree part_type = TREE_TYPE (part);
906c4e36 4433 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
06089a8b 4434 rtx target = assign_temp (part_type, 0, 1, 1);
bbf6f052 4435 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
906c4e36 4436 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
bbf6f052 4437 parts = tree_cons (to_be_saved,
906c4e36
RK
4438 build (RTL_EXPR, part_type, NULL_TREE,
4439 (tree) target),
bbf6f052
RK
4440 parts);
4441 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
4442 }
4443 return parts;
4444}
4445
4446/* Subroutine of expand_expr:
4447 record the non-copied parts (LIST) of an expr (LHS), and return a list
4448 which specifies the initial values of these parts. */
4449
4450static tree
4451init_noncopied_parts (lhs, list)
4452 tree lhs;
4453 tree list;
4454{
4455 tree tail;
4456 tree parts = 0;
4457
4458 for (tail = list; tail; tail = TREE_CHAIN (tail))
4459 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4460 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
4461 else
4462 {
4463 tree part = TREE_VALUE (tail);
4464 tree part_type = TREE_TYPE (part);
906c4e36 4465 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
bbf6f052
RK
4466 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
4467 }
4468 return parts;
4469}
4470
4471/* Subroutine of expand_expr: return nonzero iff there is no way that
4472 EXP can reference X, which is being modified. */
4473
4474static int
4475safe_from_p (x, exp)
4476 rtx x;
4477 tree exp;
4478{
4479 rtx exp_rtl = 0;
4480 int i, nops;
4481
6676e72f
RK
4482 if (x == 0
4483 /* If EXP has varying size, we MUST use a target since we currently
8f6562d0
PB
4484 have no way of allocating temporaries of variable size
4485 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
4486 So we assume here that something at a higher level has prevented a
f4510f37
RK
4487 clash. This is somewhat bogus, but the best we can do. Only
4488 do this when X is BLKmode. */
45524ce9 4489 || (TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
f4510f37 4490 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
8f6562d0
PB
4491 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
4492 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
4493 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
4494 != INTEGER_CST)
f4510f37 4495 && GET_MODE (x) == BLKmode))
bbf6f052
RK
4496 return 1;
4497
4498 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
4499 find the underlying pseudo. */
4500 if (GET_CODE (x) == SUBREG)
4501 {
4502 x = SUBREG_REG (x);
4503 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4504 return 0;
4505 }
4506
4507 /* If X is a location in the outgoing argument area, it is always safe. */
4508 if (GET_CODE (x) == MEM
4509 && (XEXP (x, 0) == virtual_outgoing_args_rtx
4510 || (GET_CODE (XEXP (x, 0)) == PLUS
4511 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
4512 return 1;
4513
4514 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4515 {
4516 case 'd':
4517 exp_rtl = DECL_RTL (exp);
4518 break;
4519
4520 case 'c':
4521 return 1;
4522
4523 case 'x':
4524 if (TREE_CODE (exp) == TREE_LIST)
f32fd778
RS
4525 return ((TREE_VALUE (exp) == 0
4526 || safe_from_p (x, TREE_VALUE (exp)))
bbf6f052
RK
4527 && (TREE_CHAIN (exp) == 0
4528 || safe_from_p (x, TREE_CHAIN (exp))));
4529 else
4530 return 0;
4531
4532 case '1':
4533 return safe_from_p (x, TREE_OPERAND (exp, 0));
4534
4535 case '2':
4536 case '<':
4537 return (safe_from_p (x, TREE_OPERAND (exp, 0))
4538 && safe_from_p (x, TREE_OPERAND (exp, 1)));
4539
4540 case 'e':
4541 case 'r':
4542 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
4543 the expression. If it is set, we conflict iff we are that rtx or
4544 both are in memory. Otherwise, we check all operands of the
4545 expression recursively. */
4546
4547 switch (TREE_CODE (exp))
4548 {
4549 case ADDR_EXPR:
e44842fe
RK
4550 return (staticp (TREE_OPERAND (exp, 0))
4551 || safe_from_p (x, TREE_OPERAND (exp, 0)));
bbf6f052
RK
4552
4553 case INDIRECT_REF:
4554 if (GET_CODE (x) == MEM)
4555 return 0;
4556 break;
4557
4558 case CALL_EXPR:
4559 exp_rtl = CALL_EXPR_RTL (exp);
4560 if (exp_rtl == 0)
4561 {
4562 /* Assume that the call will clobber all hard registers and
4563 all of memory. */
4564 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4565 || GET_CODE (x) == MEM)
4566 return 0;
4567 }
4568
4569 break;
4570
4571 case RTL_EXPR:
3bb5826a
RK
4572 /* If a sequence exists, we would have to scan every instruction
4573 in the sequence to see if it was safe. This is probably not
4574 worthwhile. */
4575 if (RTL_EXPR_SEQUENCE (exp))
bbf6f052
RK
4576 return 0;
4577
3bb5826a 4578 exp_rtl = RTL_EXPR_RTL (exp);
bbf6f052
RK
4579 break;
4580
4581 case WITH_CLEANUP_EXPR:
4582 exp_rtl = RTL_EXPR_RTL (exp);
4583 break;
4584
5dab5552
MS
4585 case CLEANUP_POINT_EXPR:
4586 return safe_from_p (x, TREE_OPERAND (exp, 0));
4587
bbf6f052
RK
4588 case SAVE_EXPR:
4589 exp_rtl = SAVE_EXPR_RTL (exp);
4590 break;
4591
8129842c
RS
4592 case BIND_EXPR:
4593 /* The only operand we look at is operand 1. The rest aren't
4594 part of the expression. */
4595 return safe_from_p (x, TREE_OPERAND (exp, 1));
4596
bbf6f052 4597 case METHOD_CALL_EXPR:
0f41302f 4598 /* This takes a rtx argument, but shouldn't appear here. */
bbf6f052
RK
4599 abort ();
4600 }
4601
4602 /* If we have an rtx, we do not need to scan our operands. */
4603 if (exp_rtl)
4604 break;
4605
4606 nops = tree_code_length[(int) TREE_CODE (exp)];
4607 for (i = 0; i < nops; i++)
4608 if (TREE_OPERAND (exp, i) != 0
4609 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
4610 return 0;
4611 }
4612
4613 /* If we have an rtl, find any enclosed object. Then see if we conflict
4614 with it. */
4615 if (exp_rtl)
4616 {
4617 if (GET_CODE (exp_rtl) == SUBREG)
4618 {
4619 exp_rtl = SUBREG_REG (exp_rtl);
4620 if (GET_CODE (exp_rtl) == REG
4621 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
4622 return 0;
4623 }
4624
4625 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
4626 are memory and EXP is not readonly. */
4627 return ! (rtx_equal_p (x, exp_rtl)
4628 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
4629 && ! TREE_READONLY (exp)));
4630 }
4631
4632 /* If we reach here, it is safe. */
4633 return 1;
4634}
4635
4636/* Subroutine of expand_expr: return nonzero iff EXP is an
4637 expression whose type is statically determinable. */
4638
4639static int
4640fixed_type_p (exp)
4641 tree exp;
4642{
4643 if (TREE_CODE (exp) == PARM_DECL
4644 || TREE_CODE (exp) == VAR_DECL
4645 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
4646 || TREE_CODE (exp) == COMPONENT_REF
4647 || TREE_CODE (exp) == ARRAY_REF)
4648 return 1;
4649 return 0;
4650}
01c8a7c8
RK
4651
4652/* Subroutine of expand_expr: return rtx if EXP is a
4653 variable or parameter; else return 0. */
4654
4655static rtx
4656var_rtx (exp)
4657 tree exp;
4658{
4659 STRIP_NOPS (exp);
4660 switch (TREE_CODE (exp))
4661 {
4662 case PARM_DECL:
4663 case VAR_DECL:
4664 return DECL_RTL (exp);
4665 default:
4666 return 0;
4667 }
4668}
bbf6f052
RK
4669\f
4670/* expand_expr: generate code for computing expression EXP.
4671 An rtx for the computed value is returned. The value is never null.
4672 In the case of a void EXP, const0_rtx is returned.
4673
4674 The value may be stored in TARGET if TARGET is nonzero.
4675 TARGET is just a suggestion; callers must assume that
4676 the rtx returned may not be the same as TARGET.
4677
4678 If TARGET is CONST0_RTX, it means that the value will be ignored.
4679
4680 If TMODE is not VOIDmode, it suggests generating the
4681 result in mode TMODE. But this is done only when convenient.
4682 Otherwise, TMODE is ignored and the value generated in its natural mode.
4683 TMODE is just a suggestion; callers must assume that
4684 the rtx returned may not have mode TMODE.
4685
d6a5ac33
RK
4686 Note that TARGET may have neither TMODE nor MODE. In that case, it
4687 probably will not be used.
bbf6f052
RK
4688
4689 If MODIFIER is EXPAND_SUM then when EXP is an addition
4690 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
4691 or a nest of (PLUS ...) and (MINUS ...) where the terms are
4692 products as above, or REG or MEM, or constant.
4693 Ordinarily in such cases we would output mul or add instructions
4694 and then return a pseudo reg containing the sum.
4695
4696 EXPAND_INITIALIZER is much like EXPAND_SUM except that
4697 it also marks a label as absolutely required (it can't be dead).
26fcb35a 4698 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
d6a5ac33
RK
4699 This is used for outputting expressions used in initializers.
4700
4701 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
4702 with a constant address even if that address is not normally legitimate.
4703 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
bbf6f052
RK
4704
4705rtx
4706expand_expr (exp, target, tmode, modifier)
4707 register tree exp;
4708 rtx target;
4709 enum machine_mode tmode;
4710 enum expand_modifier modifier;
4711{
b50d17a1
RK
4712 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
4713 This is static so it will be accessible to our recursive callees. */
4714 static tree placeholder_list = 0;
bbf6f052
RK
4715 register rtx op0, op1, temp;
4716 tree type = TREE_TYPE (exp);
4717 int unsignedp = TREE_UNSIGNED (type);
4718 register enum machine_mode mode = TYPE_MODE (type);
4719 register enum tree_code code = TREE_CODE (exp);
4720 optab this_optab;
4721 /* Use subtarget as the target for operand 0 of a binary operation. */
4722 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4723 rtx original_target = target;
ca695ac9 4724 /* Maybe defer this until sure not doing bytecode? */
dd27116b
RK
4725 int ignore = (target == const0_rtx
4726 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
4d87de75
RS
4727 || code == CONVERT_EXPR || code == REFERENCE_EXPR
4728 || code == COND_EXPR)
dd27116b 4729 && TREE_CODE (type) == VOID_TYPE));
bbf6f052
RK
4730 tree context;
4731
ca695ac9 4732
1d556704 4733 if (output_bytecode && modifier != EXPAND_INITIALIZER)
ca695ac9
JB
4734 {
4735 bc_expand_expr (exp);
4736 return NULL;
4737 }
4738
bbf6f052
RK
4739 /* Don't use hard regs as subtargets, because the combiner
4740 can only handle pseudo regs. */
4741 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
4742 subtarget = 0;
4743 /* Avoid subtargets inside loops,
4744 since they hide some invariant expressions. */
4745 if (preserve_subexpressions_p ())
4746 subtarget = 0;
4747
dd27116b
RK
4748 /* If we are going to ignore this result, we need only do something
4749 if there is a side-effect somewhere in the expression. If there
b50d17a1
RK
4750 is, short-circuit the most common cases here. Note that we must
4751 not call expand_expr with anything but const0_rtx in case this
4752 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
bbf6f052 4753
dd27116b
RK
4754 if (ignore)
4755 {
4756 if (! TREE_SIDE_EFFECTS (exp))
4757 return const0_rtx;
4758
4759 /* Ensure we reference a volatile object even if value is ignored. */
4760 if (TREE_THIS_VOLATILE (exp)
4761 && TREE_CODE (exp) != FUNCTION_DECL
4762 && mode != VOIDmode && mode != BLKmode)
4763 {
4764 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
4765 if (GET_CODE (temp) == MEM)
4766 temp = copy_to_reg (temp);
4767 return const0_rtx;
4768 }
4769
4770 if (TREE_CODE_CLASS (code) == '1')
4771 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4772 VOIDmode, modifier);
4773 else if (TREE_CODE_CLASS (code) == '2'
4774 || TREE_CODE_CLASS (code) == '<')
4775 {
4776 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
4777 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
4778 return const0_rtx;
4779 }
4780 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
4781 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
4782 /* If the second operand has no side effects, just evaluate
0f41302f 4783 the first. */
dd27116b
RK
4784 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4785 VOIDmode, modifier);
dd27116b 4786
90764a87 4787 target = 0;
dd27116b 4788 }
bbf6f052 4789
e44842fe
RK
4790 /* If will do cse, generate all results into pseudo registers
4791 since 1) that allows cse to find more things
4792 and 2) otherwise cse could produce an insn the machine
4793 cannot support. */
4794
bbf6f052
RK
4795 if (! cse_not_expected && mode != BLKmode && target
4796 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
4797 target = subtarget;
4798
bbf6f052
RK
4799 switch (code)
4800 {
4801 case LABEL_DECL:
b552441b
RS
4802 {
4803 tree function = decl_function_context (exp);
4804 /* Handle using a label in a containing function. */
4805 if (function != current_function_decl && function != 0)
4806 {
4807 struct function *p = find_function_data (function);
4808 /* Allocate in the memory associated with the function
4809 that the label is in. */
4810 push_obstacks (p->function_obstack,
4811 p->function_maybepermanent_obstack);
4812
4813 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4814 label_rtx (exp), p->forced_labels);
4815 pop_obstacks ();
4816 }
4817 else if (modifier == EXPAND_INITIALIZER)
4818 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4819 label_rtx (exp), forced_labels);
26fcb35a 4820 temp = gen_rtx (MEM, FUNCTION_MODE,
b552441b 4821 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
26fcb35a
RS
4822 if (function != current_function_decl && function != 0)
4823 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
4824 return temp;
b552441b 4825 }
bbf6f052
RK
4826
4827 case PARM_DECL:
4828 if (DECL_RTL (exp) == 0)
4829 {
4830 error_with_decl (exp, "prior parameter's size depends on `%s'");
4af3895e 4831 return CONST0_RTX (mode);
bbf6f052
RK
4832 }
4833
0f41302f 4834 /* ... fall through ... */
d6a5ac33 4835
bbf6f052 4836 case VAR_DECL:
2dca20cd
RS
4837 /* If a static var's type was incomplete when the decl was written,
4838 but the type is complete now, lay out the decl now. */
4839 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4840 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
4841 {
4842 push_obstacks_nochange ();
4843 end_temporary_allocation ();
4844 layout_decl (exp, 0);
4845 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
4846 pop_obstacks ();
4847 }
d6a5ac33 4848
0f41302f 4849 /* ... fall through ... */
d6a5ac33 4850
2dca20cd 4851 case FUNCTION_DECL:
bbf6f052
RK
4852 case RESULT_DECL:
4853 if (DECL_RTL (exp) == 0)
4854 abort ();
d6a5ac33 4855
e44842fe
RK
4856 /* Ensure variable marked as used even if it doesn't go through
4857 a parser. If it hasn't be used yet, write out an external
4858 definition. */
4859 if (! TREE_USED (exp))
4860 {
4861 assemble_external (exp);
4862 TREE_USED (exp) = 1;
4863 }
4864
dc6d66b3
RK
4865 /* Show we haven't gotten RTL for this yet. */
4866 temp = 0;
4867
bbf6f052
RK
4868 /* Handle variables inherited from containing functions. */
4869 context = decl_function_context (exp);
4870
4871 /* We treat inline_function_decl as an alias for the current function
4872 because that is the inline function whose vars, types, etc.
4873 are being merged into the current function.
4874 See expand_inline_function. */
d6a5ac33 4875
bbf6f052
RK
4876 if (context != 0 && context != current_function_decl
4877 && context != inline_function_decl
4878 /* If var is static, we don't need a static chain to access it. */
4879 && ! (GET_CODE (DECL_RTL (exp)) == MEM
4880 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
4881 {
4882 rtx addr;
4883
4884 /* Mark as non-local and addressable. */
81feeecb 4885 DECL_NONLOCAL (exp) = 1;
38ee6ed9
JM
4886 if (DECL_NO_STATIC_CHAIN (current_function_decl))
4887 abort ();
bbf6f052
RK
4888 mark_addressable (exp);
4889 if (GET_CODE (DECL_RTL (exp)) != MEM)
4890 abort ();
4891 addr = XEXP (DECL_RTL (exp), 0);
4892 if (GET_CODE (addr) == MEM)
d6a5ac33
RK
4893 addr = gen_rtx (MEM, Pmode,
4894 fix_lexical_addr (XEXP (addr, 0), exp));
bbf6f052
RK
4895 else
4896 addr = fix_lexical_addr (addr, exp);
dc6d66b3 4897 temp = change_address (DECL_RTL (exp), mode, addr);
bbf6f052 4898 }
4af3895e 4899
bbf6f052
RK
4900 /* This is the case of an array whose size is to be determined
4901 from its initializer, while the initializer is still being parsed.
4902 See expand_decl. */
d6a5ac33 4903
dc6d66b3
RK
4904 else if (GET_CODE (DECL_RTL (exp)) == MEM
4905 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
4906 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
bbf6f052 4907 XEXP (DECL_RTL (exp), 0));
d6a5ac33
RK
4908
4909 /* If DECL_RTL is memory, we are in the normal case and either
4910 the address is not valid or it is not a register and -fforce-addr
4911 is specified, get the address into a register. */
4912
dc6d66b3
RK
4913 else if (GET_CODE (DECL_RTL (exp)) == MEM
4914 && modifier != EXPAND_CONST_ADDRESS
4915 && modifier != EXPAND_SUM
4916 && modifier != EXPAND_INITIALIZER
4917 && (! memory_address_p (DECL_MODE (exp),
4918 XEXP (DECL_RTL (exp), 0))
4919 || (flag_force_addr
4920 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
4921 temp = change_address (DECL_RTL (exp), VOIDmode,
d6a5ac33 4922 copy_rtx (XEXP (DECL_RTL (exp), 0)));
1499e0a8 4923
dc6d66b3
RK
4924 /* If we got something, return it. But first, set the alignment
4925 the address is a register. */
4926 if (temp != 0)
4927 {
4928 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
4929 mark_reg_pointer (XEXP (temp, 0),
4930 DECL_ALIGN (exp) / BITS_PER_UNIT);
4931
4932 return temp;
4933 }
4934
1499e0a8
RK
4935 /* If the mode of DECL_RTL does not match that of the decl, it
4936 must be a promoted value. We return a SUBREG of the wanted mode,
4937 but mark it so that we know that it was already extended. */
4938
4939 if (GET_CODE (DECL_RTL (exp)) == REG
4940 && GET_MODE (DECL_RTL (exp)) != mode)
4941 {
1499e0a8
RK
4942 /* Get the signedness used for this variable. Ensure we get the
4943 same mode we got when the variable was declared. */
78911e8b
RK
4944 if (GET_MODE (DECL_RTL (exp))
4945 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
1499e0a8
RK
4946 abort ();
4947
4948 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
4949 SUBREG_PROMOTED_VAR_P (temp) = 1;
4950 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4951 return temp;
4952 }
4953
bbf6f052
RK
4954 return DECL_RTL (exp);
4955
4956 case INTEGER_CST:
4957 return immed_double_const (TREE_INT_CST_LOW (exp),
4958 TREE_INT_CST_HIGH (exp),
4959 mode);
4960
4961 case CONST_DECL:
4962 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
4963
4964 case REAL_CST:
4965 /* If optimized, generate immediate CONST_DOUBLE
4966 which will be turned into memory by reload if necessary.
4967
4968 We used to force a register so that loop.c could see it. But
4969 this does not allow gen_* patterns to perform optimizations with
4970 the constants. It also produces two insns in cases like "x = 1.0;".
4971 On most machines, floating-point constants are not permitted in
4972 many insns, so we'd end up copying it to a register in any case.
4973
4974 Now, we do the copying in expand_binop, if appropriate. */
4975 return immed_real_const (exp);
4976
4977 case COMPLEX_CST:
4978 case STRING_CST:
4979 if (! TREE_CST_RTL (exp))
4980 output_constant_def (exp);
4981
4982 /* TREE_CST_RTL probably contains a constant address.
4983 On RISC machines where a constant address isn't valid,
4984 make some insns to get that address into a register. */
4985 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
4986 && modifier != EXPAND_CONST_ADDRESS
4987 && modifier != EXPAND_INITIALIZER
4988 && modifier != EXPAND_SUM
d6a5ac33
RK
4989 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
4990 || (flag_force_addr
4991 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
bbf6f052
RK
4992 return change_address (TREE_CST_RTL (exp), VOIDmode,
4993 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
4994 return TREE_CST_RTL (exp);
4995
4996 case SAVE_EXPR:
4997 context = decl_function_context (exp);
d6a5ac33 4998
bbf6f052
RK
4999 /* We treat inline_function_decl as an alias for the current function
5000 because that is the inline function whose vars, types, etc.
5001 are being merged into the current function.
5002 See expand_inline_function. */
5003 if (context == current_function_decl || context == inline_function_decl)
5004 context = 0;
5005
5006 /* If this is non-local, handle it. */
5007 if (context)
5008 {
5009 temp = SAVE_EXPR_RTL (exp);
5010 if (temp && GET_CODE (temp) == REG)
5011 {
5012 put_var_into_stack (exp);
5013 temp = SAVE_EXPR_RTL (exp);
5014 }
5015 if (temp == 0 || GET_CODE (temp) != MEM)
5016 abort ();
5017 return change_address (temp, mode,
5018 fix_lexical_addr (XEXP (temp, 0), exp));
5019 }
5020 if (SAVE_EXPR_RTL (exp) == 0)
5021 {
06089a8b
RK
5022 if (mode == VOIDmode)
5023 temp = const0_rtx;
5024 else
5025 temp = assign_temp (type, 0, 0, 0);
1499e0a8 5026
bbf6f052 5027 SAVE_EXPR_RTL (exp) = temp;
bbf6f052
RK
5028 if (!optimize && GET_CODE (temp) == REG)
5029 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
5030 save_expr_regs);
ff78f773
RK
5031
5032 /* If the mode of TEMP does not match that of the expression, it
5033 must be a promoted value. We pass store_expr a SUBREG of the
5034 wanted mode but mark it so that we know that it was already
5035 extended. Note that `unsignedp' was modified above in
5036 this case. */
5037
5038 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
5039 {
5040 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
5041 SUBREG_PROMOTED_VAR_P (temp) = 1;
5042 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5043 }
5044
4c7a0be9
JW
5045 if (temp == const0_rtx)
5046 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
5047 else
5048 store_expr (TREE_OPERAND (exp, 0), temp, 0);
bbf6f052 5049 }
1499e0a8
RK
5050
5051 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
5052 must be a promoted value. We return a SUBREG of the wanted mode,
0f41302f 5053 but mark it so that we know that it was already extended. */
1499e0a8
RK
5054
5055 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
5056 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
5057 {
e70d22c8
RK
5058 /* Compute the signedness and make the proper SUBREG. */
5059 promote_mode (type, mode, &unsignedp, 0);
5060 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
1499e0a8
RK
5061 SUBREG_PROMOTED_VAR_P (temp) = 1;
5062 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5063 return temp;
5064 }
5065
bbf6f052
RK
5066 return SAVE_EXPR_RTL (exp);
5067
679163cf
MS
5068 case UNSAVE_EXPR:
5069 {
5070 rtx temp;
5071 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5072 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
5073 return temp;
5074 }
5075
b50d17a1
RK
5076 case PLACEHOLDER_EXPR:
5077 /* If there is an object on the head of the placeholder list,
5078 see if some object in it's references is of type TYPE. For
5079 further information, see tree.def. */
5080 if (placeholder_list)
5081 {
5082 tree object;
f59d43a9 5083 tree old_list = placeholder_list;
b50d17a1
RK
5084
5085 for (object = TREE_PURPOSE (placeholder_list);
330446eb
RK
5086 (TYPE_MAIN_VARIANT (TREE_TYPE (object))
5087 != TYPE_MAIN_VARIANT (type))
b50d17a1 5088 && (TREE_CODE_CLASS (TREE_CODE (object)) == 'r'
4805bfa0
RK
5089 || TREE_CODE_CLASS (TREE_CODE (object)) == '1'
5090 || TREE_CODE_CLASS (TREE_CODE (object)) == '2'
5091 || TREE_CODE_CLASS (TREE_CODE (object)) == 'e');
b50d17a1
RK
5092 object = TREE_OPERAND (object, 0))
5093 ;
5094
330446eb
RK
5095 if (object != 0
5096 && (TYPE_MAIN_VARIANT (TREE_TYPE (object))
5097 == TYPE_MAIN_VARIANT (type)))
f59d43a9
RK
5098 {
5099 /* Expand this object skipping the list entries before
5100 it was found in case it is also a PLACEHOLDER_EXPR.
5101 In that case, we want to translate it using subsequent
5102 entries. */
5103 placeholder_list = TREE_CHAIN (placeholder_list);
5104 temp = expand_expr (object, original_target, tmode, modifier);
5105 placeholder_list = old_list;
5106 return temp;
5107 }
b50d17a1
RK
5108 }
5109
5110 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
5111 abort ();
5112
5113 case WITH_RECORD_EXPR:
5114 /* Put the object on the placeholder list, expand our first operand,
5115 and pop the list. */
5116 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
5117 placeholder_list);
5118 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
5119 tmode, modifier);
5120 placeholder_list = TREE_CHAIN (placeholder_list);
5121 return target;
5122
bbf6f052 5123 case EXIT_EXPR:
e44842fe
RK
5124 expand_exit_loop_if_false (NULL_PTR,
5125 invert_truthvalue (TREE_OPERAND (exp, 0)));
bbf6f052
RK
5126 return const0_rtx;
5127
5128 case LOOP_EXPR:
0088fcb1 5129 push_temp_slots ();
bbf6f052
RK
5130 expand_start_loop (1);
5131 expand_expr_stmt (TREE_OPERAND (exp, 0));
5132 expand_end_loop ();
0088fcb1 5133 pop_temp_slots ();
bbf6f052
RK
5134
5135 return const0_rtx;
5136
5137 case BIND_EXPR:
5138 {
5139 tree vars = TREE_OPERAND (exp, 0);
5140 int vars_need_expansion = 0;
5141
5142 /* Need to open a binding contour here because
5143 if there are any cleanups they most be contained here. */
5144 expand_start_bindings (0);
5145
2df53c0b
RS
5146 /* Mark the corresponding BLOCK for output in its proper place. */
5147 if (TREE_OPERAND (exp, 2) != 0
5148 && ! TREE_USED (TREE_OPERAND (exp, 2)))
5149 insert_block (TREE_OPERAND (exp, 2));
bbf6f052
RK
5150
5151 /* If VARS have not yet been expanded, expand them now. */
5152 while (vars)
5153 {
5154 if (DECL_RTL (vars) == 0)
5155 {
5156 vars_need_expansion = 1;
5157 expand_decl (vars);
5158 }
5159 expand_decl_init (vars);
5160 vars = TREE_CHAIN (vars);
5161 }
5162
5163 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
5164
5165 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
5166
5167 return temp;
5168 }
5169
5170 case RTL_EXPR:
5171 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
5172 abort ();
5173 emit_insns (RTL_EXPR_SEQUENCE (exp));
5174 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
99310285 5175 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
ca814259 5176 free_temps_for_rtl_expr (exp);
bbf6f052
RK
5177 return RTL_EXPR_RTL (exp);
5178
5179 case CONSTRUCTOR:
dd27116b
RK
5180 /* If we don't need the result, just ensure we evaluate any
5181 subexpressions. */
5182 if (ignore)
5183 {
5184 tree elt;
5185 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
5186 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
5187 return const0_rtx;
5188 }
3207b172 5189
4af3895e
JVA
5190 /* All elts simple constants => refer to a constant in memory. But
5191 if this is a non-BLKmode mode, let it store a field at a time
5192 since that should make a CONST_INT or CONST_DOUBLE when we
3207b172 5193 fold. Likewise, if we have a target we can use, it is best to
d720b9d1
RK
5194 store directly into the target unless the type is large enough
5195 that memcpy will be used. If we are making an initializer and
3207b172 5196 all operands are constant, put it in memory as well. */
dd27116b 5197 else if ((TREE_STATIC (exp)
3207b172
RK
5198 && ((mode == BLKmode
5199 && ! (target != 0 && safe_from_p (target, exp)))
d720b9d1
RK
5200 || TREE_ADDRESSABLE (exp)
5201 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5202 && (move_by_pieces_ninsns
67225c15
RK
5203 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
5204 TYPE_ALIGN (type) / BITS_PER_UNIT)
9de08200
RK
5205 > MOVE_RATIO)
5206 && ! mostly_zeros_p (exp))))
dd27116b 5207 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
bbf6f052
RK
5208 {
5209 rtx constructor = output_constant_def (exp);
b552441b
RS
5210 if (modifier != EXPAND_CONST_ADDRESS
5211 && modifier != EXPAND_INITIALIZER
5212 && modifier != EXPAND_SUM
d6a5ac33
RK
5213 && (! memory_address_p (GET_MODE (constructor),
5214 XEXP (constructor, 0))
5215 || (flag_force_addr
5216 && GET_CODE (XEXP (constructor, 0)) != REG)))
bbf6f052
RK
5217 constructor = change_address (constructor, VOIDmode,
5218 XEXP (constructor, 0));
5219 return constructor;
5220 }
5221
bbf6f052
RK
5222 else
5223 {
e9ac02a6
JW
5224 /* Handle calls that pass values in multiple non-contiguous
5225 locations. The Irix 6 ABI has examples of this. */
5226 if (target == 0 || ! safe_from_p (target, exp)
5227 || GET_CODE (target) == PARALLEL)
06089a8b
RK
5228 {
5229 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
5230 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5231 else
5232 target = assign_temp (type, 0, 1, 1);
5233 }
07604beb
RK
5234
5235 if (TREE_READONLY (exp))
5236 {
9151b3bf
RK
5237 if (GET_CODE (target) == MEM)
5238 target = change_address (target, GET_MODE (target),
5239 XEXP (target, 0));
07604beb
RK
5240 RTX_UNCHANGING_P (target) = 1;
5241 }
5242
e1a43f73 5243 store_constructor (exp, target, 0);
bbf6f052
RK
5244 return target;
5245 }
5246
5247 case INDIRECT_REF:
5248 {
5249 tree exp1 = TREE_OPERAND (exp, 0);
5250 tree exp2;
5251
405f0da6
JW
5252 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
5253 op0 = memory_address (mode, op0);
8c8a8e34
JW
5254
5255 temp = gen_rtx (MEM, mode, op0);
5256 /* If address was computed by addition,
5257 mark this as an element of an aggregate. */
5258 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5259 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
5260 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
05e3bdb9 5261 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
8c8a8e34
JW
5262 || (TREE_CODE (exp1) == ADDR_EXPR
5263 && (exp2 = TREE_OPERAND (exp1, 0))
05e3bdb9 5264 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
8c8a8e34 5265 MEM_IN_STRUCT_P (temp) = 1;
2c4c436a 5266 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
1125706f
RK
5267
5268 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
5269 here, because, in C and C++, the fact that a location is accessed
5270 through a pointer to const does not mean that the value there can
5271 never change. Languages where it can never change should
5272 also set TREE_STATIC. */
5cb7a25a 5273 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
8c8a8e34
JW
5274 return temp;
5275 }
bbf6f052
RK
5276
5277 case ARRAY_REF:
742920c7
RK
5278 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
5279 abort ();
bbf6f052 5280
bbf6f052 5281 {
742920c7
RK
5282 tree array = TREE_OPERAND (exp, 0);
5283 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5284 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
5285 tree index = TREE_OPERAND (exp, 1);
5286 tree index_type = TREE_TYPE (index);
bbf6f052 5287 int i;
bbf6f052 5288
b50d17a1
RK
5289 if (TREE_CODE (low_bound) != INTEGER_CST
5290 && contains_placeholder_p (low_bound))
5291 low_bound = build (WITH_RECORD_EXPR, sizetype, low_bound, exp);
5292
d4c89139
PB
5293 /* Optimize the special-case of a zero lower bound.
5294
5295 We convert the low_bound to sizetype to avoid some problems
5296 with constant folding. (E.g. suppose the lower bound is 1,
5297 and its mode is QI. Without the conversion, (ARRAY
5298 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
5299 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
5300
5301 But sizetype isn't quite right either (especially if
5302 the lowbound is negative). FIXME */
5303
742920c7 5304 if (! integer_zerop (low_bound))
d4c89139
PB
5305 index = fold (build (MINUS_EXPR, index_type, index,
5306 convert (sizetype, low_bound)));
742920c7 5307
6be58303
JW
5308 if ((TREE_CODE (index) != INTEGER_CST
5309 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
c7a7ac46 5310 && (! SLOW_UNALIGNED_ACCESS || ! get_inner_unaligned_p (exp)))
742920c7 5311 {
6be58303
JW
5312 /* Nonconstant array index or nonconstant element size, and
5313 not an array in an unaligned (packed) structure field.
742920c7
RK
5314 Generate the tree for *(&array+index) and expand that,
5315 except do it in a language-independent way
5316 and don't complain about non-lvalue arrays.
5317 `mark_addressable' should already have been called
5318 for any array for which this case will be reached. */
5319
5320 /* Don't forget the const or volatile flag from the array
0f41302f 5321 element. */
742920c7
RK
5322 tree variant_type = build_type_variant (type,
5323 TREE_READONLY (exp),
5324 TREE_THIS_VOLATILE (exp));
5325 tree array_adr = build1 (ADDR_EXPR,
5326 build_pointer_type (variant_type), array);
5327 tree elt;
b50d17a1 5328 tree size = size_in_bytes (type);
742920c7 5329
4c08eef0
RK
5330 /* Convert the integer argument to a type the same size as sizetype
5331 so the multiply won't overflow spuriously. */
5332 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
5333 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
5334 index);
742920c7 5335
b50d17a1
RK
5336 if (TREE_CODE (size) != INTEGER_CST
5337 && contains_placeholder_p (size))
5338 size = build (WITH_RECORD_EXPR, sizetype, size, exp);
5339
742920c7
RK
5340 /* Don't think the address has side effects
5341 just because the array does.
5342 (In some cases the address might have side effects,
5343 and we fail to record that fact here. However, it should not
5344 matter, since expand_expr should not care.) */
5345 TREE_SIDE_EFFECTS (array_adr) = 0;
5346
2ae342f7
RK
5347 elt
5348 = build1
5349 (INDIRECT_REF, type,
5350 fold (build (PLUS_EXPR,
5351 TYPE_POINTER_TO (variant_type),
5352 array_adr,
5353 fold
5354 (build1
5355 (NOP_EXPR,
5356 TYPE_POINTER_TO (variant_type),
5357 fold (build (MULT_EXPR, TREE_TYPE (index),
5358 index,
5359 convert (TREE_TYPE (index),
5360 size))))))));;
742920c7
RK
5361
5362 /* Volatility, etc., of new expression is same as old
5363 expression. */
5364 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
5365 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
5366 TREE_READONLY (elt) = TREE_READONLY (exp);
5367
5368 return expand_expr (elt, target, tmode, modifier);
5369 }
5370
5371 /* Fold an expression like: "foo"[2].
ad2e7dd0
RK
5372 This is not done in fold so it won't happen inside &.
5373 Don't fold if this is for wide characters since it's too
5374 difficult to do correctly and this is a very rare case. */
742920c7
RK
5375
5376 if (TREE_CODE (array) == STRING_CST
5377 && TREE_CODE (index) == INTEGER_CST
5378 && !TREE_INT_CST_HIGH (index)
307b821c 5379 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
ad2e7dd0
RK
5380 && GET_MODE_CLASS (mode) == MODE_INT
5381 && GET_MODE_SIZE (mode) == 1)
307b821c 5382 return GEN_INT (TREE_STRING_POINTER (array)[i]);
bbf6f052 5383
742920c7
RK
5384 /* If this is a constant index into a constant array,
5385 just get the value from the array. Handle both the cases when
5386 we have an explicit constructor and when our operand is a variable
5387 that was declared const. */
4af3895e 5388
742920c7
RK
5389 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
5390 {
5391 if (TREE_CODE (index) == INTEGER_CST
5392 && TREE_INT_CST_HIGH (index) == 0)
5393 {
5394 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
5395
5396 i = TREE_INT_CST_LOW (index);
5397 while (elem && i--)
5398 elem = TREE_CHAIN (elem);
5399 if (elem)
5400 return expand_expr (fold (TREE_VALUE (elem)), target,
5401 tmode, modifier);
5402 }
5403 }
4af3895e 5404
742920c7
RK
5405 else if (optimize >= 1
5406 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
5407 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
5408 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
5409 {
5410 if (TREE_CODE (index) == INTEGER_CST
5411 && TREE_INT_CST_HIGH (index) == 0)
5412 {
5413 tree init = DECL_INITIAL (array);
5414
5415 i = TREE_INT_CST_LOW (index);
5416 if (TREE_CODE (init) == CONSTRUCTOR)
5417 {
5418 tree elem = CONSTRUCTOR_ELTS (init);
5419
03dc44a6
RS
5420 while (elem
5421 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
742920c7
RK
5422 elem = TREE_CHAIN (elem);
5423 if (elem)
5424 return expand_expr (fold (TREE_VALUE (elem)), target,
5425 tmode, modifier);
5426 }
5427 else if (TREE_CODE (init) == STRING_CST
5428 && i < TREE_STRING_LENGTH (init))
307b821c 5429 return GEN_INT (TREE_STRING_POINTER (init)[i]);
742920c7
RK
5430 }
5431 }
5432 }
8c8a8e34 5433
bbf6f052
RK
5434 /* Treat array-ref with constant index as a component-ref. */
5435
5436 case COMPONENT_REF:
5437 case BIT_FIELD_REF:
4af3895e 5438 /* If the operand is a CONSTRUCTOR, we can just extract the
7a0b7b9a
RK
5439 appropriate field if it is present. Don't do this if we have
5440 already written the data since we want to refer to that copy
5441 and varasm.c assumes that's what we'll do. */
4af3895e 5442 if (code != ARRAY_REF
7a0b7b9a
RK
5443 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
5444 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
4af3895e
JVA
5445 {
5446 tree elt;
5447
5448 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
5449 elt = TREE_CHAIN (elt))
5450 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
5451 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
5452 }
5453
bbf6f052
RK
5454 {
5455 enum machine_mode mode1;
5456 int bitsize;
5457 int bitpos;
7bb0943f 5458 tree offset;
bbf6f052 5459 int volatilep = 0;
7bb0943f 5460 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
bbf6f052 5461 &mode1, &unsignedp, &volatilep);
034f9101 5462 int alignment;
bbf6f052 5463
e7f3c83f
RK
5464 /* If we got back the original object, something is wrong. Perhaps
5465 we are evaluating an expression too early. In any event, don't
5466 infinitely recurse. */
5467 if (tem == exp)
5468 abort ();
5469
3d27140a 5470 /* If TEM's type is a union of variable size, pass TARGET to the inner
b74f5ff2
RK
5471 computation, since it will need a temporary and TARGET is known
5472 to have to do. This occurs in unchecked conversion in Ada. */
5473
5474 op0 = expand_expr (tem,
5475 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
5476 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
5477 != INTEGER_CST)
5478 ? target : NULL_RTX),
4ed67205
RK
5479 VOIDmode,
5480 modifier == EXPAND_INITIALIZER ? modifier : 0);
bbf6f052 5481
8c8a8e34 5482 /* If this is a constant, put it into a register if it is a
8008b228 5483 legitimate constant and memory if it isn't. */
8c8a8e34
JW
5484 if (CONSTANT_P (op0))
5485 {
5486 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
f2878c6b 5487 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
8c8a8e34
JW
5488 op0 = force_reg (mode, op0);
5489 else
5490 op0 = validize_mem (force_const_mem (mode, op0));
5491 }
5492
034f9101 5493 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
7bb0943f
RS
5494 if (offset != 0)
5495 {
906c4e36 5496 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
5497
5498 if (GET_CODE (op0) != MEM)
5499 abort ();
5500 op0 = change_address (op0, VOIDmode,
88f63c77
RK
5501 gen_rtx (PLUS, ptr_mode, XEXP (op0, 0),
5502 force_reg (ptr_mode, offset_rtx)));
7bb0943f
RS
5503 }
5504
bbf6f052
RK
5505 /* Don't forget about volatility even if this is a bitfield. */
5506 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
5507 {
5508 op0 = copy_rtx (op0);
5509 MEM_VOLATILE_P (op0) = 1;
5510 }
5511
ccc98036
RS
5512 /* In cases where an aligned union has an unaligned object
5513 as a field, we might be extracting a BLKmode value from
5514 an integer-mode (e.g., SImode) object. Handle this case
5515 by doing the extract into an object as wide as the field
5516 (which we know to be the width of a basic mode), then
f2420d0b
JW
5517 storing into memory, and changing the mode to BLKmode.
5518 If we ultimately want the address (EXPAND_CONST_ADDRESS or
5519 EXPAND_INITIALIZER), then we must not copy to a temporary. */
bbf6f052 5520 if (mode1 == VOIDmode
ccc98036 5521 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
f9409c3a 5522 || (modifier != EXPAND_CONST_ADDRESS
f9409c3a 5523 && modifier != EXPAND_INITIALIZER
c2722ef6
RK
5524 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
5525 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5526 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
f9409c3a
JW
5527 /* If the field isn't aligned enough to fetch as a memref,
5528 fetch it as a bit field. */
5529 || (SLOW_UNALIGNED_ACCESS
5530 && ((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
5531 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
bbf6f052 5532 {
bbf6f052
RK
5533 enum machine_mode ext_mode = mode;
5534
5535 if (ext_mode == BLKmode)
5536 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
5537
5538 if (ext_mode == BLKmode)
a281e72d
RK
5539 {
5540 /* In this case, BITPOS must start at a byte boundary and
5541 TARGET, if specified, must be a MEM. */
5542 if (GET_CODE (op0) != MEM
5543 || (target != 0 && GET_CODE (target) != MEM)
5544 || bitpos % BITS_PER_UNIT != 0)
5545 abort ();
5546
5547 op0 = change_address (op0, VOIDmode,
5548 plus_constant (XEXP (op0, 0),
5549 bitpos / BITS_PER_UNIT));
5550 if (target == 0)
5551 target = assign_temp (type, 0, 1, 1);
5552
5553 emit_block_move (target, op0,
5554 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5555 / BITS_PER_UNIT),
5556 1);
5557
5558 return target;
5559 }
bbf6f052 5560
dc6d66b3
RK
5561 op0 = validize_mem (op0);
5562
5563 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
5564 mark_reg_pointer (XEXP (op0, 0), alignment);
5565
5566 op0 = extract_bit_field (op0, bitsize, bitpos,
bbf6f052 5567 unsignedp, target, ext_mode, ext_mode,
034f9101 5568 alignment,
bbf6f052 5569 int_size_in_bytes (TREE_TYPE (tem)));
ef19912d
RK
5570
5571 /* If the result is a record type and BITSIZE is narrower than
5572 the mode of OP0, an integral mode, and this is a big endian
5573 machine, we must put the field into the high-order bits. */
5574 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
5575 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
5576 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
5577 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
5578 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
5579 - bitsize),
5580 op0, 1);
5581
bbf6f052
RK
5582 if (mode == BLKmode)
5583 {
5584 rtx new = assign_stack_temp (ext_mode,
5585 bitsize / BITS_PER_UNIT, 0);
5586
5587 emit_move_insn (new, op0);
5588 op0 = copy_rtx (new);
5589 PUT_MODE (op0, BLKmode);
092dded9 5590 MEM_IN_STRUCT_P (op0) = 1;
bbf6f052
RK
5591 }
5592
5593 return op0;
5594 }
5595
05019f83
RK
5596 /* If the result is BLKmode, use that to access the object
5597 now as well. */
5598 if (mode == BLKmode)
5599 mode1 = BLKmode;
5600
bbf6f052
RK
5601 /* Get a reference to just this component. */
5602 if (modifier == EXPAND_CONST_ADDRESS
5603 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5604 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
5605 (bitpos / BITS_PER_UNIT)));
5606 else
5607 op0 = change_address (op0, mode1,
5608 plus_constant (XEXP (op0, 0),
5609 (bitpos / BITS_PER_UNIT)));
dc6d66b3
RK
5610 if (GET_CODE (XEXP (op0, 0)) == REG)
5611 mark_reg_pointer (XEXP (op0, 0), alignment);
5612
bbf6f052
RK
5613 MEM_IN_STRUCT_P (op0) = 1;
5614 MEM_VOLATILE_P (op0) |= volatilep;
5615 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
5616 return op0;
5617 if (target == 0)
5618 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5619 convert_move (target, op0, unsignedp);
5620 return target;
5621 }
5622
bbf6f052
RK
5623 /* Intended for a reference to a buffer of a file-object in Pascal.
5624 But it's not certain that a special tree code will really be
5625 necessary for these. INDIRECT_REF might work for them. */
5626 case BUFFER_REF:
5627 abort ();
5628
7308a047 5629 case IN_EXPR:
7308a047 5630 {
d6a5ac33
RK
5631 /* Pascal set IN expression.
5632
5633 Algorithm:
5634 rlo = set_low - (set_low%bits_per_word);
5635 the_word = set [ (index - rlo)/bits_per_word ];
5636 bit_index = index % bits_per_word;
5637 bitmask = 1 << bit_index;
5638 return !!(the_word & bitmask); */
5639
7308a047
RS
5640 tree set = TREE_OPERAND (exp, 0);
5641 tree index = TREE_OPERAND (exp, 1);
d6a5ac33 5642 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7308a047 5643 tree set_type = TREE_TYPE (set);
7308a047
RS
5644 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
5645 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
d6a5ac33
RK
5646 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
5647 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
5648 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
5649 rtx setval = expand_expr (set, 0, VOIDmode, 0);
5650 rtx setaddr = XEXP (setval, 0);
5651 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7308a047
RS
5652 rtx rlow;
5653 rtx diff, quo, rem, addr, bit, result;
7308a047 5654
d6a5ac33
RK
5655 preexpand_calls (exp);
5656
5657 /* If domain is empty, answer is no. Likewise if index is constant
5658 and out of bounds. */
5659 if ((TREE_CODE (set_high_bound) == INTEGER_CST
5660 && TREE_CODE (set_low_bound) == INTEGER_CST
5661 && tree_int_cst_lt (set_high_bound, set_low_bound)
5662 || (TREE_CODE (index) == INTEGER_CST
5663 && TREE_CODE (set_low_bound) == INTEGER_CST
5664 && tree_int_cst_lt (index, set_low_bound))
5665 || (TREE_CODE (set_high_bound) == INTEGER_CST
5666 && TREE_CODE (index) == INTEGER_CST
5667 && tree_int_cst_lt (set_high_bound, index))))
7308a047
RS
5668 return const0_rtx;
5669
d6a5ac33
RK
5670 if (target == 0)
5671 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7308a047
RS
5672
5673 /* If we get here, we have to generate the code for both cases
5674 (in range and out of range). */
5675
5676 op0 = gen_label_rtx ();
5677 op1 = gen_label_rtx ();
5678
5679 if (! (GET_CODE (index_val) == CONST_INT
5680 && GET_CODE (lo_r) == CONST_INT))
5681 {
17938e57 5682 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
d6a5ac33 5683 GET_MODE (index_val), iunsignedp, 0);
7308a047
RS
5684 emit_jump_insn (gen_blt (op1));
5685 }
5686
5687 if (! (GET_CODE (index_val) == CONST_INT
5688 && GET_CODE (hi_r) == CONST_INT))
5689 {
17938e57 5690 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
d6a5ac33 5691 GET_MODE (index_val), iunsignedp, 0);
7308a047
RS
5692 emit_jump_insn (gen_bgt (op1));
5693 }
5694
5695 /* Calculate the element number of bit zero in the first word
5696 of the set. */
5697 if (GET_CODE (lo_r) == CONST_INT)
17938e57
RK
5698 rlow = GEN_INT (INTVAL (lo_r)
5699 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7308a047 5700 else
17938e57
RK
5701 rlow = expand_binop (index_mode, and_optab, lo_r,
5702 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
d6a5ac33 5703 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047 5704
d6a5ac33
RK
5705 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
5706 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047
RS
5707
5708 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
d6a5ac33 5709 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7308a047 5710 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
d6a5ac33
RK
5711 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
5712
7308a047 5713 addr = memory_address (byte_mode,
d6a5ac33
RK
5714 expand_binop (index_mode, add_optab, diff,
5715 setaddr, NULL_RTX, iunsignedp,
17938e57 5716 OPTAB_LIB_WIDEN));
d6a5ac33 5717
7308a047
RS
5718 /* Extract the bit we want to examine */
5719 bit = expand_shift (RSHIFT_EXPR, byte_mode,
17938e57
RK
5720 gen_rtx (MEM, byte_mode, addr),
5721 make_tree (TREE_TYPE (index), rem),
5722 NULL_RTX, 1);
5723 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
5724 GET_MODE (target) == byte_mode ? target : 0,
7308a047 5725 1, OPTAB_LIB_WIDEN);
17938e57
RK
5726
5727 if (result != target)
5728 convert_move (target, result, 1);
7308a047
RS
5729
5730 /* Output the code to handle the out-of-range case. */
5731 emit_jump (op0);
5732 emit_label (op1);
5733 emit_move_insn (target, const0_rtx);
5734 emit_label (op0);
5735 return target;
5736 }
5737
bbf6f052
RK
5738 case WITH_CLEANUP_EXPR:
5739 if (RTL_EXPR_RTL (exp) == 0)
5740 {
5741 RTL_EXPR_RTL (exp)
6fcc9690 5742 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
906c4e36
RK
5743 cleanups_this_call
5744 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
bbf6f052
RK
5745 /* That's it for this cleanup. */
5746 TREE_OPERAND (exp, 2) = 0;
3d195391 5747 expand_eh_region_start ();
bbf6f052
RK
5748 }
5749 return RTL_EXPR_RTL (exp);
5750
5dab5552
MS
5751 case CLEANUP_POINT_EXPR:
5752 {
d93d4205 5753 extern int temp_slot_level;
5dab5552 5754 tree old_cleanups = cleanups_this_call;
d93d4205
MS
5755 int old_temp_level = target_temp_slot_level;
5756 push_temp_slots ();
5757 target_temp_slot_level = temp_slot_level;
f283f66b
JM
5758 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5759 /* If we're going to use this value, load it up now. */
5760 if (! ignore)
5761 op0 = force_not_mem (op0);
5dab5552 5762 expand_cleanups_to (old_cleanups);
d93d4205
MS
5763 preserve_temp_slots (op0);
5764 free_temp_slots ();
5765 pop_temp_slots ();
5766 target_temp_slot_level = old_temp_level;
5dab5552
MS
5767 }
5768 return op0;
5769
bbf6f052
RK
5770 case CALL_EXPR:
5771 /* Check for a built-in function. */
5772 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
d6a5ac33
RK
5773 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5774 == FUNCTION_DECL)
bbf6f052
RK
5775 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5776 return expand_builtin (exp, target, subtarget, tmode, ignore);
d6a5ac33 5777
bbf6f052
RK
5778 /* If this call was expanded already by preexpand_calls,
5779 just return the result we got. */
5780 if (CALL_EXPR_RTL (exp) != 0)
5781 return CALL_EXPR_RTL (exp);
d6a5ac33 5782
8129842c 5783 return expand_call (exp, target, ignore);
bbf6f052
RK
5784
5785 case NON_LVALUE_EXPR:
5786 case NOP_EXPR:
5787 case CONVERT_EXPR:
5788 case REFERENCE_EXPR:
bbf6f052
RK
5789 if (TREE_CODE (type) == UNION_TYPE)
5790 {
5791 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
5792 if (target == 0)
06089a8b
RK
5793 {
5794 if (mode != BLKmode)
5795 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5796 else
5797 target = assign_temp (type, 0, 1, 1);
5798 }
d6a5ac33 5799
bbf6f052
RK
5800 if (GET_CODE (target) == MEM)
5801 /* Store data into beginning of memory target. */
5802 store_expr (TREE_OPERAND (exp, 0),
1499e0a8
RK
5803 change_address (target, TYPE_MODE (valtype), 0), 0);
5804
bbf6f052
RK
5805 else if (GET_CODE (target) == REG)
5806 /* Store this field into a union of the proper type. */
5807 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
5808 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
5809 VOIDmode, 0, 1,
5810 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
5811 else
5812 abort ();
5813
5814 /* Return the entire union. */
5815 return target;
5816 }
d6a5ac33 5817
7f62854a
RK
5818 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5819 {
5820 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
5821 modifier);
5822
5823 /* If the signedness of the conversion differs and OP0 is
5824 a promoted SUBREG, clear that indication since we now
5825 have to do the proper extension. */
5826 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
5827 && GET_CODE (op0) == SUBREG)
5828 SUBREG_PROMOTED_VAR_P (op0) = 0;
5829
5830 return op0;
5831 }
5832
1499e0a8 5833 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
12342f90
RS
5834 if (GET_MODE (op0) == mode)
5835 return op0;
12342f90 5836
d6a5ac33
RK
5837 /* If OP0 is a constant, just convert it into the proper mode. */
5838 if (CONSTANT_P (op0))
5839 return
5840 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5841 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
12342f90 5842
26fcb35a
RS
5843 if (modifier == EXPAND_INITIALIZER)
5844 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
d6a5ac33 5845
bbf6f052 5846 if (target == 0)
d6a5ac33
RK
5847 return
5848 convert_to_mode (mode, op0,
5849 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052 5850 else
d6a5ac33
RK
5851 convert_move (target, op0,
5852 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052
RK
5853 return target;
5854
5855 case PLUS_EXPR:
0f41302f
MS
5856 /* We come here from MINUS_EXPR when the second operand is a
5857 constant. */
bbf6f052
RK
5858 plus_expr:
5859 this_optab = add_optab;
5860
5861 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
5862 something else, make sure we add the register to the constant and
5863 then to the other thing. This case can occur during strength
5864 reduction and doing it this way will produce better code if the
5865 frame pointer or argument pointer is eliminated.
5866
5867 fold-const.c will ensure that the constant is always in the inner
5868 PLUS_EXPR, so the only case we need to do anything about is if
5869 sp, ap, or fp is our second argument, in which case we must swap
5870 the innermost first argument and our second argument. */
5871
5872 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5873 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
5874 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
5875 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
5876 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
5877 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
5878 {
5879 tree t = TREE_OPERAND (exp, 1);
5880
5881 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5882 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
5883 }
5884
88f63c77 5885 /* If the result is to be ptr_mode and we are adding an integer to
bbf6f052
RK
5886 something, we might be forming a constant. So try to use
5887 plus_constant. If it produces a sum and we can't accept it,
5888 use force_operand. This allows P = &ARR[const] to generate
5889 efficient code on machines where a SYMBOL_REF is not a valid
5890 address.
5891
5892 If this is an EXPAND_SUM call, always return the sum. */
c980ac49 5893 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
88f63c77 5894 || mode == ptr_mode)
bbf6f052 5895 {
c980ac49
RS
5896 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
5897 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5898 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
5899 {
5900 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
5901 EXPAND_SUM);
5902 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
5903 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5904 op1 = force_operand (op1, target);
5905 return op1;
5906 }
bbf6f052 5907
c980ac49
RS
5908 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
5909 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
5910 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
5911 {
5912 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
5913 EXPAND_SUM);
5914 if (! CONSTANT_P (op0))
5915 {
5916 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5917 VOIDmode, modifier);
709f5be1
RS
5918 /* Don't go to both_summands if modifier
5919 says it's not right to return a PLUS. */
5920 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5921 goto binop2;
c980ac49
RS
5922 goto both_summands;
5923 }
5924 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
5925 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5926 op0 = force_operand (op0, target);
5927 return op0;
5928 }
bbf6f052
RK
5929 }
5930
5931 /* No sense saving up arithmetic to be done
5932 if it's all in the wrong mode to form part of an address.
5933 And force_operand won't know whether to sign-extend or
5934 zero-extend. */
5935 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
88f63c77 5936 || mode != ptr_mode)
c980ac49 5937 goto binop;
bbf6f052
RK
5938
5939 preexpand_calls (exp);
5940 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5941 subtarget = 0;
5942
5943 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
906c4e36 5944 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
bbf6f052 5945
c980ac49 5946 both_summands:
bbf6f052
RK
5947 /* Make sure any term that's a sum with a constant comes last. */
5948 if (GET_CODE (op0) == PLUS
5949 && CONSTANT_P (XEXP (op0, 1)))
5950 {
5951 temp = op0;
5952 op0 = op1;
5953 op1 = temp;
5954 }
5955 /* If adding to a sum including a constant,
5956 associate it to put the constant outside. */
5957 if (GET_CODE (op1) == PLUS
5958 && CONSTANT_P (XEXP (op1, 1)))
5959 {
5960 rtx constant_term = const0_rtx;
5961
5962 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
5963 if (temp != 0)
5964 op0 = temp;
6f90e075
JW
5965 /* Ensure that MULT comes first if there is one. */
5966 else if (GET_CODE (op0) == MULT)
5967 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
bbf6f052
RK
5968 else
5969 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
5970
5971 /* Let's also eliminate constants from op0 if possible. */
5972 op0 = eliminate_constant_term (op0, &constant_term);
5973
5974 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
5975 their sum should be a constant. Form it into OP1, since the
5976 result we want will then be OP0 + OP1. */
5977
5978 temp = simplify_binary_operation (PLUS, mode, constant_term,
5979 XEXP (op1, 1));
5980 if (temp != 0)
5981 op1 = temp;
5982 else
5983 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
5984 }
5985
5986 /* Put a constant term last and put a multiplication first. */
5987 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
5988 temp = op1, op1 = op0, op0 = temp;
5989
5990 temp = simplify_binary_operation (PLUS, mode, op0, op1);
5991 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
5992
5993 case MINUS_EXPR:
ea87523e
RK
5994 /* For initializers, we are allowed to return a MINUS of two
5995 symbolic constants. Here we handle all cases when both operands
5996 are constant. */
bbf6f052
RK
5997 /* Handle difference of two symbolic constants,
5998 for the sake of an initializer. */
5999 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6000 && really_constant_p (TREE_OPERAND (exp, 0))
6001 && really_constant_p (TREE_OPERAND (exp, 1)))
6002 {
906c4e36
RK
6003 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
6004 VOIDmode, modifier);
6005 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6006 VOIDmode, modifier);
ea87523e 6007
ea87523e
RK
6008 /* If the last operand is a CONST_INT, use plus_constant of
6009 the negated constant. Else make the MINUS. */
6010 if (GET_CODE (op1) == CONST_INT)
6011 return plus_constant (op0, - INTVAL (op1));
6012 else
6013 return gen_rtx (MINUS, mode, op0, op1);
bbf6f052
RK
6014 }
6015 /* Convert A - const to A + (-const). */
6016 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6017 {
ae431183
RK
6018 tree negated = fold (build1 (NEGATE_EXPR, type,
6019 TREE_OPERAND (exp, 1)));
6020
6021 /* Deal with the case where we can't negate the constant
6022 in TYPE. */
6023 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
6024 {
6025 tree newtype = signed_type (type);
6026 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
6027 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
6028 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
6029
6030 if (! TREE_OVERFLOW (newneg))
6031 return expand_expr (convert (type,
6032 build (PLUS_EXPR, newtype,
6033 newop0, newneg)),
6034 target, tmode, modifier);
6035 }
6036 else
6037 {
6038 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
6039 goto plus_expr;
6040 }
bbf6f052
RK
6041 }
6042 this_optab = sub_optab;
6043 goto binop;
6044
6045 case MULT_EXPR:
6046 preexpand_calls (exp);
6047 /* If first operand is constant, swap them.
6048 Thus the following special case checks need only
6049 check the second operand. */
6050 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6051 {
6052 register tree t1 = TREE_OPERAND (exp, 0);
6053 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
6054 TREE_OPERAND (exp, 1) = t1;
6055 }
6056
6057 /* Attempt to return something suitable for generating an
6058 indexed address, for machines that support that. */
6059
88f63c77 6060 if (modifier == EXPAND_SUM && mode == ptr_mode
bbf6f052 6061 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
906c4e36 6062 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
6063 {
6064 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
6065
6066 /* Apply distributive law if OP0 is x+c. */
6067 if (GET_CODE (op0) == PLUS
6068 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
6069 return gen_rtx (PLUS, mode,
6070 gen_rtx (MULT, mode, XEXP (op0, 0),
906c4e36
RK
6071 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
6072 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
6073 * INTVAL (XEXP (op0, 1))));
bbf6f052
RK
6074
6075 if (GET_CODE (op0) != REG)
906c4e36 6076 op0 = force_operand (op0, NULL_RTX);
bbf6f052
RK
6077 if (GET_CODE (op0) != REG)
6078 op0 = copy_to_mode_reg (mode, op0);
6079
6080 return gen_rtx (MULT, mode, op0,
906c4e36 6081 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
bbf6f052
RK
6082 }
6083
6084 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6085 subtarget = 0;
6086
6087 /* Check for multiplying things that have been extended
6088 from a narrower type. If this machine supports multiplying
6089 in that narrower type with a result in the desired type,
6090 do it that way, and avoid the explicit type-conversion. */
6091 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
6092 && TREE_CODE (type) == INTEGER_TYPE
6093 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6094 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
6095 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6096 && int_fits_type_p (TREE_OPERAND (exp, 1),
6097 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6098 /* Don't use a widening multiply if a shift will do. */
6099 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
906c4e36 6100 > HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
6101 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
6102 ||
6103 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
6104 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6105 ==
6106 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
6107 /* If both operands are extended, they must either both
6108 be zero-extended or both be sign-extended. */
6109 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6110 ==
6111 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
6112 {
6113 enum machine_mode innermode
6114 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
b10af0c8
TG
6115 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6116 ? smul_widen_optab : umul_widen_optab);
bbf6f052
RK
6117 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6118 ? umul_widen_optab : smul_widen_optab);
b10af0c8 6119 if (mode == GET_MODE_WIDER_MODE (innermode))
bbf6f052 6120 {
b10af0c8
TG
6121 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
6122 {
6123 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6124 NULL_RTX, VOIDmode, 0);
6125 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6126 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6127 VOIDmode, 0);
6128 else
6129 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6130 NULL_RTX, VOIDmode, 0);
6131 goto binop2;
6132 }
6133 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
6134 && innermode == word_mode)
6135 {
6136 rtx htem;
6137 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6138 NULL_RTX, VOIDmode, 0);
6139 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6140 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6141 VOIDmode, 0);
6142 else
6143 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6144 NULL_RTX, VOIDmode, 0);
6145 temp = expand_binop (mode, other_optab, op0, op1, target,
6146 unsignedp, OPTAB_LIB_WIDEN);
6147 htem = expand_mult_highpart_adjust (innermode,
6148 gen_highpart (innermode, temp),
6149 op0, op1,
6150 gen_highpart (innermode, temp),
6151 unsignedp);
6152 emit_move_insn (gen_highpart (innermode, temp), htem);
6153 return temp;
6154 }
bbf6f052
RK
6155 }
6156 }
6157 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 6158 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6159 return expand_mult (mode, op0, op1, target, unsignedp);
6160
6161 case TRUNC_DIV_EXPR:
6162 case FLOOR_DIV_EXPR:
6163 case CEIL_DIV_EXPR:
6164 case ROUND_DIV_EXPR:
6165 case EXACT_DIV_EXPR:
6166 preexpand_calls (exp);
6167 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6168 subtarget = 0;
6169 /* Possible optimization: compute the dividend with EXPAND_SUM
6170 then if the divisor is constant can optimize the case
6171 where some terms of the dividend have coeffs divisible by it. */
6172 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 6173 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6174 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
6175
6176 case RDIV_EXPR:
6177 this_optab = flodiv_optab;
6178 goto binop;
6179
6180 case TRUNC_MOD_EXPR:
6181 case FLOOR_MOD_EXPR:
6182 case CEIL_MOD_EXPR:
6183 case ROUND_MOD_EXPR:
6184 preexpand_calls (exp);
6185 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6186 subtarget = 0;
6187 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 6188 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6189 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
6190
6191 case FIX_ROUND_EXPR:
6192 case FIX_FLOOR_EXPR:
6193 case FIX_CEIL_EXPR:
6194 abort (); /* Not used for C. */
6195
6196 case FIX_TRUNC_EXPR:
906c4e36 6197 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6198 if (target == 0)
6199 target = gen_reg_rtx (mode);
6200 expand_fix (target, op0, unsignedp);
6201 return target;
6202
6203 case FLOAT_EXPR:
906c4e36 6204 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6205 if (target == 0)
6206 target = gen_reg_rtx (mode);
6207 /* expand_float can't figure out what to do if FROM has VOIDmode.
6208 So give it the correct mode. With -O, cse will optimize this. */
6209 if (GET_MODE (op0) == VOIDmode)
6210 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6211 op0);
6212 expand_float (target, op0,
6213 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6214 return target;
6215
6216 case NEGATE_EXPR:
5b22bee8 6217 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
bbf6f052
RK
6218 temp = expand_unop (mode, neg_optab, op0, target, 0);
6219 if (temp == 0)
6220 abort ();
6221 return temp;
6222
6223 case ABS_EXPR:
6224 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6225
2d7050fd 6226 /* Handle complex values specially. */
d6a5ac33
RK
6227 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
6228 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
6229 return expand_complex_abs (mode, op0, target, unsignedp);
2d7050fd 6230
bbf6f052
RK
6231 /* Unsigned abs is simply the operand. Testing here means we don't
6232 risk generating incorrect code below. */
6233 if (TREE_UNSIGNED (type))
6234 return op0;
6235
2e5ec6cf
RK
6236 return expand_abs (mode, op0, target, unsignedp,
6237 safe_from_p (target, TREE_OPERAND (exp, 0)));
bbf6f052
RK
6238
6239 case MAX_EXPR:
6240 case MIN_EXPR:
6241 target = original_target;
6242 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
fc155707 6243 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
d6a5ac33 6244 || GET_MODE (target) != mode
bbf6f052
RK
6245 || (GET_CODE (target) == REG
6246 && REGNO (target) < FIRST_PSEUDO_REGISTER))
6247 target = gen_reg_rtx (mode);
906c4e36 6248 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6249 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6250
6251 /* First try to do it with a special MIN or MAX instruction.
6252 If that does not win, use a conditional jump to select the proper
6253 value. */
6254 this_optab = (TREE_UNSIGNED (type)
6255 ? (code == MIN_EXPR ? umin_optab : umax_optab)
6256 : (code == MIN_EXPR ? smin_optab : smax_optab));
6257
6258 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
6259 OPTAB_WIDEN);
6260 if (temp != 0)
6261 return temp;
6262
fa2981d8
JW
6263 /* At this point, a MEM target is no longer useful; we will get better
6264 code without it. */
6265
6266 if (GET_CODE (target) == MEM)
6267 target = gen_reg_rtx (mode);
6268
ee456b1c
RK
6269 if (target != op0)
6270 emit_move_insn (target, op0);
d6a5ac33 6271
bbf6f052 6272 op0 = gen_label_rtx ();
d6a5ac33 6273
f81497d9
RS
6274 /* If this mode is an integer too wide to compare properly,
6275 compare word by word. Rely on cse to optimize constant cases. */
d6a5ac33 6276 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
bbf6f052 6277 {
f81497d9 6278 if (code == MAX_EXPR)
d6a5ac33
RK
6279 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6280 target, op1, NULL_RTX, op0);
bbf6f052 6281 else
d6a5ac33
RK
6282 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6283 op1, target, NULL_RTX, op0);
ee456b1c 6284 emit_move_insn (target, op1);
bbf6f052 6285 }
f81497d9
RS
6286 else
6287 {
6288 if (code == MAX_EXPR)
6289 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
ee456b1c
RK
6290 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
6291 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
f81497d9
RS
6292 else
6293 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
ee456b1c
RK
6294 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
6295 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
f81497d9 6296 if (temp == const0_rtx)
ee456b1c 6297 emit_move_insn (target, op1);
f81497d9
RS
6298 else if (temp != const_true_rtx)
6299 {
6300 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
6301 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
6302 else
6303 abort ();
ee456b1c 6304 emit_move_insn (target, op1);
f81497d9
RS
6305 }
6306 }
bbf6f052
RK
6307 emit_label (op0);
6308 return target;
6309
bbf6f052
RK
6310 case BIT_NOT_EXPR:
6311 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6312 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
6313 if (temp == 0)
6314 abort ();
6315 return temp;
6316
6317 case FFS_EXPR:
6318 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6319 temp = expand_unop (mode, ffs_optab, op0, target, 1);
6320 if (temp == 0)
6321 abort ();
6322 return temp;
6323
d6a5ac33
RK
6324 /* ??? Can optimize bitwise operations with one arg constant.
6325 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
6326 and (a bitwise1 b) bitwise2 b (etc)
6327 but that is probably not worth while. */
6328
6329 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
6330 boolean values when we want in all cases to compute both of them. In
6331 general it is fastest to do TRUTH_AND_EXPR by computing both operands
6332 as actual zero-or-1 values and then bitwise anding. In cases where
6333 there cannot be any side effects, better code would be made by
6334 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
6335 how to recognize those cases. */
6336
bbf6f052
RK
6337 case TRUTH_AND_EXPR:
6338 case BIT_AND_EXPR:
6339 this_optab = and_optab;
6340 goto binop;
6341
bbf6f052
RK
6342 case TRUTH_OR_EXPR:
6343 case BIT_IOR_EXPR:
6344 this_optab = ior_optab;
6345 goto binop;
6346
874726a8 6347 case TRUTH_XOR_EXPR:
bbf6f052
RK
6348 case BIT_XOR_EXPR:
6349 this_optab = xor_optab;
6350 goto binop;
6351
6352 case LSHIFT_EXPR:
6353 case RSHIFT_EXPR:
6354 case LROTATE_EXPR:
6355 case RROTATE_EXPR:
6356 preexpand_calls (exp);
6357 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6358 subtarget = 0;
6359 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6360 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
6361 unsignedp);
6362
d6a5ac33
RK
6363 /* Could determine the answer when only additive constants differ. Also,
6364 the addition of one can be handled by changing the condition. */
bbf6f052
RK
6365 case LT_EXPR:
6366 case LE_EXPR:
6367 case GT_EXPR:
6368 case GE_EXPR:
6369 case EQ_EXPR:
6370 case NE_EXPR:
6371 preexpand_calls (exp);
6372 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
6373 if (temp != 0)
6374 return temp;
d6a5ac33 6375
0f41302f 6376 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
bbf6f052
RK
6377 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
6378 && original_target
6379 && GET_CODE (original_target) == REG
6380 && (GET_MODE (original_target)
6381 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6382 {
d6a5ac33
RK
6383 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
6384 VOIDmode, 0);
6385
bbf6f052
RK
6386 if (temp != original_target)
6387 temp = copy_to_reg (temp);
d6a5ac33 6388
bbf6f052 6389 op1 = gen_label_rtx ();
906c4e36 6390 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
bbf6f052
RK
6391 GET_MODE (temp), unsignedp, 0);
6392 emit_jump_insn (gen_beq (op1));
6393 emit_move_insn (temp, const1_rtx);
6394 emit_label (op1);
6395 return temp;
6396 }
d6a5ac33 6397
bbf6f052
RK
6398 /* If no set-flag instruction, must generate a conditional
6399 store into a temporary variable. Drop through
6400 and handle this like && and ||. */
6401
6402 case TRUTH_ANDIF_EXPR:
6403 case TRUTH_ORIF_EXPR:
e44842fe
RK
6404 if (! ignore
6405 && (target == 0 || ! safe_from_p (target, exp)
6406 /* Make sure we don't have a hard reg (such as function's return
6407 value) live across basic blocks, if not optimizing. */
6408 || (!optimize && GET_CODE (target) == REG
6409 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
bbf6f052 6410 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
e44842fe
RK
6411
6412 if (target)
6413 emit_clr_insn (target);
6414
bbf6f052
RK
6415 op1 = gen_label_rtx ();
6416 jumpifnot (exp, op1);
e44842fe
RK
6417
6418 if (target)
6419 emit_0_to_1_insn (target);
6420
bbf6f052 6421 emit_label (op1);
e44842fe 6422 return ignore ? const0_rtx : target;
bbf6f052
RK
6423
6424 case TRUTH_NOT_EXPR:
6425 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6426 /* The parser is careful to generate TRUTH_NOT_EXPR
6427 only with operands that are always zero or one. */
906c4e36 6428 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
bbf6f052
RK
6429 target, 1, OPTAB_LIB_WIDEN);
6430 if (temp == 0)
6431 abort ();
6432 return temp;
6433
6434 case COMPOUND_EXPR:
6435 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6436 emit_queue ();
6437 return expand_expr (TREE_OPERAND (exp, 1),
6438 (ignore ? const0_rtx : target),
6439 VOIDmode, 0);
6440
6441 case COND_EXPR:
6442 {
5dab5552
MS
6443 rtx flag = NULL_RTX;
6444 tree left_cleanups = NULL_TREE;
6445 tree right_cleanups = NULL_TREE;
6446
6447 /* Used to save a pointer to the place to put the setting of
6448 the flag that indicates if this side of the conditional was
6449 taken. We backpatch the code, if we find out later that we
0f41302f 6450 have any conditional cleanups that need to be performed. */
5dab5552
MS
6451 rtx dest_right_flag = NULL_RTX;
6452 rtx dest_left_flag = NULL_RTX;
6453
bbf6f052
RK
6454 /* Note that COND_EXPRs whose type is a structure or union
6455 are required to be constructed to contain assignments of
6456 a temporary variable, so that we can evaluate them here
6457 for side effect only. If type is void, we must do likewise. */
6458
6459 /* If an arm of the branch requires a cleanup,
6460 only that cleanup is performed. */
6461
6462 tree singleton = 0;
6463 tree binary_op = 0, unary_op = 0;
6464 tree old_cleanups = cleanups_this_call;
bbf6f052
RK
6465
6466 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
6467 convert it to our mode, if necessary. */
6468 if (integer_onep (TREE_OPERAND (exp, 1))
6469 && integer_zerop (TREE_OPERAND (exp, 2))
6470 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6471 {
dd27116b
RK
6472 if (ignore)
6473 {
6474 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6475 modifier);
6476 return const0_rtx;
6477 }
6478
bbf6f052
RK
6479 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
6480 if (GET_MODE (op0) == mode)
6481 return op0;
d6a5ac33 6482
bbf6f052
RK
6483 if (target == 0)
6484 target = gen_reg_rtx (mode);
6485 convert_move (target, op0, unsignedp);
6486 return target;
6487 }
6488
bbf6f052
RK
6489 /* Check for X ? A + B : A. If we have this, we can copy
6490 A to the output and conditionally add B. Similarly for unary
6491 operations. Don't do this if X has side-effects because
6492 those side effects might affect A or B and the "?" operation is
6493 a sequence point in ANSI. (We test for side effects later.) */
6494
6495 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
6496 && operand_equal_p (TREE_OPERAND (exp, 2),
6497 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6498 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
6499 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
6500 && operand_equal_p (TREE_OPERAND (exp, 1),
6501 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6502 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
6503 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
6504 && operand_equal_p (TREE_OPERAND (exp, 2),
6505 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6506 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
6507 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
6508 && operand_equal_p (TREE_OPERAND (exp, 1),
6509 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6510 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
6511
01c8a7c8
RK
6512 /* If we are not to produce a result, we have no target. Otherwise,
6513 if a target was specified use it; it will not be used as an
6514 intermediate target unless it is safe. If no target, use a
6515 temporary. */
6516
6517 if (ignore)
6518 temp = 0;
6519 else if (original_target
6520 && (safe_from_p (original_target, TREE_OPERAND (exp, 0))
6521 || (singleton && GET_CODE (original_target) == REG
6522 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
6523 && original_target == var_rtx (singleton)))
6524 && GET_MODE (original_target) == mode
6525 && ! (GET_CODE (original_target) == MEM
6526 && MEM_VOLATILE_P (original_target)))
6527 temp = original_target;
6528 else if (TREE_ADDRESSABLE (type))
6529 abort ();
6530 else
6531 temp = assign_temp (type, 0, 0, 1);
6532
bbf6f052
RK
6533 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
6534 operation, do this as A + (X != 0). Similarly for other simple
6535 binary operators. */
dd27116b 6536 if (temp && singleton && binary_op
bbf6f052
RK
6537 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6538 && (TREE_CODE (binary_op) == PLUS_EXPR
6539 || TREE_CODE (binary_op) == MINUS_EXPR
6540 || TREE_CODE (binary_op) == BIT_IOR_EXPR
9fbd9f58 6541 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
bbf6f052
RK
6542 && integer_onep (TREE_OPERAND (binary_op, 1))
6543 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6544 {
6545 rtx result;
6546 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
6547 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
6548 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
2d444001 6549 : xor_optab);
bbf6f052
RK
6550
6551 /* If we had X ? A : A + 1, do this as A + (X == 0).
6552
6553 We have to invert the truth value here and then put it
6554 back later if do_store_flag fails. We cannot simply copy
6555 TREE_OPERAND (exp, 0) to another variable and modify that
6556 because invert_truthvalue can modify the tree pointed to
6557 by its argument. */
6558 if (singleton == TREE_OPERAND (exp, 1))
6559 TREE_OPERAND (exp, 0)
6560 = invert_truthvalue (TREE_OPERAND (exp, 0));
6561
6562 result = do_store_flag (TREE_OPERAND (exp, 0),
906c4e36
RK
6563 (safe_from_p (temp, singleton)
6564 ? temp : NULL_RTX),
bbf6f052
RK
6565 mode, BRANCH_COST <= 1);
6566
6567 if (result)
6568 {
906c4e36 6569 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6570 return expand_binop (mode, boptab, op1, result, temp,
6571 unsignedp, OPTAB_LIB_WIDEN);
6572 }
6573 else if (singleton == TREE_OPERAND (exp, 1))
6574 TREE_OPERAND (exp, 0)
6575 = invert_truthvalue (TREE_OPERAND (exp, 0));
6576 }
6577
dabf8373 6578 do_pending_stack_adjust ();
bbf6f052
RK
6579 NO_DEFER_POP;
6580 op0 = gen_label_rtx ();
6581
5dab5552 6582 flag = gen_reg_rtx (word_mode);
bbf6f052
RK
6583 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
6584 {
6585 if (temp != 0)
6586 {
6587 /* If the target conflicts with the other operand of the
6588 binary op, we can't use it. Also, we can't use the target
6589 if it is a hard register, because evaluating the condition
6590 might clobber it. */
6591 if ((binary_op
6592 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
6593 || (GET_CODE (temp) == REG
6594 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
6595 temp = gen_reg_rtx (mode);
6596 store_expr (singleton, temp, 0);
6597 }
6598 else
906c4e36 6599 expand_expr (singleton,
2937cf87 6600 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5dab5552 6601 dest_left_flag = get_last_insn ();
bbf6f052
RK
6602 if (singleton == TREE_OPERAND (exp, 1))
6603 jumpif (TREE_OPERAND (exp, 0), op0);
6604 else
6605 jumpifnot (TREE_OPERAND (exp, 0), op0);
6606
0f41302f 6607 /* Allows cleanups up to here. */
5dab5552 6608 old_cleanups = cleanups_this_call;
bbf6f052
RK
6609 if (binary_op && temp == 0)
6610 /* Just touch the other operand. */
6611 expand_expr (TREE_OPERAND (binary_op, 1),
906c4e36 6612 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6613 else if (binary_op)
6614 store_expr (build (TREE_CODE (binary_op), type,
6615 make_tree (type, temp),
6616 TREE_OPERAND (binary_op, 1)),
6617 temp, 0);
6618 else
6619 store_expr (build1 (TREE_CODE (unary_op), type,
6620 make_tree (type, temp)),
6621 temp, 0);
6622 op1 = op0;
5dab5552 6623 dest_right_flag = get_last_insn ();
bbf6f052
RK
6624 }
6625#if 0
6626 /* This is now done in jump.c and is better done there because it
6627 produces shorter register lifetimes. */
6628
6629 /* Check for both possibilities either constants or variables
6630 in registers (but not the same as the target!). If so, can
6631 save branches by assigning one, branching, and assigning the
6632 other. */
6633 else if (temp && GET_MODE (temp) != BLKmode
6634 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
6635 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
6636 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
6637 && DECL_RTL (TREE_OPERAND (exp, 1))
6638 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
6639 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
6640 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
6641 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
6642 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
6643 && DECL_RTL (TREE_OPERAND (exp, 2))
6644 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
6645 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
6646 {
6647 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6648 temp = gen_reg_rtx (mode);
6649 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5dab5552 6650 dest_left_flag = get_last_insn ();
bbf6f052 6651 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 6652
0f41302f 6653 /* Allows cleanups up to here. */
5dab5552 6654 old_cleanups = cleanups_this_call;
bbf6f052
RK
6655 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6656 op1 = op0;
5dab5552 6657 dest_right_flag = get_last_insn ();
bbf6f052
RK
6658 }
6659#endif
6660 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
6661 comparison operator. If we have one of these cases, set the
6662 output to A, branch on A (cse will merge these two references),
6663 then set the output to FOO. */
6664 else if (temp
6665 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6666 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6667 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6668 TREE_OPERAND (exp, 1), 0)
6669 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6670 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
6671 {
6672 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6673 temp = gen_reg_rtx (mode);
6674 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5dab5552 6675 dest_left_flag = get_last_insn ();
bbf6f052 6676 jumpif (TREE_OPERAND (exp, 0), op0);
5dab5552 6677
0f41302f 6678 /* Allows cleanups up to here. */
5dab5552 6679 old_cleanups = cleanups_this_call;
bbf6f052
RK
6680 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6681 op1 = op0;
5dab5552 6682 dest_right_flag = get_last_insn ();
bbf6f052
RK
6683 }
6684 else if (temp
6685 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6686 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6687 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6688 TREE_OPERAND (exp, 2), 0)
6689 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6690 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
6691 {
6692 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6693 temp = gen_reg_rtx (mode);
6694 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5dab5552 6695 dest_left_flag = get_last_insn ();
bbf6f052 6696 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 6697
0f41302f 6698 /* Allows cleanups up to here. */
5dab5552 6699 old_cleanups = cleanups_this_call;
bbf6f052
RK
6700 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6701 op1 = op0;
5dab5552 6702 dest_right_flag = get_last_insn ();
bbf6f052
RK
6703 }
6704 else
6705 {
6706 op1 = gen_label_rtx ();
6707 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 6708
0f41302f 6709 /* Allows cleanups up to here. */
5dab5552 6710 old_cleanups = cleanups_this_call;
bbf6f052
RK
6711 if (temp != 0)
6712 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6713 else
906c4e36
RK
6714 expand_expr (TREE_OPERAND (exp, 1),
6715 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5dab5552
MS
6716 dest_left_flag = get_last_insn ();
6717
0f41302f 6718 /* Handle conditional cleanups, if any. */
5dab5552 6719 left_cleanups = defer_cleanups_to (old_cleanups);
bbf6f052
RK
6720
6721 emit_queue ();
6722 emit_jump_insn (gen_jump (op1));
6723 emit_barrier ();
6724 emit_label (op0);
6725 if (temp != 0)
6726 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6727 else
906c4e36
RK
6728 expand_expr (TREE_OPERAND (exp, 2),
6729 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5dab5552 6730 dest_right_flag = get_last_insn ();
bbf6f052
RK
6731 }
6732
0f41302f 6733 /* Handle conditional cleanups, if any. */
5dab5552 6734 right_cleanups = defer_cleanups_to (old_cleanups);
bbf6f052
RK
6735
6736 emit_queue ();
6737 emit_label (op1);
6738 OK_DEFER_POP;
5dab5552 6739
0f41302f 6740 /* Add back in, any conditional cleanups. */
5dab5552
MS
6741 if (left_cleanups || right_cleanups)
6742 {
6743 tree new_cleanups;
6744 tree cond;
6745 rtx last;
6746
6747 /* Now that we know that a flag is needed, go back and add in the
0f41302f 6748 setting of the flag. */
5dab5552 6749
0f41302f 6750 /* Do the left side flag. */
5dab5552 6751 last = get_last_insn ();
0f41302f 6752 /* Flag left cleanups as needed. */
5dab5552
MS
6753 emit_move_insn (flag, const1_rtx);
6754 /* ??? deprecated, use sequences instead. */
6755 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_left_flag);
6756
0f41302f 6757 /* Do the right side flag. */
5dab5552 6758 last = get_last_insn ();
0f41302f 6759 /* Flag left cleanups as needed. */
5dab5552
MS
6760 emit_move_insn (flag, const0_rtx);
6761 /* ??? deprecated, use sequences instead. */
6762 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_right_flag);
6763
9ba73d38
MS
6764 /* All cleanups must be on the function_obstack. */
6765 push_obstacks_nochange ();
6766 resume_temporary_allocation ();
6767
0f41302f 6768 /* convert flag, which is an rtx, into a tree. */
5dab5552
MS
6769 cond = make_node (RTL_EXPR);
6770 TREE_TYPE (cond) = integer_type_node;
6771 RTL_EXPR_RTL (cond) = flag;
6772 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
01842234 6773 cond = save_expr (cond);
5dab5552
MS
6774
6775 if (! left_cleanups)
6776 left_cleanups = integer_zero_node;
6777 if (! right_cleanups)
6778 right_cleanups = integer_zero_node;
fd67d2b6
JM
6779 new_cleanups = build (COND_EXPR, void_type_node,
6780 truthvalue_conversion (cond),
5dab5552
MS
6781 left_cleanups, right_cleanups);
6782 new_cleanups = fold (new_cleanups);
6783
9ba73d38
MS
6784 pop_obstacks ();
6785
3d195391 6786 /* Now add in the conditionalized cleanups. */
5dab5552
MS
6787 cleanups_this_call
6788 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
3d195391 6789 expand_eh_region_start ();
5dab5552 6790 }
bbf6f052
RK
6791 return temp;
6792 }
6793
6794 case TARGET_EXPR:
6795 {
6796 /* Something needs to be initialized, but we didn't know
6797 where that thing was when building the tree. For example,
6798 it could be the return value of a function, or a parameter
6799 to a function which lays down in the stack, or a temporary
6800 variable which must be passed by reference.
6801
6802 We guarantee that the expression will either be constructed
6803 or copied into our original target. */
6804
6805 tree slot = TREE_OPERAND (exp, 0);
2a888d4c 6806 tree cleanups = NULL_TREE;
5c062816 6807 tree exp1;
61d6b1cc 6808 rtx temp;
bbf6f052
RK
6809
6810 if (TREE_CODE (slot) != VAR_DECL)
6811 abort ();
6812
9c51f375
RK
6813 if (! ignore)
6814 target = original_target;
6815
bbf6f052
RK
6816 if (target == 0)
6817 {
6818 if (DECL_RTL (slot) != 0)
ac993f4f
MS
6819 {
6820 target = DECL_RTL (slot);
5c062816 6821 /* If we have already expanded the slot, so don't do
ac993f4f 6822 it again. (mrs) */
5c062816
MS
6823 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6824 return target;
ac993f4f 6825 }
bbf6f052
RK
6826 else
6827 {
06089a8b 6828 target = assign_temp (type, 2, 1, 1);
bbf6f052
RK
6829 /* All temp slots at this level must not conflict. */
6830 preserve_temp_slots (target);
6831 DECL_RTL (slot) = target;
bbf6f052 6832
e287fd6e
RK
6833 /* Since SLOT is not known to the called function
6834 to belong to its stack frame, we must build an explicit
6835 cleanup. This case occurs when we must build up a reference
6836 to pass the reference as an argument. In this case,
6837 it is very likely that such a reference need not be
6838 built here. */
6839
6840 if (TREE_OPERAND (exp, 2) == 0)
6841 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
2a888d4c 6842 cleanups = TREE_OPERAND (exp, 2);
e287fd6e 6843 }
bbf6f052
RK
6844 }
6845 else
6846 {
6847 /* This case does occur, when expanding a parameter which
6848 needs to be constructed on the stack. The target
6849 is the actual stack address that we want to initialize.
6850 The function we call will perform the cleanup in this case. */
6851
8c042b47
RS
6852 /* If we have already assigned it space, use that space,
6853 not target that we were passed in, as our target
6854 parameter is only a hint. */
6855 if (DECL_RTL (slot) != 0)
6856 {
6857 target = DECL_RTL (slot);
6858 /* If we have already expanded the slot, so don't do
6859 it again. (mrs) */
6860 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6861 return target;
6862 }
6863
bbf6f052
RK
6864 DECL_RTL (slot) = target;
6865 }
6866
4847c938 6867 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
5c062816
MS
6868 /* Mark it as expanded. */
6869 TREE_OPERAND (exp, 1) = NULL_TREE;
6870
41531e5b 6871 store_expr (exp1, target, 0);
61d6b1cc 6872
2a888d4c
MS
6873 if (cleanups)
6874 {
6875 cleanups_this_call = tree_cons (NULL_TREE,
6876 cleanups,
6877 cleanups_this_call);
3d195391 6878 expand_eh_region_start ();
2a888d4c 6879 }
61d6b1cc 6880
41531e5b 6881 return target;
bbf6f052
RK
6882 }
6883
6884 case INIT_EXPR:
6885 {
6886 tree lhs = TREE_OPERAND (exp, 0);
6887 tree rhs = TREE_OPERAND (exp, 1);
6888 tree noncopied_parts = 0;
6889 tree lhs_type = TREE_TYPE (lhs);
6890
6891 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
6892 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
6893 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
6894 TYPE_NONCOPIED_PARTS (lhs_type));
6895 while (noncopied_parts != 0)
6896 {
6897 expand_assignment (TREE_VALUE (noncopied_parts),
6898 TREE_PURPOSE (noncopied_parts), 0, 0);
6899 noncopied_parts = TREE_CHAIN (noncopied_parts);
6900 }
6901 return temp;
6902 }
6903
6904 case MODIFY_EXPR:
6905 {
6906 /* If lhs is complex, expand calls in rhs before computing it.
6907 That's so we don't compute a pointer and save it over a call.
6908 If lhs is simple, compute it first so we can give it as a
6909 target if the rhs is just a call. This avoids an extra temp and copy
6910 and that prevents a partial-subsumption which makes bad code.
6911 Actually we could treat component_ref's of vars like vars. */
6912
6913 tree lhs = TREE_OPERAND (exp, 0);
6914 tree rhs = TREE_OPERAND (exp, 1);
6915 tree noncopied_parts = 0;
6916 tree lhs_type = TREE_TYPE (lhs);
6917
6918 temp = 0;
6919
6920 if (TREE_CODE (lhs) != VAR_DECL
6921 && TREE_CODE (lhs) != RESULT_DECL
6922 && TREE_CODE (lhs) != PARM_DECL)
6923 preexpand_calls (exp);
6924
6925 /* Check for |= or &= of a bitfield of size one into another bitfield
6926 of size 1. In this case, (unless we need the result of the
6927 assignment) we can do this more efficiently with a
6928 test followed by an assignment, if necessary.
6929
6930 ??? At this point, we can't get a BIT_FIELD_REF here. But if
6931 things change so we do, this code should be enhanced to
6932 support it. */
6933 if (ignore
6934 && TREE_CODE (lhs) == COMPONENT_REF
6935 && (TREE_CODE (rhs) == BIT_IOR_EXPR
6936 || TREE_CODE (rhs) == BIT_AND_EXPR)
6937 && TREE_OPERAND (rhs, 0) == lhs
6938 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
6939 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
6940 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
6941 {
6942 rtx label = gen_label_rtx ();
6943
6944 do_jump (TREE_OPERAND (rhs, 1),
6945 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
6946 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
6947 expand_assignment (lhs, convert (TREE_TYPE (rhs),
6948 (TREE_CODE (rhs) == BIT_IOR_EXPR
6949 ? integer_one_node
6950 : integer_zero_node)),
6951 0, 0);
e7c33f54 6952 do_pending_stack_adjust ();
bbf6f052
RK
6953 emit_label (label);
6954 return const0_rtx;
6955 }
6956
6957 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
6958 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
6959 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
6960 TYPE_NONCOPIED_PARTS (lhs_type));
6961
6962 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
6963 while (noncopied_parts != 0)
6964 {
6965 expand_assignment (TREE_PURPOSE (noncopied_parts),
6966 TREE_VALUE (noncopied_parts), 0, 0);
6967 noncopied_parts = TREE_CHAIN (noncopied_parts);
6968 }
6969 return temp;
6970 }
6971
6972 case PREINCREMENT_EXPR:
6973 case PREDECREMENT_EXPR:
7b8b9722 6974 return expand_increment (exp, 0, ignore);
bbf6f052
RK
6975
6976 case POSTINCREMENT_EXPR:
6977 case POSTDECREMENT_EXPR:
6978 /* Faster to treat as pre-increment if result is not used. */
7b8b9722 6979 return expand_increment (exp, ! ignore, ignore);
bbf6f052
RK
6980
6981 case ADDR_EXPR:
987c71d9 6982 /* If nonzero, TEMP will be set to the address of something that might
0f41302f 6983 be a MEM corresponding to a stack slot. */
987c71d9
RK
6984 temp = 0;
6985
bbf6f052
RK
6986 /* Are we taking the address of a nested function? */
6987 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
38ee6ed9
JM
6988 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
6989 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0)))
bbf6f052
RK
6990 {
6991 op0 = trampoline_address (TREE_OPERAND (exp, 0));
6992 op0 = force_operand (op0, target);
6993 }
682ba3a6
RK
6994 /* If we are taking the address of something erroneous, just
6995 return a zero. */
6996 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
6997 return const0_rtx;
bbf6f052
RK
6998 else
6999 {
e287fd6e
RK
7000 /* We make sure to pass const0_rtx down if we came in with
7001 ignore set, to avoid doing the cleanups twice for something. */
7002 op0 = expand_expr (TREE_OPERAND (exp, 0),
7003 ignore ? const0_rtx : NULL_RTX, VOIDmode,
bbf6f052
RK
7004 (modifier == EXPAND_INITIALIZER
7005 ? modifier : EXPAND_CONST_ADDRESS));
896102d0 7006
119af78a
RK
7007 /* If we are going to ignore the result, OP0 will have been set
7008 to const0_rtx, so just return it. Don't get confused and
7009 think we are taking the address of the constant. */
7010 if (ignore)
7011 return op0;
7012
3539e816
MS
7013 op0 = protect_from_queue (op0, 0);
7014
896102d0
RK
7015 /* We would like the object in memory. If it is a constant,
7016 we can have it be statically allocated into memory. For
682ba3a6 7017 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
896102d0
RK
7018 memory and store the value into it. */
7019
7020 if (CONSTANT_P (op0))
7021 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7022 op0);
987c71d9 7023 else if (GET_CODE (op0) == MEM)
af5b53ed
RK
7024 {
7025 mark_temp_addr_taken (op0);
7026 temp = XEXP (op0, 0);
7027 }
896102d0 7028
682ba3a6
RK
7029 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7030 || GET_CODE (op0) == CONCAT)
896102d0
RK
7031 {
7032 /* If this object is in a register, it must be not
0f41302f 7033 be BLKmode. */
896102d0 7034 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
06089a8b 7035 rtx memloc = assign_temp (inner_type, 1, 1, 1);
896102d0 7036
7a0b7b9a 7037 mark_temp_addr_taken (memloc);
896102d0
RK
7038 emit_move_insn (memloc, op0);
7039 op0 = memloc;
7040 }
7041
bbf6f052
RK
7042 if (GET_CODE (op0) != MEM)
7043 abort ();
7044
7045 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
88f63c77
RK
7046 {
7047 temp = XEXP (op0, 0);
7048#ifdef POINTERS_EXTEND_UNSIGNED
7049 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
7050 && mode == ptr_mode)
9fcfcce7 7051 temp = convert_memory_address (ptr_mode, temp);
88f63c77
RK
7052#endif
7053 return temp;
7054 }
987c71d9 7055
bbf6f052
RK
7056 op0 = force_operand (XEXP (op0, 0), target);
7057 }
987c71d9 7058
bbf6f052 7059 if (flag_force_addr && GET_CODE (op0) != REG)
987c71d9
RK
7060 op0 = force_reg (Pmode, op0);
7061
dc6d66b3
RK
7062 if (GET_CODE (op0) == REG
7063 && ! REG_USERVAR_P (op0))
7064 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
987c71d9
RK
7065
7066 /* If we might have had a temp slot, add an equivalent address
7067 for it. */
7068 if (temp != 0)
7069 update_temp_slot_address (temp, op0);
7070
88f63c77
RK
7071#ifdef POINTERS_EXTEND_UNSIGNED
7072 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
7073 && mode == ptr_mode)
9fcfcce7 7074 op0 = convert_memory_address (ptr_mode, op0);
88f63c77
RK
7075#endif
7076
bbf6f052
RK
7077 return op0;
7078
7079 case ENTRY_VALUE_EXPR:
7080 abort ();
7081
7308a047
RS
7082 /* COMPLEX type for Extended Pascal & Fortran */
7083 case COMPLEX_EXPR:
7084 {
7085 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6551fa4d 7086 rtx insns;
7308a047
RS
7087
7088 /* Get the rtx code of the operands. */
7089 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7090 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
7091
7092 if (! target)
7093 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7094
6551fa4d 7095 start_sequence ();
7308a047
RS
7096
7097 /* Move the real (op0) and imaginary (op1) parts to their location. */
2d7050fd
RS
7098 emit_move_insn (gen_realpart (mode, target), op0);
7099 emit_move_insn (gen_imagpart (mode, target), op1);
7308a047 7100
6551fa4d
JW
7101 insns = get_insns ();
7102 end_sequence ();
7103
7308a047 7104 /* Complex construction should appear as a single unit. */
6551fa4d
JW
7105 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
7106 each with a separate pseudo as destination.
7107 It's not correct for flow to treat them as a unit. */
6d6e61ce 7108 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
7109 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
7110 else
7111 emit_insns (insns);
7308a047
RS
7112
7113 return target;
7114 }
7115
7116 case REALPART_EXPR:
2d7050fd
RS
7117 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7118 return gen_realpart (mode, op0);
7308a047
RS
7119
7120 case IMAGPART_EXPR:
2d7050fd
RS
7121 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7122 return gen_imagpart (mode, op0);
7308a047
RS
7123
7124 case CONJ_EXPR:
7125 {
62acb978 7126 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7308a047 7127 rtx imag_t;
6551fa4d 7128 rtx insns;
7308a047
RS
7129
7130 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7131
7132 if (! target)
d6a5ac33 7133 target = gen_reg_rtx (mode);
7308a047 7134
6551fa4d 7135 start_sequence ();
7308a047
RS
7136
7137 /* Store the realpart and the negated imagpart to target. */
62acb978
RK
7138 emit_move_insn (gen_realpart (partmode, target),
7139 gen_realpart (partmode, op0));
7308a047 7140
62acb978
RK
7141 imag_t = gen_imagpart (partmode, target);
7142 temp = expand_unop (partmode, neg_optab,
7143 gen_imagpart (partmode, op0), imag_t, 0);
7308a047
RS
7144 if (temp != imag_t)
7145 emit_move_insn (imag_t, temp);
7146
6551fa4d
JW
7147 insns = get_insns ();
7148 end_sequence ();
7149
d6a5ac33
RK
7150 /* Conjugate should appear as a single unit
7151 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
6551fa4d
JW
7152 each with a separate pseudo as destination.
7153 It's not correct for flow to treat them as a unit. */
6d6e61ce 7154 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
7155 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
7156 else
7157 emit_insns (insns);
7308a047
RS
7158
7159 return target;
7160 }
7161
bbf6f052 7162 case ERROR_MARK:
66538193
RS
7163 op0 = CONST0_RTX (tmode);
7164 if (op0 != 0)
7165 return op0;
bbf6f052
RK
7166 return const0_rtx;
7167
7168 default:
90764a87 7169 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
bbf6f052
RK
7170 }
7171
7172 /* Here to do an ordinary binary operator, generating an instruction
7173 from the optab already placed in `this_optab'. */
7174 binop:
7175 preexpand_calls (exp);
7176 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
7177 subtarget = 0;
7178 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 7179 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7180 binop2:
7181 temp = expand_binop (mode, this_optab, op0, op1, target,
7182 unsignedp, OPTAB_LIB_WIDEN);
7183 if (temp == 0)
7184 abort ();
7185 return temp;
7186}
bbf6f052 7187
bbf6f052 7188
0f41302f
MS
7189/* Emit bytecode to evaluate the given expression EXP to the stack. */
7190
ca695ac9
JB
7191void
7192bc_expand_expr (exp)
7193 tree exp;
bbf6f052 7194{
ca695ac9
JB
7195 enum tree_code code;
7196 tree type, arg0;
7197 rtx r;
7198 struct binary_operator *binoptab;
7199 struct unary_operator *unoptab;
7200 struct increment_operator *incroptab;
7201 struct bc_label *lab, *lab1;
7202 enum bytecode_opcode opcode;
7203
7204
7205 code = TREE_CODE (exp);
7206
7207 switch (code)
bbf6f052 7208 {
ca695ac9
JB
7209 case PARM_DECL:
7210
7211 if (DECL_RTL (exp) == 0)
bbf6f052 7212 {
ca695ac9
JB
7213 error_with_decl (exp, "prior parameter's size depends on `%s'");
7214 return;
bbf6f052 7215 }
ca695ac9
JB
7216
7217 bc_load_parmaddr (DECL_RTL (exp));
7218 bc_load_memory (TREE_TYPE (exp), exp);
7219
7220 return;
7221
7222 case VAR_DECL:
7223
7224 if (DECL_RTL (exp) == 0)
7225 abort ();
7226
7227#if 0
e7a42772 7228 if (BYTECODE_LABEL (DECL_RTL (exp)))
ca695ac9
JB
7229 bc_load_externaddr (DECL_RTL (exp));
7230 else
7231 bc_load_localaddr (DECL_RTL (exp));
7232#endif
7233 if (TREE_PUBLIC (exp))
e7a42772
JB
7234 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
7235 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
ca695ac9
JB
7236 else
7237 bc_load_localaddr (DECL_RTL (exp));
7238
7239 bc_load_memory (TREE_TYPE (exp), exp);
7240 return;
7241
7242 case INTEGER_CST:
7243
7244#ifdef DEBUG_PRINT_CODE
7245 fprintf (stderr, " [%x]\n", TREE_INT_CST_LOW (exp));
7246#endif
6bd6178d 7247 bc_emit_instruction (mode_to_const_map[(int) (DECL_BIT_FIELD (exp)
ca695ac9 7248 ? SImode
6bd6178d 7249 : TYPE_MODE (TREE_TYPE (exp)))],
ca695ac9
JB
7250 (HOST_WIDE_INT) TREE_INT_CST_LOW (exp));
7251 return;
7252
7253 case REAL_CST:
7254
c02bd5d9 7255#if 0
ca695ac9
JB
7256#ifdef DEBUG_PRINT_CODE
7257 fprintf (stderr, " [%g]\n", (double) TREE_INT_CST_LOW (exp));
7258#endif
c02bd5d9 7259 /* FIX THIS: find a better way to pass real_cst's. -bson */
ca695ac9
JB
7260 bc_emit_instruction (mode_to_const_map[TYPE_MODE (TREE_TYPE (exp))],
7261 (double) TREE_REAL_CST (exp));
c02bd5d9
JB
7262#else
7263 abort ();
7264#endif
7265
ca695ac9
JB
7266 return;
7267
7268 case CALL_EXPR:
7269
7270 /* We build a call description vector describing the type of
7271 the return value and of the arguments; this call vector,
7272 together with a pointer to a location for the return value
7273 and the base of the argument list, is passed to the low
7274 level machine dependent call subroutine, which is responsible
7275 for putting the arguments wherever real functions expect
7276 them, as well as getting the return value back. */
7277 {
7278 tree calldesc = 0, arg;
7279 int nargs = 0, i;
7280 rtx retval;
7281
7282 /* Push the evaluated args on the evaluation stack in reverse
7283 order. Also make an entry for each arg in the calldesc
7284 vector while we're at it. */
7285
7286 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
7287
7288 for (arg = TREE_OPERAND (exp, 1); arg; arg = TREE_CHAIN (arg))
7289 {
7290 ++nargs;
7291 bc_expand_expr (TREE_VALUE (arg));
7292
7293 calldesc = tree_cons ((tree) 0,
7294 size_in_bytes (TREE_TYPE (TREE_VALUE (arg))),
7295 calldesc);
7296 calldesc = tree_cons ((tree) 0,
7297 bc_runtime_type_code (TREE_TYPE (TREE_VALUE (arg))),
7298 calldesc);
7299 }
7300
7301 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
7302
7303 /* Allocate a location for the return value and push its
7304 address on the evaluation stack. Also make an entry
0f41302f 7305 at the front of the calldesc for the return value type. */
ca695ac9
JB
7306
7307 type = TREE_TYPE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7308 retval = bc_allocate_local (int_size_in_bytes (type), TYPE_ALIGN (type));
7309 bc_load_localaddr (retval);
7310
7311 calldesc = tree_cons ((tree) 0, size_in_bytes (type), calldesc);
7312 calldesc = tree_cons ((tree) 0, bc_runtime_type_code (type), calldesc);
7313
7314 /* Prepend the argument count. */
7315 calldesc = tree_cons ((tree) 0,
7316 build_int_2 (nargs, 0),
7317 calldesc);
7318
7319 /* Push the address of the call description vector on the stack. */
7320 calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
7321 TREE_TYPE (calldesc) = build_array_type (integer_type_node,
7322 build_index_type (build_int_2 (nargs * 2, 0)));
7323 r = output_constant_def (calldesc);
7324 bc_load_externaddr (r);
7325
0f41302f 7326 /* Push the address of the function to be called. */
ca695ac9
JB
7327 bc_expand_expr (TREE_OPERAND (exp, 0));
7328
7329 /* Call the function, popping its address and the calldesc vector
7330 address off the evaluation stack in the process. */
7331 bc_emit_instruction (call);
7332
7333 /* Pop the arguments off the stack. */
7334 bc_adjust_stack (nargs);
7335
7336 /* Load the return value onto the stack. */
7337 bc_load_localaddr (retval);
7338 bc_load_memory (type, TREE_OPERAND (exp, 0));
7339 }
7340 return;
7341
7342 case SAVE_EXPR:
7343
7344 if (!SAVE_EXPR_RTL (exp))
bbf6f052 7345 {
ca695ac9
JB
7346 /* First time around: copy to local variable */
7347 SAVE_EXPR_RTL (exp) = bc_allocate_local (int_size_in_bytes (TREE_TYPE (exp)),
7348 TYPE_ALIGN (TREE_TYPE(exp)));
7349 bc_expand_expr (TREE_OPERAND (exp, 0));
6d6e61ce 7350 bc_emit_instruction (duplicate);
ca695ac9
JB
7351
7352 bc_load_localaddr (SAVE_EXPR_RTL (exp));
7353 bc_store_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
bbf6f052 7354 }
ca695ac9 7355 else
bbf6f052 7356 {
ca695ac9
JB
7357 /* Consecutive reference: use saved copy */
7358 bc_load_localaddr (SAVE_EXPR_RTL (exp));
7359 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
bbf6f052 7360 }
ca695ac9
JB
7361 return;
7362
7363#if 0
7364 /* FIXME: the XXXX_STMT codes have been removed in GCC2, but
7365 how are they handled instead? */
7366 case LET_STMT:
7367
7368 TREE_USED (exp) = 1;
7369 bc_expand_expr (STMT_BODY (exp));
7370 return;
7371#endif
7372
7373 case NOP_EXPR:
7374 case CONVERT_EXPR:
7375
7376 bc_expand_expr (TREE_OPERAND (exp, 0));
7377 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)), TREE_TYPE (exp));
7378 return;
7379
7380 case MODIFY_EXPR:
7381
c02bd5d9 7382 expand_assignment (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 0, 0);
ca695ac9
JB
7383 return;
7384
7385 case ADDR_EXPR:
7386
7387 bc_expand_address (TREE_OPERAND (exp, 0));
7388 return;
7389
7390 case INDIRECT_REF:
7391
7392 bc_expand_expr (TREE_OPERAND (exp, 0));
7393 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7394 return;
7395
7396 case ARRAY_REF:
7397
7398 bc_expand_expr (bc_canonicalize_array_ref (exp));
7399 return;
7400
7401 case COMPONENT_REF:
7402
7403 bc_expand_component_address (exp);
7404
7405 /* If we have a bitfield, generate a proper load */
7406 bc_load_memory (TREE_TYPE (TREE_OPERAND (exp, 1)), TREE_OPERAND (exp, 1));
7407 return;
7408
7409 case COMPOUND_EXPR:
7410
7411 bc_expand_expr (TREE_OPERAND (exp, 0));
7412 bc_emit_instruction (drop);
7413 bc_expand_expr (TREE_OPERAND (exp, 1));
7414 return;
7415
7416 case COND_EXPR:
7417
7418 bc_expand_expr (TREE_OPERAND (exp, 0));
7419 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
7420 lab = bc_get_bytecode_label ();
c02bd5d9 7421 bc_emit_bytecode (xjumpifnot);
ca695ac9
JB
7422 bc_emit_bytecode_labelref (lab);
7423
7424#ifdef DEBUG_PRINT_CODE
7425 fputc ('\n', stderr);
7426#endif
7427 bc_expand_expr (TREE_OPERAND (exp, 1));
7428 lab1 = bc_get_bytecode_label ();
7429 bc_emit_bytecode (jump);
7430 bc_emit_bytecode_labelref (lab1);
7431
7432#ifdef DEBUG_PRINT_CODE
7433 fputc ('\n', stderr);
7434#endif
7435
7436 bc_emit_bytecode_labeldef (lab);
7437 bc_expand_expr (TREE_OPERAND (exp, 2));
7438 bc_emit_bytecode_labeldef (lab1);
7439 return;
7440
7441 case TRUTH_ANDIF_EXPR:
7442
c02bd5d9 7443 opcode = xjumpifnot;
ca695ac9
JB
7444 goto andorif;
7445
7446 case TRUTH_ORIF_EXPR:
7447
c02bd5d9 7448 opcode = xjumpif;
ca695ac9
JB
7449 goto andorif;
7450
7451 case PLUS_EXPR:
7452
7453 binoptab = optab_plus_expr;
7454 goto binop;
7455
7456 case MINUS_EXPR:
7457
7458 binoptab = optab_minus_expr;
7459 goto binop;
7460
7461 case MULT_EXPR:
7462
7463 binoptab = optab_mult_expr;
7464 goto binop;
7465
7466 case TRUNC_DIV_EXPR:
7467 case FLOOR_DIV_EXPR:
7468 case CEIL_DIV_EXPR:
7469 case ROUND_DIV_EXPR:
7470 case EXACT_DIV_EXPR:
7471
7472 binoptab = optab_trunc_div_expr;
7473 goto binop;
7474
7475 case TRUNC_MOD_EXPR:
7476 case FLOOR_MOD_EXPR:
7477 case CEIL_MOD_EXPR:
7478 case ROUND_MOD_EXPR:
7479
7480 binoptab = optab_trunc_mod_expr;
7481 goto binop;
7482
7483 case FIX_ROUND_EXPR:
7484 case FIX_FLOOR_EXPR:
7485 case FIX_CEIL_EXPR:
7486 abort (); /* Not used for C. */
7487
7488 case FIX_TRUNC_EXPR:
7489 case FLOAT_EXPR:
7490 case MAX_EXPR:
7491 case MIN_EXPR:
7492 case FFS_EXPR:
7493 case LROTATE_EXPR:
7494 case RROTATE_EXPR:
7495 abort (); /* FIXME */
7496
7497 case RDIV_EXPR:
7498
7499 binoptab = optab_rdiv_expr;
7500 goto binop;
7501
7502 case BIT_AND_EXPR:
7503
7504 binoptab = optab_bit_and_expr;
7505 goto binop;
7506
7507 case BIT_IOR_EXPR:
7508
7509 binoptab = optab_bit_ior_expr;
7510 goto binop;
7511
7512 case BIT_XOR_EXPR:
7513
7514 binoptab = optab_bit_xor_expr;
7515 goto binop;
7516
7517 case LSHIFT_EXPR:
7518
7519 binoptab = optab_lshift_expr;
7520 goto binop;
7521
7522 case RSHIFT_EXPR:
7523
7524 binoptab = optab_rshift_expr;
7525 goto binop;
7526
7527 case TRUTH_AND_EXPR:
7528
7529 binoptab = optab_truth_and_expr;
7530 goto binop;
7531
7532 case TRUTH_OR_EXPR:
7533
7534 binoptab = optab_truth_or_expr;
7535 goto binop;
7536
7537 case LT_EXPR:
7538
7539 binoptab = optab_lt_expr;
7540 goto binop;
7541
7542 case LE_EXPR:
7543
7544 binoptab = optab_le_expr;
7545 goto binop;
7546
7547 case GE_EXPR:
7548
7549 binoptab = optab_ge_expr;
7550 goto binop;
7551
7552 case GT_EXPR:
7553
7554 binoptab = optab_gt_expr;
7555 goto binop;
7556
7557 case EQ_EXPR:
7558
7559 binoptab = optab_eq_expr;
7560 goto binop;
7561
7562 case NE_EXPR:
7563
7564 binoptab = optab_ne_expr;
7565 goto binop;
7566
7567 case NEGATE_EXPR:
7568
7569 unoptab = optab_negate_expr;
7570 goto unop;
7571
7572 case BIT_NOT_EXPR:
7573
7574 unoptab = optab_bit_not_expr;
7575 goto unop;
7576
7577 case TRUTH_NOT_EXPR:
7578
7579 unoptab = optab_truth_not_expr;
7580 goto unop;
7581
7582 case PREDECREMENT_EXPR:
7583
7584 incroptab = optab_predecrement_expr;
7585 goto increment;
7586
7587 case PREINCREMENT_EXPR:
7588
7589 incroptab = optab_preincrement_expr;
7590 goto increment;
7591
7592 case POSTDECREMENT_EXPR:
7593
7594 incroptab = optab_postdecrement_expr;
7595 goto increment;
7596
7597 case POSTINCREMENT_EXPR:
7598
7599 incroptab = optab_postincrement_expr;
7600 goto increment;
7601
7602 case CONSTRUCTOR:
7603
7604 bc_expand_constructor (exp);
7605 return;
7606
7607 case ERROR_MARK:
7608 case RTL_EXPR:
7609
7610 return;
7611
7612 case BIND_EXPR:
7613 {
7614 tree vars = TREE_OPERAND (exp, 0);
7615 int vars_need_expansion = 0;
7616
7617 /* Need to open a binding contour here because
7618 if there are any cleanups they most be contained here. */
7619 expand_start_bindings (0);
7620
7621 /* Mark the corresponding BLOCK for output. */
7622 if (TREE_OPERAND (exp, 2) != 0)
7623 TREE_USED (TREE_OPERAND (exp, 2)) = 1;
7624
7625 /* If VARS have not yet been expanded, expand them now. */
7626 while (vars)
7627 {
7628 if (DECL_RTL (vars) == 0)
7629 {
7630 vars_need_expansion = 1;
9bac07c3 7631 expand_decl (vars);
ca695ac9 7632 }
9bac07c3 7633 expand_decl_init (vars);
ca695ac9
JB
7634 vars = TREE_CHAIN (vars);
7635 }
7636
7637 bc_expand_expr (TREE_OPERAND (exp, 1));
7638
7639 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
7640
7641 return;
7642 }
7643 }
7644
7645 abort ();
7646
7647 binop:
7648
7649 bc_expand_binary_operation (binoptab, TREE_TYPE (exp),
7650 TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1));
7651 return;
7652
7653
7654 unop:
7655
7656 bc_expand_unary_operation (unoptab, TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7657 return;
7658
7659
7660 andorif:
7661
7662 bc_expand_expr (TREE_OPERAND (exp, 0));
7663 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
7664 lab = bc_get_bytecode_label ();
7665
6d6e61ce 7666 bc_emit_instruction (duplicate);
ca695ac9
JB
7667 bc_emit_bytecode (opcode);
7668 bc_emit_bytecode_labelref (lab);
7669
7670#ifdef DEBUG_PRINT_CODE
7671 fputc ('\n', stderr);
7672#endif
7673
7674 bc_emit_instruction (drop);
7675
7676 bc_expand_expr (TREE_OPERAND (exp, 1));
7677 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)));
7678 bc_emit_bytecode_labeldef (lab);
7679 return;
7680
7681
7682 increment:
7683
7684 type = TREE_TYPE (TREE_OPERAND (exp, 0));
7685
7686 /* Push the quantum. */
7687 bc_expand_expr (TREE_OPERAND (exp, 1));
7688
7689 /* Convert it to the lvalue's type. */
7690 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)), type);
7691
7692 /* Push the address of the lvalue */
c02bd5d9 7693 bc_expand_expr (build1 (ADDR_EXPR, TYPE_POINTER_TO (type), TREE_OPERAND (exp, 0)));
ca695ac9
JB
7694
7695 /* Perform actual increment */
c02bd5d9 7696 bc_expand_increment (incroptab, type);
ca695ac9
JB
7697 return;
7698}
7699\f
7700/* Return the alignment in bits of EXP, a pointer valued expression.
7701 But don't return more than MAX_ALIGN no matter what.
7702 The alignment returned is, by default, the alignment of the thing that
7703 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
7704
7705 Otherwise, look at the expression to see if we can do better, i.e., if the
7706 expression is actually pointing at an object whose alignment is tighter. */
7707
7708static int
7709get_pointer_alignment (exp, max_align)
7710 tree exp;
7711 unsigned max_align;
7712{
7713 unsigned align, inner;
7714
7715 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7716 return 0;
7717
7718 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7719 align = MIN (align, max_align);
7720
7721 while (1)
7722 {
7723 switch (TREE_CODE (exp))
7724 {
7725 case NOP_EXPR:
7726 case CONVERT_EXPR:
7727 case NON_LVALUE_EXPR:
7728 exp = TREE_OPERAND (exp, 0);
7729 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7730 return align;
7731 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
8dc2fbcf 7732 align = MIN (inner, max_align);
ca695ac9
JB
7733 break;
7734
7735 case PLUS_EXPR:
7736 /* If sum of pointer + int, restrict our maximum alignment to that
7737 imposed by the integer. If not, we can't do any better than
7738 ALIGN. */
7739 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
7740 return align;
7741
7742 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
7743 & (max_align - 1))
7744 != 0)
7745 max_align >>= 1;
7746
7747 exp = TREE_OPERAND (exp, 0);
7748 break;
7749
7750 case ADDR_EXPR:
7751 /* See what we are pointing at and look at its alignment. */
7752 exp = TREE_OPERAND (exp, 0);
7753 if (TREE_CODE (exp) == FUNCTION_DECL)
8dc2fbcf 7754 align = FUNCTION_BOUNDARY;
ca695ac9 7755 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
8dc2fbcf 7756 align = DECL_ALIGN (exp);
ca695ac9
JB
7757#ifdef CONSTANT_ALIGNMENT
7758 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
7759 align = CONSTANT_ALIGNMENT (exp, align);
7760#endif
7761 return MIN (align, max_align);
7762
7763 default:
7764 return align;
7765 }
7766 }
7767}
7768\f
7769/* Return the tree node and offset if a given argument corresponds to
7770 a string constant. */
7771
7772static tree
7773string_constant (arg, ptr_offset)
7774 tree arg;
7775 tree *ptr_offset;
7776{
7777 STRIP_NOPS (arg);
7778
7779 if (TREE_CODE (arg) == ADDR_EXPR
7780 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
7781 {
7782 *ptr_offset = integer_zero_node;
7783 return TREE_OPERAND (arg, 0);
7784 }
7785 else if (TREE_CODE (arg) == PLUS_EXPR)
7786 {
7787 tree arg0 = TREE_OPERAND (arg, 0);
7788 tree arg1 = TREE_OPERAND (arg, 1);
7789
7790 STRIP_NOPS (arg0);
7791 STRIP_NOPS (arg1);
7792
7793 if (TREE_CODE (arg0) == ADDR_EXPR
7794 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
7795 {
7796 *ptr_offset = arg1;
7797 return TREE_OPERAND (arg0, 0);
7798 }
7799 else if (TREE_CODE (arg1) == ADDR_EXPR
7800 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
7801 {
7802 *ptr_offset = arg0;
7803 return TREE_OPERAND (arg1, 0);
7804 }
7805 }
7806
7807 return 0;
7808}
7809
7810/* Compute the length of a C string. TREE_STRING_LENGTH is not the right
7811 way, because it could contain a zero byte in the middle.
7812 TREE_STRING_LENGTH is the size of the character array, not the string.
7813
7814 Unfortunately, string_constant can't access the values of const char
7815 arrays with initializers, so neither can we do so here. */
7816
7817static tree
7818c_strlen (src)
7819 tree src;
7820{
7821 tree offset_node;
7822 int offset, max;
7823 char *ptr;
7824
7825 src = string_constant (src, &offset_node);
7826 if (src == 0)
7827 return 0;
7828 max = TREE_STRING_LENGTH (src);
7829 ptr = TREE_STRING_POINTER (src);
7830 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
7831 {
7832 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
7833 compute the offset to the following null if we don't know where to
7834 start searching for it. */
7835 int i;
7836 for (i = 0; i < max; i++)
7837 if (ptr[i] == 0)
7838 return 0;
7839 /* We don't know the starting offset, but we do know that the string
7840 has no internal zero bytes. We can assume that the offset falls
7841 within the bounds of the string; otherwise, the programmer deserves
7842 what he gets. Subtract the offset from the length of the string,
7843 and return that. */
7844 /* This would perhaps not be valid if we were dealing with named
7845 arrays in addition to literal string constants. */
7846 return size_binop (MINUS_EXPR, size_int (max), offset_node);
7847 }
7848
7849 /* We have a known offset into the string. Start searching there for
7850 a null character. */
7851 if (offset_node == 0)
7852 offset = 0;
7853 else
7854 {
7855 /* Did we get a long long offset? If so, punt. */
7856 if (TREE_INT_CST_HIGH (offset_node) != 0)
7857 return 0;
7858 offset = TREE_INT_CST_LOW (offset_node);
7859 }
7860 /* If the offset is known to be out of bounds, warn, and call strlen at
7861 runtime. */
7862 if (offset < 0 || offset > max)
7863 {
7864 warning ("offset outside bounds of constant string");
7865 return 0;
7866 }
7867 /* Use strlen to search for the first zero byte. Since any strings
7868 constructed with build_string will have nulls appended, we win even
7869 if we get handed something like (char[4])"abcd".
7870
7871 Since OFFSET is our starting index into the string, no further
7872 calculation is needed. */
7873 return size_int (strlen (ptr + offset));
7874}
2bbf216f
RK
7875
7876rtx
7877expand_builtin_return_addr (fndecl_code, count, tem)
7878 enum built_in_function fndecl_code;
2bbf216f 7879 int count;
75bb87ea 7880 rtx tem;
2bbf216f
RK
7881{
7882 int i;
7883
7884 /* Some machines need special handling before we can access
7885 arbitrary frames. For example, on the sparc, we must first flush
7886 all register windows to the stack. */
7887#ifdef SETUP_FRAME_ADDRESSES
7888 SETUP_FRAME_ADDRESSES ();
7889#endif
7890
7891 /* On the sparc, the return address is not in the frame, it is in a
7892 register. There is no way to access it off of the current frame
7893 pointer, but it can be accessed off the previous frame pointer by
7894 reading the value from the register window save area. */
7895#ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
7896 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
7897 count--;
7898#endif
7899
7900 /* Scan back COUNT frames to the specified frame. */
7901 for (i = 0; i < count; i++)
7902 {
7903 /* Assume the dynamic chain pointer is in the word that the
7904 frame address points to, unless otherwise specified. */
7905#ifdef DYNAMIC_CHAIN_ADDRESS
7906 tem = DYNAMIC_CHAIN_ADDRESS (tem);
7907#endif
7908 tem = memory_address (Pmode, tem);
7909 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
7910 }
7911
7912 /* For __builtin_frame_address, return what we've got. */
7913 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
7914 return tem;
7915
7916 /* For __builtin_return_address, Get the return address from that
7917 frame. */
7918#ifdef RETURN_ADDR_RTX
7919 tem = RETURN_ADDR_RTX (count, tem);
7920#else
7921 tem = memory_address (Pmode,
7922 plus_constant (tem, GET_MODE_SIZE (Pmode)));
7923 tem = gen_rtx (MEM, Pmode, tem);
7924#endif
0ebba7fc 7925 return tem;
2bbf216f 7926}
ca695ac9
JB
7927\f
7928/* Expand an expression EXP that calls a built-in function,
7929 with result going to TARGET if that's convenient
7930 (and in mode MODE if that's convenient).
7931 SUBTARGET may be used as the target for computing one of EXP's operands.
7932 IGNORE is nonzero if the value is to be ignored. */
7933
98aad286
RK
7934#define CALLED_AS_BUILT_IN(NODE) \
7935 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
7936
ca695ac9
JB
7937static rtx
7938expand_builtin (exp, target, subtarget, mode, ignore)
7939 tree exp;
7940 rtx target;
7941 rtx subtarget;
7942 enum machine_mode mode;
7943 int ignore;
7944{
7945 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7946 tree arglist = TREE_OPERAND (exp, 1);
7947 rtx op0;
7948 rtx lab1, insns;
7949 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
7950 optab builtin_optab;
7951
7952 switch (DECL_FUNCTION_CODE (fndecl))
7953 {
7954 case BUILT_IN_ABS:
7955 case BUILT_IN_LABS:
7956 case BUILT_IN_FABS:
7957 /* build_function_call changes these into ABS_EXPR. */
7958 abort ();
7959
7960 case BUILT_IN_SIN:
7961 case BUILT_IN_COS:
0f41302f 7962 /* Treat these like sqrt, but only if the user asks for them. */
ba558a85
RK
7963 if (! flag_fast_math)
7964 break;
ca695ac9
JB
7965 case BUILT_IN_FSQRT:
7966 /* If not optimizing, call the library function. */
7967 if (! optimize)
7968 break;
7969
7970 if (arglist == 0
7971 /* Arg could be wrong type if user redeclared this fcn wrong. */
7972 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
7b073ca6 7973 break;
ca695ac9
JB
7974
7975 /* Stabilize and compute the argument. */
7976 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
7977 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
7978 {
7979 exp = copy_node (exp);
7980 arglist = copy_node (arglist);
7981 TREE_OPERAND (exp, 1) = arglist;
7982 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
7983 }
7984 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
7985
7986 /* Make a suitable register to place result in. */
7987 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7988
7989 emit_queue ();
7990 start_sequence ();
7991
7992 switch (DECL_FUNCTION_CODE (fndecl))
7993 {
7994 case BUILT_IN_SIN:
7995 builtin_optab = sin_optab; break;
7996 case BUILT_IN_COS:
7997 builtin_optab = cos_optab; break;
7998 case BUILT_IN_FSQRT:
7999 builtin_optab = sqrt_optab; break;
8000 default:
8001 abort ();
8002 }
8003
8004 /* Compute into TARGET.
8005 Set TARGET to wherever the result comes back. */
8006 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8007 builtin_optab, op0, target, 0);
8008
8009 /* If we were unable to expand via the builtin, stop the
8010 sequence (without outputting the insns) and break, causing
8011 a call the the library function. */
8012 if (target == 0)
8013 {
8014 end_sequence ();
8015 break;
8016 }
8017
8018 /* Check the results by default. But if flag_fast_math is turned on,
8019 then assume sqrt will always be called with valid arguments. */
8020
8021 if (! flag_fast_math)
8022 {
8023 /* Don't define the builtin FP instructions
8024 if your machine is not IEEE. */
8025 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
8026 abort ();
8027
8028 lab1 = gen_label_rtx ();
8029
8030 /* Test the result; if it is NaN, set errno=EDOM because
8031 the argument was not in the domain. */
8032 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
8033 emit_jump_insn (gen_beq (lab1));
8034
4ac09687 8035#ifdef TARGET_EDOM
ca695ac9
JB
8036 {
8037#ifdef GEN_ERRNO_RTX
8038 rtx errno_rtx = GEN_ERRNO_RTX;
8039#else
8040 rtx errno_rtx
e74a2201 8041 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "errno"));
ca695ac9
JB
8042#endif
8043
8044 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
8045 }
8046#else
8047 /* We can't set errno=EDOM directly; let the library call do it.
0f41302f 8048 Pop the arguments right away in case the call gets deleted. */
ca695ac9
JB
8049 NO_DEFER_POP;
8050 expand_call (exp, target, 0);
8051 OK_DEFER_POP;
8052#endif
8053
8054 emit_label (lab1);
8055 }
8056
0f41302f 8057 /* Output the entire sequence. */
ca695ac9
JB
8058 insns = get_insns ();
8059 end_sequence ();
8060 emit_insns (insns);
8061
8062 return target;
8063
8064 /* __builtin_apply_args returns block of memory allocated on
8065 the stack into which is stored the arg pointer, structure
8066 value address, static chain, and all the registers that might
8067 possibly be used in performing a function call. The code is
8068 moved to the start of the function so the incoming values are
8069 saved. */
8070 case BUILT_IN_APPLY_ARGS:
8071 /* Don't do __builtin_apply_args more than once in a function.
8072 Save the result of the first call and reuse it. */
8073 if (apply_args_value != 0)
8074 return apply_args_value;
8075 {
8076 /* When this function is called, it means that registers must be
8077 saved on entry to this function. So we migrate the
8078 call to the first insn of this function. */
8079 rtx temp;
8080 rtx seq;
8081
8082 start_sequence ();
8083 temp = expand_builtin_apply_args ();
8084 seq = get_insns ();
8085 end_sequence ();
8086
8087 apply_args_value = temp;
8088
8089 /* Put the sequence after the NOTE that starts the function.
8090 If this is inside a SEQUENCE, make the outer-level insn
8091 chain current, so the code is placed at the start of the
8092 function. */
8093 push_topmost_sequence ();
8094 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8095 pop_topmost_sequence ();
8096 return temp;
8097 }
8098
8099 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8100 FUNCTION with a copy of the parameters described by
8101 ARGUMENTS, and ARGSIZE. It returns a block of memory
8102 allocated on the stack into which is stored all the registers
8103 that might possibly be used for returning the result of a
8104 function. ARGUMENTS is the value returned by
8105 __builtin_apply_args. ARGSIZE is the number of bytes of
8106 arguments that must be copied. ??? How should this value be
8107 computed? We'll also need a safe worst case value for varargs
8108 functions. */
8109 case BUILT_IN_APPLY:
8110 if (arglist == 0
8111 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8112 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8113 || TREE_CHAIN (arglist) == 0
8114 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8115 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8116 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8117 return const0_rtx;
8118 else
8119 {
8120 int i;
8121 tree t;
8122 rtx ops[3];
8123
8124 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
8125 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
8126
8127 return expand_builtin_apply (ops[0], ops[1], ops[2]);
8128 }
8129
8130 /* __builtin_return (RESULT) causes the function to return the
8131 value described by RESULT. RESULT is address of the block of
8132 memory returned by __builtin_apply. */
8133 case BUILT_IN_RETURN:
8134 if (arglist
8135 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8136 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
8137 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
8138 NULL_RTX, VOIDmode, 0));
8139 return const0_rtx;
8140
8141 case BUILT_IN_SAVEREGS:
8142 /* Don't do __builtin_saveregs more than once in a function.
8143 Save the result of the first call and reuse it. */
8144 if (saveregs_value != 0)
8145 return saveregs_value;
8146 {
8147 /* When this function is called, it means that registers must be
8148 saved on entry to this function. So we migrate the
8149 call to the first insn of this function. */
8150 rtx temp;
8151 rtx seq;
ca695ac9
JB
8152
8153 /* Now really call the function. `expand_call' does not call
8154 expand_builtin, so there is no danger of infinite recursion here. */
8155 start_sequence ();
8156
8157#ifdef EXPAND_BUILTIN_SAVEREGS
8158 /* Do whatever the machine needs done in this case. */
8159 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
8160#else
8161 /* The register where the function returns its value
8162 is likely to have something else in it, such as an argument.
8163 So preserve that register around the call. */
d0c76654 8164
ca695ac9
JB
8165 if (value_mode != VOIDmode)
8166 {
d0c76654
RK
8167 rtx valreg = hard_libcall_value (value_mode);
8168 rtx saved_valreg = gen_reg_rtx (value_mode);
8169
ca695ac9 8170 emit_move_insn (saved_valreg, valreg);
d0c76654
RK
8171 temp = expand_call (exp, target, ignore);
8172 emit_move_insn (valreg, saved_valreg);
ca695ac9 8173 }
d0c76654
RK
8174 else
8175 /* Generate the call, putting the value in a pseudo. */
8176 temp = expand_call (exp, target, ignore);
ca695ac9
JB
8177#endif
8178
8179 seq = get_insns ();
8180 end_sequence ();
8181
8182 saveregs_value = temp;
8183
8184 /* Put the sequence after the NOTE that starts the function.
8185 If this is inside a SEQUENCE, make the outer-level insn
8186 chain current, so the code is placed at the start of the
8187 function. */
8188 push_topmost_sequence ();
8189 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8190 pop_topmost_sequence ();
8191 return temp;
8192 }
8193
8194 /* __builtin_args_info (N) returns word N of the arg space info
8195 for the current function. The number and meanings of words
8196 is controlled by the definition of CUMULATIVE_ARGS. */
8197 case BUILT_IN_ARGS_INFO:
8198 {
8199 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
8200 int i;
8201 int *word_ptr = (int *) &current_function_args_info;
8202 tree type, elts, result;
8203
8204 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
8205 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
8206 __FILE__, __LINE__);
8207
8208 if (arglist != 0)
8209 {
8210 tree arg = TREE_VALUE (arglist);
8211 if (TREE_CODE (arg) != INTEGER_CST)
8212 error ("argument of `__builtin_args_info' must be constant");
8213 else
8214 {
8215 int wordnum = TREE_INT_CST_LOW (arg);
8216
8217 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
8218 error ("argument of `__builtin_args_info' out of range");
8219 else
8220 return GEN_INT (word_ptr[wordnum]);
8221 }
8222 }
8223 else
8224 error ("missing argument in `__builtin_args_info'");
8225
8226 return const0_rtx;
8227
8228#if 0
8229 for (i = 0; i < nwords; i++)
8230 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
8231
8232 type = build_array_type (integer_type_node,
8233 build_index_type (build_int_2 (nwords, 0)));
8234 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
8235 TREE_CONSTANT (result) = 1;
8236 TREE_STATIC (result) = 1;
8237 result = build (INDIRECT_REF, build_pointer_type (type), result);
8238 TREE_CONSTANT (result) = 1;
8239 return expand_expr (result, NULL_RTX, VOIDmode, 0);
8240#endif
8241 }
8242
17bbab26 8243 /* Return the address of the first anonymous stack arg. */
ca695ac9
JB
8244 case BUILT_IN_NEXT_ARG:
8245 {
8246 tree fntype = TREE_TYPE (current_function_decl);
c4dfe0fc 8247
33162beb
DE
8248 if ((TYPE_ARG_TYPES (fntype) == 0
8249 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
8250 == void_type_node))
8251 && ! current_function_varargs)
ca695ac9
JB
8252 {
8253 error ("`va_start' used in function with fixed args");
8254 return const0_rtx;
8255 }
c4dfe0fc 8256
e4493c04
RK
8257 if (arglist)
8258 {
8259 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
8260 tree arg = TREE_VALUE (arglist);
8261
8262 /* Strip off all nops for the sake of the comparison. This
6692a31f
RK
8263 is not quite the same as STRIP_NOPS. It does more.
8264 We must also strip off INDIRECT_EXPR for C++ reference
8265 parameters. */
e4493c04
RK
8266 while (TREE_CODE (arg) == NOP_EXPR
8267 || TREE_CODE (arg) == CONVERT_EXPR
6692a31f
RK
8268 || TREE_CODE (arg) == NON_LVALUE_EXPR
8269 || TREE_CODE (arg) == INDIRECT_REF)
e4493c04
RK
8270 arg = TREE_OPERAND (arg, 0);
8271 if (arg != last_parm)
8272 warning ("second parameter of `va_start' not last named argument");
8273 }
5b4ff0de 8274 else if (! current_function_varargs)
e4493c04
RK
8275 /* Evidently an out of date version of <stdarg.h>; can't validate
8276 va_start's second argument, but can still work as intended. */
8277 warning ("`__builtin_next_arg' called without an argument");
ca695ac9
JB
8278 }
8279
8280 return expand_binop (Pmode, add_optab,
8281 current_function_internal_arg_pointer,
8282 current_function_arg_offset_rtx,
8283 NULL_RTX, 0, OPTAB_LIB_WIDEN);
8284
8285 case BUILT_IN_CLASSIFY_TYPE:
8286 if (arglist != 0)
8287 {
8288 tree type = TREE_TYPE (TREE_VALUE (arglist));
8289 enum tree_code code = TREE_CODE (type);
8290 if (code == VOID_TYPE)
8291 return GEN_INT (void_type_class);
8292 if (code == INTEGER_TYPE)
8293 return GEN_INT (integer_type_class);
8294 if (code == CHAR_TYPE)
8295 return GEN_INT (char_type_class);
8296 if (code == ENUMERAL_TYPE)
8297 return GEN_INT (enumeral_type_class);
8298 if (code == BOOLEAN_TYPE)
8299 return GEN_INT (boolean_type_class);
8300 if (code == POINTER_TYPE)
8301 return GEN_INT (pointer_type_class);
8302 if (code == REFERENCE_TYPE)
8303 return GEN_INT (reference_type_class);
8304 if (code == OFFSET_TYPE)
8305 return GEN_INT (offset_type_class);
8306 if (code == REAL_TYPE)
8307 return GEN_INT (real_type_class);
8308 if (code == COMPLEX_TYPE)
8309 return GEN_INT (complex_type_class);
8310 if (code == FUNCTION_TYPE)
8311 return GEN_INT (function_type_class);
8312 if (code == METHOD_TYPE)
8313 return GEN_INT (method_type_class);
8314 if (code == RECORD_TYPE)
8315 return GEN_INT (record_type_class);
8316 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
8317 return GEN_INT (union_type_class);
8318 if (code == ARRAY_TYPE)
4042d440
PB
8319 {
8320 if (TYPE_STRING_FLAG (type))
8321 return GEN_INT (string_type_class);
8322 else
8323 return GEN_INT (array_type_class);
8324 }
ca695ac9
JB
8325 if (code == SET_TYPE)
8326 return GEN_INT (set_type_class);
8327 if (code == FILE_TYPE)
8328 return GEN_INT (file_type_class);
8329 if (code == LANG_TYPE)
8330 return GEN_INT (lang_type_class);
8331 }
8332 return GEN_INT (no_type_class);
8333
8334 case BUILT_IN_CONSTANT_P:
8335 if (arglist == 0)
8336 return const0_rtx;
8337 else
33cf5823
RK
8338 {
8339 tree arg = TREE_VALUE (arglist);
8340
8341 STRIP_NOPS (arg);
8342 return (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c'
8343 || (TREE_CODE (arg) == ADDR_EXPR
8344 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8345 ? const1_rtx : const0_rtx);
8346 }
ca695ac9
JB
8347
8348 case BUILT_IN_FRAME_ADDRESS:
8349 /* The argument must be a nonnegative integer constant.
8350 It counts the number of frames to scan up the stack.
8351 The value is the address of that frame. */
8352 case BUILT_IN_RETURN_ADDRESS:
8353 /* The argument must be a nonnegative integer constant.
8354 It counts the number of frames to scan up the stack.
8355 The value is the return address saved in that frame. */
8356 if (arglist == 0)
8357 /* Warning about missing arg was already issued. */
8358 return const0_rtx;
8359 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
8360 {
8361 error ("invalid arg to `__builtin_return_address'");
8362 return const0_rtx;
8363 }
153c149b 8364 else if (tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
ca695ac9
JB
8365 {
8366 error ("invalid arg to `__builtin_return_address'");
8367 return const0_rtx;
8368 }
8369 else
8370 {
2bbf216f
RK
8371 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
8372 TREE_INT_CST_LOW (TREE_VALUE (arglist)),
8373 hard_frame_pointer_rtx);
ca695ac9
JB
8374
8375 /* For __builtin_frame_address, return what we've got. */
8376 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8377 return tem;
8378
2bbf216f
RK
8379 if (GET_CODE (tem) != REG)
8380 tem = copy_to_reg (tem);
8381 return tem;
ca695ac9
JB
8382 }
8383
8384 case BUILT_IN_ALLOCA:
8385 if (arglist == 0
8386 /* Arg could be non-integer if user redeclared this fcn wrong. */
8387 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
7b073ca6 8388 break;
1ee86d15 8389
ca695ac9
JB
8390 /* Compute the argument. */
8391 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
8392
8393 /* Allocate the desired space. */
1ee86d15 8394 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
ca695ac9
JB
8395
8396 case BUILT_IN_FFS:
8397 /* If not optimizing, call the library function. */
98aad286 8398 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
ca695ac9
JB
8399 break;
8400
8401 if (arglist == 0
8402 /* Arg could be non-integer if user redeclared this fcn wrong. */
8403 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
7b073ca6 8404 break;
ca695ac9
JB
8405
8406 /* Compute the argument. */
8407 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8408 /* Compute ffs, into TARGET if possible.
8409 Set TARGET to wherever the result comes back. */
8410 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8411 ffs_optab, op0, target, 1);
8412 if (target == 0)
8413 abort ();
8414 return target;
8415
8416 case BUILT_IN_STRLEN:
8417 /* If not optimizing, call the library function. */
98aad286 8418 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
ca695ac9
JB
8419 break;
8420
8421 if (arglist == 0
8422 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8423 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
7b073ca6 8424 break;
ca695ac9
JB
8425 else
8426 {
8427 tree src = TREE_VALUE (arglist);
8428 tree len = c_strlen (src);
8429
8430 int align
8431 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8432
8433 rtx result, src_rtx, char_rtx;
8434 enum machine_mode insn_mode = value_mode, char_mode;
8435 enum insn_code icode;
8436
0f41302f 8437 /* If the length is known, just return it. */
ca695ac9
JB
8438 if (len != 0)
8439 return expand_expr (len, target, mode, 0);
8440
0f41302f 8441 /* If SRC is not a pointer type, don't do this operation inline. */
ca695ac9
JB
8442 if (align == 0)
8443 break;
8444
0f41302f 8445 /* Call a function if we can't compute strlen in the right mode. */
ca695ac9
JB
8446
8447 while (insn_mode != VOIDmode)
8448 {
8449 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
8450 if (icode != CODE_FOR_nothing)
8451 break;
bbf6f052 8452
ca695ac9
JB
8453 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
8454 }
8455 if (insn_mode == VOIDmode)
8456 break;
bbf6f052 8457
ca695ac9
JB
8458 /* Make a place to write the result of the instruction. */
8459 result = target;
8460 if (! (result != 0
8461 && GET_CODE (result) == REG
8462 && GET_MODE (result) == insn_mode
8463 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8464 result = gen_reg_rtx (insn_mode);
bbf6f052 8465
ca695ac9
JB
8466 /* Make sure the operands are acceptable to the predicates. */
8467
8468 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
8469 result = gen_reg_rtx (insn_mode);
8470
8471 src_rtx = memory_address (BLKmode,
88f63c77 8472 expand_expr (src, NULL_RTX, ptr_mode,
ca695ac9
JB
8473 EXPAND_NORMAL));
8474 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
8475 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
8476
8477 char_rtx = const0_rtx;
8478 char_mode = insn_operand_mode[(int)icode][2];
8479 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
8480 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
8481
8482 emit_insn (GEN_FCN (icode) (result,
8483 gen_rtx (MEM, BLKmode, src_rtx),
8484 char_rtx, GEN_INT (align)));
8485
8486 /* Return the value in the proper mode for this function. */
8487 if (GET_MODE (result) == value_mode)
8488 return result;
8489 else if (target != 0)
8490 {
8491 convert_move (target, result, 0);
8492 return target;
8493 }
8494 else
8495 return convert_to_mode (value_mode, result, 0);
8496 }
8497
8498 case BUILT_IN_STRCPY:
e87b4f3f 8499 /* If not optimizing, call the library function. */
98aad286 8500 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
e87b4f3f
RS
8501 break;
8502
8503 if (arglist == 0
ca695ac9
JB
8504 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8505 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8506 || TREE_CHAIN (arglist) == 0
8507 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
7b073ca6 8508 break;
ca695ac9 8509 else
db0e6d01 8510 {
ca695ac9 8511 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
e7c33f54 8512
ca695ac9
JB
8513 if (len == 0)
8514 break;
e7c33f54 8515
ca695ac9 8516 len = size_binop (PLUS_EXPR, len, integer_one_node);
e7c33f54 8517
ca695ac9 8518 chainon (arglist, build_tree_list (NULL_TREE, len));
1bbddf11
JVA
8519 }
8520
ca695ac9
JB
8521 /* Drops in. */
8522 case BUILT_IN_MEMCPY:
8523 /* If not optimizing, call the library function. */
98aad286 8524 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
ca695ac9 8525 break;
e7c33f54 8526
ca695ac9
JB
8527 if (arglist == 0
8528 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8529 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8530 || TREE_CHAIN (arglist) == 0
8531 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8532 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8533 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7b073ca6 8534 break;
ca695ac9 8535 else
e7c33f54 8536 {
ca695ac9
JB
8537 tree dest = TREE_VALUE (arglist);
8538 tree src = TREE_VALUE (TREE_CHAIN (arglist));
8539 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
e9cf6a97 8540 tree type;
e87b4f3f 8541
ca695ac9
JB
8542 int src_align
8543 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8544 int dest_align
8545 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8546 rtx dest_rtx, dest_mem, src_mem;
60bac6ea 8547
ca695ac9
JB
8548 /* If either SRC or DEST is not a pointer type, don't do
8549 this operation in-line. */
8550 if (src_align == 0 || dest_align == 0)
8551 {
8552 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
8553 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8554 break;
8555 }
8556
88f63c77 8557 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
ca695ac9
JB
8558 dest_mem = gen_rtx (MEM, BLKmode,
8559 memory_address (BLKmode, dest_rtx));
e9cf6a97 8560 /* There could be a void* cast on top of the object. */
5480a90c
RK
8561 while (TREE_CODE (dest) == NOP_EXPR)
8562 dest = TREE_OPERAND (dest, 0);
8563 type = TREE_TYPE (TREE_TYPE (dest));
e9cf6a97 8564 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
ca695ac9
JB
8565 src_mem = gen_rtx (MEM, BLKmode,
8566 memory_address (BLKmode,
8567 expand_expr (src, NULL_RTX,
88f63c77
RK
8568 ptr_mode,
8569 EXPAND_SUM)));
e9cf6a97 8570 /* There could be a void* cast on top of the object. */
5480a90c
RK
8571 while (TREE_CODE (src) == NOP_EXPR)
8572 src = TREE_OPERAND (src, 0);
8573 type = TREE_TYPE (TREE_TYPE (src));
e9cf6a97 8574 MEM_IN_STRUCT_P (src_mem) = AGGREGATE_TYPE_P (type);
ca695ac9
JB
8575
8576 /* Copy word part most expediently. */
8577 emit_block_move (dest_mem, src_mem,
8578 expand_expr (len, NULL_RTX, VOIDmode, 0),
8579 MIN (src_align, dest_align));
85c53d24 8580 return force_operand (dest_rtx, NULL_RTX);
ca695ac9
JB
8581 }
8582
d7f21d63
RK
8583 case BUILT_IN_MEMSET:
8584 /* If not optimizing, call the library function. */
8585 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8586 break;
8587
8588 if (arglist == 0
8589 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8590 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8591 || TREE_CHAIN (arglist) == 0
8592 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
8593 != INTEGER_TYPE)
8594 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8595 || (INTEGER_CST
8596 != (TREE_CODE (TREE_TYPE
8597 (TREE_VALUE
8598 (TREE_CHAIN (TREE_CHAIN (arglist))))))))
8599 break;
8600 else
8601 {
8602 tree dest = TREE_VALUE (arglist);
8603 tree val = TREE_VALUE (TREE_CHAIN (arglist));
8604 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8605 tree type;
8606
8607 int dest_align
8608 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8609 rtx dest_rtx, dest_mem;
8610
8611 /* If DEST is not a pointer type, don't do this
8612 operation in-line. */
8613 if (dest_align == 0)
8614 break;
8615
8616 /* If VAL is not 0, don't do this operation in-line. */
8617 if (expand_expr (val, NULL_RTX, VOIDmode, 0) != const0_rtx)
8618 break;
8619
8620 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
8621 dest_mem = gen_rtx (MEM, BLKmode,
8622 memory_address (BLKmode, dest_rtx));
8623 /* There could be a void* cast on top of the object. */
8624 while (TREE_CODE (dest) == NOP_EXPR)
8625 dest = TREE_OPERAND (dest, 0);
8626 type = TREE_TYPE (TREE_TYPE (dest));
8627 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
8628
8629 clear_storage (dest_mem, expand_expr (len, NULL_RTX, VOIDmode, 0),
8630 dest_align);
8631
8632 return force_operand (dest_rtx, NULL_RTX);
8633 }
8634
ca695ac9
JB
8635/* These comparison functions need an instruction that returns an actual
8636 index. An ordinary compare that just sets the condition codes
8637 is not enough. */
8638#ifdef HAVE_cmpstrsi
8639 case BUILT_IN_STRCMP:
8640 /* If not optimizing, call the library function. */
98aad286 8641 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
ca695ac9
JB
8642 break;
8643
8644 if (arglist == 0
8645 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8646 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8647 || TREE_CHAIN (arglist) == 0
8648 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
7b073ca6 8649 break;
ca695ac9
JB
8650 else if (!HAVE_cmpstrsi)
8651 break;
8652 {
8653 tree arg1 = TREE_VALUE (arglist);
8654 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
8655 tree offset;
8656 tree len, len2;
8657
8658 len = c_strlen (arg1);
8659 if (len)
8660 len = size_binop (PLUS_EXPR, integer_one_node, len);
8661 len2 = c_strlen (arg2);
8662 if (len2)
8663 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
8664
8665 /* If we don't have a constant length for the first, use the length
8666 of the second, if we know it. We don't require a constant for
8667 this case; some cost analysis could be done if both are available
8668 but neither is constant. For now, assume they're equally cheap.
8669
8670 If both strings have constant lengths, use the smaller. This
8671 could arise if optimization results in strcpy being called with
8672 two fixed strings, or if the code was machine-generated. We should
8673 add some code to the `memcmp' handler below to deal with such
8674 situations, someday. */
8675 if (!len || TREE_CODE (len) != INTEGER_CST)
8676 {
8677 if (len2)
8678 len = len2;
8679 else if (len == 0)
8680 break;
8681 }
8682 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
8683 {
8684 if (tree_int_cst_lt (len2, len))
8685 len = len2;
8686 }
8687
8688 chainon (arglist, build_tree_list (NULL_TREE, len));
8689 }
8690
8691 /* Drops in. */
8692 case BUILT_IN_MEMCMP:
8693 /* If not optimizing, call the library function. */
98aad286 8694 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
ca695ac9
JB
8695 break;
8696
8697 if (arglist == 0
8698 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8699 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8700 || TREE_CHAIN (arglist) == 0
8701 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8702 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8703 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7b073ca6 8704 break;
ca695ac9
JB
8705 else if (!HAVE_cmpstrsi)
8706 break;
8707 {
8708 tree arg1 = TREE_VALUE (arglist);
8709 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
8710 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8711 rtx result;
8712
8713 int arg1_align
8714 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8715 int arg2_align
8716 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8717 enum machine_mode insn_mode
8718 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
60bac6ea 8719
ca695ac9
JB
8720 /* If we don't have POINTER_TYPE, call the function. */
8721 if (arg1_align == 0 || arg2_align == 0)
8722 {
8723 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
8724 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8725 break;
8726 }
60bac6ea 8727
ca695ac9
JB
8728 /* Make a place to write the result of the instruction. */
8729 result = target;
8730 if (! (result != 0
8731 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
8732 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8733 result = gen_reg_rtx (insn_mode);
60bac6ea 8734
ca695ac9
JB
8735 emit_insn (gen_cmpstrsi (result,
8736 gen_rtx (MEM, BLKmode,
88f63c77
RK
8737 expand_expr (arg1, NULL_RTX,
8738 ptr_mode,
ca695ac9
JB
8739 EXPAND_NORMAL)),
8740 gen_rtx (MEM, BLKmode,
88f63c77
RK
8741 expand_expr (arg2, NULL_RTX,
8742 ptr_mode,
ca695ac9
JB
8743 EXPAND_NORMAL)),
8744 expand_expr (len, NULL_RTX, VOIDmode, 0),
8745 GEN_INT (MIN (arg1_align, arg2_align))));
60bac6ea 8746
ca695ac9
JB
8747 /* Return the value in the proper mode for this function. */
8748 mode = TYPE_MODE (TREE_TYPE (exp));
8749 if (GET_MODE (result) == mode)
8750 return result;
8751 else if (target != 0)
8752 {
8753 convert_move (target, result, 0);
8754 return target;
60bac6ea 8755 }
ca695ac9
JB
8756 else
8757 return convert_to_mode (mode, result, 0);
8758 }
60bac6ea 8759#else
ca695ac9
JB
8760 case BUILT_IN_STRCMP:
8761 case BUILT_IN_MEMCMP:
8762 break;
60bac6ea
RS
8763#endif
8764
4ed67205
RK
8765 /* __builtin_setjmp is passed a pointer to an array of five words
8766 (not all will be used on all machines). It operates similarly to
8767 the C library function of the same name, but is more efficient.
8768 Much of the code below (and for longjmp) is copied from the handling
8769 of non-local gotos.
8770
8771 NOTE: This is intended for use by GNAT and will only work in
8772 the method used by it. This code will likely NOT survive to
8773 the GCC 2.8.0 release. */
8774 case BUILT_IN_SETJMP:
8775 if (arglist == 0
8776 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8777 break;
8778
8779 {
85ab4aaa
RK
8780 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
8781 VOIDmode, 0);
4ed67205
RK
8782 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
8783 enum machine_mode sa_mode = Pmode;
8784 rtx stack_save;
7565a035
RK
8785 int old_inhibit_defer_pop = inhibit_defer_pop;
8786 int return_pops = RETURN_POPS_ARGS (get_identifier ("__dummy"),
8787 get_identifier ("__dummy"), 0);
8788 rtx next_arg_reg;
8789 CUMULATIVE_ARGS args_so_far;
a8a8cbb7 8790 int i;
4ed67205 8791
85ab4aaa
RK
8792#ifdef POINTERS_EXTEND_UNSIGNED
8793 buf_addr = convert_memory_address (Pmode, buf_addr);
8794#endif
8795
8796 buf_addr = force_reg (Pmode, buf_addr);
8797
4ed67205
RK
8798 if (target == 0 || GET_CODE (target) != REG
8799 || REGNO (target) < FIRST_PSEUDO_REGISTER)
8800 target = gen_reg_rtx (value_mode);
8801
8802 emit_queue ();
8803
0dddb42d 8804 CONST_CALL_P (emit_note (NULL_PTR, NOTE_INSN_SETJMP)) = 1;
4ed67205
RK
8805 current_function_calls_setjmp = 1;
8806
8807 /* We store the frame pointer and the address of lab1 in the buffer
8808 and use the rest of it for the stack save area, which is
8809 machine-dependent. */
8810 emit_move_insn (gen_rtx (MEM, Pmode, buf_addr),
8811 virtual_stack_vars_rtx);
8812 emit_move_insn
8813 (validize_mem (gen_rtx (MEM, Pmode,
8814 plus_constant (buf_addr,
8815 GET_MODE_SIZE (Pmode)))),
8816 gen_rtx (LABEL_REF, Pmode, lab1));
8817
8818#ifdef HAVE_save_stack_nonlocal
8819 if (HAVE_save_stack_nonlocal)
8820 sa_mode = insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0];
8821#endif
8822
8823 stack_save = gen_rtx (MEM, sa_mode,
8824 plus_constant (buf_addr,
8825 2 * GET_MODE_SIZE (Pmode)));
8826 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
8827
7565a035
RK
8828#ifdef HAVE_setjmp
8829 if (HAVE_setjmp)
8830 emit_insn (gen_setjmp ());
8831#endif
8832
4ed67205
RK
8833 /* Set TARGET to zero and branch around the other case. */
8834 emit_move_insn (target, const0_rtx);
8835 emit_jump_insn (gen_jump (lab2));
8836 emit_barrier ();
8837 emit_label (lab1);
8838
a8a8cbb7 8839 /* Note that setjmp clobbers FP when we get here, so we have to
0dddb42d 8840 make sure it's marked as used by this function. */
a8a8cbb7
RK
8841 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
8842
477efd50
RK
8843 /* Mark the static chain as clobbered here so life information
8844 doesn't get messed up for it. */
8845 emit_insn (gen_rtx (CLOBBER, VOIDmode, static_chain_rtx));
8846
4ed67205
RK
8847 /* Now put in the code to restore the frame pointer, and argument
8848 pointer, if needed. The code below is from expand_end_bindings
8849 in stmt.c; see detailed documentation there. */
8850#ifdef HAVE_nonlocal_goto
8851 if (! HAVE_nonlocal_goto)
8852#endif
8853 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
8854
a8a8cbb7
RK
8855 current_function_has_nonlocal_goto = 1;
8856
4ed67205
RK
8857#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
8858 if (fixed_regs[ARG_POINTER_REGNUM])
8859 {
8860#ifdef ELIMINABLE_REGS
8861 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
4ed67205
RK
8862
8863 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
8864 if (elim_regs[i].from == ARG_POINTER_REGNUM
8865 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
8866 break;
8867
8868 if (i == sizeof elim_regs / sizeof elim_regs [0])
8869#endif
8870 {
8871 /* Now restore our arg pointer from the address at which it
8872 was saved in our stack frame.
8873 If there hasn't be space allocated for it yet, make
8874 some now. */
8875 if (arg_pointer_save_area == 0)
8876 arg_pointer_save_area
8877 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
8878 emit_move_insn (virtual_incoming_args_rtx,
8879 copy_to_reg (arg_pointer_save_area));
8880 }
8881 }
8882#endif
8883
32ccb0ac
RK
8884#ifdef HAVE_nonlocal_goto_receiver
8885 if (HAVE_nonlocal_goto_receiver)
8886 emit_insn (gen_nonlocal_goto_receiver ());
8887#endif
7565a035
RK
8888 /* The static chain pointer contains the address of dummy function.
8889 We need to call it here to handle some PIC cases of restoring
8890 a global pointer. Then return 1. */
8891 op0 = copy_to_mode_reg (Pmode, static_chain_rtx);
8892
8893 /* We can't actually call emit_library_call here, so do everything
8894 it does, which isn't much for a libfunc with no args. */
8895 op0 = memory_address (FUNCTION_MODE, op0);
8896
8897 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE,
2c7ee1a6 8898 gen_rtx (SYMBOL_REF, Pmode, "__dummy"), 1);
7565a035
RK
8899 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1);
8900
8901#ifndef ACCUMULATE_OUTGOING_ARGS
8902#ifdef HAVE_call_pop
8903 if (HAVE_call_pop)
8904 emit_call_insn (gen_call_pop (gen_rtx (MEM, FUNCTION_MODE, op0),
8905 const0_rtx, next_arg_reg,
8906 GEN_INT (return_pops)));
8907 else
8908#endif
8909#endif
8910
8911#ifdef HAVE_call
8912 if (HAVE_call)
8913 emit_call_insn (gen_call (gen_rtx (MEM, FUNCTION_MODE, op0),
8914 const0_rtx, next_arg_reg, const0_rtx));
4ed67205 8915 else
7565a035
RK
8916#endif
8917 abort ();
4ed67205 8918
7565a035 8919 emit_move_insn (target, const1_rtx);
4ed67205
RK
8920 emit_label (lab2);
8921 return target;
8922 }
8923
8924 /* __builtin_longjmp is passed a pointer to an array of five words
7565a035 8925 and a value, which is a dummy. It's similar to the C library longjmp
4ed67205
RK
8926 function but works with __builtin_setjmp above. */
8927 case BUILT_IN_LONGJMP:
8928 if (arglist == 0 || TREE_CHAIN (arglist) == 0
8929 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8930 break;
8931
8932 {
b089937a
RK
8933 tree dummy_id = get_identifier ("__dummy");
8934 tree dummy_type = build_function_type (void_type_node, NULL_TREE);
8935 tree dummy_decl = build_decl (FUNCTION_DECL, dummy_id, dummy_type);
85ab4aaa 8936#ifdef POINTERS_EXTEND_UNSIGNED
4ed67205 8937 rtx buf_addr
0fedef28 8938 = force_reg (Pmode,
85ab4aaa
RK
8939 convert_memory_address
8940 (Pmode,
8941 expand_expr (TREE_VALUE (arglist),
8942 NULL_RTX, VOIDmode, 0)));
8943#else
8944 rtx buf_addr
8945 = force_reg (Pmode, expand_expr (TREE_VALUE (arglist),
8946 NULL_RTX,
8947 VOIDmode, 0));
8948#endif
4ed67205
RK
8949 rtx fp = gen_rtx (MEM, Pmode, buf_addr);
8950 rtx lab = gen_rtx (MEM, Pmode,
8951 plus_constant (buf_addr, GET_MODE_SIZE (Pmode)));
8952 enum machine_mode sa_mode
8953#ifdef HAVE_save_stack_nonlocal
8954 = (HAVE_save_stack_nonlocal
8955 ? insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0]
8956 : Pmode);
8957#else
8958 = Pmode;
8959#endif
8960 rtx stack = gen_rtx (MEM, sa_mode,
8961 plus_constant (buf_addr,
8962 2 * GET_MODE_SIZE (Pmode)));
b089937a
RK
8963
8964 DECL_EXTERNAL (dummy_decl) = 1;
8965 TREE_PUBLIC (dummy_decl) = 1;
8966 make_decl_rtl (dummy_decl, NULL_PTR, 1);
7565a035
RK
8967
8968 /* Expand the second expression just for side-effects. */
8969 expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
8970 const0_rtx, VOIDmode, 0);
8971
b089937a 8972 assemble_external (dummy_decl);
4ed67205
RK
8973
8974 /* Pick up FP, label, and SP from the block and jump. This code is
8975 from expand_goto in stmt.c; see there for detailed comments. */
8976#if HAVE_nonlocal_goto
8977 if (HAVE_nonlocal_goto)
b089937a
RK
8978 emit_insn (gen_nonlocal_goto (fp, lab, stack,
8979 XEXP (DECL_RTL (dummy_decl), 0)));
4ed67205
RK
8980 else
8981#endif
8982 {
7565a035 8983 lab = copy_to_reg (lab);
4ed67205
RK
8984 emit_move_insn (hard_frame_pointer_rtx, fp);
8985 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
8986
7565a035
RK
8987 /* Put in the static chain register the address of the dummy
8988 function. */
b089937a 8989 emit_move_insn (static_chain_rtx, XEXP (DECL_RTL (dummy_decl), 0));
4ed67205
RK
8990 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
8991 emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
8992 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
7565a035 8993 emit_indirect_jump (lab);
4ed67205
RK
8994 }
8995
8996 return const0_rtx;
8997 }
8998
ca695ac9
JB
8999 default: /* just do library call, if unknown builtin */
9000 error ("built-in function `%s' not currently supported",
9001 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9002 }
e87b4f3f 9003
ca695ac9
JB
9004 /* The switch statement above can drop through to cause the function
9005 to be called normally. */
e7c33f54 9006
ca695ac9
JB
9007 return expand_call (exp, target, ignore);
9008}
9009\f
9010/* Built-in functions to perform an untyped call and return. */
0006469d 9011
ca695ac9
JB
9012/* For each register that may be used for calling a function, this
9013 gives a mode used to copy the register's value. VOIDmode indicates
9014 the register is not used for calling a function. If the machine
9015 has register windows, this gives only the outbound registers.
9016 INCOMING_REGNO gives the corresponding inbound register. */
9017static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
0006469d 9018
ca695ac9
JB
9019/* For each register that may be used for returning values, this gives
9020 a mode used to copy the register's value. VOIDmode indicates the
9021 register is not used for returning values. If the machine has
9022 register windows, this gives only the outbound registers.
9023 INCOMING_REGNO gives the corresponding inbound register. */
9024static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
0006469d 9025
ca695ac9
JB
9026/* For each register that may be used for calling a function, this
9027 gives the offset of that register into the block returned by
9faa82d8 9028 __builtin_apply_args. 0 indicates that the register is not
0f41302f 9029 used for calling a function. */
ca695ac9 9030static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
0006469d 9031
ca695ac9
JB
9032/* Return the offset of register REGNO into the block returned by
9033 __builtin_apply_args. This is not declared static, since it is
0f41302f 9034 needed in objc-act.c. */
0006469d 9035
ca695ac9
JB
9036int
9037apply_args_register_offset (regno)
9038 int regno;
9039{
9040 apply_args_size ();
0006469d 9041
ca695ac9 9042 /* Arguments are always put in outgoing registers (in the argument
0f41302f 9043 block) if such make sense. */
ca695ac9
JB
9044#ifdef OUTGOING_REGNO
9045 regno = OUTGOING_REGNO(regno);
9046#endif
9047 return apply_args_reg_offset[regno];
9048}
0006469d 9049
ca695ac9
JB
9050/* Return the size required for the block returned by __builtin_apply_args,
9051 and initialize apply_args_mode. */
0006469d 9052
ca695ac9
JB
9053static int
9054apply_args_size ()
9055{
9056 static int size = -1;
9057 int align, regno;
9058 enum machine_mode mode;
bbf6f052 9059
ca695ac9
JB
9060 /* The values computed by this function never change. */
9061 if (size < 0)
9062 {
9063 /* The first value is the incoming arg-pointer. */
9064 size = GET_MODE_SIZE (Pmode);
bbf6f052 9065
ca695ac9
JB
9066 /* The second value is the structure value address unless this is
9067 passed as an "invisible" first argument. */
9068 if (struct_value_rtx)
9069 size += GET_MODE_SIZE (Pmode);
9070
9071 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9072 if (FUNCTION_ARG_REGNO_P (regno))
bbf6f052 9073 {
ca695ac9
JB
9074 /* Search for the proper mode for copying this register's
9075 value. I'm not sure this is right, but it works so far. */
9076 enum machine_mode best_mode = VOIDmode;
9077
9078 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9079 mode != VOIDmode;
9080 mode = GET_MODE_WIDER_MODE (mode))
9081 if (HARD_REGNO_MODE_OK (regno, mode)
9082 && HARD_REGNO_NREGS (regno, mode) == 1)
9083 best_mode = mode;
9084
9085 if (best_mode == VOIDmode)
9086 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9087 mode != VOIDmode;
9088 mode = GET_MODE_WIDER_MODE (mode))
9089 if (HARD_REGNO_MODE_OK (regno, mode)
9090 && (mov_optab->handlers[(int) mode].insn_code
9091 != CODE_FOR_nothing))
9092 best_mode = mode;
9093
9094 mode = best_mode;
9095 if (mode == VOIDmode)
9096 abort ();
9097
9098 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9099 if (size % align != 0)
9100 size = CEIL (size, align) * align;
9101 apply_args_reg_offset[regno] = size;
9102 size += GET_MODE_SIZE (mode);
9103 apply_args_mode[regno] = mode;
9104 }
9105 else
9106 {
9107 apply_args_mode[regno] = VOIDmode;
9108 apply_args_reg_offset[regno] = 0;
bbf6f052 9109 }
ca695ac9
JB
9110 }
9111 return size;
9112}
bbf6f052 9113
ca695ac9
JB
9114/* Return the size required for the block returned by __builtin_apply,
9115 and initialize apply_result_mode. */
bbf6f052 9116
ca695ac9
JB
9117static int
9118apply_result_size ()
9119{
9120 static int size = -1;
9121 int align, regno;
9122 enum machine_mode mode;
bbf6f052 9123
ca695ac9
JB
9124 /* The values computed by this function never change. */
9125 if (size < 0)
9126 {
9127 size = 0;
bbf6f052 9128
ca695ac9
JB
9129 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9130 if (FUNCTION_VALUE_REGNO_P (regno))
9131 {
9132 /* Search for the proper mode for copying this register's
9133 value. I'm not sure this is right, but it works so far. */
9134 enum machine_mode best_mode = VOIDmode;
bbf6f052 9135
ca695ac9
JB
9136 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9137 mode != TImode;
9138 mode = GET_MODE_WIDER_MODE (mode))
9139 if (HARD_REGNO_MODE_OK (regno, mode))
9140 best_mode = mode;
bbf6f052 9141
ca695ac9
JB
9142 if (best_mode == VOIDmode)
9143 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9144 mode != VOIDmode;
9145 mode = GET_MODE_WIDER_MODE (mode))
9146 if (HARD_REGNO_MODE_OK (regno, mode)
9147 && (mov_optab->handlers[(int) mode].insn_code
9148 != CODE_FOR_nothing))
9149 best_mode = mode;
bbf6f052 9150
ca695ac9
JB
9151 mode = best_mode;
9152 if (mode == VOIDmode)
9153 abort ();
bbf6f052 9154
ca695ac9
JB
9155 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9156 if (size % align != 0)
9157 size = CEIL (size, align) * align;
9158 size += GET_MODE_SIZE (mode);
9159 apply_result_mode[regno] = mode;
bbf6f052
RK
9160 }
9161 else
ca695ac9 9162 apply_result_mode[regno] = VOIDmode;
bbf6f052 9163
ca695ac9
JB
9164 /* Allow targets that use untyped_call and untyped_return to override
9165 the size so that machine-specific information can be stored here. */
9166#ifdef APPLY_RESULT_SIZE
9167 size = APPLY_RESULT_SIZE;
9168#endif
9169 }
9170 return size;
9171}
bbf6f052 9172
ca695ac9
JB
9173#if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
9174/* Create a vector describing the result block RESULT. If SAVEP is true,
9175 the result block is used to save the values; otherwise it is used to
9176 restore the values. */
bbf6f052 9177
ca695ac9
JB
9178static rtx
9179result_vector (savep, result)
9180 int savep;
9181 rtx result;
9182{
9183 int regno, size, align, nelts;
9184 enum machine_mode mode;
9185 rtx reg, mem;
9186 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
9187
9188 size = nelts = 0;
9189 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9190 if ((mode = apply_result_mode[regno]) != VOIDmode)
9191 {
9192 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9193 if (size % align != 0)
9194 size = CEIL (size, align) * align;
18992995 9195 reg = gen_rtx (REG, mode, savep ? regno : INCOMING_REGNO (regno));
ca695ac9
JB
9196 mem = change_address (result, mode,
9197 plus_constant (XEXP (result, 0), size));
9198 savevec[nelts++] = (savep
9199 ? gen_rtx (SET, VOIDmode, mem, reg)
9200 : gen_rtx (SET, VOIDmode, reg, mem));
9201 size += GET_MODE_SIZE (mode);
bbf6f052 9202 }
ca695ac9
JB
9203 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
9204}
9205#endif /* HAVE_untyped_call or HAVE_untyped_return */
bbf6f052 9206
ca695ac9
JB
9207/* Save the state required to perform an untyped call with the same
9208 arguments as were passed to the current function. */
9209
9210static rtx
9211expand_builtin_apply_args ()
9212{
9213 rtx registers;
9214 int size, align, regno;
9215 enum machine_mode mode;
9216
9217 /* Create a block where the arg-pointer, structure value address,
9218 and argument registers can be saved. */
9219 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
9220
9221 /* Walk past the arg-pointer and structure value address. */
9222 size = GET_MODE_SIZE (Pmode);
9223 if (struct_value_rtx)
9224 size += GET_MODE_SIZE (Pmode);
9225
c816db88
RK
9226 /* Save each register used in calling a function to the block. */
9227 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
ca695ac9 9228 if ((mode = apply_args_mode[regno]) != VOIDmode)
bbf6f052 9229 {
ee33823f
RK
9230 rtx tem;
9231
ca695ac9
JB
9232 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9233 if (size % align != 0)
9234 size = CEIL (size, align) * align;
ee33823f
RK
9235
9236 tem = gen_rtx (REG, mode, INCOMING_REGNO (regno));
9237
9238#ifdef STACK_REGS
9239 /* For reg-stack.c's stack register household.
9240 Compare with a similar piece of code in function.c. */
9241
9242 emit_insn (gen_rtx (USE, mode, tem));
9243#endif
9244
ca695ac9
JB
9245 emit_move_insn (change_address (registers, mode,
9246 plus_constant (XEXP (registers, 0),
9247 size)),
ee33823f 9248 tem);
ca695ac9 9249 size += GET_MODE_SIZE (mode);
bbf6f052
RK
9250 }
9251
ca695ac9
JB
9252 /* Save the arg pointer to the block. */
9253 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
9254 copy_to_reg (virtual_incoming_args_rtx));
9255 size = GET_MODE_SIZE (Pmode);
bbf6f052 9256
ca695ac9
JB
9257 /* Save the structure value address unless this is passed as an
9258 "invisible" first argument. */
9259 if (struct_value_incoming_rtx)
9260 {
9261 emit_move_insn (change_address (registers, Pmode,
9262 plus_constant (XEXP (registers, 0),
9263 size)),
9264 copy_to_reg (struct_value_incoming_rtx));
9265 size += GET_MODE_SIZE (Pmode);
9266 }
9267
9268 /* Return the address of the block. */
9269 return copy_addr_to_reg (XEXP (registers, 0));
9270}
9271
9272/* Perform an untyped call and save the state required to perform an
9273 untyped return of whatever value was returned by the given function. */
9274
9275static rtx
9276expand_builtin_apply (function, arguments, argsize)
9277 rtx function, arguments, argsize;
9278{
9279 int size, align, regno;
9280 enum machine_mode mode;
9281 rtx incoming_args, result, reg, dest, call_insn;
9282 rtx old_stack_level = 0;
b3f8cf4a 9283 rtx call_fusage = 0;
bbf6f052 9284
ca695ac9
JB
9285 /* Create a block where the return registers can be saved. */
9286 result = assign_stack_local (BLKmode, apply_result_size (), -1);
bbf6f052 9287
ca695ac9 9288 /* ??? The argsize value should be adjusted here. */
bbf6f052 9289
ca695ac9
JB
9290 /* Fetch the arg pointer from the ARGUMENTS block. */
9291 incoming_args = gen_reg_rtx (Pmode);
9292 emit_move_insn (incoming_args,
9293 gen_rtx (MEM, Pmode, arguments));
9294#ifndef STACK_GROWS_DOWNWARD
9295 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
9296 incoming_args, 0, OPTAB_LIB_WIDEN);
46b68a37
JW
9297#endif
9298
ca695ac9
JB
9299 /* Perform postincrements before actually calling the function. */
9300 emit_queue ();
46b68a37 9301
ca695ac9
JB
9302 /* Push a new argument block and copy the arguments. */
9303 do_pending_stack_adjust ();
9304 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
bbf6f052 9305
ca695ac9
JB
9306 /* Push a block of memory onto the stack to store the memory arguments.
9307 Save the address in a register, and copy the memory arguments. ??? I
9308 haven't figured out how the calling convention macros effect this,
9309 but it's likely that the source and/or destination addresses in
9310 the block copy will need updating in machine specific ways. */
9311 dest = copy_addr_to_reg (push_block (argsize, 0, 0));
9312 emit_block_move (gen_rtx (MEM, BLKmode, dest),
9313 gen_rtx (MEM, BLKmode, incoming_args),
9314 argsize,
9315 PARM_BOUNDARY / BITS_PER_UNIT);
bbf6f052 9316
ca695ac9
JB
9317 /* Refer to the argument block. */
9318 apply_args_size ();
9319 arguments = gen_rtx (MEM, BLKmode, arguments);
9320
9321 /* Walk past the arg-pointer and structure value address. */
9322 size = GET_MODE_SIZE (Pmode);
9323 if (struct_value_rtx)
9324 size += GET_MODE_SIZE (Pmode);
9325
9326 /* Restore each of the registers previously saved. Make USE insns
c816db88
RK
9327 for each of these registers for use in making the call. */
9328 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
ca695ac9
JB
9329 if ((mode = apply_args_mode[regno]) != VOIDmode)
9330 {
9331 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9332 if (size % align != 0)
9333 size = CEIL (size, align) * align;
9334 reg = gen_rtx (REG, mode, regno);
9335 emit_move_insn (reg,
9336 change_address (arguments, mode,
9337 plus_constant (XEXP (arguments, 0),
9338 size)));
9339
b3f8cf4a 9340 use_reg (&call_fusage, reg);
ca695ac9
JB
9341 size += GET_MODE_SIZE (mode);
9342 }
9343
9344 /* Restore the structure value address unless this is passed as an
9345 "invisible" first argument. */
9346 size = GET_MODE_SIZE (Pmode);
9347 if (struct_value_rtx)
9348 {
9349 rtx value = gen_reg_rtx (Pmode);
9350 emit_move_insn (value,
9351 change_address (arguments, Pmode,
9352 plus_constant (XEXP (arguments, 0),
9353 size)));
9354 emit_move_insn (struct_value_rtx, value);
9355 if (GET_CODE (struct_value_rtx) == REG)
b3f8cf4a 9356 use_reg (&call_fusage, struct_value_rtx);
ca695ac9
JB
9357 size += GET_MODE_SIZE (Pmode);
9358 }
bbf6f052 9359
ca695ac9 9360 /* All arguments and registers used for the call are set up by now! */
b3f8cf4a 9361 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
bbf6f052 9362
ca695ac9
JB
9363 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
9364 and we don't want to load it into a register as an optimization,
9365 because prepare_call_address already did it if it should be done. */
9366 if (GET_CODE (function) != SYMBOL_REF)
9367 function = memory_address (FUNCTION_MODE, function);
bbf6f052 9368
ca695ac9
JB
9369 /* Generate the actual call instruction and save the return value. */
9370#ifdef HAVE_untyped_call
9371 if (HAVE_untyped_call)
9372 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
9373 result, result_vector (1, result)));
9374 else
9375#endif
9376#ifdef HAVE_call_value
9377 if (HAVE_call_value)
9378 {
9379 rtx valreg = 0;
bbf6f052 9380
ca695ac9
JB
9381 /* Locate the unique return register. It is not possible to
9382 express a call that sets more than one return register using
9383 call_value; use untyped_call for that. In fact, untyped_call
9384 only needs to save the return registers in the given block. */
9385 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9386 if ((mode = apply_result_mode[regno]) != VOIDmode)
9387 {
9388 if (valreg)
9389 abort (); /* HAVE_untyped_call required. */
9390 valreg = gen_rtx (REG, mode, regno);
9391 }
bbf6f052 9392
ca695ac9
JB
9393 emit_call_insn (gen_call_value (valreg,
9394 gen_rtx (MEM, FUNCTION_MODE, function),
9395 const0_rtx, NULL_RTX, const0_rtx));
bbf6f052 9396
ca695ac9
JB
9397 emit_move_insn (change_address (result, GET_MODE (valreg),
9398 XEXP (result, 0)),
9399 valreg);
9400 }
9401 else
9402#endif
9403 abort ();
bbf6f052 9404
b3f8cf4a 9405 /* Find the CALL insn we just emitted. */
ca695ac9
JB
9406 for (call_insn = get_last_insn ();
9407 call_insn && GET_CODE (call_insn) != CALL_INSN;
9408 call_insn = PREV_INSN (call_insn))
9409 ;
bbf6f052 9410
ca695ac9
JB
9411 if (! call_insn)
9412 abort ();
bbf6f052 9413
6d100794
RK
9414 /* Put the register usage information on the CALL. If there is already
9415 some usage information, put ours at the end. */
9416 if (CALL_INSN_FUNCTION_USAGE (call_insn))
9417 {
9418 rtx link;
9419
9420 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
9421 link = XEXP (link, 1))
9422 ;
9423
9424 XEXP (link, 1) = call_fusage;
9425 }
9426 else
9427 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
e7c33f54 9428
ca695ac9
JB
9429 /* Restore the stack. */
9430 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
e7c33f54 9431
ca695ac9
JB
9432 /* Return the address of the result block. */
9433 return copy_addr_to_reg (XEXP (result, 0));
9434}
e7c33f54 9435
ca695ac9 9436/* Perform an untyped return. */
e7c33f54 9437
ca695ac9
JB
9438static void
9439expand_builtin_return (result)
9440 rtx result;
9441{
9442 int size, align, regno;
9443 enum machine_mode mode;
9444 rtx reg;
b3f8cf4a 9445 rtx call_fusage = 0;
e7c33f54 9446
ca695ac9
JB
9447 apply_result_size ();
9448 result = gen_rtx (MEM, BLKmode, result);
e7c33f54 9449
ca695ac9
JB
9450#ifdef HAVE_untyped_return
9451 if (HAVE_untyped_return)
9452 {
9453 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
9454 emit_barrier ();
9455 return;
9456 }
9457#endif
e7c33f54 9458
ca695ac9
JB
9459 /* Restore the return value and note that each value is used. */
9460 size = 0;
9461 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9462 if ((mode = apply_result_mode[regno]) != VOIDmode)
9463 {
9464 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9465 if (size % align != 0)
9466 size = CEIL (size, align) * align;
9467 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
9468 emit_move_insn (reg,
9469 change_address (result, mode,
9470 plus_constant (XEXP (result, 0),
9471 size)));
e7c33f54 9472
b3f8cf4a 9473 push_to_sequence (call_fusage);
ca695ac9 9474 emit_insn (gen_rtx (USE, VOIDmode, reg));
b3f8cf4a 9475 call_fusage = get_insns ();
ca695ac9
JB
9476 end_sequence ();
9477 size += GET_MODE_SIZE (mode);
9478 }
e7c33f54 9479
ca695ac9 9480 /* Put the USE insns before the return. */
b3f8cf4a 9481 emit_insns (call_fusage);
e7c33f54 9482
ca695ac9
JB
9483 /* Return whatever values was restored by jumping directly to the end
9484 of the function. */
9485 expand_null_return ();
9486}
9487\f
9488/* Expand code for a post- or pre- increment or decrement
9489 and return the RTX for the result.
9490 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
e7c33f54 9491
ca695ac9 9492static rtx
7b8b9722 9493expand_increment (exp, post, ignore)
ca695ac9 9494 register tree exp;
7b8b9722 9495 int post, ignore;
ca695ac9
JB
9496{
9497 register rtx op0, op1;
9498 register rtx temp, value;
9499 register tree incremented = TREE_OPERAND (exp, 0);
9500 optab this_optab = add_optab;
9501 int icode;
9502 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9503 int op0_is_copy = 0;
9504 int single_insn = 0;
a97f5a86
RS
9505 /* 1 means we can't store into OP0 directly,
9506 because it is a subreg narrower than a word,
9507 and we don't dare clobber the rest of the word. */
9508 int bad_subreg = 0;
e7c33f54 9509
ca695ac9 9510 if (output_bytecode)
c02bd5d9
JB
9511 {
9512 bc_expand_expr (exp);
9513 return NULL_RTX;
9514 }
e7c33f54 9515
ca695ac9
JB
9516 /* Stabilize any component ref that might need to be
9517 evaluated more than once below. */
9518 if (!post
9519 || TREE_CODE (incremented) == BIT_FIELD_REF
9520 || (TREE_CODE (incremented) == COMPONENT_REF
9521 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9522 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9523 incremented = stabilize_reference (incremented);
9524 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9525 ones into save exprs so that they don't accidentally get evaluated
9526 more than once by the code below. */
9527 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9528 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9529 incremented = save_expr (incremented);
bbf6f052 9530
ca695ac9
JB
9531 /* Compute the operands as RTX.
9532 Note whether OP0 is the actual lvalue or a copy of it:
9533 I believe it is a copy iff it is a register or subreg
9534 and insns were generated in computing it. */
bbf6f052 9535
ca695ac9
JB
9536 temp = get_last_insn ();
9537 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
bbf6f052 9538
ca695ac9 9539 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9faa82d8 9540 in place but instead must do sign- or zero-extension during assignment,
ca695ac9
JB
9541 so we copy it into a new register and let the code below use it as
9542 a copy.
bbf6f052 9543
ca695ac9
JB
9544 Note that we can safely modify this SUBREG since it is know not to be
9545 shared (it was made by the expand_expr call above). */
bbf6f052 9546
ca695ac9 9547 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
3e073e72
RK
9548 {
9549 if (post)
9550 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9551 else
9552 bad_subreg = 1;
9553 }
a97f5a86
RS
9554 else if (GET_CODE (op0) == SUBREG
9555 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
79777b79
RK
9556 {
9557 /* We cannot increment this SUBREG in place. If we are
9558 post-incrementing, get a copy of the old value. Otherwise,
9559 just mark that we cannot increment in place. */
9560 if (post)
9561 op0 = copy_to_reg (op0);
9562 else
9563 bad_subreg = 1;
9564 }
bbf6f052 9565
ca695ac9
JB
9566 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9567 && temp != get_last_insn ());
9568 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052 9569
ca695ac9
JB
9570 /* Decide whether incrementing or decrementing. */
9571 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9572 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9573 this_optab = sub_optab;
bbf6f052 9574
ca695ac9
JB
9575 /* Convert decrement by a constant into a negative increment. */
9576 if (this_optab == sub_optab
9577 && GET_CODE (op1) == CONST_INT)
9578 {
9579 op1 = GEN_INT (- INTVAL (op1));
9580 this_optab = add_optab;
9581 }
bbf6f052 9582
ca695ac9
JB
9583 /* For a preincrement, see if we can do this with a single instruction. */
9584 if (!post)
9585 {
9586 icode = (int) this_optab->handlers[(int) mode].insn_code;
9587 if (icode != (int) CODE_FOR_nothing
9588 /* Make sure that OP0 is valid for operands 0 and 1
9589 of the insn we want to queue. */
9590 && (*insn_operand_predicate[icode][0]) (op0, mode)
9591 && (*insn_operand_predicate[icode][1]) (op0, mode)
9592 && (*insn_operand_predicate[icode][2]) (op1, mode))
9593 single_insn = 1;
9594 }
bbf6f052 9595
ca695ac9
JB
9596 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9597 then we cannot just increment OP0. We must therefore contrive to
9598 increment the original value. Then, for postincrement, we can return
9599 OP0 since it is a copy of the old value. For preincrement, expand here
a97f5a86
RS
9600 unless we can do it with a single insn.
9601
9602 Likewise if storing directly into OP0 would clobber high bits
9603 we need to preserve (bad_subreg). */
9604 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
ca695ac9
JB
9605 {
9606 /* This is the easiest way to increment the value wherever it is.
9607 Problems with multiple evaluation of INCREMENTED are prevented
9608 because either (1) it is a component_ref or preincrement,
9609 in which case it was stabilized above, or (2) it is an array_ref
9610 with constant index in an array in a register, which is
9611 safe to reevaluate. */
9612 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9613 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9614 ? MINUS_EXPR : PLUS_EXPR),
9615 TREE_TYPE (exp),
9616 incremented,
9617 TREE_OPERAND (exp, 1));
e9cdf6e4
RK
9618
9619 while (TREE_CODE (incremented) == NOP_EXPR
9620 || TREE_CODE (incremented) == CONVERT_EXPR)
9621 {
9622 newexp = convert (TREE_TYPE (incremented), newexp);
9623 incremented = TREE_OPERAND (incremented, 0);
9624 }
9625
7b8b9722 9626 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
ca695ac9
JB
9627 return post ? op0 : temp;
9628 }
bbf6f052 9629
ca695ac9
JB
9630 if (post)
9631 {
9632 /* We have a true reference to the value in OP0.
9633 If there is an insn to add or subtract in this mode, queue it.
9634 Queueing the increment insn avoids the register shuffling
9635 that often results if we must increment now and first save
9636 the old value for subsequent use. */
bbf6f052 9637
ca695ac9
JB
9638#if 0 /* Turned off to avoid making extra insn for indexed memref. */
9639 op0 = stabilize (op0);
9640#endif
bbf6f052 9641
ca695ac9
JB
9642 icode = (int) this_optab->handlers[(int) mode].insn_code;
9643 if (icode != (int) CODE_FOR_nothing
9644 /* Make sure that OP0 is valid for operands 0 and 1
9645 of the insn we want to queue. */
9646 && (*insn_operand_predicate[icode][0]) (op0, mode)
9647 && (*insn_operand_predicate[icode][1]) (op0, mode))
9648 {
9649 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
9650 op1 = force_reg (mode, op1);
bbf6f052 9651
ca695ac9
JB
9652 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9653 }
0842a179
RK
9654 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9655 {
9656 rtx addr = force_reg (Pmode, XEXP (op0, 0));
9657 rtx temp, result;
9658
9659 op0 = change_address (op0, VOIDmode, addr);
9660 temp = force_reg (GET_MODE (op0), op0);
9661 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
9662 op1 = force_reg (mode, op1);
9663
9664 /* The increment queue is LIFO, thus we have to `queue'
9665 the instructions in reverse order. */
9666 enqueue_insn (op0, gen_move_insn (op0, temp));
9667 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9668 return result;
9669 }
ca695ac9 9670 }
bbf6f052 9671
ca695ac9
JB
9672 /* Preincrement, or we can't increment with one simple insn. */
9673 if (post)
9674 /* Save a copy of the value before inc or dec, to return it later. */
9675 temp = value = copy_to_reg (op0);
9676 else
9677 /* Arrange to return the incremented value. */
9678 /* Copy the rtx because expand_binop will protect from the queue,
9679 and the results of that would be invalid for us to return
9680 if our caller does emit_queue before using our result. */
9681 temp = copy_rtx (value = op0);
bbf6f052 9682
ca695ac9
JB
9683 /* Increment however we can. */
9684 op1 = expand_binop (mode, this_optab, value, op1, op0,
9685 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9686 /* Make sure the value is stored into OP0. */
9687 if (op1 != op0)
9688 emit_move_insn (op0, op1);
bbf6f052 9689
ca695ac9
JB
9690 return temp;
9691}
9692\f
9693/* Expand all function calls contained within EXP, innermost ones first.
9694 But don't look within expressions that have sequence points.
9695 For each CALL_EXPR, record the rtx for its value
9696 in the CALL_EXPR_RTL field. */
bbf6f052 9697
ca695ac9
JB
9698static void
9699preexpand_calls (exp)
9700 tree exp;
9701{
9702 register int nops, i;
9703 int type = TREE_CODE_CLASS (TREE_CODE (exp));
bbf6f052 9704
ca695ac9
JB
9705 if (! do_preexpand_calls)
9706 return;
bbf6f052 9707
ca695ac9 9708 /* Only expressions and references can contain calls. */
bbf6f052 9709
ca695ac9
JB
9710 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
9711 return;
bbf6f052 9712
ca695ac9
JB
9713 switch (TREE_CODE (exp))
9714 {
9715 case CALL_EXPR:
9716 /* Do nothing if already expanded. */
43198be7
RK
9717 if (CALL_EXPR_RTL (exp) != 0
9718 /* Do nothing if the call returns a variable-sized object. */
9719 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
9720 /* Do nothing to built-in functions. */
9721 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
9722 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9723 == FUNCTION_DECL)
9724 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
ca695ac9 9725 return;
bbf6f052 9726
43198be7 9727 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
ca695ac9 9728 return;
bbf6f052 9729
ca695ac9
JB
9730 case COMPOUND_EXPR:
9731 case COND_EXPR:
9732 case TRUTH_ANDIF_EXPR:
9733 case TRUTH_ORIF_EXPR:
9734 /* If we find one of these, then we can be sure
9735 the adjust will be done for it (since it makes jumps).
9736 Do it now, so that if this is inside an argument
9737 of a function, we don't get the stack adjustment
9738 after some other args have already been pushed. */
9739 do_pending_stack_adjust ();
9740 return;
bbf6f052 9741
ca695ac9
JB
9742 case BLOCK:
9743 case RTL_EXPR:
9744 case WITH_CLEANUP_EXPR:
402c7311 9745 case CLEANUP_POINT_EXPR:
ca695ac9 9746 return;
bbf6f052 9747
ca695ac9
JB
9748 case SAVE_EXPR:
9749 if (SAVE_EXPR_RTL (exp) != 0)
9750 return;
9751 }
bbf6f052 9752
ca695ac9
JB
9753 nops = tree_code_length[(int) TREE_CODE (exp)];
9754 for (i = 0; i < nops; i++)
9755 if (TREE_OPERAND (exp, i) != 0)
9756 {
9757 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9758 if (type == 'e' || type == '<' || type == '1' || type == '2'
9759 || type == 'r')
9760 preexpand_calls (TREE_OPERAND (exp, i));
9761 }
bbf6f052
RK
9762}
9763\f
ca695ac9
JB
9764/* At the start of a function, record that we have no previously-pushed
9765 arguments waiting to be popped. */
0006469d 9766
ca695ac9
JB
9767void
9768init_pending_stack_adjust ()
9769{
9770 pending_stack_adjust = 0;
9771}
fb2ca25a 9772
ca695ac9
JB
9773/* When exiting from function, if safe, clear out any pending stack adjust
9774 so the adjustment won't get done. */
904762c8 9775
ca695ac9
JB
9776void
9777clear_pending_stack_adjust ()
fb2ca25a 9778{
ca695ac9 9779#ifdef EXIT_IGNORE_STACK
b7c2e1e2
RK
9780 if (optimize > 0
9781 && ! flag_omit_frame_pointer && EXIT_IGNORE_STACK
ca695ac9
JB
9782 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9783 && ! flag_inline_functions)
9784 pending_stack_adjust = 0;
fb2ca25a 9785#endif
fb2ca25a
KKT
9786}
9787
ca695ac9
JB
9788/* Pop any previously-pushed arguments that have not been popped yet. */
9789
9790void
9791do_pending_stack_adjust ()
9792{
9793 if (inhibit_defer_pop == 0)
9794 {
9795 if (pending_stack_adjust != 0)
9796 adjust_stack (GEN_INT (pending_stack_adjust));
9797 pending_stack_adjust = 0;
9798 }
9799}
9800
5dab5552
MS
9801/* Defer the expansion all cleanups up to OLD_CLEANUPS.
9802 Returns the cleanups to be performed. */
9803
9804static tree
9805defer_cleanups_to (old_cleanups)
9806 tree old_cleanups;
9807{
9808 tree new_cleanups = NULL_TREE;
9809 tree cleanups = cleanups_this_call;
9810 tree last = NULL_TREE;
9811
9812 while (cleanups_this_call != old_cleanups)
9813 {
3d195391 9814 expand_eh_region_end (TREE_VALUE (cleanups_this_call));
4ea8537b 9815 last = cleanups_this_call;
5dab5552
MS
9816 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
9817 }
9818
9819 if (last)
9820 {
9821 /* Remove the list from the chain of cleanups. */
9822 TREE_CHAIN (last) = NULL_TREE;
9823
9824 /* reverse them so that we can build them in the right order. */
9825 cleanups = nreverse (cleanups);
9826
9ba73d38
MS
9827 /* All cleanups must be on the function_obstack. */
9828 push_obstacks_nochange ();
9829 resume_temporary_allocation ();
9830
5dab5552
MS
9831 while (cleanups)
9832 {
9833 if (new_cleanups)
9834 new_cleanups = build (COMPOUND_EXPR, TREE_TYPE (new_cleanups),
9835 TREE_VALUE (cleanups), new_cleanups);
9836 else
9837 new_cleanups = TREE_VALUE (cleanups);
9838
9839 cleanups = TREE_CHAIN (cleanups);
9840 }
9ba73d38
MS
9841
9842 pop_obstacks ();
5dab5552
MS
9843 }
9844
9845 return new_cleanups;
9846}
9847
ca695ac9
JB
9848/* Expand all cleanups up to OLD_CLEANUPS.
9849 Needed here, and also for language-dependent calls. */
904762c8 9850
ca695ac9
JB
9851void
9852expand_cleanups_to (old_cleanups)
9853 tree old_cleanups;
0006469d 9854{
ca695ac9 9855 while (cleanups_this_call != old_cleanups)
0006469d 9856 {
3d195391 9857 expand_eh_region_end (TREE_VALUE (cleanups_this_call));
d3158f1a 9858 expand_expr (TREE_VALUE (cleanups_this_call), const0_rtx, VOIDmode, 0);
ca695ac9
JB
9859 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
9860 }
9861}
9862\f
9863/* Expand conditional expressions. */
0006469d 9864
ca695ac9
JB
9865/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9866 LABEL is an rtx of code CODE_LABEL, in this function and all the
9867 functions here. */
0006469d 9868
ca695ac9
JB
9869void
9870jumpifnot (exp, label)
9871 tree exp;
9872 rtx label;
9873{
9874 do_jump (exp, label, NULL_RTX);
9875}
0006469d 9876
ca695ac9 9877/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
0006469d 9878
ca695ac9
JB
9879void
9880jumpif (exp, label)
9881 tree exp;
9882 rtx label;
9883{
9884 do_jump (exp, NULL_RTX, label);
9885}
0006469d 9886
ca695ac9
JB
9887/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9888 the result is zero, or IF_TRUE_LABEL if the result is one.
9889 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9890 meaning fall through in that case.
0006469d 9891
ca695ac9
JB
9892 do_jump always does any pending stack adjust except when it does not
9893 actually perform a jump. An example where there is no jump
9894 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
0006469d 9895
ca695ac9
JB
9896 This function is responsible for optimizing cases such as
9897 &&, || and comparison operators in EXP. */
904762c8 9898
ca695ac9
JB
9899void
9900do_jump (exp, if_false_label, if_true_label)
9901 tree exp;
9902 rtx if_false_label, if_true_label;
0006469d 9903{
ca695ac9
JB
9904 register enum tree_code code = TREE_CODE (exp);
9905 /* Some cases need to create a label to jump to
9906 in order to properly fall through.
9907 These cases set DROP_THROUGH_LABEL nonzero. */
9908 rtx drop_through_label = 0;
9909 rtx temp;
9910 rtx comparison = 0;
9911 int i;
9912 tree type;
2f6e6d22 9913 enum machine_mode mode;
0006469d 9914
ca695ac9 9915 emit_queue ();
0006469d 9916
ca695ac9
JB
9917 switch (code)
9918 {
9919 case ERROR_MARK:
9920 break;
0006469d 9921
ca695ac9
JB
9922 case INTEGER_CST:
9923 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9924 if (temp)
9925 emit_jump (temp);
9926 break;
0006469d 9927
ca695ac9
JB
9928#if 0
9929 /* This is not true with #pragma weak */
9930 case ADDR_EXPR:
9931 /* The address of something can never be zero. */
9932 if (if_true_label)
9933 emit_jump (if_true_label);
9934 break;
9935#endif
0006469d 9936
ca695ac9
JB
9937 case NOP_EXPR:
9938 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9939 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9940 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9941 goto normal;
9942 case CONVERT_EXPR:
9943 /* If we are narrowing the operand, we have to do the compare in the
9944 narrower mode. */
9945 if ((TYPE_PRECISION (TREE_TYPE (exp))
9946 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9947 goto normal;
9948 case NON_LVALUE_EXPR:
9949 case REFERENCE_EXPR:
9950 case ABS_EXPR:
9951 case NEGATE_EXPR:
9952 case LROTATE_EXPR:
9953 case RROTATE_EXPR:
9954 /* These cannot change zero->non-zero or vice versa. */
9955 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9956 break;
0006469d 9957
ca695ac9
JB
9958#if 0
9959 /* This is never less insns than evaluating the PLUS_EXPR followed by
9960 a test and can be longer if the test is eliminated. */
9961 case PLUS_EXPR:
9962 /* Reduce to minus. */
9963 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9964 TREE_OPERAND (exp, 0),
9965 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9966 TREE_OPERAND (exp, 1))));
9967 /* Process as MINUS. */
0006469d 9968#endif
0006469d 9969
ca695ac9
JB
9970 case MINUS_EXPR:
9971 /* Non-zero iff operands of minus differ. */
9972 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
9973 TREE_OPERAND (exp, 0),
9974 TREE_OPERAND (exp, 1)),
9975 NE, NE);
9976 break;
904762c8 9977
ca695ac9
JB
9978 case BIT_AND_EXPR:
9979 /* If we are AND'ing with a small constant, do this comparison in the
9980 smallest type that fits. If the machine doesn't have comparisons
9981 that small, it will be converted back to the wider comparison.
9982 This helps if we are testing the sign bit of a narrower object.
9983 combine can't do this for us because it can't know whether a
9984 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
0006469d 9985
ca695ac9
JB
9986 if (! SLOW_BYTE_ACCESS
9987 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9988 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9989 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
2f6e6d22
RK
9990 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9991 && (type = type_for_mode (mode, 1)) != 0
ca695ac9
JB
9992 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9993 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9994 != CODE_FOR_nothing))
9995 {
9996 do_jump (convert (type, exp), if_false_label, if_true_label);
9997 break;
9998 }
9999 goto normal;
904762c8 10000
ca695ac9
JB
10001 case TRUTH_NOT_EXPR:
10002 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10003 break;
0006469d 10004
ca695ac9 10005 case TRUTH_ANDIF_EXPR:
7ee055f4
MS
10006 {
10007 rtx seq1, seq2;
10008 tree cleanups, old_cleanups;
10009
10010 if (if_false_label == 0)
10011 if_false_label = drop_through_label = gen_label_rtx ();
10012 start_sequence ();
10013 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
10014 seq1 = get_insns ();
10015 end_sequence ();
10016
10017 old_cleanups = cleanups_this_call;
10018 start_sequence ();
10019 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10020 seq2 = get_insns ();
26657dca 10021 cleanups = defer_cleanups_to (old_cleanups);
7ee055f4
MS
10022 end_sequence ();
10023
7ee055f4
MS
10024 if (cleanups)
10025 {
10026 rtx flag = gen_reg_rtx (word_mode);
10027 tree new_cleanups;
10028 tree cond;
10029
0f41302f 10030 /* Flag cleanups as not needed. */
7ee055f4
MS
10031 emit_move_insn (flag, const0_rtx);
10032 emit_insns (seq1);
10033
0f41302f 10034 /* Flag cleanups as needed. */
7ee055f4
MS
10035 emit_move_insn (flag, const1_rtx);
10036 emit_insns (seq2);
10037
9ba73d38
MS
10038 /* All cleanups must be on the function_obstack. */
10039 push_obstacks_nochange ();
10040 resume_temporary_allocation ();
10041
0f41302f 10042 /* convert flag, which is an rtx, into a tree. */
7ee055f4
MS
10043 cond = make_node (RTL_EXPR);
10044 TREE_TYPE (cond) = integer_type_node;
10045 RTL_EXPR_RTL (cond) = flag;
10046 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
01842234 10047 cond = save_expr (cond);
7ee055f4
MS
10048
10049 new_cleanups = build (COND_EXPR, void_type_node,
10050 truthvalue_conversion (cond),
10051 cleanups, integer_zero_node);
10052 new_cleanups = fold (new_cleanups);
10053
9ba73d38
MS
10054 pop_obstacks ();
10055
3d195391 10056 /* Now add in the conditionalized cleanups. */
7ee055f4
MS
10057 cleanups_this_call
10058 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
3d195391 10059 expand_eh_region_start ();
7ee055f4
MS
10060 }
10061 else
10062 {
10063 emit_insns (seq1);
10064 emit_insns (seq2);
10065 }
10066 }
ca695ac9 10067 break;
0006469d 10068
ca695ac9 10069 case TRUTH_ORIF_EXPR:
7ee055f4
MS
10070 {
10071 rtx seq1, seq2;
10072 tree cleanups, old_cleanups;
10073
10074 if (if_true_label == 0)
10075 if_true_label = drop_through_label = gen_label_rtx ();
10076 start_sequence ();
10077 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
10078 seq1 = get_insns ();
10079 end_sequence ();
10080
10081 old_cleanups = cleanups_this_call;
10082 start_sequence ();
10083 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10084 seq2 = get_insns ();
26657dca 10085 cleanups = defer_cleanups_to (old_cleanups);
7ee055f4
MS
10086 end_sequence ();
10087
7ee055f4
MS
10088 if (cleanups)
10089 {
10090 rtx flag = gen_reg_rtx (word_mode);
10091 tree new_cleanups;
10092 tree cond;
10093
0f41302f 10094 /* Flag cleanups as not needed. */
7ee055f4
MS
10095 emit_move_insn (flag, const0_rtx);
10096 emit_insns (seq1);
10097
0f41302f 10098 /* Flag cleanups as needed. */
7ee055f4
MS
10099 emit_move_insn (flag, const1_rtx);
10100 emit_insns (seq2);
10101
9ba73d38
MS
10102 /* All cleanups must be on the function_obstack. */
10103 push_obstacks_nochange ();
10104 resume_temporary_allocation ();
10105
0f41302f 10106 /* convert flag, which is an rtx, into a tree. */
7ee055f4
MS
10107 cond = make_node (RTL_EXPR);
10108 TREE_TYPE (cond) = integer_type_node;
10109 RTL_EXPR_RTL (cond) = flag;
10110 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
01842234 10111 cond = save_expr (cond);
7ee055f4
MS
10112
10113 new_cleanups = build (COND_EXPR, void_type_node,
10114 truthvalue_conversion (cond),
10115 cleanups, integer_zero_node);
10116 new_cleanups = fold (new_cleanups);
10117
9ba73d38
MS
10118 pop_obstacks ();
10119
3d195391 10120 /* Now add in the conditionalized cleanups. */
7ee055f4
MS
10121 cleanups_this_call
10122 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
3d195391 10123 expand_eh_region_start ();
7ee055f4
MS
10124 }
10125 else
10126 {
10127 emit_insns (seq1);
10128 emit_insns (seq2);
10129 }
10130 }
ca695ac9 10131 break;
0006469d 10132
ca695ac9 10133 case COMPOUND_EXPR:
0088fcb1 10134 push_temp_slots ();
ca695ac9 10135 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
d80f96e9 10136 preserve_temp_slots (NULL_RTX);
ca695ac9 10137 free_temp_slots ();
0088fcb1 10138 pop_temp_slots ();
ca695ac9
JB
10139 emit_queue ();
10140 do_pending_stack_adjust ();
10141 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10142 break;
0006469d 10143
ca695ac9
JB
10144 case COMPONENT_REF:
10145 case BIT_FIELD_REF:
10146 case ARRAY_REF:
10147 {
10148 int bitsize, bitpos, unsignedp;
10149 enum machine_mode mode;
10150 tree type;
10151 tree offset;
10152 int volatilep = 0;
0006469d 10153
ca695ac9
JB
10154 /* Get description of this reference. We don't actually care
10155 about the underlying object here. */
10156 get_inner_reference (exp, &bitsize, &bitpos, &offset,
10157 &mode, &unsignedp, &volatilep);
0006469d 10158
ca695ac9
JB
10159 type = type_for_size (bitsize, unsignedp);
10160 if (! SLOW_BYTE_ACCESS
10161 && type != 0 && bitsize >= 0
10162 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10163 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10164 != CODE_FOR_nothing))
10165 {
10166 do_jump (convert (type, exp), if_false_label, if_true_label);
10167 break;
10168 }
10169 goto normal;
10170 }
0006469d 10171
ca695ac9
JB
10172 case COND_EXPR:
10173 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
10174 if (integer_onep (TREE_OPERAND (exp, 1))
10175 && integer_zerop (TREE_OPERAND (exp, 2)))
10176 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
904762c8 10177
ca695ac9
JB
10178 else if (integer_zerop (TREE_OPERAND (exp, 1))
10179 && integer_onep (TREE_OPERAND (exp, 2)))
10180 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
0006469d 10181
ca695ac9
JB
10182 else
10183 {
0cb1d109
DE
10184 rtx seq1, seq2;
10185 tree cleanups_left_side, cleanups_right_side, old_cleanups;
10186
ca695ac9
JB
10187 register rtx label1 = gen_label_rtx ();
10188 drop_through_label = gen_label_rtx ();
0cb1d109 10189
ca695ac9 10190 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
0cb1d109
DE
10191
10192 /* We need to save the cleanups for the lhs and rhs separately.
10193 Keep track of the cleanups seen before the lhs. */
10194 old_cleanups = cleanups_this_call;
10195 start_sequence ();
ca695ac9
JB
10196 /* Now the THEN-expression. */
10197 do_jump (TREE_OPERAND (exp, 1),
10198 if_false_label ? if_false_label : drop_through_label,
10199 if_true_label ? if_true_label : drop_through_label);
10200 /* In case the do_jump just above never jumps. */
10201 do_pending_stack_adjust ();
10202 emit_label (label1);
0cb1d109
DE
10203 seq1 = get_insns ();
10204 /* Now grab the cleanups for the lhs. */
10205 cleanups_left_side = defer_cleanups_to (old_cleanups);
10206 end_sequence ();
10207
10208 /* And keep track of where we start before the rhs. */
10209 old_cleanups = cleanups_this_call;
10210 start_sequence ();
ca695ac9
JB
10211 /* Now the ELSE-expression. */
10212 do_jump (TREE_OPERAND (exp, 2),
10213 if_false_label ? if_false_label : drop_through_label,
10214 if_true_label ? if_true_label : drop_through_label);
0cb1d109
DE
10215 seq2 = get_insns ();
10216 /* Grab the cleanups for the rhs. */
10217 cleanups_right_side = defer_cleanups_to (old_cleanups);
10218 end_sequence ();
10219
10220 if (cleanups_left_side || cleanups_right_side)
10221 {
10222 /* Make the cleanups for the THEN and ELSE clauses
10223 conditional based on which half is executed. */
10224 rtx flag = gen_reg_rtx (word_mode);
10225 tree new_cleanups;
10226 tree cond;
10227
10228 /* Set the flag to 0 so that we know we executed the lhs. */
10229 emit_move_insn (flag, const0_rtx);
10230 emit_insns (seq1);
10231
10232 /* Set the flag to 1 so that we know we executed the rhs. */
10233 emit_move_insn (flag, const1_rtx);
10234 emit_insns (seq2);
10235
10236 /* Make sure the cleanup lives on the function_obstack. */
10237 push_obstacks_nochange ();
10238 resume_temporary_allocation ();
10239
10240 /* Now, build up a COND_EXPR that tests the value of the
10241 flag, and then either do the cleanups for the lhs or the
10242 rhs. */
10243 cond = make_node (RTL_EXPR);
10244 TREE_TYPE (cond) = integer_type_node;
10245 RTL_EXPR_RTL (cond) = flag;
10246 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
10247 cond = save_expr (cond);
10248
10249 new_cleanups = build (COND_EXPR, void_type_node,
10250 truthvalue_conversion (cond),
10251 cleanups_right_side, cleanups_left_side);
10252 new_cleanups = fold (new_cleanups);
10253
10254 pop_obstacks ();
10255
10256 /* Now add in the conditionalized cleanups. */
10257 cleanups_this_call
10258 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
10259 expand_eh_region_start ();
10260 }
10261 else
10262 {
10263 /* No cleanups were needed, so emit the two sequences
10264 directly. */
10265 emit_insns (seq1);
10266 emit_insns (seq2);
10267 }
ca695ac9
JB
10268 }
10269 break;
0006469d 10270
ca695ac9 10271 case EQ_EXPR:
0e8c9172
RK
10272 {
10273 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10274
10275 if (integer_zerop (TREE_OPERAND (exp, 1)))
10276 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10277 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
201012cb 10278 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
0e8c9172
RK
10279 do_jump
10280 (fold
10281 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
10282 fold (build (EQ_EXPR, TREE_TYPE (exp),
c8465d86
RK
10283 fold (build1 (REALPART_EXPR,
10284 TREE_TYPE (inner_type),
0e8c9172 10285 TREE_OPERAND (exp, 0))),
c8465d86
RK
10286 fold (build1 (REALPART_EXPR,
10287 TREE_TYPE (inner_type),
0e8c9172
RK
10288 TREE_OPERAND (exp, 1))))),
10289 fold (build (EQ_EXPR, TREE_TYPE (exp),
c8465d86
RK
10290 fold (build1 (IMAGPART_EXPR,
10291 TREE_TYPE (inner_type),
0e8c9172 10292 TREE_OPERAND (exp, 0))),
c8465d86
RK
10293 fold (build1 (IMAGPART_EXPR,
10294 TREE_TYPE (inner_type),
0e8c9172
RK
10295 TREE_OPERAND (exp, 1))))))),
10296 if_false_label, if_true_label);
10297 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10298 && !can_compare_p (TYPE_MODE (inner_type)))
10299 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
10300 else
10301 comparison = compare (exp, EQ, EQ);
10302 break;
10303 }
0006469d 10304
ca695ac9 10305 case NE_EXPR:
0e8c9172
RK
10306 {
10307 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10308
10309 if (integer_zerop (TREE_OPERAND (exp, 1)))
10310 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10311 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
201012cb 10312 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
0e8c9172
RK
10313 do_jump
10314 (fold
10315 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
10316 fold (build (NE_EXPR, TREE_TYPE (exp),
c8465d86
RK
10317 fold (build1 (REALPART_EXPR,
10318 TREE_TYPE (inner_type),
0e8c9172 10319 TREE_OPERAND (exp, 0))),
c8465d86
RK
10320 fold (build1 (REALPART_EXPR,
10321 TREE_TYPE (inner_type),
0e8c9172
RK
10322 TREE_OPERAND (exp, 1))))),
10323 fold (build (NE_EXPR, TREE_TYPE (exp),
c8465d86
RK
10324 fold (build1 (IMAGPART_EXPR,
10325 TREE_TYPE (inner_type),
0e8c9172 10326 TREE_OPERAND (exp, 0))),
c8465d86
RK
10327 fold (build1 (IMAGPART_EXPR,
10328 TREE_TYPE (inner_type),
0e8c9172
RK
10329 TREE_OPERAND (exp, 1))))))),
10330 if_false_label, if_true_label);
10331 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10332 && !can_compare_p (TYPE_MODE (inner_type)))
10333 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
10334 else
10335 comparison = compare (exp, NE, NE);
10336 break;
10337 }
0006469d 10338
ca695ac9
JB
10339 case LT_EXPR:
10340 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10341 == MODE_INT)
10342 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10343 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
10344 else
10345 comparison = compare (exp, LT, LTU);
10346 break;
0006469d 10347
ca695ac9
JB
10348 case LE_EXPR:
10349 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10350 == MODE_INT)
10351 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10352 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
10353 else
10354 comparison = compare (exp, LE, LEU);
10355 break;
0006469d 10356
ca695ac9
JB
10357 case GT_EXPR:
10358 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10359 == MODE_INT)
10360 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10361 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
10362 else
10363 comparison = compare (exp, GT, GTU);
10364 break;
0006469d 10365
ca695ac9
JB
10366 case GE_EXPR:
10367 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10368 == MODE_INT)
10369 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10370 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
10371 else
10372 comparison = compare (exp, GE, GEU);
10373 break;
0006469d 10374
ca695ac9
JB
10375 default:
10376 normal:
10377 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10378#if 0
10379 /* This is not needed any more and causes poor code since it causes
10380 comparisons and tests from non-SI objects to have different code
10381 sequences. */
10382 /* Copy to register to avoid generating bad insns by cse
10383 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10384 if (!cse_not_expected && GET_CODE (temp) == MEM)
10385 temp = copy_to_reg (temp);
10386#endif
10387 do_pending_stack_adjust ();
10388 if (GET_CODE (temp) == CONST_INT)
10389 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
10390 else if (GET_CODE (temp) == LABEL_REF)
10391 comparison = const_true_rtx;
10392 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10393 && !can_compare_p (GET_MODE (temp)))
10394 /* Note swapping the labels gives us not-equal. */
10395 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10396 else if (GET_MODE (temp) != VOIDmode)
10397 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
10398 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10399 GET_MODE (temp), NULL_RTX, 0);
10400 else
10401 abort ();
10402 }
0006469d 10403
ca695ac9
JB
10404 /* Do any postincrements in the expression that was tested. */
10405 emit_queue ();
0006469d 10406
ca695ac9
JB
10407 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
10408 straight into a conditional jump instruction as the jump condition.
10409 Otherwise, all the work has been done already. */
0006469d 10410
ca695ac9 10411 if (comparison == const_true_rtx)
0006469d 10412 {
ca695ac9
JB
10413 if (if_true_label)
10414 emit_jump (if_true_label);
0006469d 10415 }
ca695ac9
JB
10416 else if (comparison == const0_rtx)
10417 {
10418 if (if_false_label)
10419 emit_jump (if_false_label);
10420 }
10421 else if (comparison)
10422 do_jump_for_compare (comparison, if_false_label, if_true_label);
0006469d 10423
ca695ac9 10424 if (drop_through_label)
0006469d 10425 {
ca695ac9
JB
10426 /* If do_jump produces code that might be jumped around,
10427 do any stack adjusts from that code, before the place
10428 where control merges in. */
10429 do_pending_stack_adjust ();
10430 emit_label (drop_through_label);
10431 }
10432}
10433\f
10434/* Given a comparison expression EXP for values too wide to be compared
10435 with one insn, test the comparison and jump to the appropriate label.
10436 The code of EXP is ignored; we always test GT if SWAP is 0,
10437 and LT if SWAP is 1. */
0006469d 10438
ca695ac9
JB
10439static void
10440do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10441 tree exp;
10442 int swap;
10443 rtx if_false_label, if_true_label;
10444{
10445 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10446 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10447 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10448 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10449 rtx drop_through_label = 0;
10450 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10451 int i;
0006469d 10452
ca695ac9
JB
10453 if (! if_true_label || ! if_false_label)
10454 drop_through_label = gen_label_rtx ();
10455 if (! if_true_label)
10456 if_true_label = drop_through_label;
10457 if (! if_false_label)
10458 if_false_label = drop_through_label;
0006469d 10459
ca695ac9
JB
10460 /* Compare a word at a time, high order first. */
10461 for (i = 0; i < nwords; i++)
10462 {
10463 rtx comp;
10464 rtx op0_word, op1_word;
0006469d 10465
ca695ac9
JB
10466 if (WORDS_BIG_ENDIAN)
10467 {
10468 op0_word = operand_subword_force (op0, i, mode);
10469 op1_word = operand_subword_force (op1, i, mode);
10470 }
10471 else
10472 {
10473 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10474 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10475 }
0006469d 10476
ca695ac9
JB
10477 /* All but high-order word must be compared as unsigned. */
10478 comp = compare_from_rtx (op0_word, op1_word,
10479 (unsignedp || i > 0) ? GTU : GT,
10480 unsignedp, word_mode, NULL_RTX, 0);
10481 if (comp == const_true_rtx)
10482 emit_jump (if_true_label);
10483 else if (comp != const0_rtx)
10484 do_jump_for_compare (comp, NULL_RTX, if_true_label);
0006469d 10485
ca695ac9
JB
10486 /* Consider lower words only if these are equal. */
10487 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10488 NULL_RTX, 0);
10489 if (comp == const_true_rtx)
10490 emit_jump (if_false_label);
10491 else if (comp != const0_rtx)
10492 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10493 }
0006469d 10494
ca695ac9
JB
10495 if (if_false_label)
10496 emit_jump (if_false_label);
10497 if (drop_through_label)
10498 emit_label (drop_through_label);
0006469d
TW
10499}
10500
ca695ac9
JB
10501/* Compare OP0 with OP1, word at a time, in mode MODE.
10502 UNSIGNEDP says to do unsigned comparison.
10503 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
904762c8 10504
2e5ec6cf 10505void
ca695ac9
JB
10506do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10507 enum machine_mode mode;
10508 int unsignedp;
10509 rtx op0, op1;
10510 rtx if_false_label, if_true_label;
0006469d 10511{
ca695ac9
JB
10512 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10513 rtx drop_through_label = 0;
10514 int i;
0006469d 10515
ca695ac9
JB
10516 if (! if_true_label || ! if_false_label)
10517 drop_through_label = gen_label_rtx ();
10518 if (! if_true_label)
10519 if_true_label = drop_through_label;
10520 if (! if_false_label)
10521 if_false_label = drop_through_label;
0006469d 10522
ca695ac9
JB
10523 /* Compare a word at a time, high order first. */
10524 for (i = 0; i < nwords; i++)
0006469d 10525 {
ca695ac9
JB
10526 rtx comp;
10527 rtx op0_word, op1_word;
0006469d 10528
ca695ac9
JB
10529 if (WORDS_BIG_ENDIAN)
10530 {
10531 op0_word = operand_subword_force (op0, i, mode);
10532 op1_word = operand_subword_force (op1, i, mode);
10533 }
10534 else
10535 {
10536 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10537 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10538 }
0006469d 10539
ca695ac9
JB
10540 /* All but high-order word must be compared as unsigned. */
10541 comp = compare_from_rtx (op0_word, op1_word,
10542 (unsignedp || i > 0) ? GTU : GT,
10543 unsignedp, word_mode, NULL_RTX, 0);
10544 if (comp == const_true_rtx)
10545 emit_jump (if_true_label);
10546 else if (comp != const0_rtx)
10547 do_jump_for_compare (comp, NULL_RTX, if_true_label);
0006469d 10548
ca695ac9
JB
10549 /* Consider lower words only if these are equal. */
10550 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10551 NULL_RTX, 0);
10552 if (comp == const_true_rtx)
10553 emit_jump (if_false_label);
10554 else if (comp != const0_rtx)
10555 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10556 }
0006469d 10557
ca695ac9
JB
10558 if (if_false_label)
10559 emit_jump (if_false_label);
10560 if (drop_through_label)
10561 emit_label (drop_through_label);
0006469d 10562}
bbf6f052 10563
ca695ac9
JB
10564/* Given an EQ_EXPR expression EXP for values too wide to be compared
10565 with one insn, test the comparison and jump to the appropriate label. */
10566
10567static void
10568do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10569 tree exp;
10570 rtx if_false_label, if_true_label;
bbf6f052 10571{
ca695ac9
JB
10572 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10573 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10574 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10575 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10576 int i;
10577 rtx drop_through_label = 0;
bbf6f052 10578
ca695ac9
JB
10579 if (! if_false_label)
10580 drop_through_label = if_false_label = gen_label_rtx ();
bbf6f052 10581
ca695ac9
JB
10582 for (i = 0; i < nwords; i++)
10583 {
10584 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
10585 operand_subword_force (op1, i, mode),
10586 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10587 word_mode, NULL_RTX, 0);
10588 if (comp == const_true_rtx)
10589 emit_jump (if_false_label);
10590 else if (comp != const0_rtx)
10591 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10592 }
1499e0a8 10593
ca695ac9
JB
10594 if (if_true_label)
10595 emit_jump (if_true_label);
10596 if (drop_through_label)
10597 emit_label (drop_through_label);
10598}
10599\f
10600/* Jump according to whether OP0 is 0.
10601 We assume that OP0 has an integer mode that is too wide
10602 for the available compare insns. */
1499e0a8 10603
ca695ac9
JB
10604static void
10605do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10606 rtx op0;
10607 rtx if_false_label, if_true_label;
10608{
10609 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10610 int i;
10611 rtx drop_through_label = 0;
1499e0a8 10612
ca695ac9
JB
10613 if (! if_false_label)
10614 drop_through_label = if_false_label = gen_label_rtx ();
1499e0a8 10615
ca695ac9
JB
10616 for (i = 0; i < nwords; i++)
10617 {
10618 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
10619 GET_MODE (op0)),
10620 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
10621 if (comp == const_true_rtx)
10622 emit_jump (if_false_label);
10623 else if (comp != const0_rtx)
10624 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10625 }
1499e0a8 10626
ca695ac9
JB
10627 if (if_true_label)
10628 emit_jump (if_true_label);
10629 if (drop_through_label)
10630 emit_label (drop_through_label);
10631}
bbf6f052 10632
ca695ac9
JB
10633/* Given a comparison expression in rtl form, output conditional branches to
10634 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
bbf6f052 10635
ca695ac9
JB
10636static void
10637do_jump_for_compare (comparison, if_false_label, if_true_label)
10638 rtx comparison, if_false_label, if_true_label;
10639{
10640 if (if_true_label)
a358cee0 10641 {
ca695ac9
JB
10642 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10643 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
10644 else
10645 abort ();
a358cee0 10646
ca695ac9
JB
10647 if (if_false_label)
10648 emit_jump (if_false_label);
c980ac49 10649 }
ca695ac9 10650 else if (if_false_label)
bbf6f052 10651 {
ca695ac9 10652 rtx insn;
f12f485a 10653 rtx prev = get_last_insn ();
ca695ac9 10654 rtx branch = 0;
bbf6f052 10655
ca695ac9
JB
10656 /* Output the branch with the opposite condition. Then try to invert
10657 what is generated. If more than one insn is a branch, or if the
10658 branch is not the last insn written, abort. If we can't invert
10659 the branch, emit make a true label, redirect this jump to that,
10660 emit a jump to the false label and define the true label. */
bbf6f052 10661
ca695ac9 10662 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
34661f5c 10663 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])(if_false_label));
ca695ac9
JB
10664 else
10665 abort ();
bbf6f052 10666
41dfd40c
RK
10667 /* Here we get the first insn that was just emitted. It used to be the
10668 case that, on some machines, emitting the branch would discard
10669 the previous compare insn and emit a replacement. This isn't
10670 done anymore, but abort if we see that PREV is deleted. */
10671
ca695ac9 10672 if (prev == 0)
ca695ac9 10673 insn = get_insns ();
41dfd40c
RK
10674 else if (INSN_DELETED_P (prev))
10675 abort ();
ca695ac9 10676 else
41dfd40c 10677 insn = NEXT_INSN (prev);
bbf6f052 10678
34661f5c 10679 for (; insn; insn = NEXT_INSN (insn))
ca695ac9
JB
10680 if (GET_CODE (insn) == JUMP_INSN)
10681 {
10682 if (branch)
10683 abort ();
10684 branch = insn;
10685 }
10686
10687 if (branch != get_last_insn ())
10688 abort ();
10689
127e4d19 10690 JUMP_LABEL (branch) = if_false_label;
ca695ac9
JB
10691 if (! invert_jump (branch, if_false_label))
10692 {
10693 if_true_label = gen_label_rtx ();
10694 redirect_jump (branch, if_true_label);
10695 emit_jump (if_false_label);
10696 emit_label (if_true_label);
bbf6f052
RK
10697 }
10698 }
ca695ac9
JB
10699}
10700\f
10701/* Generate code for a comparison expression EXP
10702 (including code to compute the values to be compared)
10703 and set (CC0) according to the result.
10704 SIGNED_CODE should be the rtx operation for this comparison for
10705 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10706
10707 We force a stack adjustment unless there are currently
10708 things pushed on the stack that aren't yet used. */
10709
10710static rtx
10711compare (exp, signed_code, unsigned_code)
10712 register tree exp;
10713 enum rtx_code signed_code, unsigned_code;
10714{
10715 register rtx op0
10716 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10717 register rtx op1
10718 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10719 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
10720 register enum machine_mode mode = TYPE_MODE (type);
10721 int unsignedp = TREE_UNSIGNED (type);
10722 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
bbf6f052 10723
5718612f
JL
10724#ifdef HAVE_canonicalize_funcptr_for_compare
10725 /* If function pointers need to be "canonicalized" before they can
10726 be reliably compared, then canonicalize them. */
10727 if (HAVE_canonicalize_funcptr_for_compare
10728 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10729 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10730 == FUNCTION_TYPE))
10731 {
10732 rtx new_op0 = gen_reg_rtx (mode);
10733
10734 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10735 op0 = new_op0;
10736 }
10737
10738 if (HAVE_canonicalize_funcptr_for_compare
10739 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10740 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10741 == FUNCTION_TYPE))
10742 {
10743 rtx new_op1 = gen_reg_rtx (mode);
10744
10745 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10746 op1 = new_op1;
10747 }
10748#endif
10749
ca695ac9
JB
10750 return compare_from_rtx (op0, op1, code, unsignedp, mode,
10751 ((mode == BLKmode)
10752 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10753 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
10754}
bbf6f052 10755
ca695ac9
JB
10756/* Like compare but expects the values to compare as two rtx's.
10757 The decision as to signed or unsigned comparison must be made by the caller.
bbf6f052 10758
ca695ac9
JB
10759 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10760 compared.
bbf6f052 10761
ca695ac9
JB
10762 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10763 size of MODE should be used. */
bbf6f052 10764
ca695ac9
JB
10765rtx
10766compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10767 register rtx op0, op1;
10768 enum rtx_code code;
10769 int unsignedp;
10770 enum machine_mode mode;
10771 rtx size;
10772 int align;
10773{
10774 rtx tem;
bbf6f052 10775
ca695ac9
JB
10776 /* If one operand is constant, make it the second one. Only do this
10777 if the other operand is not constant as well. */
bbf6f052 10778
ca695ac9
JB
10779 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10780 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
10781 {
10782 tem = op0;
10783 op0 = op1;
10784 op1 = tem;
10785 code = swap_condition (code);
10786 }
bbf6f052 10787
ca695ac9 10788 if (flag_force_mem)
bbf6f052 10789 {
ca695ac9
JB
10790 op0 = force_not_mem (op0);
10791 op1 = force_not_mem (op1);
10792 }
bbf6f052 10793
ca695ac9 10794 do_pending_stack_adjust ();
bbf6f052 10795
ca695ac9
JB
10796 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10797 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10798 return tem;
bbf6f052 10799
ca695ac9
JB
10800#if 0
10801 /* There's no need to do this now that combine.c can eliminate lots of
10802 sign extensions. This can be less efficient in certain cases on other
0f41302f 10803 machines. */
bbf6f052 10804
ca695ac9
JB
10805 /* If this is a signed equality comparison, we can do it as an
10806 unsigned comparison since zero-extension is cheaper than sign
10807 extension and comparisons with zero are done as unsigned. This is
10808 the case even on machines that can do fast sign extension, since
10809 zero-extension is easier to combine with other operations than
10810 sign-extension is. If we are comparing against a constant, we must
10811 convert it to what it would look like unsigned. */
10812 if ((code == EQ || code == NE) && ! unsignedp
10813 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10814 {
10815 if (GET_CODE (op1) == CONST_INT
10816 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10817 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10818 unsignedp = 1;
bbf6f052 10819 }
ca695ac9
JB
10820#endif
10821
10822 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
bbf6f052 10823
ca695ac9 10824 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
bbf6f052
RK
10825}
10826\f
ca695ac9
JB
10827/* Generate code to calculate EXP using a store-flag instruction
10828 and return an rtx for the result. EXP is either a comparison
10829 or a TRUTH_NOT_EXPR whose operand is a comparison.
bbf6f052 10830
ca695ac9 10831 If TARGET is nonzero, store the result there if convenient.
bbf6f052 10832
ca695ac9
JB
10833 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10834 cheap.
bbf6f052 10835
ca695ac9
JB
10836 Return zero if there is no suitable set-flag instruction
10837 available on this machine.
bbf6f052 10838
ca695ac9
JB
10839 Once expand_expr has been called on the arguments of the comparison,
10840 we are committed to doing the store flag, since it is not safe to
10841 re-evaluate the expression. We emit the store-flag insn by calling
10842 emit_store_flag, but only expand the arguments if we have a reason
10843 to believe that emit_store_flag will be successful. If we think that
10844 it will, but it isn't, we have to simulate the store-flag with a
10845 set/jump/set sequence. */
bbf6f052 10846
ca695ac9
JB
10847static rtx
10848do_store_flag (exp, target, mode, only_cheap)
10849 tree exp;
10850 rtx target;
10851 enum machine_mode mode;
10852 int only_cheap;
bbf6f052 10853{
ca695ac9
JB
10854 enum rtx_code code;
10855 tree arg0, arg1, type;
10856 tree tem;
10857 enum machine_mode operand_mode;
10858 int invert = 0;
10859 int unsignedp;
10860 rtx op0, op1;
10861 enum insn_code icode;
10862 rtx subtarget = target;
10863 rtx result, label, pattern, jump_pat;
bbf6f052 10864
ca695ac9
JB
10865 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10866 result at the end. We can't simply invert the test since it would
10867 have already been inverted if it were valid. This case occurs for
10868 some floating-point comparisons. */
10869
10870 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10871 invert = 1, exp = TREE_OPERAND (exp, 0);
10872
10873 arg0 = TREE_OPERAND (exp, 0);
10874 arg1 = TREE_OPERAND (exp, 1);
10875 type = TREE_TYPE (arg0);
10876 operand_mode = TYPE_MODE (type);
10877 unsignedp = TREE_UNSIGNED (type);
10878
10879 /* We won't bother with BLKmode store-flag operations because it would mean
10880 passing a lot of information to emit_store_flag. */
10881 if (operand_mode == BLKmode)
10882 return 0;
10883
5718612f
JL
10884 /* We won't bother with store-flag operations involving function pointers
10885 when function pointers must be canonicalized before comparisons. */
10886#ifdef HAVE_canonicalize_funcptr_for_compare
10887 if (HAVE_canonicalize_funcptr_for_compare
10888 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10889 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10890 == FUNCTION_TYPE))
10891 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10892 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10893 == FUNCTION_TYPE))))
10894 return 0;
10895#endif
10896
ca695ac9
JB
10897 STRIP_NOPS (arg0);
10898 STRIP_NOPS (arg1);
10899
10900 /* Get the rtx comparison code to use. We know that EXP is a comparison
10901 operation of some type. Some comparisons against 1 and -1 can be
10902 converted to comparisons with zero. Do so here so that the tests
10903 below will be aware that we have a comparison with zero. These
10904 tests will not catch constants in the first operand, but constants
10905 are rarely passed as the first operand. */
10906
10907 switch (TREE_CODE (exp))
10908 {
10909 case EQ_EXPR:
10910 code = EQ;
10911 break;
10912 case NE_EXPR:
10913 code = NE;
10914 break;
10915 case LT_EXPR:
10916 if (integer_onep (arg1))
10917 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10918 else
10919 code = unsignedp ? LTU : LT;
10920 break;
10921 case LE_EXPR:
10922 if (! unsignedp && integer_all_onesp (arg1))
10923 arg1 = integer_zero_node, code = LT;
10924 else
10925 code = unsignedp ? LEU : LE;
10926 break;
10927 case GT_EXPR:
10928 if (! unsignedp && integer_all_onesp (arg1))
10929 arg1 = integer_zero_node, code = GE;
10930 else
10931 code = unsignedp ? GTU : GT;
10932 break;
10933 case GE_EXPR:
10934 if (integer_onep (arg1))
10935 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10936 else
10937 code = unsignedp ? GEU : GE;
10938 break;
10939 default:
10940 abort ();
10941 }
bbf6f052 10942
ca695ac9
JB
10943 /* Put a constant second. */
10944 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
bbf6f052 10945 {
ca695ac9
JB
10946 tem = arg0; arg0 = arg1; arg1 = tem;
10947 code = swap_condition (code);
bbf6f052 10948 }
bbf6f052 10949
ca695ac9
JB
10950 /* If this is an equality or inequality test of a single bit, we can
10951 do this by shifting the bit being tested to the low-order bit and
10952 masking the result with the constant 1. If the condition was EQ,
10953 we xor it with 1. This does not require an scc insn and is faster
10954 than an scc insn even if we have it. */
bbf6f052 10955
ca695ac9
JB
10956 if ((code == NE || code == EQ)
10957 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10958 && integer_pow2p (TREE_OPERAND (arg0, 1))
10959 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
10960 {
10961 tree inner = TREE_OPERAND (arg0, 0);
21b2a157
JW
10962 HOST_WIDE_INT tem;
10963 int bitnum;
ca695ac9 10964 int ops_unsignedp;
bbf6f052 10965
21b2a157
JW
10966 tem = INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
10967 NULL_RTX, VOIDmode, 0));
10968 /* In this case, immed_double_const will sign extend the value to make
10969 it look the same on the host and target. We must remove the
10970 sign-extension before calling exact_log2, since exact_log2 will
10971 fail for negative values. */
10972 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT
10973 && BITS_PER_WORD == GET_MODE_BITSIZE (TYPE_MODE (type)))
8b295000
JW
10974 /* We don't use the obvious constant shift to generate the mask,
10975 because that generates compiler warnings when BITS_PER_WORD is
10976 greater than or equal to HOST_BITS_PER_WIDE_INT, even though this
10977 code is unreachable in that case. */
10978 tem = tem & GET_MODE_MASK (word_mode);
21b2a157
JW
10979 bitnum = exact_log2 (tem);
10980
ca695ac9
JB
10981 /* If INNER is a right shift of a constant and it plus BITNUM does
10982 not overflow, adjust BITNUM and INNER. */
bbf6f052 10983
ca695ac9
JB
10984 if (TREE_CODE (inner) == RSHIFT_EXPR
10985 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10986 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10987 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
10988 < TYPE_PRECISION (type)))
10989 {
10990 bitnum +=TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10991 inner = TREE_OPERAND (inner, 0);
10992 }
bbf6f052 10993
ca695ac9
JB
10994 /* If we are going to be able to omit the AND below, we must do our
10995 operations as unsigned. If we must use the AND, we have a choice.
10996 Normally unsigned is faster, but for some machines signed is. */
10997 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
ad92c826
RK
10998#ifdef LOAD_EXTEND_OP
10999 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
ca695ac9
JB
11000#else
11001 : 1
11002#endif
11003 );
bbf6f052 11004
ca695ac9
JB
11005 if (subtarget == 0 || GET_CODE (subtarget) != REG
11006 || GET_MODE (subtarget) != operand_mode
11007 || ! safe_from_p (subtarget, inner))
11008 subtarget = 0;
e7c33f54 11009
ca695ac9 11010 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
bbf6f052 11011
ca695ac9
JB
11012 if (bitnum != 0)
11013 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
0c316b20 11014 size_int (bitnum), subtarget, ops_unsignedp);
bbf6f052 11015
ca695ac9
JB
11016 if (GET_MODE (op0) != mode)
11017 op0 = convert_to_mode (mode, op0, ops_unsignedp);
bbf6f052 11018
ca695ac9 11019 if ((code == EQ && ! invert) || (code == NE && invert))
0c316b20 11020 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
ca695ac9 11021 ops_unsignedp, OPTAB_LIB_WIDEN);
bbf6f052 11022
ca695ac9
JB
11023 /* Put the AND last so it can combine with more things. */
11024 if (bitnum != TYPE_PRECISION (type) - 1)
0c316b20 11025 op0 = expand_and (op0, const1_rtx, subtarget);
bbf6f052 11026
ca695ac9
JB
11027 return op0;
11028 }
bbf6f052 11029
ca695ac9
JB
11030 /* Now see if we are likely to be able to do this. Return if not. */
11031 if (! can_compare_p (operand_mode))
11032 return 0;
11033 icode = setcc_gen_code[(int) code];
11034 if (icode == CODE_FOR_nothing
11035 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
11036 {
11037 /* We can only do this if it is one of the special cases that
11038 can be handled without an scc insn. */
11039 if ((code == LT && integer_zerop (arg1))
11040 || (! only_cheap && code == GE && integer_zerop (arg1)))
11041 ;
11042 else if (BRANCH_COST >= 0
11043 && ! only_cheap && (code == NE || code == EQ)
11044 && TREE_CODE (type) != REAL_TYPE
11045 && ((abs_optab->handlers[(int) operand_mode].insn_code
11046 != CODE_FOR_nothing)
11047 || (ffs_optab->handlers[(int) operand_mode].insn_code
11048 != CODE_FOR_nothing)))
11049 ;
11050 else
11051 return 0;
11052 }
11053
11054 preexpand_calls (exp);
11055 if (subtarget == 0 || GET_CODE (subtarget) != REG
11056 || GET_MODE (subtarget) != operand_mode
11057 || ! safe_from_p (subtarget, arg1))
11058 subtarget = 0;
bbf6f052 11059
ca695ac9
JB
11060 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
11061 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
bbf6f052 11062
ca695ac9
JB
11063 if (target == 0)
11064 target = gen_reg_rtx (mode);
bbf6f052 11065
ca695ac9
JB
11066 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
11067 because, if the emit_store_flag does anything it will succeed and
11068 OP0 and OP1 will not be used subsequently. */
bbf6f052 11069
ca695ac9
JB
11070 result = emit_store_flag (target, code,
11071 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
11072 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
11073 operand_mode, unsignedp, 1);
bbf6f052 11074
ca695ac9
JB
11075 if (result)
11076 {
11077 if (invert)
11078 result = expand_binop (mode, xor_optab, result, const1_rtx,
11079 result, 0, OPTAB_LIB_WIDEN);
11080 return result;
11081 }
bbf6f052 11082
ca695ac9 11083 /* If this failed, we have to do this with set/compare/jump/set code. */
bb60ac63 11084 if (GET_CODE (target) != REG
ca695ac9
JB
11085 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
11086 target = gen_reg_rtx (GET_MODE (target));
bbf6f052 11087
ca695ac9
JB
11088 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
11089 result = compare_from_rtx (op0, op1, code, unsignedp,
11090 operand_mode, NULL_RTX, 0);
11091 if (GET_CODE (result) == CONST_INT)
11092 return (((result == const0_rtx && ! invert)
11093 || (result != const0_rtx && invert))
11094 ? const0_rtx : const1_rtx);
bbf6f052 11095
ca695ac9
JB
11096 label = gen_label_rtx ();
11097 if (bcc_gen_fctn[(int) code] == 0)
11098 abort ();
bbf6f052 11099
ca695ac9
JB
11100 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
11101 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
11102 emit_label (label);
bbf6f052 11103
ca695ac9
JB
11104 return target;
11105}
11106\f
11107/* Generate a tablejump instruction (used for switch statements). */
bbf6f052 11108
ca695ac9 11109#ifdef HAVE_tablejump
bbf6f052 11110
ca695ac9
JB
11111/* INDEX is the value being switched on, with the lowest value
11112 in the table already subtracted.
11113 MODE is its expected mode (needed if INDEX is constant).
11114 RANGE is the length of the jump table.
11115 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
bbf6f052 11116
ca695ac9
JB
11117 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11118 index value is out of range. */
bbf6f052 11119
ca695ac9
JB
11120void
11121do_tablejump (index, mode, range, table_label, default_label)
11122 rtx index, range, table_label, default_label;
11123 enum machine_mode mode;
11124{
11125 register rtx temp, vector;
bbf6f052 11126
ca695ac9
JB
11127 /* Do an unsigned comparison (in the proper mode) between the index
11128 expression and the value which represents the length of the range.
11129 Since we just finished subtracting the lower bound of the range
11130 from the index expression, this comparison allows us to simultaneously
11131 check that the original index expression value is both greater than
11132 or equal to the minimum value of the range and less than or equal to
11133 the maximum value of the range. */
bbf6f052 11134
bf500664
RK
11135 emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
11136 emit_jump_insn (gen_bgtu (default_label));
bbf6f052 11137
ca695ac9
JB
11138 /* If index is in range, it must fit in Pmode.
11139 Convert to Pmode so we can index with it. */
11140 if (mode != Pmode)
11141 index = convert_to_mode (Pmode, index, 1);
bbf6f052 11142
ca695ac9
JB
11143 /* Don't let a MEM slip thru, because then INDEX that comes
11144 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11145 and break_out_memory_refs will go to work on it and mess it up. */
11146#ifdef PIC_CASE_VECTOR_ADDRESS
11147 if (flag_pic && GET_CODE (index) != REG)
11148 index = copy_to_mode_reg (Pmode, index);
11149#endif
bbf6f052 11150
ca695ac9
JB
11151 /* If flag_force_addr were to affect this address
11152 it could interfere with the tricky assumptions made
11153 about addresses that contain label-refs,
11154 which may be valid only very near the tablejump itself. */
11155 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11156 GET_MODE_SIZE, because this indicates how large insns are. The other
11157 uses should all be Pmode, because they are addresses. This code
11158 could fail if addresses and insns are not the same size. */
11159 index = gen_rtx (PLUS, Pmode,
11160 gen_rtx (MULT, Pmode, index,
11161 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
11162 gen_rtx (LABEL_REF, Pmode, table_label));
11163#ifdef PIC_CASE_VECTOR_ADDRESS
11164 if (flag_pic)
11165 index = PIC_CASE_VECTOR_ADDRESS (index);
11166 else
11167#endif
11168 index = memory_address_noforce (CASE_VECTOR_MODE, index);
11169 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11170 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
11171 RTX_UNCHANGING_P (vector) = 1;
11172 convert_move (temp, vector, 0);
bbf6f052 11173
ca695ac9 11174 emit_jump_insn (gen_tablejump (temp, table_label));
bbf6f052 11175
ca695ac9
JB
11176#ifndef CASE_VECTOR_PC_RELATIVE
11177 /* If we are generating PIC code or if the table is PC-relative, the
11178 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11179 if (! flag_pic)
11180 emit_barrier ();
bbf6f052 11181#endif
ca695ac9 11182}
bbf6f052 11183
ca695ac9 11184#endif /* HAVE_tablejump */
bbf6f052 11185
bbf6f052 11186
ca695ac9
JB
11187/* Emit a suitable bytecode to load a value from memory, assuming a pointer
11188 to that value is on the top of the stack. The resulting type is TYPE, and
0f41302f 11189 the source declaration is DECL. */
bbf6f052 11190
ca695ac9
JB
11191void
11192bc_load_memory (type, decl)
11193 tree type, decl;
11194{
11195 enum bytecode_opcode opcode;
11196
11197
11198 /* Bit fields are special. We only know about signed and
11199 unsigned ints, and enums. The latter are treated as
0f41302f 11200 signed integers. */
ca695ac9
JB
11201
11202 if (DECL_BIT_FIELD (decl))
11203 if (TREE_CODE (type) == ENUMERAL_TYPE
11204 || TREE_CODE (type) == INTEGER_TYPE)
11205 opcode = TREE_UNSIGNED (type) ? zxloadBI : sxloadBI;
11206 else
11207 abort ();
11208 else
0f41302f 11209 /* See corresponding comment in bc_store_memory(). */
ca695ac9
JB
11210 if (TYPE_MODE (type) == BLKmode
11211 || TYPE_MODE (type) == VOIDmode)
11212 return;
11213 else
6bd6178d 11214 opcode = mode_to_load_map [(int) TYPE_MODE (type)];
bbf6f052 11215
ca695ac9
JB
11216 if (opcode == neverneverland)
11217 abort ();
11218
11219 bc_emit_bytecode (opcode);
11220
11221#ifdef DEBUG_PRINT_CODE
11222 fputc ('\n', stderr);
11223#endif
bbf6f052 11224}
bbf6f052 11225
bbf6f052 11226
ca695ac9
JB
11227/* Store the contents of the second stack slot to the address in the
11228 top stack slot. DECL is the declaration of the destination and is used
0f41302f 11229 to determine whether we're dealing with a bitfield. */
bbf6f052 11230
ca695ac9
JB
11231void
11232bc_store_memory (type, decl)
11233 tree type, decl;
11234{
11235 enum bytecode_opcode opcode;
11236
11237
11238 if (DECL_BIT_FIELD (decl))
f81497d9 11239 {
ca695ac9
JB
11240 if (TREE_CODE (type) == ENUMERAL_TYPE
11241 || TREE_CODE (type) == INTEGER_TYPE)
11242 opcode = sstoreBI;
f81497d9 11243 else
ca695ac9 11244 abort ();
f81497d9 11245 }
ca695ac9
JB
11246 else
11247 if (TYPE_MODE (type) == BLKmode)
11248 {
11249 /* Copy structure. This expands to a block copy instruction, storeBLK.
11250 In addition to the arguments expected by the other store instructions,
11251 it also expects a type size (SImode) on top of the stack, which is the
11252 structure size in size units (usually bytes). The two first arguments
11253 are already on the stack; so we just put the size on level 1. For some
11254 other languages, the size may be variable, this is why we don't encode
0f41302f 11255 it as a storeBLK literal, but rather treat it as a full-fledged expression. */
ca695ac9
JB
11256
11257 bc_expand_expr (TYPE_SIZE (type));
11258 opcode = storeBLK;
11259 }
11260 else
6bd6178d 11261 opcode = mode_to_store_map [(int) TYPE_MODE (type)];
f81497d9 11262
ca695ac9
JB
11263 if (opcode == neverneverland)
11264 abort ();
11265
11266 bc_emit_bytecode (opcode);
11267
11268#ifdef DEBUG_PRINT_CODE
11269 fputc ('\n', stderr);
11270#endif
f81497d9
RS
11271}
11272
f81497d9 11273
ca695ac9
JB
11274/* Allocate local stack space sufficient to hold a value of the given
11275 SIZE at alignment boundary ALIGNMENT bits. ALIGNMENT must be an
11276 integral power of 2. A special case is locals of type VOID, which
11277 have size 0 and alignment 1 - any "voidish" SIZE or ALIGNMENT is
11278 remapped into the corresponding attribute of SI. */
11279
11280rtx
11281bc_allocate_local (size, alignment)
11282 int size, alignment;
f81497d9 11283{
ca695ac9
JB
11284 rtx retval;
11285 int byte_alignment;
f81497d9 11286
ca695ac9
JB
11287 if (size < 0)
11288 abort ();
f81497d9 11289
ca695ac9
JB
11290 /* Normalize size and alignment */
11291 if (!size)
11292 size = UNITS_PER_WORD;
bbf6f052 11293
ca695ac9
JB
11294 if (alignment < BITS_PER_UNIT)
11295 byte_alignment = 1 << (INT_ALIGN - 1);
11296 else
11297 /* Align */
11298 byte_alignment = alignment / BITS_PER_UNIT;
bbf6f052 11299
ca695ac9
JB
11300 if (local_vars_size & (byte_alignment - 1))
11301 local_vars_size += byte_alignment - (local_vars_size & (byte_alignment - 1));
bbf6f052 11302
ca695ac9
JB
11303 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
11304 local_vars_size += size;
bbf6f052 11305
ca695ac9 11306 return retval;
bbf6f052
RK
11307}
11308
bbf6f052 11309
ca695ac9 11310/* Allocate variable-sized local array. Variable-sized arrays are
0f41302f 11311 actually pointers to the address in memory where they are stored. */
ca695ac9
JB
11312
11313rtx
11314bc_allocate_variable_array (size)
11315 tree size;
bbf6f052 11316{
ca695ac9
JB
11317 rtx retval;
11318 const int ptralign = (1 << (PTR_ALIGN - 1));
bbf6f052 11319
ca695ac9
JB
11320 /* Align pointer */
11321 if (local_vars_size & ptralign)
11322 local_vars_size += ptralign - (local_vars_size & ptralign);
bbf6f052 11323
ca695ac9
JB
11324 /* Note down local space needed: pointer to block; also return
11325 dummy rtx */
bbf6f052 11326
ca695ac9
JB
11327 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
11328 local_vars_size += POINTER_SIZE / BITS_PER_UNIT;
11329 return retval;
bbf6f052 11330}
bbf6f052 11331
bbf6f052 11332
ca695ac9 11333/* Push the machine address for the given external variable offset. */
0f41302f 11334
ca695ac9
JB
11335void
11336bc_load_externaddr (externaddr)
11337 rtx externaddr;
11338{
11339 bc_emit_bytecode (constP);
e7a42772
JB
11340 bc_emit_code_labelref (BYTECODE_LABEL (externaddr),
11341 BYTECODE_BC_LABEL (externaddr)->offset);
bbf6f052 11342
ca695ac9
JB
11343#ifdef DEBUG_PRINT_CODE
11344 fputc ('\n', stderr);
11345#endif
bbf6f052
RK
11346}
11347
bbf6f052 11348
ca695ac9 11349/* Like above, but expects an IDENTIFIER. */
0f41302f 11350
ca695ac9
JB
11351void
11352bc_load_externaddr_id (id, offset)
11353 tree id;
11354 int offset;
11355{
11356 if (!IDENTIFIER_POINTER (id))
11357 abort ();
bbf6f052 11358
ca695ac9 11359 bc_emit_bytecode (constP);
3d8e9bc2 11360 bc_emit_code_labelref (xstrdup (IDENTIFIER_POINTER (id)), offset);
bbf6f052 11361
ca695ac9
JB
11362#ifdef DEBUG_PRINT_CODE
11363 fputc ('\n', stderr);
11364#endif
11365}
bbf6f052 11366
bbf6f052 11367
ca695ac9 11368/* Push the machine address for the given local variable offset. */
0f41302f 11369
ca695ac9
JB
11370void
11371bc_load_localaddr (localaddr)
11372 rtx localaddr;
11373{
e7a42772 11374 bc_emit_instruction (localP, (HOST_WIDE_INT) BYTECODE_BC_LABEL (localaddr)->offset);
bbf6f052 11375}
bbf6f052 11376
bbf6f052 11377
ca695ac9 11378/* Push the machine address for the given parameter offset.
0f41302f
MS
11379 NOTE: offset is in bits. */
11380
ca695ac9
JB
11381void
11382bc_load_parmaddr (parmaddr)
11383 rtx parmaddr;
bbf6f052 11384{
e7a42772
JB
11385 bc_emit_instruction (argP, ((HOST_WIDE_INT) BYTECODE_BC_LABEL (parmaddr)->offset
11386 / BITS_PER_UNIT));
ca695ac9 11387}
bbf6f052 11388
ca695ac9
JB
11389
11390/* Convert a[i] into *(a + i). */
0f41302f 11391
ca695ac9
JB
11392tree
11393bc_canonicalize_array_ref (exp)
11394 tree exp;
11395{
11396 tree type = TREE_TYPE (exp);
11397 tree array_adr = build1 (ADDR_EXPR, TYPE_POINTER_TO (type),
11398 TREE_OPERAND (exp, 0));
11399 tree index = TREE_OPERAND (exp, 1);
11400
11401
11402 /* Convert the integer argument to a type the same size as a pointer
11403 so the multiply won't overflow spuriously. */
11404
11405 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
11406 index = convert (type_for_size (POINTER_SIZE, 0), index);
11407
11408 /* The array address isn't volatile even if the array is.
11409 (Of course this isn't terribly relevant since the bytecode
11410 translator treats nearly everything as volatile anyway.) */
11411 TREE_THIS_VOLATILE (array_adr) = 0;
11412
11413 return build1 (INDIRECT_REF, type,
11414 fold (build (PLUS_EXPR,
11415 TYPE_POINTER_TO (type),
11416 array_adr,
11417 fold (build (MULT_EXPR,
11418 TYPE_POINTER_TO (type),
11419 index,
11420 size_in_bytes (type))))));
bbf6f052
RK
11421}
11422
bbf6f052 11423
ca695ac9
JB
11424/* Load the address of the component referenced by the given
11425 COMPONENT_REF expression.
bbf6f052 11426
0f41302f 11427 Returns innermost lvalue. */
bbf6f052 11428
ca695ac9
JB
11429tree
11430bc_expand_component_address (exp)
11431 tree exp;
bbf6f052 11432{
ca695ac9
JB
11433 tree tem, chain;
11434 enum machine_mode mode;
11435 int bitpos = 0;
11436 HOST_WIDE_INT SIval;
a7c5971a 11437
bbf6f052 11438
ca695ac9
JB
11439 tem = TREE_OPERAND (exp, 1);
11440 mode = DECL_MODE (tem);
bbf6f052 11441
ca695ac9
JB
11442
11443 /* Compute cumulative bit offset for nested component refs
11444 and array refs, and find the ultimate containing object. */
11445
11446 for (tem = exp;; tem = TREE_OPERAND (tem, 0))
bbf6f052 11447 {
ca695ac9
JB
11448 if (TREE_CODE (tem) == COMPONENT_REF)
11449 bitpos += TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (tem, 1)));
11450 else
11451 if (TREE_CODE (tem) == ARRAY_REF
11452 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
11453 && TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) == INTEGER_CST)
bbf6f052 11454
ca695ac9
JB
11455 bitpos += (TREE_INT_CST_LOW (TREE_OPERAND (tem, 1))
11456 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)))
11457 /* * TYPE_SIZE_UNIT (TREE_TYPE (tem)) */);
11458 else
11459 break;
11460 }
bbf6f052 11461
c02bd5d9 11462 bc_expand_expr (tem);
bbf6f052 11463
cd1b4b44 11464
ca695ac9
JB
11465 /* For bitfields also push their offset and size */
11466 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
11467 bc_push_offset_and_size (bitpos, /* DECL_SIZE_UNIT */ (TREE_OPERAND (exp, 1)));
11468 else
11469 if (SIval = bitpos / BITS_PER_UNIT)
11470 bc_emit_instruction (addconstPSI, SIval);
bbf6f052 11471
ca695ac9 11472 return (TREE_OPERAND (exp, 1));
bbf6f052 11473}
e7c33f54 11474
bbf6f052 11475
ca695ac9 11476/* Emit code to push two SI constants */
0f41302f 11477
ca695ac9
JB
11478void
11479bc_push_offset_and_size (offset, size)
11480 HOST_WIDE_INT offset, size;
11481{
11482 bc_emit_instruction (constSI, offset);
11483 bc_emit_instruction (constSI, size);
11484}
bbf6f052 11485
bbf6f052 11486
ca695ac9
JB
11487/* Emit byte code to push the address of the given lvalue expression to
11488 the stack. If it's a bit field, we also push offset and size info.
bbf6f052 11489
ca695ac9 11490 Returns innermost component, which allows us to determine not only
0f41302f 11491 its type, but also whether it's a bitfield. */
ca695ac9
JB
11492
11493tree
11494bc_expand_address (exp)
bbf6f052 11495 tree exp;
bbf6f052 11496{
ca695ac9
JB
11497 /* Safeguard */
11498 if (!exp || TREE_CODE (exp) == ERROR_MARK)
11499 return (exp);
bbf6f052 11500
e7c33f54 11501
ca695ac9
JB
11502 switch (TREE_CODE (exp))
11503 {
11504 case ARRAY_REF:
e7c33f54 11505
ca695ac9 11506 return (bc_expand_address (bc_canonicalize_array_ref (exp)));
e7c33f54 11507
ca695ac9 11508 case COMPONENT_REF:
bbf6f052 11509
ca695ac9 11510 return (bc_expand_component_address (exp));
bbf6f052 11511
ca695ac9 11512 case INDIRECT_REF:
bbf6f052 11513
ca695ac9
JB
11514 bc_expand_expr (TREE_OPERAND (exp, 0));
11515
11516 /* For variable-sized types: retrieve pointer. Sometimes the
11517 TYPE_SIZE tree is NULL. Is this a bug or a feature? Let's
0f41302f 11518 also make sure we have an operand, just in case... */
ca695ac9
JB
11519
11520 if (TREE_OPERAND (exp, 0)
11521 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))
11522 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))) != INTEGER_CST)
11523 bc_emit_instruction (loadP);
11524
11525 /* If packed, also return offset and size */
11526 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 0)))
11527
11528 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (exp, 0))),
11529 TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (exp, 0))));
11530
11531 return (TREE_OPERAND (exp, 0));
11532
11533 case FUNCTION_DECL:
11534
e7a42772
JB
11535 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
11536 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
bbf6f052 11537 break;
ca695ac9
JB
11538
11539 case PARM_DECL:
11540
11541 bc_load_parmaddr (DECL_RTL (exp));
11542
11543 /* For variable-sized types: retrieve pointer */
11544 if (TYPE_SIZE (TREE_TYPE (exp))
11545 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
11546 bc_emit_instruction (loadP);
11547
11548 /* If packed, also return offset and size */
11549 if (DECL_BIT_FIELD (exp))
11550 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
11551 TREE_INT_CST_LOW (DECL_SIZE (exp)));
11552
bbf6f052 11553 break;
ca695ac9
JB
11554
11555 case RESULT_DECL:
11556
11557 bc_emit_instruction (returnP);
bbf6f052 11558 break;
ca695ac9
JB
11559
11560 case VAR_DECL:
11561
11562#if 0
e7a42772 11563 if (BYTECODE_LABEL (DECL_RTL (exp)))
ca695ac9
JB
11564 bc_load_externaddr (DECL_RTL (exp));
11565#endif
11566
11567 if (DECL_EXTERNAL (exp))
e7a42772 11568 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
eb862a37 11569 (BYTECODE_BC_LABEL (DECL_RTL (exp)))->offset);
bbf6f052 11570 else
ca695ac9
JB
11571 bc_load_localaddr (DECL_RTL (exp));
11572
11573 /* For variable-sized types: retrieve pointer */
11574 if (TYPE_SIZE (TREE_TYPE (exp))
11575 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
11576 bc_emit_instruction (loadP);
11577
11578 /* If packed, also return offset and size */
11579 if (DECL_BIT_FIELD (exp))
11580 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
11581 TREE_INT_CST_LOW (DECL_SIZE (exp)));
11582
bbf6f052 11583 break;
ca695ac9
JB
11584
11585 case STRING_CST:
11586 {
11587 rtx r;
11588
11589 bc_emit_bytecode (constP);
11590 r = output_constant_def (exp);
e7a42772 11591 bc_emit_code_labelref (BYTECODE_LABEL (r), BYTECODE_BC_LABEL (r)->offset);
ca695ac9
JB
11592
11593#ifdef DEBUG_PRINT_CODE
11594 fputc ('\n', stderr);
11595#endif
11596 }
bbf6f052 11597 break;
ca695ac9 11598
bbf6f052 11599 default:
bbf6f052 11600
ca695ac9
JB
11601 abort();
11602 break;
bbf6f052
RK
11603 }
11604
0f41302f 11605 /* Most lvalues don't have components. */
ca695ac9
JB
11606 return (exp);
11607}
bbf6f052 11608
ca695ac9
JB
11609
11610/* Emit a type code to be used by the runtime support in handling
11611 parameter passing. The type code consists of the machine mode
11612 plus the minimal alignment shifted left 8 bits. */
11613
11614tree
11615bc_runtime_type_code (type)
11616 tree type;
11617{
11618 int val;
11619
11620 switch (TREE_CODE (type))
bbf6f052 11621 {
ca695ac9
JB
11622 case VOID_TYPE:
11623 case INTEGER_TYPE:
11624 case REAL_TYPE:
11625 case COMPLEX_TYPE:
11626 case ENUMERAL_TYPE:
11627 case POINTER_TYPE:
11628 case RECORD_TYPE:
11629
6bd6178d 11630 val = (int) TYPE_MODE (type) | TYPE_ALIGN (type) << 8;
ca695ac9
JB
11631 break;
11632
11633 case ERROR_MARK:
11634
11635 val = 0;
11636 break;
11637
11638 default:
af508edd 11639
ca695ac9
JB
11640 abort ();
11641 }
11642 return build_int_2 (val, 0);
11643}
af508edd 11644
af508edd 11645
ca695ac9 11646/* Generate constructor label */
0f41302f 11647
ca695ac9
JB
11648char *
11649bc_gen_constr_label ()
11650{
11651 static int label_counter;
11652 static char label[20];
bbf6f052 11653
ca695ac9 11654 sprintf (label, "*LR%d", label_counter++);
bbf6f052 11655
ca695ac9
JB
11656 return (obstack_copy0 (&permanent_obstack, label, strlen (label)));
11657}
bbf6f052 11658
bbf6f052 11659
ca695ac9
JB
11660/* Evaluate constructor CONSTR and return pointer to it on level one. We
11661 expand the constructor data as static data, and push a pointer to it.
11662 The pointer is put in the pointer table and is retrieved by a constP
11663 bytecode instruction. We then loop and store each constructor member in
11664 the corresponding component. Finally, we return the original pointer on
0f41302f 11665 the stack. */
af508edd 11666
ca695ac9
JB
11667void
11668bc_expand_constructor (constr)
11669 tree constr;
11670{
11671 char *l;
11672 HOST_WIDE_INT ptroffs;
11673 rtx constr_rtx;
bbf6f052 11674
ca695ac9
JB
11675
11676 /* Literal constructors are handled as constants, whereas
11677 non-literals are evaluated and stored element by element
0f41302f 11678 into the data segment. */
ca695ac9
JB
11679
11680 /* Allocate space in proper segment and push pointer to space on stack.
11681 */
bbf6f052 11682
ca695ac9 11683 l = bc_gen_constr_label ();
bbf6f052 11684
ca695ac9 11685 if (TREE_CONSTANT (constr))
bbf6f052 11686 {
ca695ac9
JB
11687 text_section ();
11688
11689 bc_emit_const_labeldef (l);
11690 bc_output_constructor (constr, int_size_in_bytes (TREE_TYPE (constr)));
bbf6f052 11691 }
ca695ac9
JB
11692 else
11693 {
11694 data_section ();
bbf6f052 11695
ca695ac9
JB
11696 bc_emit_data_labeldef (l);
11697 bc_output_data_constructor (constr);
11698 }
bbf6f052 11699
ca695ac9
JB
11700
11701 /* Add reference to pointer table and recall pointer to stack;
11702 this code is common for both types of constructors: literals
0f41302f 11703 and non-literals. */
bbf6f052 11704
de7d9320
JB
11705 ptroffs = bc_define_pointer (l);
11706 bc_emit_instruction (constP, ptroffs);
d39985fa 11707
0f41302f 11708 /* This is all that has to be done if it's a literal. */
ca695ac9
JB
11709 if (TREE_CONSTANT (constr))
11710 return;
bbf6f052 11711
ca695ac9
JB
11712
11713 /* At this point, we have the pointer to the structure on top of the stack.
0f41302f 11714 Generate sequences of store_memory calls for the constructor. */
ca695ac9
JB
11715
11716 /* constructor type is structure */
11717 if (TREE_CODE (TREE_TYPE (constr)) == RECORD_TYPE)
e7c33f54 11718 {
ca695ac9
JB
11719 register tree elt;
11720
11721 /* If the constructor has fewer fields than the structure,
11722 clear the whole structure first. */
11723
11724 if (list_length (CONSTRUCTOR_ELTS (constr))
11725 != list_length (TYPE_FIELDS (TREE_TYPE (constr))))
11726 {
6d6e61ce 11727 bc_emit_instruction (duplicate);
ca695ac9
JB
11728 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
11729 bc_emit_instruction (clearBLK);
11730 }
11731
11732 /* Store each element of the constructor into the corresponding
11733 field of TARGET. */
11734
11735 for (elt = CONSTRUCTOR_ELTS (constr); elt; elt = TREE_CHAIN (elt))
11736 {
11737 register tree field = TREE_PURPOSE (elt);
11738 register enum machine_mode mode;
11739 int bitsize;
11740 int bitpos;
11741 int unsignedp;
11742
11743 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field)) /* * DECL_SIZE_UNIT (field) */;
11744 mode = DECL_MODE (field);
11745 unsignedp = TREE_UNSIGNED (field);
11746
11747 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
11748
11749 bc_store_field (elt, bitsize, bitpos, mode, TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
11750 /* The alignment of TARGET is
11751 at least what its type requires. */
11752 VOIDmode, 0,
11753 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
11754 int_size_in_bytes (TREE_TYPE (constr)));
11755 }
e7c33f54 11756 }
ca695ac9
JB
11757 else
11758
11759 /* Constructor type is array */
11760 if (TREE_CODE (TREE_TYPE (constr)) == ARRAY_TYPE)
11761 {
11762 register tree elt;
11763 register int i;
11764 tree domain = TYPE_DOMAIN (TREE_TYPE (constr));
11765 int minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
11766 int maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
11767 tree elttype = TREE_TYPE (TREE_TYPE (constr));
11768
11769 /* If the constructor has fewer fields than the structure,
11770 clear the whole structure first. */
11771
11772 if (list_length (CONSTRUCTOR_ELTS (constr)) < maxelt - minelt + 1)
11773 {
6d6e61ce 11774 bc_emit_instruction (duplicate);
ca695ac9
JB
11775 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
11776 bc_emit_instruction (clearBLK);
11777 }
11778
11779
11780 /* Store each element of the constructor into the corresponding
0f41302f 11781 element of TARGET, determined by counting the elements. */
ca695ac9
JB
11782
11783 for (elt = CONSTRUCTOR_ELTS (constr), i = 0;
11784 elt;
11785 elt = TREE_CHAIN (elt), i++)
11786 {
11787 register enum machine_mode mode;
11788 int bitsize;
11789 int bitpos;
11790 int unsignedp;
11791
11792 mode = TYPE_MODE (elttype);
11793 bitsize = GET_MODE_BITSIZE (mode);
11794 unsignedp = TREE_UNSIGNED (elttype);
11795
11796 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype))
11797 /* * TYPE_SIZE_UNIT (elttype) */ );
11798
11799 bc_store_field (elt, bitsize, bitpos, mode,
11800 TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
11801 /* The alignment of TARGET is
11802 at least what its type requires. */
11803 VOIDmode, 0,
11804 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
11805 int_size_in_bytes (TREE_TYPE (constr)));
11806 }
11807
11808 }
11809}
bbf6f052 11810
bbf6f052 11811
ca695ac9
JB
11812/* Store the value of EXP (an expression tree) into member FIELD of
11813 structure at address on stack, which has type TYPE, mode MODE and
11814 occupies BITSIZE bits, starting BITPOS bits from the beginning of the
11815 structure.
bbf6f052 11816
ca695ac9
JB
11817 ALIGN is the alignment that TARGET is known to have, measured in bytes.
11818 TOTAL_SIZE is its size in bytes, or -1 if variable. */
bbf6f052 11819
ca695ac9
JB
11820void
11821bc_store_field (field, bitsize, bitpos, mode, exp, type,
11822 value_mode, unsignedp, align, total_size)
11823 int bitsize, bitpos;
11824 enum machine_mode mode;
11825 tree field, exp, type;
11826 enum machine_mode value_mode;
11827 int unsignedp;
11828 int align;
11829 int total_size;
11830{
bbf6f052 11831
ca695ac9
JB
11832 /* Expand expression and copy pointer */
11833 bc_expand_expr (exp);
11834 bc_emit_instruction (over);
bbf6f052 11835
bbf6f052 11836
ca695ac9
JB
11837 /* If the component is a bit field, we cannot use addressing to access
11838 it. Use bit-field techniques to store in it. */
bbf6f052 11839
ca695ac9
JB
11840 if (DECL_BIT_FIELD (field))
11841 {
11842 bc_store_bit_field (bitpos, bitsize, unsignedp);
11843 return;
11844 }
11845 else
11846 /* Not bit field */
11847 {
11848 HOST_WIDE_INT offset = bitpos / BITS_PER_UNIT;
11849
11850 /* Advance pointer to the desired member */
11851 if (offset)
11852 bc_emit_instruction (addconstPSI, offset);
11853
11854 /* Store */
11855 bc_store_memory (type, field);
11856 }
11857}
bbf6f052 11858
ca695ac9
JB
11859
11860/* Store SI/SU in bitfield */
0f41302f 11861
bbf6f052 11862void
ca695ac9
JB
11863bc_store_bit_field (offset, size, unsignedp)
11864 int offset, size, unsignedp;
bbf6f052 11865{
ca695ac9
JB
11866 /* Push bitfield offset and size */
11867 bc_push_offset_and_size (offset, size);
bbf6f052 11868
ca695ac9
JB
11869 /* Store */
11870 bc_emit_instruction (sstoreBI);
11871}
e87b4f3f 11872
88d3b7f0 11873
ca695ac9 11874/* Load SI/SU from bitfield */
0f41302f 11875
ca695ac9
JB
11876void
11877bc_load_bit_field (offset, size, unsignedp)
11878 int offset, size, unsignedp;
11879{
11880 /* Push bitfield offset and size */
11881 bc_push_offset_and_size (offset, size);
88d3b7f0 11882
ca695ac9
JB
11883 /* Load: sign-extend if signed, else zero-extend */
11884 bc_emit_instruction (unsignedp ? zxloadBI : sxloadBI);
11885}
709f5be1 11886
bbf6f052 11887
ca695ac9
JB
11888/* Adjust interpreter stack by NLEVELS. Positive means drop NLEVELS
11889 (adjust stack pointer upwards), negative means add that number of
11890 levels (adjust the stack pointer downwards). Only positive values
0f41302f 11891 normally make sense. */
bbf6f052 11892
ca695ac9
JB
11893void
11894bc_adjust_stack (nlevels)
11895 int nlevels;
11896{
11897 switch (nlevels)
11898 {
11899 case 0:
11900 break;
11901
11902 case 2:
11903 bc_emit_instruction (drop);
11904
11905 case 1:
11906 bc_emit_instruction (drop);
11907 break;
11908
11909 default:
11910
11911 bc_emit_instruction (adjstackSI, (HOST_WIDE_INT) nlevels);
11912 stack_depth -= nlevels;
11913 }
11914
a68c7608
RS
11915#if defined (VALIDATE_STACK_FOR_BC)
11916 VALIDATE_STACK_FOR_BC ();
bbf6f052
RK
11917#endif
11918}
This page took 2.016366 seconds and 5 git commands to generate.