]> gcc.gnu.org Git - gcc.git/blame - gcc/expr.c
Fix SI followed by 4 DFs under AIX
[gcc.git] / gcc / expr.c
CommitLineData
bbf6f052 1/* Convert tree expression to rtl instructions, for GNU compiler.
3d27140a 2 Copyright (C) 1988, 92, 93, 94, 95, 1996 Free Software Foundation, Inc.
bbf6f052
RK
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
940d9d63
RK
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA. */
bbf6f052
RK
20
21
22#include "config.h"
ca695ac9 23#include "machmode.h"
bbf6f052
RK
24#include "rtl.h"
25#include "tree.h"
ca695ac9 26#include "obstack.h"
bbf6f052 27#include "flags.h"
bf76bb5a 28#include "regs.h"
4ed67205 29#include "hard-reg-set.h"
3d195391 30#include "except.h"
bbf6f052
RK
31#include "function.h"
32#include "insn-flags.h"
33#include "insn-codes.h"
34#include "expr.h"
35#include "insn-config.h"
36#include "recog.h"
37#include "output.h"
bbf6f052
RK
38#include "typeclass.h"
39
ca695ac9
JB
40#include "bytecode.h"
41#include "bc-opcode.h"
42#include "bc-typecd.h"
43#include "bc-optab.h"
44#include "bc-emit.h"
45
46
bbf6f052
RK
47#define CEIL(x,y) (((x) + (y) - 1) / (y))
48
49/* Decide whether a function's arguments should be processed
bbc8a071
RK
50 from first to last or from last to first.
51
52 They should if the stack and args grow in opposite directions, but
53 only if we have push insns. */
bbf6f052 54
bbf6f052 55#ifdef PUSH_ROUNDING
bbc8a071 56
3319a347 57#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
bbf6f052
RK
58#define PUSH_ARGS_REVERSED /* If it's last to first */
59#endif
bbc8a071 60
bbf6f052
RK
61#endif
62
63#ifndef STACK_PUSH_CODE
64#ifdef STACK_GROWS_DOWNWARD
65#define STACK_PUSH_CODE PRE_DEC
66#else
67#define STACK_PUSH_CODE PRE_INC
68#endif
69#endif
70
71/* Like STACK_BOUNDARY but in units of bytes, not bits. */
72#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
73
74/* If this is nonzero, we do not bother generating VOLATILE
75 around volatile memory references, and we are willing to
76 output indirect addresses. If cse is to follow, we reject
77 indirect addresses so a useful potential cse is generated;
78 if it is used only once, instruction combination will produce
79 the same indirect address eventually. */
80int cse_not_expected;
81
82/* Nonzero to generate code for all the subroutines within an
83 expression before generating the upper levels of the expression.
84 Nowadays this is never zero. */
85int do_preexpand_calls = 1;
86
87/* Number of units that we should eventually pop off the stack.
88 These are the arguments to function calls that have already returned. */
89int pending_stack_adjust;
90
91/* Nonzero means stack pops must not be deferred, and deferred stack
92 pops must not be output. It is nonzero inside a function call,
93 inside a conditional expression, inside a statement expression,
94 and in other cases as well. */
95int inhibit_defer_pop;
96
97/* A list of all cleanups which belong to the arguments of
98 function calls being expanded by expand_call. */
99tree cleanups_this_call;
100
d93d4205
MS
101/* When temporaries are created by TARGET_EXPRs, they are created at
102 this level of temp_slot_level, so that they can remain allocated
103 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
104 of TARGET_EXPRs. */
105int target_temp_slot_level;
106
bbf6f052
RK
107/* Nonzero means __builtin_saveregs has already been done in this function.
108 The value is the pseudoreg containing the value __builtin_saveregs
109 returned. */
110static rtx saveregs_value;
111
dcf76fff
TW
112/* Similarly for __builtin_apply_args. */
113static rtx apply_args_value;
114
4969d05d
RK
115/* This structure is used by move_by_pieces to describe the move to
116 be performed. */
117
118struct move_by_pieces
119{
120 rtx to;
121 rtx to_addr;
122 int autinc_to;
123 int explicit_inc_to;
e9cf6a97 124 int to_struct;
4969d05d
RK
125 rtx from;
126 rtx from_addr;
127 int autinc_from;
128 int explicit_inc_from;
e9cf6a97 129 int from_struct;
4969d05d
RK
130 int len;
131 int offset;
132 int reverse;
133};
134
9de08200
RK
135/* This structure is used by clear_by_pieces to describe the clear to
136 be performed. */
137
138struct clear_by_pieces
139{
140 rtx to;
141 rtx to_addr;
142 int autinc_to;
143 int explicit_inc_to;
144 int to_struct;
145 int len;
146 int offset;
147 int reverse;
148};
149
c02bd5d9
JB
150/* Used to generate bytecodes: keep track of size of local variables,
151 as well as depth of arithmetic stack. (Notice that variables are
152 stored on the machine's stack, not the arithmetic stack.) */
153
186f92ce 154extern int local_vars_size;
c02bd5d9
JB
155extern int stack_depth;
156extern int max_stack_depth;
292b1216 157extern struct obstack permanent_obstack;
4ed67205 158extern rtx arg_pointer_save_area;
c02bd5d9 159
4969d05d
RK
160static rtx enqueue_insn PROTO((rtx, rtx));
161static int queued_subexp_p PROTO((rtx));
162static void init_queue PROTO((void));
163static void move_by_pieces PROTO((rtx, rtx, int, int));
164static int move_by_pieces_ninsns PROTO((unsigned int, int));
165static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
166 struct move_by_pieces *));
9de08200
RK
167static void clear_by_pieces PROTO((rtx, int, int));
168static void clear_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
169 struct clear_by_pieces *));
170static int is_zeros_p PROTO((tree));
171static int mostly_zeros_p PROTO((tree));
e1a43f73 172static void store_constructor PROTO((tree, rtx, int));
4969d05d
RK
173static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
174 enum machine_mode, int, int, int));
6be58303 175static int get_inner_unaligned_p PROTO((tree));
4969d05d
RK
176static tree save_noncopied_parts PROTO((tree, tree));
177static tree init_noncopied_parts PROTO((tree, tree));
178static int safe_from_p PROTO((rtx, tree));
179static int fixed_type_p PROTO((tree));
01c8a7c8 180static rtx var_rtx PROTO((tree));
4969d05d
RK
181static int get_pointer_alignment PROTO((tree, unsigned));
182static tree string_constant PROTO((tree, tree *));
183static tree c_strlen PROTO((tree));
307b821c
RK
184static rtx expand_builtin PROTO((tree, rtx, rtx,
185 enum machine_mode, int));
0006469d
TW
186static int apply_args_size PROTO((void));
187static int apply_result_size PROTO((void));
188static rtx result_vector PROTO((int, rtx));
189static rtx expand_builtin_apply_args PROTO((void));
190static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
191static void expand_builtin_return PROTO((rtx));
7b8b9722 192static rtx expand_increment PROTO((tree, int, int));
0dc09c0f 193void bc_expand_increment PROTO((struct increment_operator *, tree));
ca695ac9
JB
194rtx bc_allocate_local PROTO((int, int));
195void bc_store_memory PROTO((tree, tree));
196tree bc_expand_component_address PROTO((tree));
197tree bc_expand_address PROTO((tree));
198void bc_expand_constructor PROTO((tree));
199void bc_adjust_stack PROTO((int));
200tree bc_canonicalize_array_ref PROTO((tree));
201void bc_load_memory PROTO((tree, tree));
202void bc_load_externaddr PROTO((rtx));
203void bc_load_externaddr_id PROTO((tree, int));
204void bc_load_localaddr PROTO((rtx));
205void bc_load_parmaddr PROTO((rtx));
4969d05d
RK
206static void preexpand_calls PROTO((tree));
207static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
2e5ec6cf 208void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
4969d05d
RK
209static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
210static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
211static void do_jump_for_compare PROTO((rtx, rtx, rtx));
212static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
213static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
5dab5552 214static tree defer_cleanups_to PROTO((tree));
16545b0a 215extern tree truthvalue_conversion PROTO((tree));
bbf6f052 216
4fa52007
RK
217/* Record for each mode whether we can move a register directly to or
218 from an object of that mode in memory. If we can't, we won't try
219 to use that mode directly when accessing a field of that mode. */
220
221static char direct_load[NUM_MACHINE_MODES];
222static char direct_store[NUM_MACHINE_MODES];
223
bbf6f052
RK
224/* MOVE_RATIO is the number of move instructions that is better than
225 a block move. */
226
227#ifndef MOVE_RATIO
266007a7 228#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
bbf6f052
RK
229#define MOVE_RATIO 2
230#else
231/* A value of around 6 would minimize code size; infinity would minimize
232 execution time. */
233#define MOVE_RATIO 15
234#endif
235#endif
e87b4f3f 236
266007a7 237/* This array records the insn_code of insns to perform block moves. */
e6677db3 238enum insn_code movstr_optab[NUM_MACHINE_MODES];
266007a7 239
9de08200
RK
240/* This array records the insn_code of insns to perform block clears. */
241enum insn_code clrstr_optab[NUM_MACHINE_MODES];
242
0f41302f 243/* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
e87b4f3f
RS
244
245#ifndef SLOW_UNALIGNED_ACCESS
c7a7ac46 246#define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
e87b4f3f 247#endif
0006469d
TW
248
249/* Register mappings for target machines without register windows. */
250#ifndef INCOMING_REGNO
251#define INCOMING_REGNO(OUT) (OUT)
252#endif
253#ifndef OUTGOING_REGNO
254#define OUTGOING_REGNO(IN) (IN)
255#endif
bbf6f052 256\f
0f41302f 257/* Maps used to convert modes to const, load, and store bytecodes. */
ca695ac9
JB
258enum bytecode_opcode mode_to_const_map[MAX_MACHINE_MODE];
259enum bytecode_opcode mode_to_load_map[MAX_MACHINE_MODE];
260enum bytecode_opcode mode_to_store_map[MAX_MACHINE_MODE];
261
262/* Initialize maps used to convert modes to const, load, and store
0f41302f
MS
263 bytecodes. */
264
ca695ac9
JB
265void
266bc_init_mode_to_opcode_maps ()
267{
268 int mode;
269
6bd6178d 270 for (mode = 0; mode < (int) MAX_MACHINE_MODE; mode++)
ca695ac9
JB
271 mode_to_const_map[mode] =
272 mode_to_load_map[mode] =
273 mode_to_store_map[mode] = neverneverland;
274
275#define DEF_MODEMAP(SYM, CODE, UCODE, CONST, LOAD, STORE) \
6bd6178d
RK
276 mode_to_const_map[(int) SYM] = CONST; \
277 mode_to_load_map[(int) SYM] = LOAD; \
278 mode_to_store_map[(int) SYM] = STORE;
ca695ac9
JB
279
280#include "modemap.def"
281#undef DEF_MODEMAP
282}
283\f
4fa52007 284/* This is run once per compilation to set up which modes can be used
266007a7 285 directly in memory and to initialize the block move optab. */
4fa52007
RK
286
287void
288init_expr_once ()
289{
290 rtx insn, pat;
291 enum machine_mode mode;
e2549997
RS
292 /* Try indexing by frame ptr and try by stack ptr.
293 It is known that on the Convex the stack ptr isn't a valid index.
294 With luck, one or the other is valid on any machine. */
4fa52007 295 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
e2549997 296 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
4fa52007
RK
297
298 start_sequence ();
299 insn = emit_insn (gen_rtx (SET, 0, 0));
300 pat = PATTERN (insn);
301
302 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
303 mode = (enum machine_mode) ((int) mode + 1))
304 {
305 int regno;
306 rtx reg;
307 int num_clobbers;
308
309 direct_load[(int) mode] = direct_store[(int) mode] = 0;
310 PUT_MODE (mem, mode);
e2549997 311 PUT_MODE (mem1, mode);
4fa52007 312
e6fe56a4
RK
313 /* See if there is some register that can be used in this mode and
314 directly loaded or stored from memory. */
315
7308a047
RS
316 if (mode != VOIDmode && mode != BLKmode)
317 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
318 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
319 regno++)
320 {
321 if (! HARD_REGNO_MODE_OK (regno, mode))
322 continue;
e6fe56a4 323
7308a047 324 reg = gen_rtx (REG, mode, regno);
e6fe56a4 325
7308a047
RS
326 SET_SRC (pat) = mem;
327 SET_DEST (pat) = reg;
328 if (recog (pat, insn, &num_clobbers) >= 0)
329 direct_load[(int) mode] = 1;
e6fe56a4 330
e2549997
RS
331 SET_SRC (pat) = mem1;
332 SET_DEST (pat) = reg;
333 if (recog (pat, insn, &num_clobbers) >= 0)
334 direct_load[(int) mode] = 1;
335
7308a047
RS
336 SET_SRC (pat) = reg;
337 SET_DEST (pat) = mem;
338 if (recog (pat, insn, &num_clobbers) >= 0)
339 direct_store[(int) mode] = 1;
e2549997
RS
340
341 SET_SRC (pat) = reg;
342 SET_DEST (pat) = mem1;
343 if (recog (pat, insn, &num_clobbers) >= 0)
344 direct_store[(int) mode] = 1;
7308a047 345 }
4fa52007
RK
346 }
347
348 end_sequence ();
349}
350
bbf6f052
RK
351/* This is run at the start of compiling a function. */
352
353void
354init_expr ()
355{
356 init_queue ();
357
358 pending_stack_adjust = 0;
359 inhibit_defer_pop = 0;
360 cleanups_this_call = 0;
361 saveregs_value = 0;
0006469d 362 apply_args_value = 0;
e87b4f3f 363 forced_labels = 0;
bbf6f052
RK
364}
365
366/* Save all variables describing the current status into the structure *P.
367 This is used before starting a nested function. */
368
369void
370save_expr_status (p)
371 struct function *p;
372{
373 /* Instead of saving the postincrement queue, empty it. */
374 emit_queue ();
375
376 p->pending_stack_adjust = pending_stack_adjust;
377 p->inhibit_defer_pop = inhibit_defer_pop;
378 p->cleanups_this_call = cleanups_this_call;
379 p->saveregs_value = saveregs_value;
0006469d 380 p->apply_args_value = apply_args_value;
e87b4f3f 381 p->forced_labels = forced_labels;
bbf6f052
RK
382
383 pending_stack_adjust = 0;
384 inhibit_defer_pop = 0;
385 cleanups_this_call = 0;
386 saveregs_value = 0;
0006469d 387 apply_args_value = 0;
e87b4f3f 388 forced_labels = 0;
bbf6f052
RK
389}
390
391/* Restore all variables describing the current status from the structure *P.
392 This is used after a nested function. */
393
394void
395restore_expr_status (p)
396 struct function *p;
397{
398 pending_stack_adjust = p->pending_stack_adjust;
399 inhibit_defer_pop = p->inhibit_defer_pop;
400 cleanups_this_call = p->cleanups_this_call;
401 saveregs_value = p->saveregs_value;
0006469d 402 apply_args_value = p->apply_args_value;
e87b4f3f 403 forced_labels = p->forced_labels;
bbf6f052
RK
404}
405\f
406/* Manage the queue of increment instructions to be output
407 for POSTINCREMENT_EXPR expressions, etc. */
408
409static rtx pending_chain;
410
411/* Queue up to increment (or change) VAR later. BODY says how:
412 BODY should be the same thing you would pass to emit_insn
413 to increment right away. It will go to emit_insn later on.
414
415 The value is a QUEUED expression to be used in place of VAR
416 where you want to guarantee the pre-incrementation value of VAR. */
417
418static rtx
419enqueue_insn (var, body)
420 rtx var, body;
421{
422 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
906c4e36 423 var, NULL_RTX, NULL_RTX, body, pending_chain);
bbf6f052
RK
424 return pending_chain;
425}
426
427/* Use protect_from_queue to convert a QUEUED expression
428 into something that you can put immediately into an instruction.
429 If the queued incrementation has not happened yet,
430 protect_from_queue returns the variable itself.
431 If the incrementation has happened, protect_from_queue returns a temp
432 that contains a copy of the old value of the variable.
433
434 Any time an rtx which might possibly be a QUEUED is to be put
435 into an instruction, it must be passed through protect_from_queue first.
436 QUEUED expressions are not meaningful in instructions.
437
438 Do not pass a value through protect_from_queue and then hold
439 on to it for a while before putting it in an instruction!
440 If the queue is flushed in between, incorrect code will result. */
441
442rtx
443protect_from_queue (x, modify)
444 register rtx x;
445 int modify;
446{
447 register RTX_CODE code = GET_CODE (x);
448
449#if 0 /* A QUEUED can hang around after the queue is forced out. */
450 /* Shortcut for most common case. */
451 if (pending_chain == 0)
452 return x;
453#endif
454
455 if (code != QUEUED)
456 {
e9baa644
RK
457 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
458 use of autoincrement. Make a copy of the contents of the memory
459 location rather than a copy of the address, but not if the value is
460 of mode BLKmode. Don't modify X in place since it might be
461 shared. */
bbf6f052
RK
462 if (code == MEM && GET_MODE (x) != BLKmode
463 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
464 {
465 register rtx y = XEXP (x, 0);
e9baa644
RK
466 register rtx new = gen_rtx (MEM, GET_MODE (x), QUEUED_VAR (y));
467
468 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
469 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
470 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
471
bbf6f052
RK
472 if (QUEUED_INSN (y))
473 {
e9baa644
RK
474 register rtx temp = gen_reg_rtx (GET_MODE (new));
475 emit_insn_before (gen_move_insn (temp, new),
bbf6f052
RK
476 QUEUED_INSN (y));
477 return temp;
478 }
e9baa644 479 return new;
bbf6f052
RK
480 }
481 /* Otherwise, recursively protect the subexpressions of all
482 the kinds of rtx's that can contain a QUEUED. */
483 if (code == MEM)
3f15938e
RS
484 {
485 rtx tem = protect_from_queue (XEXP (x, 0), 0);
486 if (tem != XEXP (x, 0))
487 {
488 x = copy_rtx (x);
489 XEXP (x, 0) = tem;
490 }
491 }
bbf6f052
RK
492 else if (code == PLUS || code == MULT)
493 {
3f15938e
RS
494 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
495 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
496 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
497 {
498 x = copy_rtx (x);
499 XEXP (x, 0) = new0;
500 XEXP (x, 1) = new1;
501 }
bbf6f052
RK
502 }
503 return x;
504 }
505 /* If the increment has not happened, use the variable itself. */
506 if (QUEUED_INSN (x) == 0)
507 return QUEUED_VAR (x);
508 /* If the increment has happened and a pre-increment copy exists,
509 use that copy. */
510 if (QUEUED_COPY (x) != 0)
511 return QUEUED_COPY (x);
512 /* The increment has happened but we haven't set up a pre-increment copy.
513 Set one up now, and use it. */
514 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
515 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
516 QUEUED_INSN (x));
517 return QUEUED_COPY (x);
518}
519
520/* Return nonzero if X contains a QUEUED expression:
521 if it contains anything that will be altered by a queued increment.
522 We handle only combinations of MEM, PLUS, MINUS and MULT operators
523 since memory addresses generally contain only those. */
524
525static int
526queued_subexp_p (x)
527 rtx x;
528{
529 register enum rtx_code code = GET_CODE (x);
530 switch (code)
531 {
532 case QUEUED:
533 return 1;
534 case MEM:
535 return queued_subexp_p (XEXP (x, 0));
536 case MULT:
537 case PLUS:
538 case MINUS:
539 return queued_subexp_p (XEXP (x, 0))
540 || queued_subexp_p (XEXP (x, 1));
541 }
542 return 0;
543}
544
545/* Perform all the pending incrementations. */
546
547void
548emit_queue ()
549{
550 register rtx p;
551 while (p = pending_chain)
552 {
553 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
554 pending_chain = QUEUED_NEXT (p);
555 }
556}
557
558static void
559init_queue ()
560{
561 if (pending_chain)
562 abort ();
563}
564\f
565/* Copy data from FROM to TO, where the machine modes are not the same.
566 Both modes may be integer, or both may be floating.
567 UNSIGNEDP should be nonzero if FROM is an unsigned type.
568 This causes zero-extension instead of sign-extension. */
569
570void
571convert_move (to, from, unsignedp)
572 register rtx to, from;
573 int unsignedp;
574{
575 enum machine_mode to_mode = GET_MODE (to);
576 enum machine_mode from_mode = GET_MODE (from);
577 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
578 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
579 enum insn_code code;
580 rtx libcall;
581
582 /* rtx code for making an equivalent value. */
583 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
584
585 to = protect_from_queue (to, 1);
586 from = protect_from_queue (from, 0);
587
588 if (to_real != from_real)
589 abort ();
590
1499e0a8
RK
591 /* If FROM is a SUBREG that indicates that we have already done at least
592 the required extension, strip it. We don't handle such SUBREGs as
593 TO here. */
594
595 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
596 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
597 >= GET_MODE_SIZE (to_mode))
598 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
599 from = gen_lowpart (to_mode, from), from_mode = to_mode;
600
601 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
602 abort ();
603
bbf6f052
RK
604 if (to_mode == from_mode
605 || (from_mode == VOIDmode && CONSTANT_P (from)))
606 {
607 emit_move_insn (to, from);
608 return;
609 }
610
611 if (to_real)
612 {
81d79e2c
RS
613 rtx value;
614
2b01c326 615 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
b424402e 616 {
2b01c326
RK
617 /* Try converting directly if the insn is supported. */
618 if ((code = can_extend_p (to_mode, from_mode, 0))
619 != CODE_FOR_nothing)
620 {
621 emit_unop_insn (code, to, from, UNKNOWN);
622 return;
623 }
bbf6f052 624 }
2b01c326 625
b424402e
RS
626#ifdef HAVE_trunchfqf2
627 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
628 {
629 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
630 return;
631 }
632#endif
633#ifdef HAVE_truncsfqf2
634 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
635 {
636 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
637 return;
638 }
639#endif
640#ifdef HAVE_truncdfqf2
641 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
642 {
643 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
644 return;
645 }
646#endif
647#ifdef HAVE_truncxfqf2
648 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
649 {
650 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
651 return;
652 }
653#endif
654#ifdef HAVE_trunctfqf2
655 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
656 {
657 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
658 return;
659 }
660#endif
03747aa3
RK
661
662#ifdef HAVE_trunctqfhf2
663 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
664 {
665 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
666 return;
667 }
668#endif
b424402e
RS
669#ifdef HAVE_truncsfhf2
670 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
671 {
672 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
673 return;
674 }
675#endif
676#ifdef HAVE_truncdfhf2
677 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
678 {
679 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
680 return;
681 }
682#endif
683#ifdef HAVE_truncxfhf2
684 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
685 {
686 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
687 return;
688 }
689#endif
690#ifdef HAVE_trunctfhf2
691 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
692 {
693 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
694 return;
695 }
696#endif
2b01c326
RK
697
698#ifdef HAVE_truncsftqf2
699 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
700 {
701 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
702 return;
703 }
704#endif
705#ifdef HAVE_truncdftqf2
706 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
707 {
708 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
709 return;
710 }
711#endif
712#ifdef HAVE_truncxftqf2
713 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
714 {
715 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
716 return;
717 }
718#endif
719#ifdef HAVE_trunctftqf2
720 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
721 {
722 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
723 return;
724 }
725#endif
726
bbf6f052
RK
727#ifdef HAVE_truncdfsf2
728 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
729 {
730 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
731 return;
732 }
733#endif
b092b471
JW
734#ifdef HAVE_truncxfsf2
735 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
736 {
737 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
738 return;
739 }
740#endif
bbf6f052
RK
741#ifdef HAVE_trunctfsf2
742 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
743 {
744 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
745 return;
746 }
747#endif
b092b471
JW
748#ifdef HAVE_truncxfdf2
749 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
750 {
751 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
752 return;
753 }
754#endif
bbf6f052
RK
755#ifdef HAVE_trunctfdf2
756 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
757 {
758 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
759 return;
760 }
761#endif
762
b092b471
JW
763 libcall = (rtx) 0;
764 switch (from_mode)
765 {
766 case SFmode:
767 switch (to_mode)
768 {
769 case DFmode:
770 libcall = extendsfdf2_libfunc;
771 break;
772
773 case XFmode:
774 libcall = extendsfxf2_libfunc;
775 break;
776
777 case TFmode:
778 libcall = extendsftf2_libfunc;
779 break;
780 }
781 break;
782
783 case DFmode:
784 switch (to_mode)
785 {
786 case SFmode:
787 libcall = truncdfsf2_libfunc;
788 break;
789
790 case XFmode:
791 libcall = extenddfxf2_libfunc;
792 break;
793
794 case TFmode:
795 libcall = extenddftf2_libfunc;
796 break;
797 }
798 break;
799
800 case XFmode:
801 switch (to_mode)
802 {
803 case SFmode:
804 libcall = truncxfsf2_libfunc;
805 break;
806
807 case DFmode:
808 libcall = truncxfdf2_libfunc;
809 break;
810 }
811 break;
812
813 case TFmode:
814 switch (to_mode)
815 {
816 case SFmode:
817 libcall = trunctfsf2_libfunc;
818 break;
819
820 case DFmode:
821 libcall = trunctfdf2_libfunc;
822 break;
823 }
824 break;
825 }
826
827 if (libcall == (rtx) 0)
828 /* This conversion is not implemented yet. */
bbf6f052
RK
829 abort ();
830
81d79e2c
RS
831 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
832 1, from, from_mode);
833 emit_move_insn (to, value);
bbf6f052
RK
834 return;
835 }
836
837 /* Now both modes are integers. */
838
839 /* Handle expanding beyond a word. */
840 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
841 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
842 {
843 rtx insns;
844 rtx lowpart;
845 rtx fill_value;
846 rtx lowfrom;
847 int i;
848 enum machine_mode lowpart_mode;
849 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
850
851 /* Try converting directly if the insn is supported. */
852 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
853 != CODE_FOR_nothing)
854 {
cd1b4b44
RK
855 /* If FROM is a SUBREG, put it into a register. Do this
856 so that we always generate the same set of insns for
857 better cse'ing; if an intermediate assignment occurred,
858 we won't be doing the operation directly on the SUBREG. */
859 if (optimize > 0 && GET_CODE (from) == SUBREG)
860 from = force_reg (from_mode, from);
bbf6f052
RK
861 emit_unop_insn (code, to, from, equiv_code);
862 return;
863 }
864 /* Next, try converting via full word. */
865 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
866 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
867 != CODE_FOR_nothing))
868 {
a81fee56
RS
869 if (GET_CODE (to) == REG)
870 emit_insn (gen_rtx (CLOBBER, VOIDmode, to));
bbf6f052
RK
871 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
872 emit_unop_insn (code, to,
873 gen_lowpart (word_mode, to), equiv_code);
874 return;
875 }
876
877 /* No special multiword conversion insn; do it by hand. */
878 start_sequence ();
879
5c5033c3
RK
880 /* Since we will turn this into a no conflict block, we must ensure
881 that the source does not overlap the target. */
882
883 if (reg_overlap_mentioned_p (to, from))
884 from = force_reg (from_mode, from);
885
bbf6f052
RK
886 /* Get a copy of FROM widened to a word, if necessary. */
887 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
888 lowpart_mode = word_mode;
889 else
890 lowpart_mode = from_mode;
891
892 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
893
894 lowpart = gen_lowpart (lowpart_mode, to);
895 emit_move_insn (lowpart, lowfrom);
896
897 /* Compute the value to put in each remaining word. */
898 if (unsignedp)
899 fill_value = const0_rtx;
900 else
901 {
902#ifdef HAVE_slt
903 if (HAVE_slt
904 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
905 && STORE_FLAG_VALUE == -1)
906 {
906c4e36
RK
907 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
908 lowpart_mode, 0, 0);
bbf6f052
RK
909 fill_value = gen_reg_rtx (word_mode);
910 emit_insn (gen_slt (fill_value));
911 }
912 else
913#endif
914 {
915 fill_value
916 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
917 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
906c4e36 918 NULL_RTX, 0);
bbf6f052
RK
919 fill_value = convert_to_mode (word_mode, fill_value, 1);
920 }
921 }
922
923 /* Fill the remaining words. */
924 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
925 {
926 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
927 rtx subword = operand_subword (to, index, 1, to_mode);
928
929 if (subword == 0)
930 abort ();
931
932 if (fill_value != subword)
933 emit_move_insn (subword, fill_value);
934 }
935
936 insns = get_insns ();
937 end_sequence ();
938
906c4e36 939 emit_no_conflict_block (insns, to, from, NULL_RTX,
2abec1b7 940 gen_rtx (equiv_code, to_mode, copy_rtx (from)));
bbf6f052
RK
941 return;
942 }
943
d3c64ee3
RS
944 /* Truncating multi-word to a word or less. */
945 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
946 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
bbf6f052 947 {
431a6eca
JW
948 if (!((GET_CODE (from) == MEM
949 && ! MEM_VOLATILE_P (from)
950 && direct_load[(int) to_mode]
951 && ! mode_dependent_address_p (XEXP (from, 0)))
952 || GET_CODE (from) == REG
953 || GET_CODE (from) == SUBREG))
954 from = force_reg (from_mode, from);
bbf6f052
RK
955 convert_move (to, gen_lowpart (word_mode, from), 0);
956 return;
957 }
958
959 /* Handle pointer conversion */ /* SPEE 900220 */
960 if (to_mode == PSImode)
961 {
962 if (from_mode != SImode)
963 from = convert_to_mode (SImode, from, unsignedp);
964
1f584163
DE
965#ifdef HAVE_truncsipsi2
966 if (HAVE_truncsipsi2)
bbf6f052 967 {
1f584163 968 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
bbf6f052
RK
969 return;
970 }
1f584163 971#endif /* HAVE_truncsipsi2 */
bbf6f052
RK
972 abort ();
973 }
974
975 if (from_mode == PSImode)
976 {
977 if (to_mode != SImode)
978 {
979 from = convert_to_mode (SImode, from, unsignedp);
980 from_mode = SImode;
981 }
982 else
983 {
1f584163
DE
984#ifdef HAVE_extendpsisi2
985 if (HAVE_extendpsisi2)
bbf6f052 986 {
1f584163 987 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
bbf6f052
RK
988 return;
989 }
1f584163 990#endif /* HAVE_extendpsisi2 */
bbf6f052
RK
991 abort ();
992 }
993 }
994
0407367d
RK
995 if (to_mode == PDImode)
996 {
997 if (from_mode != DImode)
998 from = convert_to_mode (DImode, from, unsignedp);
999
1000#ifdef HAVE_truncdipdi2
1001 if (HAVE_truncdipdi2)
1002 {
1003 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1004 return;
1005 }
1006#endif /* HAVE_truncdipdi2 */
1007 abort ();
1008 }
1009
1010 if (from_mode == PDImode)
1011 {
1012 if (to_mode != DImode)
1013 {
1014 from = convert_to_mode (DImode, from, unsignedp);
1015 from_mode = DImode;
1016 }
1017 else
1018 {
1019#ifdef HAVE_extendpdidi2
1020 if (HAVE_extendpdidi2)
1021 {
1022 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1023 return;
1024 }
1025#endif /* HAVE_extendpdidi2 */
1026 abort ();
1027 }
1028 }
1029
bbf6f052
RK
1030 /* Now follow all the conversions between integers
1031 no more than a word long. */
1032
1033 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1034 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1035 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
d3c64ee3 1036 GET_MODE_BITSIZE (from_mode)))
bbf6f052 1037 {
d3c64ee3
RS
1038 if (!((GET_CODE (from) == MEM
1039 && ! MEM_VOLATILE_P (from)
1040 && direct_load[(int) to_mode]
1041 && ! mode_dependent_address_p (XEXP (from, 0)))
1042 || GET_CODE (from) == REG
1043 || GET_CODE (from) == SUBREG))
1044 from = force_reg (from_mode, from);
34aa3599
RK
1045 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1046 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1047 from = copy_to_reg (from);
bbf6f052
RK
1048 emit_move_insn (to, gen_lowpart (to_mode, from));
1049 return;
1050 }
1051
d3c64ee3 1052 /* Handle extension. */
bbf6f052
RK
1053 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1054 {
1055 /* Convert directly if that works. */
1056 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1057 != CODE_FOR_nothing)
1058 {
1059 emit_unop_insn (code, to, from, equiv_code);
1060 return;
1061 }
1062 else
1063 {
1064 enum machine_mode intermediate;
1065
1066 /* Search for a mode to convert via. */
1067 for (intermediate = from_mode; intermediate != VOIDmode;
1068 intermediate = GET_MODE_WIDER_MODE (intermediate))
930b4e39
RK
1069 if (((can_extend_p (to_mode, intermediate, unsignedp)
1070 != CODE_FOR_nothing)
1071 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1072 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
bbf6f052
RK
1073 && (can_extend_p (intermediate, from_mode, unsignedp)
1074 != CODE_FOR_nothing))
1075 {
1076 convert_move (to, convert_to_mode (intermediate, from,
1077 unsignedp), unsignedp);
1078 return;
1079 }
1080
1081 /* No suitable intermediate mode. */
1082 abort ();
1083 }
1084 }
1085
1086 /* Support special truncate insns for certain modes. */
1087
1088 if (from_mode == DImode && to_mode == SImode)
1089 {
1090#ifdef HAVE_truncdisi2
1091 if (HAVE_truncdisi2)
1092 {
1093 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1094 return;
1095 }
1096#endif
1097 convert_move (to, force_reg (from_mode, from), unsignedp);
1098 return;
1099 }
1100
1101 if (from_mode == DImode && to_mode == HImode)
1102 {
1103#ifdef HAVE_truncdihi2
1104 if (HAVE_truncdihi2)
1105 {
1106 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1107 return;
1108 }
1109#endif
1110 convert_move (to, force_reg (from_mode, from), unsignedp);
1111 return;
1112 }
1113
1114 if (from_mode == DImode && to_mode == QImode)
1115 {
1116#ifdef HAVE_truncdiqi2
1117 if (HAVE_truncdiqi2)
1118 {
1119 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1120 return;
1121 }
1122#endif
1123 convert_move (to, force_reg (from_mode, from), unsignedp);
1124 return;
1125 }
1126
1127 if (from_mode == SImode && to_mode == HImode)
1128 {
1129#ifdef HAVE_truncsihi2
1130 if (HAVE_truncsihi2)
1131 {
1132 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1133 return;
1134 }
1135#endif
1136 convert_move (to, force_reg (from_mode, from), unsignedp);
1137 return;
1138 }
1139
1140 if (from_mode == SImode && to_mode == QImode)
1141 {
1142#ifdef HAVE_truncsiqi2
1143 if (HAVE_truncsiqi2)
1144 {
1145 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1146 return;
1147 }
1148#endif
1149 convert_move (to, force_reg (from_mode, from), unsignedp);
1150 return;
1151 }
1152
1153 if (from_mode == HImode && to_mode == QImode)
1154 {
1155#ifdef HAVE_trunchiqi2
1156 if (HAVE_trunchiqi2)
1157 {
1158 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1159 return;
1160 }
1161#endif
1162 convert_move (to, force_reg (from_mode, from), unsignedp);
1163 return;
1164 }
1165
b9bcad65
RK
1166 if (from_mode == TImode && to_mode == DImode)
1167 {
1168#ifdef HAVE_trunctidi2
1169 if (HAVE_trunctidi2)
1170 {
1171 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1172 return;
1173 }
1174#endif
1175 convert_move (to, force_reg (from_mode, from), unsignedp);
1176 return;
1177 }
1178
1179 if (from_mode == TImode && to_mode == SImode)
1180 {
1181#ifdef HAVE_trunctisi2
1182 if (HAVE_trunctisi2)
1183 {
1184 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1185 return;
1186 }
1187#endif
1188 convert_move (to, force_reg (from_mode, from), unsignedp);
1189 return;
1190 }
1191
1192 if (from_mode == TImode && to_mode == HImode)
1193 {
1194#ifdef HAVE_trunctihi2
1195 if (HAVE_trunctihi2)
1196 {
1197 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1198 return;
1199 }
1200#endif
1201 convert_move (to, force_reg (from_mode, from), unsignedp);
1202 return;
1203 }
1204
1205 if (from_mode == TImode && to_mode == QImode)
1206 {
1207#ifdef HAVE_trunctiqi2
1208 if (HAVE_trunctiqi2)
1209 {
1210 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1211 return;
1212 }
1213#endif
1214 convert_move (to, force_reg (from_mode, from), unsignedp);
1215 return;
1216 }
1217
bbf6f052
RK
1218 /* Handle truncation of volatile memrefs, and so on;
1219 the things that couldn't be truncated directly,
1220 and for which there was no special instruction. */
1221 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1222 {
1223 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1224 emit_move_insn (to, temp);
1225 return;
1226 }
1227
1228 /* Mode combination is not recognized. */
1229 abort ();
1230}
1231
1232/* Return an rtx for a value that would result
1233 from converting X to mode MODE.
1234 Both X and MODE may be floating, or both integer.
1235 UNSIGNEDP is nonzero if X is an unsigned value.
1236 This can be done by referring to a part of X in place
5d901c31
RS
1237 or by copying to a new temporary with conversion.
1238
1239 This function *must not* call protect_from_queue
1240 except when putting X into an insn (in which case convert_move does it). */
bbf6f052
RK
1241
1242rtx
1243convert_to_mode (mode, x, unsignedp)
1244 enum machine_mode mode;
1245 rtx x;
1246 int unsignedp;
5ffe63ed
RS
1247{
1248 return convert_modes (mode, VOIDmode, x, unsignedp);
1249}
1250
1251/* Return an rtx for a value that would result
1252 from converting X from mode OLDMODE to mode MODE.
1253 Both modes may be floating, or both integer.
1254 UNSIGNEDP is nonzero if X is an unsigned value.
1255
1256 This can be done by referring to a part of X in place
1257 or by copying to a new temporary with conversion.
1258
1259 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1260
1261 This function *must not* call protect_from_queue
1262 except when putting X into an insn (in which case convert_move does it). */
1263
1264rtx
1265convert_modes (mode, oldmode, x, unsignedp)
1266 enum machine_mode mode, oldmode;
1267 rtx x;
1268 int unsignedp;
bbf6f052
RK
1269{
1270 register rtx temp;
5ffe63ed 1271
1499e0a8
RK
1272 /* If FROM is a SUBREG that indicates that we have already done at least
1273 the required extension, strip it. */
1274
1275 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1276 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1277 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1278 x = gen_lowpart (mode, x);
bbf6f052 1279
64791b18
RK
1280 if (GET_MODE (x) != VOIDmode)
1281 oldmode = GET_MODE (x);
1282
5ffe63ed 1283 if (mode == oldmode)
bbf6f052
RK
1284 return x;
1285
1286 /* There is one case that we must handle specially: If we are converting
906c4e36 1287 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
bbf6f052
RK
1288 we are to interpret the constant as unsigned, gen_lowpart will do
1289 the wrong if the constant appears negative. What we want to do is
1290 make the high-order word of the constant zero, not all ones. */
1291
1292 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
906c4e36 1293 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
bbf6f052 1294 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
906c4e36 1295 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
bbf6f052
RK
1296
1297 /* We can do this with a gen_lowpart if both desired and current modes
1298 are integer, and this is either a constant integer, a register, or a
ba2e110c
RK
1299 non-volatile MEM. Except for the constant case where MODE is no
1300 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
bbf6f052 1301
ba2e110c
RK
1302 if ((GET_CODE (x) == CONST_INT
1303 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 1304 || (GET_MODE_CLASS (mode) == MODE_INT
5ffe63ed 1305 && GET_MODE_CLASS (oldmode) == MODE_INT
bbf6f052 1306 && (GET_CODE (x) == CONST_DOUBLE
5ffe63ed 1307 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
d57c66da
JW
1308 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1309 && direct_load[(int) mode])
2bf29316
JW
1310 || (GET_CODE (x) == REG
1311 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1312 GET_MODE_BITSIZE (GET_MODE (x)))))))))
ba2e110c
RK
1313 {
1314 /* ?? If we don't know OLDMODE, we have to assume here that
1315 X does not need sign- or zero-extension. This may not be
1316 the case, but it's the best we can do. */
1317 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1318 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1319 {
1320 HOST_WIDE_INT val = INTVAL (x);
1321 int width = GET_MODE_BITSIZE (oldmode);
1322
1323 /* We must sign or zero-extend in this case. Start by
1324 zero-extending, then sign extend if we need to. */
1325 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1326 if (! unsignedp
1327 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1328 val |= (HOST_WIDE_INT) (-1) << width;
1329
1330 return GEN_INT (val);
1331 }
1332
1333 return gen_lowpart (mode, x);
1334 }
bbf6f052
RK
1335
1336 temp = gen_reg_rtx (mode);
1337 convert_move (temp, x, unsignedp);
1338 return temp;
1339}
1340\f
1341/* Generate several move instructions to copy LEN bytes
1342 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1343 The caller must pass FROM and TO
1344 through protect_from_queue before calling.
1345 ALIGN (in bytes) is maximum alignment we can assume. */
1346
bbf6f052
RK
1347static void
1348move_by_pieces (to, from, len, align)
1349 rtx to, from;
1350 int len, align;
1351{
1352 struct move_by_pieces data;
1353 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
e87b4f3f 1354 int max_size = MOVE_MAX + 1;
bbf6f052
RK
1355
1356 data.offset = 0;
1357 data.to_addr = to_addr;
1358 data.from_addr = from_addr;
1359 data.to = to;
1360 data.from = from;
1361 data.autinc_to
1362 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1363 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1364 data.autinc_from
1365 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1366 || GET_CODE (from_addr) == POST_INC
1367 || GET_CODE (from_addr) == POST_DEC);
1368
1369 data.explicit_inc_from = 0;
1370 data.explicit_inc_to = 0;
1371 data.reverse
1372 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1373 if (data.reverse) data.offset = len;
1374 data.len = len;
1375
e9cf6a97
JW
1376 data.to_struct = MEM_IN_STRUCT_P (to);
1377 data.from_struct = MEM_IN_STRUCT_P (from);
1378
bbf6f052
RK
1379 /* If copying requires more than two move insns,
1380 copy addresses to registers (to make displacements shorter)
1381 and use post-increment if available. */
1382 if (!(data.autinc_from && data.autinc_to)
1383 && move_by_pieces_ninsns (len, align) > 2)
1384 {
1385#ifdef HAVE_PRE_DECREMENT
1386 if (data.reverse && ! data.autinc_from)
1387 {
1388 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1389 data.autinc_from = 1;
1390 data.explicit_inc_from = -1;
1391 }
1392#endif
1393#ifdef HAVE_POST_INCREMENT
1394 if (! data.autinc_from)
1395 {
1396 data.from_addr = copy_addr_to_reg (from_addr);
1397 data.autinc_from = 1;
1398 data.explicit_inc_from = 1;
1399 }
1400#endif
1401 if (!data.autinc_from && CONSTANT_P (from_addr))
1402 data.from_addr = copy_addr_to_reg (from_addr);
1403#ifdef HAVE_PRE_DECREMENT
1404 if (data.reverse && ! data.autinc_to)
1405 {
1406 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1407 data.autinc_to = 1;
1408 data.explicit_inc_to = -1;
1409 }
1410#endif
1411#ifdef HAVE_POST_INCREMENT
1412 if (! data.reverse && ! data.autinc_to)
1413 {
1414 data.to_addr = copy_addr_to_reg (to_addr);
1415 data.autinc_to = 1;
1416 data.explicit_inc_to = 1;
1417 }
1418#endif
1419 if (!data.autinc_to && CONSTANT_P (to_addr))
1420 data.to_addr = copy_addr_to_reg (to_addr);
1421 }
1422
c7a7ac46 1423 if (! SLOW_UNALIGNED_ACCESS
e87b4f3f 1424 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1425 align = MOVE_MAX;
bbf6f052
RK
1426
1427 /* First move what we can in the largest integer mode, then go to
1428 successively smaller modes. */
1429
1430 while (max_size > 1)
1431 {
1432 enum machine_mode mode = VOIDmode, tmode;
1433 enum insn_code icode;
1434
e7c33f54
RK
1435 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1436 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1437 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1438 mode = tmode;
1439
1440 if (mode == VOIDmode)
1441 break;
1442
1443 icode = mov_optab->handlers[(int) mode].insn_code;
1444 if (icode != CODE_FOR_nothing
1445 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1446 GET_MODE_SIZE (mode)))
1447 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1448
1449 max_size = GET_MODE_SIZE (mode);
1450 }
1451
1452 /* The code above should have handled everything. */
1453 if (data.len != 0)
1454 abort ();
1455}
1456
1457/* Return number of insns required to move L bytes by pieces.
1458 ALIGN (in bytes) is maximum alignment we can assume. */
1459
1460static int
1461move_by_pieces_ninsns (l, align)
1462 unsigned int l;
1463 int align;
1464{
1465 register int n_insns = 0;
e87b4f3f 1466 int max_size = MOVE_MAX + 1;
bbf6f052 1467
c7a7ac46 1468 if (! SLOW_UNALIGNED_ACCESS
e87b4f3f 1469 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1470 align = MOVE_MAX;
bbf6f052
RK
1471
1472 while (max_size > 1)
1473 {
1474 enum machine_mode mode = VOIDmode, tmode;
1475 enum insn_code icode;
1476
e7c33f54
RK
1477 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1478 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1479 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1480 mode = tmode;
1481
1482 if (mode == VOIDmode)
1483 break;
1484
1485 icode = mov_optab->handlers[(int) mode].insn_code;
1486 if (icode != CODE_FOR_nothing
1487 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1488 GET_MODE_SIZE (mode)))
1489 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1490
1491 max_size = GET_MODE_SIZE (mode);
1492 }
1493
1494 return n_insns;
1495}
1496
1497/* Subroutine of move_by_pieces. Move as many bytes as appropriate
1498 with move instructions for mode MODE. GENFUN is the gen_... function
1499 to make a move insn for that mode. DATA has all the other info. */
1500
1501static void
1502move_by_pieces_1 (genfun, mode, data)
1503 rtx (*genfun) ();
1504 enum machine_mode mode;
1505 struct move_by_pieces *data;
1506{
1507 register int size = GET_MODE_SIZE (mode);
1508 register rtx to1, from1;
1509
1510 while (data->len >= size)
1511 {
1512 if (data->reverse) data->offset -= size;
1513
1514 to1 = (data->autinc_to
1515 ? gen_rtx (MEM, mode, data->to_addr)
1516 : change_address (data->to, mode,
1517 plus_constant (data->to_addr, data->offset)));
e9cf6a97 1518 MEM_IN_STRUCT_P (to1) = data->to_struct;
bbf6f052
RK
1519 from1 =
1520 (data->autinc_from
1521 ? gen_rtx (MEM, mode, data->from_addr)
1522 : change_address (data->from, mode,
1523 plus_constant (data->from_addr, data->offset)));
e9cf6a97 1524 MEM_IN_STRUCT_P (from1) = data->from_struct;
bbf6f052
RK
1525
1526#ifdef HAVE_PRE_DECREMENT
1527 if (data->explicit_inc_to < 0)
906c4e36 1528 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
bbf6f052 1529 if (data->explicit_inc_from < 0)
906c4e36 1530 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
bbf6f052
RK
1531#endif
1532
1533 emit_insn ((*genfun) (to1, from1));
1534#ifdef HAVE_POST_INCREMENT
1535 if (data->explicit_inc_to > 0)
906c4e36 1536 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
bbf6f052 1537 if (data->explicit_inc_from > 0)
906c4e36 1538 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
bbf6f052
RK
1539#endif
1540
1541 if (! data->reverse) data->offset += size;
1542
1543 data->len -= size;
1544 }
1545}
1546\f
1547/* Emit code to move a block Y to a block X.
1548 This may be done with string-move instructions,
1549 with multiple scalar move instructions, or with a library call.
1550
1551 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1552 with mode BLKmode.
1553 SIZE is an rtx that says how long they are.
1554 ALIGN is the maximum alignment we can assume they have,
1555 measured in bytes. */
1556
1557void
1558emit_block_move (x, y, size, align)
1559 rtx x, y;
1560 rtx size;
1561 int align;
1562{
1563 if (GET_MODE (x) != BLKmode)
1564 abort ();
1565
1566 if (GET_MODE (y) != BLKmode)
1567 abort ();
1568
1569 x = protect_from_queue (x, 1);
1570 y = protect_from_queue (y, 0);
5d901c31 1571 size = protect_from_queue (size, 0);
bbf6f052
RK
1572
1573 if (GET_CODE (x) != MEM)
1574 abort ();
1575 if (GET_CODE (y) != MEM)
1576 abort ();
1577 if (size == 0)
1578 abort ();
1579
1580 if (GET_CODE (size) == CONST_INT
906c4e36 1581 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
bbf6f052
RK
1582 move_by_pieces (x, y, INTVAL (size), align);
1583 else
1584 {
1585 /* Try the most limited insn first, because there's no point
1586 including more than one in the machine description unless
1587 the more limited one has some advantage. */
266007a7 1588
0bba3f6f 1589 rtx opalign = GEN_INT (align);
266007a7
RK
1590 enum machine_mode mode;
1591
1592 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1593 mode = GET_MODE_WIDER_MODE (mode))
bbf6f052 1594 {
266007a7 1595 enum insn_code code = movstr_optab[(int) mode];
266007a7
RK
1596
1597 if (code != CODE_FOR_nothing
803090c4
RK
1598 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1599 here because if SIZE is less than the mode mask, as it is
8008b228 1600 returned by the macro, it will definitely be less than the
803090c4 1601 actual mode mask. */
8ca00751
RK
1602 && ((GET_CODE (size) == CONST_INT
1603 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1604 <= GET_MODE_MASK (mode)))
1605 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
0bba3f6f
RK
1606 && (insn_operand_predicate[(int) code][0] == 0
1607 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1608 && (insn_operand_predicate[(int) code][1] == 0
1609 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1610 && (insn_operand_predicate[(int) code][3] == 0
1611 || (*insn_operand_predicate[(int) code][3]) (opalign,
1612 VOIDmode)))
bbf6f052 1613 {
1ba1e2a8 1614 rtx op2;
266007a7
RK
1615 rtx last = get_last_insn ();
1616 rtx pat;
1617
1ba1e2a8 1618 op2 = convert_to_mode (mode, size, 1);
0bba3f6f
RK
1619 if (insn_operand_predicate[(int) code][2] != 0
1620 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
266007a7
RK
1621 op2 = copy_to_mode_reg (mode, op2);
1622
1623 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1624 if (pat)
1625 {
1626 emit_insn (pat);
1627 return;
1628 }
1629 else
1630 delete_insns_since (last);
bbf6f052
RK
1631 }
1632 }
bbf6f052
RK
1633
1634#ifdef TARGET_MEM_FUNCTIONS
d562e42e 1635 emit_library_call (memcpy_libfunc, 0,
bbf6f052
RK
1636 VOIDmode, 3, XEXP (x, 0), Pmode,
1637 XEXP (y, 0), Pmode,
0fa83258
RK
1638 convert_to_mode (TYPE_MODE (sizetype), size,
1639 TREE_UNSIGNED (sizetype)),
1640 TYPE_MODE (sizetype));
bbf6f052 1641#else
d562e42e 1642 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
1643 VOIDmode, 3, XEXP (y, 0), Pmode,
1644 XEXP (x, 0), Pmode,
3b6f75e2
JW
1645 convert_to_mode (TYPE_MODE (integer_type_node), size,
1646 TREE_UNSIGNED (integer_type_node)),
1647 TYPE_MODE (integer_type_node));
bbf6f052
RK
1648#endif
1649 }
1650}
1651\f
1652/* Copy all or part of a value X into registers starting at REGNO.
1653 The number of registers to be filled is NREGS. */
1654
1655void
1656move_block_to_reg (regno, x, nregs, mode)
1657 int regno;
1658 rtx x;
1659 int nregs;
1660 enum machine_mode mode;
1661{
1662 int i;
1663 rtx pat, last;
1664
72bb9717
RK
1665 if (nregs == 0)
1666 return;
1667
bbf6f052
RK
1668 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1669 x = validize_mem (force_const_mem (mode, x));
1670
1671 /* See if the machine can do this with a load multiple insn. */
1672#ifdef HAVE_load_multiple
c3a02afe 1673 if (HAVE_load_multiple)
bbf6f052 1674 {
c3a02afe
RK
1675 last = get_last_insn ();
1676 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1677 GEN_INT (nregs));
1678 if (pat)
1679 {
1680 emit_insn (pat);
1681 return;
1682 }
1683 else
1684 delete_insns_since (last);
bbf6f052 1685 }
bbf6f052
RK
1686#endif
1687
1688 for (i = 0; i < nregs; i++)
1689 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1690 operand_subword_force (x, i, mode));
1691}
1692
1693/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
0040593d
JW
1694 The number of registers to be filled is NREGS. SIZE indicates the number
1695 of bytes in the object X. */
1696
bbf6f052
RK
1697
1698void
0040593d 1699move_block_from_reg (regno, x, nregs, size)
bbf6f052
RK
1700 int regno;
1701 rtx x;
1702 int nregs;
0040593d 1703 int size;
bbf6f052
RK
1704{
1705 int i;
1706 rtx pat, last;
1707
0040593d
JW
1708 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1709 to the left before storing to memory. */
1710 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1711 {
1712 rtx tem = operand_subword (x, 0, 1, BLKmode);
1713 rtx shift;
1714
1715 if (tem == 0)
1716 abort ();
1717
1718 shift = expand_shift (LSHIFT_EXPR, word_mode,
1719 gen_rtx (REG, word_mode, regno),
1720 build_int_2 ((UNITS_PER_WORD - size)
1721 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1722 emit_move_insn (tem, shift);
1723 return;
1724 }
1725
bbf6f052
RK
1726 /* See if the machine can do this with a store multiple insn. */
1727#ifdef HAVE_store_multiple
c3a02afe 1728 if (HAVE_store_multiple)
bbf6f052 1729 {
c3a02afe
RK
1730 last = get_last_insn ();
1731 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1732 GEN_INT (nregs));
1733 if (pat)
1734 {
1735 emit_insn (pat);
1736 return;
1737 }
1738 else
1739 delete_insns_since (last);
bbf6f052 1740 }
bbf6f052
RK
1741#endif
1742
1743 for (i = 0; i < nregs; i++)
1744 {
1745 rtx tem = operand_subword (x, i, 1, BLKmode);
1746
1747 if (tem == 0)
1748 abort ();
1749
1750 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1751 }
1752}
1753
fffa9c1d
JW
1754/* Emit code to move a block Y to a block X, where X is non-consecutive
1755 registers represented by a PARALLEL. */
1756
1757void
1758emit_group_load (x, y)
1759 rtx x, y;
1760{
1761 rtx target_reg, source;
1762 int i;
1763
1764 if (GET_CODE (x) != PARALLEL)
1765 abort ();
1766
1767 /* Check for a NULL entry, used to indicate that the parameter goes
1768 both on the stack and in registers. */
1769 if (XEXP (XVECEXP (x, 0, 0), 0))
1770 i = 0;
1771 else
1772 i = 1;
1773
1774 for (; i < XVECLEN (x, 0); i++)
1775 {
1776 rtx element = XVECEXP (x, 0, i);
1777
1778 target_reg = XEXP (element, 0);
1779
1780 if (GET_CODE (y) == MEM)
1781 source = change_address (y, GET_MODE (target_reg),
1782 plus_constant (XEXP (y, 0),
1783 INTVAL (XEXP (element, 1))));
1784 else if (XEXP (element, 1) == const0_rtx)
1785 {
1786 if (GET_MODE (target_reg) == GET_MODE (y))
1787 source = y;
eaa9b4d9
MM
1788 /* Allow for the target_reg to be smaller than the input register
1789 to allow for AIX with 4 DF arguments after a single SI arg. The
1790 last DF argument will only load 1 word into the integer registers,
1791 but load a DF value into the float registers. */
fffa9c1d 1792 else if (GET_MODE_SIZE (GET_MODE (target_reg))
eaa9b4d9 1793 <= GET_MODE_SIZE (GET_MODE (y)))
fffa9c1d
JW
1794 source = gen_rtx (SUBREG, GET_MODE (target_reg), y, 0);
1795 else
1796 abort ();
1797 }
1798 else
1799 abort ();
1800
1801 emit_move_insn (target_reg, source);
1802 }
1803}
1804
1805/* Emit code to move a block Y to a block X, where Y is non-consecutive
1806 registers represented by a PARALLEL. */
1807
1808void
1809emit_group_store (x, y)
1810 rtx x, y;
1811{
1812 rtx source_reg, target;
1813 int i;
1814
1815 if (GET_CODE (y) != PARALLEL)
1816 abort ();
1817
1818 /* Check for a NULL entry, used to indicate that the parameter goes
1819 both on the stack and in registers. */
1820 if (XEXP (XVECEXP (y, 0, 0), 0))
1821 i = 0;
1822 else
1823 i = 1;
1824
1825 for (; i < XVECLEN (y, 0); i++)
1826 {
1827 rtx element = XVECEXP (y, 0, i);
1828
1829 source_reg = XEXP (element, 0);
1830
1831 if (GET_CODE (x) == MEM)
1832 target = change_address (x, GET_MODE (source_reg),
1833 plus_constant (XEXP (x, 0),
1834 INTVAL (XEXP (element, 1))));
1835 else if (XEXP (element, 1) == const0_rtx)
1836 target = x;
1837 else
1838 abort ();
1839
1840 emit_move_insn (target, source_reg);
1841 }
1842}
1843
94b25f81
RK
1844/* Add a USE expression for REG to the (possibly empty) list pointed
1845 to by CALL_FUSAGE. REG must denote a hard register. */
bbf6f052
RK
1846
1847void
b3f8cf4a
RK
1848use_reg (call_fusage, reg)
1849 rtx *call_fusage, reg;
1850{
0304dfbb
DE
1851 if (GET_CODE (reg) != REG
1852 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
b3f8cf4a
RK
1853 abort();
1854
1855 *call_fusage
1856 = gen_rtx (EXPR_LIST, VOIDmode,
0304dfbb 1857 gen_rtx (USE, VOIDmode, reg), *call_fusage);
b3f8cf4a
RK
1858}
1859
94b25f81
RK
1860/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1861 starting at REGNO. All of these registers must be hard registers. */
b3f8cf4a
RK
1862
1863void
0304dfbb
DE
1864use_regs (call_fusage, regno, nregs)
1865 rtx *call_fusage;
bbf6f052
RK
1866 int regno;
1867 int nregs;
1868{
0304dfbb 1869 int i;
bbf6f052 1870
0304dfbb
DE
1871 if (regno + nregs > FIRST_PSEUDO_REGISTER)
1872 abort ();
1873
1874 for (i = 0; i < nregs; i++)
1875 use_reg (call_fusage, gen_rtx (REG, reg_raw_mode[regno + i], regno + i));
bbf6f052 1876}
fffa9c1d
JW
1877
1878/* Add USE expressions to *CALL_FUSAGE for each REG contained in the
1879 PARALLEL REGS. This is for calls that pass values in multiple
1880 non-contiguous locations. The Irix 6 ABI has examples of this. */
1881
1882void
1883use_group_regs (call_fusage, regs)
1884 rtx *call_fusage;
1885 rtx regs;
1886{
1887 int i;
1888
1889 /* Check for a NULL entry, used to indicate that the parameter goes
1890 both on the stack and in registers. */
1891 if (XEXP (XVECEXP (regs, 0, 0), 0))
1892 i = 0;
1893 else
1894 i = 1;
1895
1896 for (; i < XVECLEN (regs, 0); i++)
1897 use_reg (call_fusage, XEXP (XVECEXP (regs, 0, i), 0));
1898}
bbf6f052 1899\f
9de08200
RK
1900/* Generate several move instructions to clear LEN bytes of block TO.
1901 (A MEM rtx with BLKmode). The caller must pass TO through
1902 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
1903 we can assume. */
1904
1905static void
1906clear_by_pieces (to, len, align)
1907 rtx to;
1908 int len, align;
1909{
1910 struct clear_by_pieces data;
1911 rtx to_addr = XEXP (to, 0);
1912 int max_size = MOVE_MAX + 1;
1913
1914 data.offset = 0;
1915 data.to_addr = to_addr;
1916 data.to = to;
1917 data.autinc_to
1918 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1919 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1920
1921 data.explicit_inc_to = 0;
1922 data.reverse
1923 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1924 if (data.reverse) data.offset = len;
1925 data.len = len;
1926
1927 data.to_struct = MEM_IN_STRUCT_P (to);
1928
1929 /* If copying requires more than two move insns,
1930 copy addresses to registers (to make displacements shorter)
1931 and use post-increment if available. */
1932 if (!data.autinc_to
1933 && move_by_pieces_ninsns (len, align) > 2)
1934 {
1935#ifdef HAVE_PRE_DECREMENT
1936 if (data.reverse && ! data.autinc_to)
1937 {
1938 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1939 data.autinc_to = 1;
1940 data.explicit_inc_to = -1;
1941 }
1942#endif
1943#ifdef HAVE_POST_INCREMENT
1944 if (! data.reverse && ! data.autinc_to)
1945 {
1946 data.to_addr = copy_addr_to_reg (to_addr);
1947 data.autinc_to = 1;
1948 data.explicit_inc_to = 1;
1949 }
1950#endif
1951 if (!data.autinc_to && CONSTANT_P (to_addr))
1952 data.to_addr = copy_addr_to_reg (to_addr);
1953 }
1954
1955 if (! SLOW_UNALIGNED_ACCESS
1956 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1957 align = MOVE_MAX;
1958
1959 /* First move what we can in the largest integer mode, then go to
1960 successively smaller modes. */
1961
1962 while (max_size > 1)
1963 {
1964 enum machine_mode mode = VOIDmode, tmode;
1965 enum insn_code icode;
1966
1967 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1968 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1969 if (GET_MODE_SIZE (tmode) < max_size)
1970 mode = tmode;
1971
1972 if (mode == VOIDmode)
1973 break;
1974
1975 icode = mov_optab->handlers[(int) mode].insn_code;
1976 if (icode != CODE_FOR_nothing
1977 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1978 GET_MODE_SIZE (mode)))
1979 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
1980
1981 max_size = GET_MODE_SIZE (mode);
1982 }
1983
1984 /* The code above should have handled everything. */
1985 if (data.len != 0)
1986 abort ();
1987}
1988
1989/* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
1990 with move instructions for mode MODE. GENFUN is the gen_... function
1991 to make a move insn for that mode. DATA has all the other info. */
1992
1993static void
1994clear_by_pieces_1 (genfun, mode, data)
1995 rtx (*genfun) ();
1996 enum machine_mode mode;
1997 struct clear_by_pieces *data;
1998{
1999 register int size = GET_MODE_SIZE (mode);
2000 register rtx to1;
2001
2002 while (data->len >= size)
2003 {
2004 if (data->reverse) data->offset -= size;
2005
2006 to1 = (data->autinc_to
2007 ? gen_rtx (MEM, mode, data->to_addr)
2008 : change_address (data->to, mode,
2009 plus_constant (data->to_addr, data->offset)));
2010 MEM_IN_STRUCT_P (to1) = data->to_struct;
2011
2012#ifdef HAVE_PRE_DECREMENT
2013 if (data->explicit_inc_to < 0)
2014 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2015#endif
2016
2017 emit_insn ((*genfun) (to1, const0_rtx));
2018#ifdef HAVE_POST_INCREMENT
2019 if (data->explicit_inc_to > 0)
2020 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2021#endif
2022
2023 if (! data->reverse) data->offset += size;
2024
2025 data->len -= size;
2026 }
2027}
2028\f
bbf6f052 2029/* Write zeros through the storage of OBJECT.
9de08200
RK
2030 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2031 the maximum alignment we can is has, measured in bytes. */
bbf6f052
RK
2032
2033void
9de08200 2034clear_storage (object, size, align)
bbf6f052 2035 rtx object;
4c08eef0 2036 rtx size;
9de08200 2037 int align;
bbf6f052
RK
2038{
2039 if (GET_MODE (object) == BLKmode)
2040 {
9de08200
RK
2041 object = protect_from_queue (object, 1);
2042 size = protect_from_queue (size, 0);
2043
2044 if (GET_CODE (size) == CONST_INT
2045 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
2046 clear_by_pieces (object, INTVAL (size), align);
2047
2048 else
2049 {
2050 /* Try the most limited insn first, because there's no point
2051 including more than one in the machine description unless
2052 the more limited one has some advantage. */
2053
2054 rtx opalign = GEN_INT (align);
2055 enum machine_mode mode;
2056
2057 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2058 mode = GET_MODE_WIDER_MODE (mode))
2059 {
2060 enum insn_code code = clrstr_optab[(int) mode];
2061
2062 if (code != CODE_FOR_nothing
2063 /* We don't need MODE to be narrower than
2064 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2065 the mode mask, as it is returned by the macro, it will
2066 definitely be less than the actual mode mask. */
2067 && ((GET_CODE (size) == CONST_INT
2068 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2069 <= GET_MODE_MASK (mode)))
2070 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2071 && (insn_operand_predicate[(int) code][0] == 0
2072 || (*insn_operand_predicate[(int) code][0]) (object,
2073 BLKmode))
2074 && (insn_operand_predicate[(int) code][2] == 0
2075 || (*insn_operand_predicate[(int) code][2]) (opalign,
2076 VOIDmode)))
2077 {
2078 rtx op1;
2079 rtx last = get_last_insn ();
2080 rtx pat;
2081
2082 op1 = convert_to_mode (mode, size, 1);
2083 if (insn_operand_predicate[(int) code][1] != 0
2084 && ! (*insn_operand_predicate[(int) code][1]) (op1,
2085 mode))
2086 op1 = copy_to_mode_reg (mode, op1);
2087
2088 pat = GEN_FCN ((int) code) (object, op1, opalign);
2089 if (pat)
2090 {
2091 emit_insn (pat);
2092 return;
2093 }
2094 else
2095 delete_insns_since (last);
2096 }
2097 }
2098
2099
bbf6f052 2100#ifdef TARGET_MEM_FUNCTIONS
9de08200
RK
2101 emit_library_call (memset_libfunc, 0,
2102 VOIDmode, 3,
2103 XEXP (object, 0), Pmode,
2104 const0_rtx, TYPE_MODE (integer_type_node),
2105 convert_to_mode (TYPE_MODE (sizetype),
2106 size, TREE_UNSIGNED (sizetype)),
2107 TYPE_MODE (sizetype));
bbf6f052 2108#else
9de08200
RK
2109 emit_library_call (bzero_libfunc, 0,
2110 VOIDmode, 2,
2111 XEXP (object, 0), Pmode,
2112 convert_to_mode (TYPE_MODE (integer_type_node),
2113 size,
2114 TREE_UNSIGNED (integer_type_node)),
2115 TYPE_MODE (integer_type_node));
bbf6f052 2116#endif
9de08200 2117 }
bbf6f052
RK
2118 }
2119 else
2120 emit_move_insn (object, const0_rtx);
2121}
2122
2123/* Generate code to copy Y into X.
2124 Both Y and X must have the same mode, except that
2125 Y can be a constant with VOIDmode.
2126 This mode cannot be BLKmode; use emit_block_move for that.
2127
2128 Return the last instruction emitted. */
2129
2130rtx
2131emit_move_insn (x, y)
2132 rtx x, y;
2133{
2134 enum machine_mode mode = GET_MODE (x);
bbf6f052
RK
2135
2136 x = protect_from_queue (x, 1);
2137 y = protect_from_queue (y, 0);
2138
2139 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2140 abort ();
2141
2142 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2143 y = force_const_mem (mode, y);
2144
2145 /* If X or Y are memory references, verify that their addresses are valid
2146 for the machine. */
2147 if (GET_CODE (x) == MEM
2148 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2149 && ! push_operand (x, GET_MODE (x)))
2150 || (flag_force_addr
2151 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2152 x = change_address (x, VOIDmode, XEXP (x, 0));
2153
2154 if (GET_CODE (y) == MEM
2155 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2156 || (flag_force_addr
2157 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2158 y = change_address (y, VOIDmode, XEXP (y, 0));
2159
2160 if (mode == BLKmode)
2161 abort ();
2162
261c4230
RS
2163 return emit_move_insn_1 (x, y);
2164}
2165
2166/* Low level part of emit_move_insn.
2167 Called just like emit_move_insn, but assumes X and Y
2168 are basically valid. */
2169
2170rtx
2171emit_move_insn_1 (x, y)
2172 rtx x, y;
2173{
2174 enum machine_mode mode = GET_MODE (x);
2175 enum machine_mode submode;
2176 enum mode_class class = GET_MODE_CLASS (mode);
2177 int i;
2178
bbf6f052
RK
2179 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2180 return
2181 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2182
89742723 2183 /* Expand complex moves by moving real part and imag part, if possible. */
7308a047 2184 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
d0c76654
RK
2185 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2186 * BITS_PER_UNIT),
2187 (class == MODE_COMPLEX_INT
2188 ? MODE_INT : MODE_FLOAT),
2189 0))
7308a047
RS
2190 && (mov_optab->handlers[(int) submode].insn_code
2191 != CODE_FOR_nothing))
2192 {
2193 /* Don't split destination if it is a stack push. */
2194 int stack = push_operand (x, GET_MODE (x));
6551fa4d 2195 rtx insns;
7308a047 2196
7308a047
RS
2197 /* If this is a stack, push the highpart first, so it
2198 will be in the argument order.
2199
2200 In that case, change_address is used only to convert
2201 the mode, not to change the address. */
c937357e
RS
2202 if (stack)
2203 {
e33c0d66
RS
2204 /* Note that the real part always precedes the imag part in memory
2205 regardless of machine's endianness. */
c937357e
RS
2206#ifdef STACK_GROWS_DOWNWARD
2207 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2208 (gen_rtx (MEM, submode, (XEXP (x, 0))),
e33c0d66 2209 gen_imagpart (submode, y)));
c937357e
RS
2210 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2211 (gen_rtx (MEM, submode, (XEXP (x, 0))),
e33c0d66 2212 gen_realpart (submode, y)));
c937357e
RS
2213#else
2214 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2215 (gen_rtx (MEM, submode, (XEXP (x, 0))),
e33c0d66 2216 gen_realpart (submode, y)));
c937357e
RS
2217 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2218 (gen_rtx (MEM, submode, (XEXP (x, 0))),
e33c0d66 2219 gen_imagpart (submode, y)));
c937357e
RS
2220#endif
2221 }
2222 else
2223 {
2224 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
976ff203 2225 (gen_realpart (submode, x), gen_realpart (submode, y)));
c937357e 2226 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
976ff203 2227 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
c937357e 2228 }
7308a047 2229
7a1ab50a 2230 return get_last_insn ();
7308a047
RS
2231 }
2232
bbf6f052
RK
2233 /* This will handle any multi-word mode that lacks a move_insn pattern.
2234 However, you will get better code if you define such patterns,
2235 even if they must turn into multiple assembler instructions. */
a4320483 2236 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
bbf6f052
RK
2237 {
2238 rtx last_insn = 0;
6551fa4d
JW
2239 rtx insns;
2240
a98c9f1a
RK
2241#ifdef PUSH_ROUNDING
2242
2243 /* If X is a push on the stack, do the push now and replace
2244 X with a reference to the stack pointer. */
2245 if (push_operand (x, GET_MODE (x)))
2246 {
2247 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2248 x = change_address (x, VOIDmode, stack_pointer_rtx);
2249 }
2250#endif
2251
15a7a8ec 2252 /* Show the output dies here. */
43e046cb
RK
2253 if (x != y)
2254 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
15a7a8ec 2255
bbf6f052
RK
2256 for (i = 0;
2257 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2258 i++)
2259 {
2260 rtx xpart = operand_subword (x, i, 1, mode);
2261 rtx ypart = operand_subword (y, i, 1, mode);
2262
2263 /* If we can't get a part of Y, put Y into memory if it is a
2264 constant. Otherwise, force it into a register. If we still
2265 can't get a part of Y, abort. */
2266 if (ypart == 0 && CONSTANT_P (y))
2267 {
2268 y = force_const_mem (mode, y);
2269 ypart = operand_subword (y, i, 1, mode);
2270 }
2271 else if (ypart == 0)
2272 ypart = operand_subword_force (y, i, mode);
2273
2274 if (xpart == 0 || ypart == 0)
2275 abort ();
2276
2277 last_insn = emit_move_insn (xpart, ypart);
2278 }
6551fa4d 2279
bbf6f052
RK
2280 return last_insn;
2281 }
2282 else
2283 abort ();
2284}
2285\f
2286/* Pushing data onto the stack. */
2287
2288/* Push a block of length SIZE (perhaps variable)
2289 and return an rtx to address the beginning of the block.
2290 Note that it is not possible for the value returned to be a QUEUED.
2291 The value may be virtual_outgoing_args_rtx.
2292
2293 EXTRA is the number of bytes of padding to push in addition to SIZE.
2294 BELOW nonzero means this padding comes at low addresses;
2295 otherwise, the padding comes at high addresses. */
2296
2297rtx
2298push_block (size, extra, below)
2299 rtx size;
2300 int extra, below;
2301{
2302 register rtx temp;
88f63c77
RK
2303
2304 size = convert_modes (Pmode, ptr_mode, size, 1);
bbf6f052
RK
2305 if (CONSTANT_P (size))
2306 anti_adjust_stack (plus_constant (size, extra));
2307 else if (GET_CODE (size) == REG && extra == 0)
2308 anti_adjust_stack (size);
2309 else
2310 {
2311 rtx temp = copy_to_mode_reg (Pmode, size);
2312 if (extra != 0)
906c4e36 2313 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
bbf6f052
RK
2314 temp, 0, OPTAB_LIB_WIDEN);
2315 anti_adjust_stack (temp);
2316 }
2317
2318#ifdef STACK_GROWS_DOWNWARD
2319 temp = virtual_outgoing_args_rtx;
2320 if (extra != 0 && below)
2321 temp = plus_constant (temp, extra);
2322#else
2323 if (GET_CODE (size) == CONST_INT)
2324 temp = plus_constant (virtual_outgoing_args_rtx,
2325 - INTVAL (size) - (below ? 0 : extra));
2326 else if (extra != 0 && !below)
2327 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2328 negate_rtx (Pmode, plus_constant (size, extra)));
2329 else
2330 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2331 negate_rtx (Pmode, size));
2332#endif
2333
2334 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2335}
2336
87e38d84 2337rtx
bbf6f052
RK
2338gen_push_operand ()
2339{
2340 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2341}
2342
2343/* Generate code to push X onto the stack, assuming it has mode MODE and
2344 type TYPE.
2345 MODE is redundant except when X is a CONST_INT (since they don't
2346 carry mode info).
2347 SIZE is an rtx for the size of data to be copied (in bytes),
2348 needed only if X is BLKmode.
2349
2350 ALIGN (in bytes) is maximum alignment we can assume.
2351
cd048831
RK
2352 If PARTIAL and REG are both nonzero, then copy that many of the first
2353 words of X into registers starting with REG, and push the rest of X.
bbf6f052
RK
2354 The amount of space pushed is decreased by PARTIAL words,
2355 rounded *down* to a multiple of PARM_BOUNDARY.
2356 REG must be a hard register in this case.
cd048831
RK
2357 If REG is zero but PARTIAL is not, take any all others actions for an
2358 argument partially in registers, but do not actually load any
2359 registers.
bbf6f052
RK
2360
2361 EXTRA is the amount in bytes of extra space to leave next to this arg.
6dc42e49 2362 This is ignored if an argument block has already been allocated.
bbf6f052
RK
2363
2364 On a machine that lacks real push insns, ARGS_ADDR is the address of
2365 the bottom of the argument block for this call. We use indexing off there
2366 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2367 argument block has not been preallocated.
2368
2369 ARGS_SO_FAR is the size of args previously pushed for this call. */
2370
2371void
2372emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2373 args_addr, args_so_far)
2374 register rtx x;
2375 enum machine_mode mode;
2376 tree type;
2377 rtx size;
2378 int align;
2379 int partial;
2380 rtx reg;
2381 int extra;
2382 rtx args_addr;
2383 rtx args_so_far;
2384{
2385 rtx xinner;
2386 enum direction stack_direction
2387#ifdef STACK_GROWS_DOWNWARD
2388 = downward;
2389#else
2390 = upward;
2391#endif
2392
2393 /* Decide where to pad the argument: `downward' for below,
2394 `upward' for above, or `none' for don't pad it.
2395 Default is below for small data on big-endian machines; else above. */
2396 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2397
9c7be814
JL
2398 /* If we're placing part of X into a register and part of X onto
2399 the stack, indicate that the entire register is clobbered to
2400 keep flow from thinking the unused part of the register is live. */
22745c7e 2401 if (partial > 0 && reg != 0)
9c7be814
JL
2402 emit_insn (gen_rtx (CLOBBER, VOIDmode, reg));
2403
bbf6f052
RK
2404 /* Invert direction if stack is post-update. */
2405 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2406 if (where_pad != none)
2407 where_pad = (where_pad == downward ? upward : downward);
2408
2409 xinner = x = protect_from_queue (x, 0);
2410
2411 if (mode == BLKmode)
2412 {
2413 /* Copy a block into the stack, entirely or partially. */
2414
2415 register rtx temp;
2416 int used = partial * UNITS_PER_WORD;
2417 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2418 int skip;
2419
2420 if (size == 0)
2421 abort ();
2422
2423 used -= offset;
2424
2425 /* USED is now the # of bytes we need not copy to the stack
2426 because registers will take care of them. */
2427
2428 if (partial != 0)
2429 xinner = change_address (xinner, BLKmode,
2430 plus_constant (XEXP (xinner, 0), used));
2431
2432 /* If the partial register-part of the arg counts in its stack size,
2433 skip the part of stack space corresponding to the registers.
2434 Otherwise, start copying to the beginning of the stack space,
2435 by setting SKIP to 0. */
2436#ifndef REG_PARM_STACK_SPACE
2437 skip = 0;
2438#else
2439 skip = used;
2440#endif
2441
2442#ifdef PUSH_ROUNDING
2443 /* Do it with several push insns if that doesn't take lots of insns
2444 and if there is no difficulty with push insns that skip bytes
2445 on the stack for alignment purposes. */
2446 if (args_addr == 0
2447 && GET_CODE (size) == CONST_INT
2448 && skip == 0
2449 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2450 < MOVE_RATIO)
bbf6f052
RK
2451 /* Here we avoid the case of a structure whose weak alignment
2452 forces many pushes of a small amount of data,
2453 and such small pushes do rounding that causes trouble. */
c7a7ac46 2454 && ((! SLOW_UNALIGNED_ACCESS)
e87b4f3f 2455 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
bbf6f052 2456 || PUSH_ROUNDING (align) == align)
bbf6f052
RK
2457 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2458 {
2459 /* Push padding now if padding above and stack grows down,
2460 or if padding below and stack grows up.
2461 But if space already allocated, this has already been done. */
2462 if (extra && args_addr == 0
2463 && where_pad != none && where_pad != stack_direction)
906c4e36 2464 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2465
2466 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
2467 INTVAL (size) - used, align);
2468 }
2469 else
2470#endif /* PUSH_ROUNDING */
2471 {
2472 /* Otherwise make space on the stack and copy the data
2473 to the address of that space. */
2474
2475 /* Deduct words put into registers from the size we must copy. */
2476 if (partial != 0)
2477 {
2478 if (GET_CODE (size) == CONST_INT)
906c4e36 2479 size = GEN_INT (INTVAL (size) - used);
bbf6f052
RK
2480 else
2481 size = expand_binop (GET_MODE (size), sub_optab, size,
906c4e36
RK
2482 GEN_INT (used), NULL_RTX, 0,
2483 OPTAB_LIB_WIDEN);
bbf6f052
RK
2484 }
2485
2486 /* Get the address of the stack space.
2487 In this case, we do not deal with EXTRA separately.
2488 A single stack adjust will do. */
2489 if (! args_addr)
2490 {
2491 temp = push_block (size, extra, where_pad == downward);
2492 extra = 0;
2493 }
2494 else if (GET_CODE (args_so_far) == CONST_INT)
2495 temp = memory_address (BLKmode,
2496 plus_constant (args_addr,
2497 skip + INTVAL (args_so_far)));
2498 else
2499 temp = memory_address (BLKmode,
2500 plus_constant (gen_rtx (PLUS, Pmode,
2501 args_addr, args_so_far),
2502 skip));
2503
2504 /* TEMP is the address of the block. Copy the data there. */
2505 if (GET_CODE (size) == CONST_INT
2506 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2507 < MOVE_RATIO))
2508 {
2509 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
2510 INTVAL (size), align);
2511 goto ret;
2512 }
2513 /* Try the most limited insn first, because there's no point
2514 including more than one in the machine description unless
2515 the more limited one has some advantage. */
2516#ifdef HAVE_movstrqi
2517 if (HAVE_movstrqi
2518 && GET_CODE (size) == CONST_INT
2519 && ((unsigned) INTVAL (size)
2520 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
2521 {
c841050e
RS
2522 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
2523 xinner, size, GEN_INT (align));
2524 if (pat != 0)
2525 {
2526 emit_insn (pat);
2527 goto ret;
2528 }
bbf6f052
RK
2529 }
2530#endif
2531#ifdef HAVE_movstrhi
2532 if (HAVE_movstrhi
2533 && GET_CODE (size) == CONST_INT
2534 && ((unsigned) INTVAL (size)
2535 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
2536 {
c841050e
RS
2537 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
2538 xinner, size, GEN_INT (align));
2539 if (pat != 0)
2540 {
2541 emit_insn (pat);
2542 goto ret;
2543 }
bbf6f052
RK
2544 }
2545#endif
2546#ifdef HAVE_movstrsi
2547 if (HAVE_movstrsi)
2548 {
c841050e
RS
2549 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
2550 xinner, size, GEN_INT (align));
2551 if (pat != 0)
2552 {
2553 emit_insn (pat);
2554 goto ret;
2555 }
bbf6f052
RK
2556 }
2557#endif
2558#ifdef HAVE_movstrdi
2559 if (HAVE_movstrdi)
2560 {
c841050e
RS
2561 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
2562 xinner, size, GEN_INT (align));
2563 if (pat != 0)
2564 {
2565 emit_insn (pat);
2566 goto ret;
2567 }
bbf6f052
RK
2568 }
2569#endif
2570
2571#ifndef ACCUMULATE_OUTGOING_ARGS
2572 /* If the source is referenced relative to the stack pointer,
2573 copy it to another register to stabilize it. We do not need
2574 to do this if we know that we won't be changing sp. */
2575
2576 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2577 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2578 temp = copy_to_reg (temp);
2579#endif
2580
2581 /* Make inhibit_defer_pop nonzero around the library call
2582 to force it to pop the bcopy-arguments right away. */
2583 NO_DEFER_POP;
2584#ifdef TARGET_MEM_FUNCTIONS
d562e42e 2585 emit_library_call (memcpy_libfunc, 0,
bbf6f052 2586 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
0fa83258
RK
2587 convert_to_mode (TYPE_MODE (sizetype),
2588 size, TREE_UNSIGNED (sizetype)),
26ba80fc 2589 TYPE_MODE (sizetype));
bbf6f052 2590#else
d562e42e 2591 emit_library_call (bcopy_libfunc, 0,
bbf6f052 2592 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3b6f75e2
JW
2593 convert_to_mode (TYPE_MODE (integer_type_node),
2594 size,
2595 TREE_UNSIGNED (integer_type_node)),
2596 TYPE_MODE (integer_type_node));
bbf6f052
RK
2597#endif
2598 OK_DEFER_POP;
2599 }
2600 }
2601 else if (partial > 0)
2602 {
2603 /* Scalar partly in registers. */
2604
2605 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2606 int i;
2607 int not_stack;
2608 /* # words of start of argument
2609 that we must make space for but need not store. */
2610 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2611 int args_offset = INTVAL (args_so_far);
2612 int skip;
2613
2614 /* Push padding now if padding above and stack grows down,
2615 or if padding below and stack grows up.
2616 But if space already allocated, this has already been done. */
2617 if (extra && args_addr == 0
2618 && where_pad != none && where_pad != stack_direction)
906c4e36 2619 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2620
2621 /* If we make space by pushing it, we might as well push
2622 the real data. Otherwise, we can leave OFFSET nonzero
2623 and leave the space uninitialized. */
2624 if (args_addr == 0)
2625 offset = 0;
2626
2627 /* Now NOT_STACK gets the number of words that we don't need to
2628 allocate on the stack. */
2629 not_stack = partial - offset;
2630
2631 /* If the partial register-part of the arg counts in its stack size,
2632 skip the part of stack space corresponding to the registers.
2633 Otherwise, start copying to the beginning of the stack space,
2634 by setting SKIP to 0. */
2635#ifndef REG_PARM_STACK_SPACE
2636 skip = 0;
2637#else
2638 skip = not_stack;
2639#endif
2640
2641 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2642 x = validize_mem (force_const_mem (mode, x));
2643
2644 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2645 SUBREGs of such registers are not allowed. */
2646 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2647 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2648 x = copy_to_reg (x);
2649
2650 /* Loop over all the words allocated on the stack for this arg. */
2651 /* We can do it by words, because any scalar bigger than a word
2652 has a size a multiple of a word. */
2653#ifndef PUSH_ARGS_REVERSED
2654 for (i = not_stack; i < size; i++)
2655#else
2656 for (i = size - 1; i >= not_stack; i--)
2657#endif
2658 if (i >= not_stack + offset)
2659 emit_push_insn (operand_subword_force (x, i, mode),
906c4e36
RK
2660 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2661 0, args_addr,
2662 GEN_INT (args_offset + ((i - not_stack + skip)
bbf6f052
RK
2663 * UNITS_PER_WORD)));
2664 }
2665 else
2666 {
2667 rtx addr;
2668
2669 /* Push padding now if padding above and stack grows down,
2670 or if padding below and stack grows up.
2671 But if space already allocated, this has already been done. */
2672 if (extra && args_addr == 0
2673 && where_pad != none && where_pad != stack_direction)
906c4e36 2674 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2675
2676#ifdef PUSH_ROUNDING
2677 if (args_addr == 0)
2678 addr = gen_push_operand ();
2679 else
2680#endif
2681 if (GET_CODE (args_so_far) == CONST_INT)
2682 addr
2683 = memory_address (mode,
2684 plus_constant (args_addr, INTVAL (args_so_far)));
2685 else
2686 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2687 args_so_far));
2688
2689 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2690 }
2691
2692 ret:
2693 /* If part should go in registers, copy that part
2694 into the appropriate registers. Do this now, at the end,
2695 since mem-to-mem copies above may do function calls. */
cd048831 2696 if (partial > 0 && reg != 0)
fffa9c1d
JW
2697 {
2698 /* Handle calls that pass values in multiple non-contiguous locations.
2699 The Irix 6 ABI has examples of this. */
2700 if (GET_CODE (reg) == PARALLEL)
2701 emit_group_load (reg, x);
2702 else
2703 move_block_to_reg (REGNO (reg), x, partial, mode);
2704 }
bbf6f052
RK
2705
2706 if (extra && args_addr == 0 && where_pad == stack_direction)
906c4e36 2707 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2708}
2709\f
bbf6f052
RK
2710/* Expand an assignment that stores the value of FROM into TO.
2711 If WANT_VALUE is nonzero, return an rtx for the value of TO.
709f5be1
RS
2712 (This may contain a QUEUED rtx;
2713 if the value is constant, this rtx is a constant.)
2714 Otherwise, the returned value is NULL_RTX.
bbf6f052
RK
2715
2716 SUGGEST_REG is no longer actually used.
2717 It used to mean, copy the value through a register
2718 and return that register, if that is possible.
709f5be1 2719 We now use WANT_VALUE to decide whether to do this. */
bbf6f052
RK
2720
2721rtx
2722expand_assignment (to, from, want_value, suggest_reg)
2723 tree to, from;
2724 int want_value;
2725 int suggest_reg;
2726{
2727 register rtx to_rtx = 0;
2728 rtx result;
2729
2730 /* Don't crash if the lhs of the assignment was erroneous. */
2731
2732 if (TREE_CODE (to) == ERROR_MARK)
709f5be1
RS
2733 {
2734 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
2735 return want_value ? result : NULL_RTX;
2736 }
bbf6f052 2737
ca695ac9
JB
2738 if (output_bytecode)
2739 {
2740 tree dest_innermost;
2741
2742 bc_expand_expr (from);
6d6e61ce 2743 bc_emit_instruction (duplicate);
ca695ac9
JB
2744
2745 dest_innermost = bc_expand_address (to);
2746
2747 /* Can't deduce from TYPE that we're dealing with a bitfield, so
0f41302f 2748 take care of it here. */
ca695ac9
JB
2749
2750 bc_store_memory (TREE_TYPE (to), dest_innermost);
2751 return NULL;
2752 }
2753
bbf6f052
RK
2754 /* Assignment of a structure component needs special treatment
2755 if the structure component's rtx is not simply a MEM.
6be58303
JW
2756 Assignment of an array element at a constant index, and assignment of
2757 an array element in an unaligned packed structure field, has the same
2758 problem. */
bbf6f052
RK
2759
2760 if (TREE_CODE (to) == COMPONENT_REF
2761 || TREE_CODE (to) == BIT_FIELD_REF
2762 || (TREE_CODE (to) == ARRAY_REF
6be58303
JW
2763 && ((TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2764 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST)
c7a7ac46 2765 || (SLOW_UNALIGNED_ACCESS && get_inner_unaligned_p (to)))))
bbf6f052
RK
2766 {
2767 enum machine_mode mode1;
2768 int bitsize;
2769 int bitpos;
7bb0943f 2770 tree offset;
bbf6f052
RK
2771 int unsignedp;
2772 int volatilep = 0;
0088fcb1 2773 tree tem;
d78d243c 2774 int alignment;
0088fcb1
RK
2775
2776 push_temp_slots ();
2777 tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
bbf6f052
RK
2778 &mode1, &unsignedp, &volatilep);
2779
2780 /* If we are going to use store_bit_field and extract_bit_field,
2781 make sure to_rtx will be safe for multiple use. */
2782
2783 if (mode1 == VOIDmode && want_value)
2784 tem = stabilize_reference (tem);
2785
d78d243c 2786 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
906c4e36 2787 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
2788 if (offset != 0)
2789 {
906c4e36 2790 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
2791
2792 if (GET_CODE (to_rtx) != MEM)
2793 abort ();
2794 to_rtx = change_address (to_rtx, VOIDmode,
88f63c77
RK
2795 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
2796 force_reg (ptr_mode, offset_rtx)));
d78d243c
RS
2797 /* If we have a variable offset, the known alignment
2798 is only that of the innermost structure containing the field.
2799 (Actually, we could sometimes do better by using the
2800 align of an element of the innermost array, but no need.) */
2801 if (TREE_CODE (to) == COMPONENT_REF
2802 || TREE_CODE (to) == BIT_FIELD_REF)
2803 alignment
2804 = TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (to, 0))) / BITS_PER_UNIT;
7bb0943f 2805 }
bbf6f052
RK
2806 if (volatilep)
2807 {
2808 if (GET_CODE (to_rtx) == MEM)
01188446
JW
2809 {
2810 /* When the offset is zero, to_rtx is the address of the
2811 structure we are storing into, and hence may be shared.
2812 We must make a new MEM before setting the volatile bit. */
2813 if (offset == 0)
2814 to_rtx = change_address (to_rtx, VOIDmode, XEXP (to_rtx, 0));
2815 MEM_VOLATILE_P (to_rtx) = 1;
2816 }
bbf6f052
RK
2817#if 0 /* This was turned off because, when a field is volatile
2818 in an object which is not volatile, the object may be in a register,
2819 and then we would abort over here. */
2820 else
2821 abort ();
2822#endif
2823 }
2824
2825 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2826 (want_value
2827 /* Spurious cast makes HPUX compiler happy. */
2828 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2829 : VOIDmode),
2830 unsignedp,
2831 /* Required alignment of containing datum. */
d78d243c 2832 alignment,
bbf6f052
RK
2833 int_size_in_bytes (TREE_TYPE (tem)));
2834 preserve_temp_slots (result);
2835 free_temp_slots ();
0088fcb1 2836 pop_temp_slots ();
bbf6f052 2837
709f5be1
RS
2838 /* If the value is meaningful, convert RESULT to the proper mode.
2839 Otherwise, return nothing. */
5ffe63ed
RS
2840 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
2841 TYPE_MODE (TREE_TYPE (from)),
2842 result,
2843 TREE_UNSIGNED (TREE_TYPE (to)))
709f5be1 2844 : NULL_RTX);
bbf6f052
RK
2845 }
2846
cd1db108
RS
2847 /* If the rhs is a function call and its value is not an aggregate,
2848 call the function before we start to compute the lhs.
2849 This is needed for correct code for cases such as
2850 val = setjmp (buf) on machines where reference to val
1ad87b63
RK
2851 requires loading up part of an address in a separate insn.
2852
2853 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
2854 a promoted variable where the zero- or sign- extension needs to be done.
2855 Handling this in the normal way is safe because no computation is done
2856 before the call. */
2857 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
b35cd3c1 2858 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
1ad87b63 2859 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
cd1db108 2860 {
0088fcb1
RK
2861 rtx value;
2862
2863 push_temp_slots ();
2864 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
cd1db108
RS
2865 if (to_rtx == 0)
2866 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
aaf87c45 2867
fffa9c1d
JW
2868 /* Handle calls that return values in multiple non-contiguous locations.
2869 The Irix 6 ABI has examples of this. */
2870 if (GET_CODE (to_rtx) == PARALLEL)
2871 emit_group_load (to_rtx, value);
2872 else if (GET_MODE (to_rtx) == BLKmode)
db3ec607 2873 emit_block_move (to_rtx, value, expr_size (from),
ff9b5bd8 2874 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
aaf87c45
JL
2875 else
2876 emit_move_insn (to_rtx, value);
cd1db108
RS
2877 preserve_temp_slots (to_rtx);
2878 free_temp_slots ();
0088fcb1 2879 pop_temp_slots ();
709f5be1 2880 return want_value ? to_rtx : NULL_RTX;
cd1db108
RS
2881 }
2882
bbf6f052
RK
2883 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2884 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2885
2886 if (to_rtx == 0)
906c4e36 2887 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
bbf6f052 2888
86d38d25
RS
2889 /* Don't move directly into a return register. */
2890 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
2891 {
0088fcb1
RK
2892 rtx temp;
2893
2894 push_temp_slots ();
2895 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
86d38d25
RS
2896 emit_move_insn (to_rtx, temp);
2897 preserve_temp_slots (to_rtx);
2898 free_temp_slots ();
0088fcb1 2899 pop_temp_slots ();
709f5be1 2900 return want_value ? to_rtx : NULL_RTX;
86d38d25
RS
2901 }
2902
bbf6f052
RK
2903 /* In case we are returning the contents of an object which overlaps
2904 the place the value is being stored, use a safe function when copying
2905 a value through a pointer into a structure value return block. */
2906 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2907 && current_function_returns_struct
2908 && !current_function_returns_pcc_struct)
2909 {
0088fcb1
RK
2910 rtx from_rtx, size;
2911
2912 push_temp_slots ();
33a20d10
RK
2913 size = expr_size (from);
2914 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
2915
2916#ifdef TARGET_MEM_FUNCTIONS
d562e42e 2917 emit_library_call (memcpy_libfunc, 0,
bbf6f052
RK
2918 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2919 XEXP (from_rtx, 0), Pmode,
0fa83258
RK
2920 convert_to_mode (TYPE_MODE (sizetype),
2921 size, TREE_UNSIGNED (sizetype)),
26ba80fc 2922 TYPE_MODE (sizetype));
bbf6f052 2923#else
d562e42e 2924 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
2925 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2926 XEXP (to_rtx, 0), Pmode,
3b6f75e2
JW
2927 convert_to_mode (TYPE_MODE (integer_type_node),
2928 size, TREE_UNSIGNED (integer_type_node)),
2929 TYPE_MODE (integer_type_node));
bbf6f052
RK
2930#endif
2931
2932 preserve_temp_slots (to_rtx);
2933 free_temp_slots ();
0088fcb1 2934 pop_temp_slots ();
709f5be1 2935 return want_value ? to_rtx : NULL_RTX;
bbf6f052
RK
2936 }
2937
2938 /* Compute FROM and store the value in the rtx we got. */
2939
0088fcb1 2940 push_temp_slots ();
bbf6f052
RK
2941 result = store_expr (from, to_rtx, want_value);
2942 preserve_temp_slots (result);
2943 free_temp_slots ();
0088fcb1 2944 pop_temp_slots ();
709f5be1 2945 return want_value ? result : NULL_RTX;
bbf6f052
RK
2946}
2947
2948/* Generate code for computing expression EXP,
2949 and storing the value into TARGET.
bbf6f052
RK
2950 TARGET may contain a QUEUED rtx.
2951
709f5be1
RS
2952 If WANT_VALUE is nonzero, return a copy of the value
2953 not in TARGET, so that we can be sure to use the proper
2954 value in a containing expression even if TARGET has something
2955 else stored in it. If possible, we copy the value through a pseudo
2956 and return that pseudo. Or, if the value is constant, we try to
2957 return the constant. In some cases, we return a pseudo
2958 copied *from* TARGET.
2959
2960 If the mode is BLKmode then we may return TARGET itself.
2961 It turns out that in BLKmode it doesn't cause a problem.
2962 because C has no operators that could combine two different
2963 assignments into the same BLKmode object with different values
2964 with no sequence point. Will other languages need this to
2965 be more thorough?
2966
2967 If WANT_VALUE is 0, we return NULL, to make sure
2968 to catch quickly any cases where the caller uses the value
2969 and fails to set WANT_VALUE. */
bbf6f052
RK
2970
2971rtx
709f5be1 2972store_expr (exp, target, want_value)
bbf6f052
RK
2973 register tree exp;
2974 register rtx target;
709f5be1 2975 int want_value;
bbf6f052
RK
2976{
2977 register rtx temp;
2978 int dont_return_target = 0;
2979
2980 if (TREE_CODE (exp) == COMPOUND_EXPR)
2981 {
2982 /* Perform first part of compound expression, then assign from second
2983 part. */
2984 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2985 emit_queue ();
709f5be1 2986 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
bbf6f052
RK
2987 }
2988 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2989 {
2990 /* For conditional expression, get safe form of the target. Then
2991 test the condition, doing the appropriate assignment on either
2992 side. This avoids the creation of unnecessary temporaries.
2993 For non-BLKmode, it is more efficient not to do this. */
2994
2995 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
a3a58acc
JM
2996 rtx flag = NULL_RTX;
2997 tree left_cleanups = NULL_TREE;
2998 tree right_cleanups = NULL_TREE;
2999 tree old_cleanups = cleanups_this_call;
3000
3001 /* Used to save a pointer to the place to put the setting of
3002 the flag that indicates if this side of the conditional was
3003 taken. We backpatch the code, if we find out later that we
3004 have any conditional cleanups that need to be performed. */
3005 rtx dest_right_flag = NULL_RTX;
3006 rtx dest_left_flag = NULL_RTX;
bbf6f052
RK
3007
3008 emit_queue ();
3009 target = protect_from_queue (target, 1);
3010
dabf8373 3011 do_pending_stack_adjust ();
bbf6f052
RK
3012 NO_DEFER_POP;
3013 jumpifnot (TREE_OPERAND (exp, 0), lab1);
709f5be1 3014 store_expr (TREE_OPERAND (exp, 1), target, 0);
a3a58acc
JM
3015 dest_left_flag = get_last_insn ();
3016 /* Handle conditional cleanups, if any. */
3017 left_cleanups = defer_cleanups_to (old_cleanups);
bbf6f052
RK
3018 emit_queue ();
3019 emit_jump_insn (gen_jump (lab2));
3020 emit_barrier ();
3021 emit_label (lab1);
709f5be1 3022 store_expr (TREE_OPERAND (exp, 2), target, 0);
a3a58acc
JM
3023 dest_right_flag = get_last_insn ();
3024 /* Handle conditional cleanups, if any. */
3025 right_cleanups = defer_cleanups_to (old_cleanups);
bbf6f052
RK
3026 emit_queue ();
3027 emit_label (lab2);
3028 OK_DEFER_POP;
a3a58acc
JM
3029
3030 /* Add back in any conditional cleanups. */
3031 if (left_cleanups || right_cleanups)
3032 {
3033 tree new_cleanups;
3034 tree cond;
3035 rtx last;
3036
3037 /* Now that we know that a flag is needed, go back and add in the
3038 setting of the flag. */
3039
3040 flag = gen_reg_rtx (word_mode);
3041
3042 /* Do the left side flag. */
3043 last = get_last_insn ();
3044 /* Flag left cleanups as needed. */
3045 emit_move_insn (flag, const1_rtx);
3046 /* ??? deprecated, use sequences instead. */
3047 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_left_flag);
3048
3049 /* Do the right side flag. */
3050 last = get_last_insn ();
3051 /* Flag left cleanups as needed. */
3052 emit_move_insn (flag, const0_rtx);
3053 /* ??? deprecated, use sequences instead. */
3054 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_right_flag);
3055
3056 /* All cleanups must be on the function_obstack. */
3057 push_obstacks_nochange ();
3058 resume_temporary_allocation ();
3059
3060 /* convert flag, which is an rtx, into a tree. */
3061 cond = make_node (RTL_EXPR);
3062 TREE_TYPE (cond) = integer_type_node;
3063 RTL_EXPR_RTL (cond) = flag;
3064 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
3065 cond = save_expr (cond);
3066
3067 if (! left_cleanups)
3068 left_cleanups = integer_zero_node;
3069 if (! right_cleanups)
3070 right_cleanups = integer_zero_node;
3071 new_cleanups = build (COND_EXPR, void_type_node,
3072 truthvalue_conversion (cond),
3073 left_cleanups, right_cleanups);
3074 new_cleanups = fold (new_cleanups);
3075
3076 pop_obstacks ();
3077
3078 /* Now add in the conditionalized cleanups. */
3079 cleanups_this_call
3080 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
3081 expand_eh_region_start ();
3082 }
709f5be1 3083 return want_value ? target : NULL_RTX;
bbf6f052 3084 }
709f5be1 3085 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
bbf6f052
RK
3086 && GET_MODE (target) != BLKmode)
3087 /* If target is in memory and caller wants value in a register instead,
3088 arrange that. Pass TARGET as target for expand_expr so that,
709f5be1 3089 if EXP is another assignment, WANT_VALUE will be nonzero for it.
c2e6aff6
RS
3090 We know expand_expr will not use the target in that case.
3091 Don't do this if TARGET is volatile because we are supposed
3092 to write it and then read it. */
bbf6f052 3093 {
906c4e36 3094 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
bbf6f052
RK
3095 GET_MODE (target), 0);
3096 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3097 temp = copy_to_reg (temp);
3098 dont_return_target = 1;
3099 }
3100 else if (queued_subexp_p (target))
709f5be1
RS
3101 /* If target contains a postincrement, let's not risk
3102 using it as the place to generate the rhs. */
bbf6f052
RK
3103 {
3104 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3105 {
3106 /* Expand EXP into a new pseudo. */
3107 temp = gen_reg_rtx (GET_MODE (target));
3108 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3109 }
3110 else
906c4e36 3111 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
709f5be1
RS
3112
3113 /* If target is volatile, ANSI requires accessing the value
3114 *from* the target, if it is accessed. So make that happen.
3115 In no case return the target itself. */
3116 if (! MEM_VOLATILE_P (target) && want_value)
3117 dont_return_target = 1;
bbf6f052 3118 }
1499e0a8
RK
3119 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3120 /* If this is an scalar in a register that is stored in a wider mode
3121 than the declared mode, compute the result into its declared mode
3122 and then convert to the wider mode. Our value is the computed
3123 expression. */
3124 {
5a32d038 3125 /* If we don't want a value, we can do the conversion inside EXP,
f635a84d
RK
3126 which will often result in some optimizations. Do the conversion
3127 in two steps: first change the signedness, if needed, then
3128 the extend. */
5a32d038 3129 if (! want_value)
f635a84d
RK
3130 {
3131 if (TREE_UNSIGNED (TREE_TYPE (exp))
3132 != SUBREG_PROMOTED_UNSIGNED_P (target))
3133 exp
3134 = convert
3135 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3136 TREE_TYPE (exp)),
3137 exp);
3138
3139 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3140 SUBREG_PROMOTED_UNSIGNED_P (target)),
3141 exp);
3142 }
5a32d038 3143
1499e0a8 3144 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
b258707c 3145
766f36c7 3146 /* If TEMP is a volatile MEM and we want a result value, make
f29369b9
RK
3147 the access now so it gets done only once. Likewise if
3148 it contains TARGET. */
3149 if (GET_CODE (temp) == MEM && want_value
3150 && (MEM_VOLATILE_P (temp)
3151 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
766f36c7
RK
3152 temp = copy_to_reg (temp);
3153
b258707c
RS
3154 /* If TEMP is a VOIDmode constant, use convert_modes to make
3155 sure that we properly convert it. */
3156 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3157 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3158 TYPE_MODE (TREE_TYPE (exp)), temp,
3159 SUBREG_PROMOTED_UNSIGNED_P (target));
3160
1499e0a8
RK
3161 convert_move (SUBREG_REG (target), temp,
3162 SUBREG_PROMOTED_UNSIGNED_P (target));
709f5be1 3163 return want_value ? temp : NULL_RTX;
1499e0a8 3164 }
bbf6f052
RK
3165 else
3166 {
3167 temp = expand_expr (exp, target, GET_MODE (target), 0);
766f36c7 3168 /* Return TARGET if it's a specified hardware register.
709f5be1
RS
3169 If TARGET is a volatile mem ref, either return TARGET
3170 or return a reg copied *from* TARGET; ANSI requires this.
3171
3172 Otherwise, if TEMP is not TARGET, return TEMP
3173 if it is constant (for efficiency),
3174 or if we really want the correct value. */
bbf6f052
RK
3175 if (!(target && GET_CODE (target) == REG
3176 && REGNO (target) < FIRST_PSEUDO_REGISTER)
709f5be1
RS
3177 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3178 && temp != target
3179 && (CONSTANT_P (temp) || want_value))
bbf6f052
RK
3180 dont_return_target = 1;
3181 }
3182
b258707c
RS
3183 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3184 the same as that of TARGET, adjust the constant. This is needed, for
3185 example, in case it is a CONST_DOUBLE and we want only a word-sized
3186 value. */
3187 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
c1da1f33 3188 && TREE_CODE (exp) != ERROR_MARK
b258707c
RS
3189 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3190 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3191 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3192
bbf6f052
RK
3193 /* If value was not generated in the target, store it there.
3194 Convert the value to TARGET's type first if nec. */
3195
3196 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
3197 {
3198 target = protect_from_queue (target, 1);
3199 if (GET_MODE (temp) != GET_MODE (target)
3200 && GET_MODE (temp) != VOIDmode)
3201 {
3202 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3203 if (dont_return_target)
3204 {
3205 /* In this case, we will return TEMP,
3206 so make sure it has the proper mode.
3207 But don't forget to store the value into TARGET. */
3208 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3209 emit_move_insn (target, temp);
3210 }
3211 else
3212 convert_move (target, temp, unsignedp);
3213 }
3214
3215 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3216 {
3217 /* Handle copying a string constant into an array.
3218 The string constant may be shorter than the array.
3219 So copy just the string's actual length, and clear the rest. */
3220 rtx size;
22619c3f 3221 rtx addr;
bbf6f052 3222
e87b4f3f
RS
3223 /* Get the size of the data type of the string,
3224 which is actually the size of the target. */
3225 size = expr_size (exp);
3226 if (GET_CODE (size) == CONST_INT
3227 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3228 emit_block_move (target, temp, size,
3229 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3230 else
bbf6f052 3231 {
e87b4f3f
RS
3232 /* Compute the size of the data to copy from the string. */
3233 tree copy_size
c03b7665 3234 = size_binop (MIN_EXPR,
b50d17a1 3235 make_tree (sizetype, size),
c03b7665
RK
3236 convert (sizetype,
3237 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
906c4e36
RK
3238 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3239 VOIDmode, 0);
e87b4f3f
RS
3240 rtx label = 0;
3241
3242 /* Copy that much. */
3243 emit_block_move (target, temp, copy_size_rtx,
3244 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3245
88f63c77
RK
3246 /* Figure out how much is left in TARGET that we have to clear.
3247 Do all calculations in ptr_mode. */
3248
3249 addr = XEXP (target, 0);
3250 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3251
e87b4f3f
RS
3252 if (GET_CODE (copy_size_rtx) == CONST_INT)
3253 {
88f63c77 3254 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
22619c3f 3255 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
e87b4f3f
RS
3256 }
3257 else
3258 {
88f63c77
RK
3259 addr = force_reg (ptr_mode, addr);
3260 addr = expand_binop (ptr_mode, add_optab, addr,
906c4e36
RK
3261 copy_size_rtx, NULL_RTX, 0,
3262 OPTAB_LIB_WIDEN);
e87b4f3f 3263
88f63c77 3264 size = expand_binop (ptr_mode, sub_optab, size,
906c4e36
RK
3265 copy_size_rtx, NULL_RTX, 0,
3266 OPTAB_LIB_WIDEN);
e87b4f3f 3267
906c4e36 3268 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
e87b4f3f
RS
3269 GET_MODE (size), 0, 0);
3270 label = gen_label_rtx ();
3271 emit_jump_insn (gen_blt (label));
3272 }
3273
3274 if (size != const0_rtx)
3275 {
bbf6f052 3276#ifdef TARGET_MEM_FUNCTIONS
3b6f75e2
JW
3277 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3278 addr, Pmode,
3279 const0_rtx, TYPE_MODE (integer_type_node),
3280 convert_to_mode (TYPE_MODE (sizetype),
3281 size,
3282 TREE_UNSIGNED (sizetype)),
3283 TYPE_MODE (sizetype));
bbf6f052 3284#else
d562e42e 3285 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3b6f75e2
JW
3286 addr, Pmode,
3287 convert_to_mode (TYPE_MODE (integer_type_node),
3288 size,
3289 TREE_UNSIGNED (integer_type_node)),
3290 TYPE_MODE (integer_type_node));
bbf6f052 3291#endif
e87b4f3f 3292 }
22619c3f 3293
e87b4f3f
RS
3294 if (label)
3295 emit_label (label);
bbf6f052
RK
3296 }
3297 }
fffa9c1d
JW
3298 /* Handle calls that return values in multiple non-contiguous locations.
3299 The Irix 6 ABI has examples of this. */
3300 else if (GET_CODE (target) == PARALLEL)
3301 emit_group_load (target, temp);
bbf6f052
RK
3302 else if (GET_MODE (temp) == BLKmode)
3303 emit_block_move (target, temp, expr_size (exp),
3304 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3305 else
3306 emit_move_insn (target, temp);
3307 }
709f5be1 3308
766f36c7
RK
3309 /* If we don't want a value, return NULL_RTX. */
3310 if (! want_value)
3311 return NULL_RTX;
3312
3313 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3314 ??? The latter test doesn't seem to make sense. */
3315 else if (dont_return_target && GET_CODE (temp) != MEM)
bbf6f052 3316 return temp;
766f36c7
RK
3317
3318 /* Return TARGET itself if it is a hard register. */
3319 else if (want_value && GET_MODE (target) != BLKmode
3320 && ! (GET_CODE (target) == REG
3321 && REGNO (target) < FIRST_PSEUDO_REGISTER))
709f5be1 3322 return copy_to_reg (target);
766f36c7
RK
3323
3324 else
709f5be1 3325 return target;
bbf6f052
RK
3326}
3327\f
9de08200
RK
3328/* Return 1 if EXP just contains zeros. */
3329
3330static int
3331is_zeros_p (exp)
3332 tree exp;
3333{
3334 tree elt;
3335
3336 switch (TREE_CODE (exp))
3337 {
3338 case CONVERT_EXPR:
3339 case NOP_EXPR:
3340 case NON_LVALUE_EXPR:
3341 return is_zeros_p (TREE_OPERAND (exp, 0));
3342
3343 case INTEGER_CST:
3344 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3345
3346 case COMPLEX_CST:
3347 return
3348 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3349
3350 case REAL_CST:
3351 return REAL_VALUES_EQUAL (TREE_REAL_CST (exp), dconst0);
3352
3353 case CONSTRUCTOR:
e1a43f73
PB
3354 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3355 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
9de08200
RK
3356 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3357 if (! is_zeros_p (TREE_VALUE (elt)))
3358 return 0;
3359
3360 return 1;
3361 }
3362
3363 return 0;
3364}
3365
3366/* Return 1 if EXP contains mostly (3/4) zeros. */
3367
3368static int
3369mostly_zeros_p (exp)
3370 tree exp;
3371{
9de08200
RK
3372 if (TREE_CODE (exp) == CONSTRUCTOR)
3373 {
e1a43f73
PB
3374 int elts = 0, zeros = 0;
3375 tree elt = CONSTRUCTOR_ELTS (exp);
3376 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3377 {
3378 /* If there are no ranges of true bits, it is all zero. */
3379 return elt == NULL_TREE;
3380 }
3381 for (; elt; elt = TREE_CHAIN (elt))
3382 {
3383 /* We do not handle the case where the index is a RANGE_EXPR,
3384 so the statistic will be somewhat inaccurate.
3385 We do make a more accurate count in store_constructor itself,
3386 so since this function is only used for nested array elements,
0f41302f 3387 this should be close enough. */
e1a43f73
PB
3388 if (mostly_zeros_p (TREE_VALUE (elt)))
3389 zeros++;
3390 elts++;
3391 }
9de08200
RK
3392
3393 return 4 * zeros >= 3 * elts;
3394 }
3395
3396 return is_zeros_p (exp);
3397}
3398\f
e1a43f73
PB
3399/* Helper function for store_constructor.
3400 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3401 TYPE is the type of the CONSTRUCTOR, not the element type.
23ccec44
JW
3402 CLEARED is as for store_constructor.
3403
3404 This provides a recursive shortcut back to store_constructor when it isn't
3405 necessary to go through store_field. This is so that we can pass through
3406 the cleared field to let store_constructor know that we may not have to
3407 clear a substructure if the outer structure has already been cleared. */
e1a43f73
PB
3408
3409static void
3410store_constructor_field (target, bitsize, bitpos,
3411 mode, exp, type, cleared)
3412 rtx target;
3413 int bitsize, bitpos;
3414 enum machine_mode mode;
3415 tree exp, type;
3416 int cleared;
3417{
3418 if (TREE_CODE (exp) == CONSTRUCTOR
23ccec44
JW
3419 && bitpos % BITS_PER_UNIT == 0
3420 /* If we have a non-zero bitpos for a register target, then we just
3421 let store_field do the bitfield handling. This is unlikely to
3422 generate unnecessary clear instructions anyways. */
3423 && (bitpos == 0 || GET_CODE (target) == MEM))
e1a43f73 3424 {
126e5b0d
JW
3425 if (bitpos != 0)
3426 target = change_address (target, VOIDmode,
3427 plus_constant (XEXP (target, 0),
3428 bitpos / BITS_PER_UNIT));
3429 store_constructor (exp, target, cleared);
e1a43f73
PB
3430 }
3431 else
3432 store_field (target, bitsize, bitpos, mode, exp,
3433 VOIDmode, 0, TYPE_ALIGN (type) / BITS_PER_UNIT,
3434 int_size_in_bytes (type));
3435}
3436
bbf6f052 3437/* Store the value of constructor EXP into the rtx TARGET.
e1a43f73 3438 TARGET is either a REG or a MEM.
0f41302f 3439 CLEARED is true if TARGET is known to have been zero'd. */
bbf6f052
RK
3440
3441static void
e1a43f73 3442store_constructor (exp, target, cleared)
bbf6f052
RK
3443 tree exp;
3444 rtx target;
e1a43f73 3445 int cleared;
bbf6f052 3446{
4af3895e
JVA
3447 tree type = TREE_TYPE (exp);
3448
bbf6f052
RK
3449 /* We know our target cannot conflict, since safe_from_p has been called. */
3450#if 0
3451 /* Don't try copying piece by piece into a hard register
3452 since that is vulnerable to being clobbered by EXP.
3453 Instead, construct in a pseudo register and then copy it all. */
3454 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
3455 {
3456 rtx temp = gen_reg_rtx (GET_MODE (target));
e1a43f73 3457 store_constructor (exp, temp, 0);
bbf6f052
RK
3458 emit_move_insn (target, temp);
3459 return;
3460 }
3461#endif
3462
e44842fe
RK
3463 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
3464 || TREE_CODE (type) == QUAL_UNION_TYPE)
bbf6f052
RK
3465 {
3466 register tree elt;
3467
4af3895e 3468 /* Inform later passes that the whole union value is dead. */
e44842fe
RK
3469 if (TREE_CODE (type) == UNION_TYPE
3470 || TREE_CODE (type) == QUAL_UNION_TYPE)
bbf6f052 3471 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
4af3895e
JVA
3472
3473 /* If we are building a static constructor into a register,
3474 set the initial value as zero so we can fold the value into
67225c15
RK
3475 a constant. But if more than one register is involved,
3476 this probably loses. */
3477 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
3478 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
9de08200
RK
3479 {
3480 if (! cleared)
3481 emit_move_insn (target, const0_rtx);
4af3895e 3482
9de08200
RK
3483 cleared = 1;
3484 }
3485
3486 /* If the constructor has fewer fields than the structure
3487 or if we are initializing the structure to mostly zeros,
bbf6f052 3488 clear the whole structure first. */
9de08200
RK
3489 else if ((list_length (CONSTRUCTOR_ELTS (exp))
3490 != list_length (TYPE_FIELDS (type)))
3491 || mostly_zeros_p (exp))
3492 {
3493 if (! cleared)
3494 clear_storage (target, expr_size (exp),
3495 TYPE_ALIGN (type) / BITS_PER_UNIT);
3496
3497 cleared = 1;
3498 }
bbf6f052
RK
3499 else
3500 /* Inform later passes that the old value is dead. */
3501 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3502
3503 /* Store each element of the constructor into
3504 the corresponding field of TARGET. */
3505
3506 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3507 {
3508 register tree field = TREE_PURPOSE (elt);
3509 register enum machine_mode mode;
3510 int bitsize;
b50d17a1 3511 int bitpos = 0;
bbf6f052 3512 int unsignedp;
b50d17a1
RK
3513 tree pos, constant = 0, offset = 0;
3514 rtx to_rtx = target;
bbf6f052 3515
f32fd778
RS
3516 /* Just ignore missing fields.
3517 We cleared the whole structure, above,
3518 if any fields are missing. */
3519 if (field == 0)
3520 continue;
3521
e1a43f73
PB
3522 if (cleared && is_zeros_p (TREE_VALUE (elt)))
3523 continue;
9de08200 3524
bbf6f052
RK
3525 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
3526 unsignedp = TREE_UNSIGNED (field);
3527 mode = DECL_MODE (field);
3528 if (DECL_BIT_FIELD (field))
3529 mode = VOIDmode;
3530
b50d17a1
RK
3531 pos = DECL_FIELD_BITPOS (field);
3532 if (TREE_CODE (pos) == INTEGER_CST)
3533 constant = pos;
3534 else if (TREE_CODE (pos) == PLUS_EXPR
3535 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3536 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
3537 else
3538 offset = pos;
3539
3540 if (constant)
cd11b87e 3541 bitpos = TREE_INT_CST_LOW (constant);
b50d17a1
RK
3542
3543 if (offset)
3544 {
3545 rtx offset_rtx;
3546
3547 if (contains_placeholder_p (offset))
3548 offset = build (WITH_RECORD_EXPR, sizetype,
3549 offset, exp);
bbf6f052 3550
b50d17a1
RK
3551 offset = size_binop (FLOOR_DIV_EXPR, offset,
3552 size_int (BITS_PER_UNIT));
bbf6f052 3553
b50d17a1
RK
3554 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3555 if (GET_CODE (to_rtx) != MEM)
3556 abort ();
3557
3558 to_rtx
3559 = change_address (to_rtx, VOIDmode,
88f63c77
RK
3560 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
3561 force_reg (ptr_mode, offset_rtx)));
b50d17a1 3562 }
cf04eb80
RK
3563 if (TREE_READONLY (field))
3564 {
9151b3bf
RK
3565 if (GET_CODE (to_rtx) == MEM)
3566 to_rtx = change_address (to_rtx, GET_MODE (to_rtx),
3567 XEXP (to_rtx, 0));
cf04eb80
RK
3568 RTX_UNCHANGING_P (to_rtx) = 1;
3569 }
3570
e1a43f73
PB
3571 store_constructor_field (to_rtx, bitsize, bitpos,
3572 mode, TREE_VALUE (elt), type, cleared);
bbf6f052
RK
3573 }
3574 }
4af3895e 3575 else if (TREE_CODE (type) == ARRAY_TYPE)
bbf6f052
RK
3576 {
3577 register tree elt;
3578 register int i;
e1a43f73 3579 int need_to_clear;
4af3895e 3580 tree domain = TYPE_DOMAIN (type);
906c4e36
RK
3581 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
3582 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4af3895e 3583 tree elttype = TREE_TYPE (type);
bbf6f052 3584
e1a43f73
PB
3585 /* If the constructor has fewer elements than the array,
3586 clear the whole array first. Similarly if this this is
3587 static constructor of a non-BLKmode object. */
3588 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
3589 need_to_clear = 1;
3590 else
3591 {
3592 HOST_WIDE_INT count = 0, zero_count = 0;
3593 need_to_clear = 0;
3594 /* This loop is a more accurate version of the loop in
3595 mostly_zeros_p (it handles RANGE_EXPR in an index).
3596 It is also needed to check for missing elements. */
3597 for (elt = CONSTRUCTOR_ELTS (exp);
3598 elt != NULL_TREE;
3599 elt = TREE_CHAIN (elt), i++)
3600 {
3601 tree index = TREE_PURPOSE (elt);
3602 HOST_WIDE_INT this_node_count;
3603 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3604 {
3605 tree lo_index = TREE_OPERAND (index, 0);
3606 tree hi_index = TREE_OPERAND (index, 1);
3607 if (TREE_CODE (lo_index) != INTEGER_CST
3608 || TREE_CODE (hi_index) != INTEGER_CST)
3609 {
3610 need_to_clear = 1;
3611 break;
3612 }
3613 this_node_count = TREE_INT_CST_LOW (hi_index)
3614 - TREE_INT_CST_LOW (lo_index) + 1;
3615 }
3616 else
3617 this_node_count = 1;
3618 count += this_node_count;
3619 if (mostly_zeros_p (TREE_VALUE (elt)))
3620 zero_count += this_node_count;
3621 }
8e958f70 3622 /* Clear the entire array first if there are any missing elements,
0f41302f 3623 or if the incidence of zero elements is >= 75%. */
8e958f70
PB
3624 if (count < maxelt - minelt + 1
3625 || 4 * zero_count >= 3 * count)
e1a43f73
PB
3626 need_to_clear = 1;
3627 }
3628 if (need_to_clear)
9de08200
RK
3629 {
3630 if (! cleared)
3631 clear_storage (target, expr_size (exp),
3632 TYPE_ALIGN (type) / BITS_PER_UNIT);
9de08200
RK
3633 cleared = 1;
3634 }
bbf6f052
RK
3635 else
3636 /* Inform later passes that the old value is dead. */
3637 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3638
3639 /* Store each element of the constructor into
3640 the corresponding element of TARGET, determined
3641 by counting the elements. */
3642 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
3643 elt;
3644 elt = TREE_CHAIN (elt), i++)
3645 {
3646 register enum machine_mode mode;
3647 int bitsize;
3648 int bitpos;
3649 int unsignedp;
e1a43f73 3650 tree value = TREE_VALUE (elt);
03dc44a6
RS
3651 tree index = TREE_PURPOSE (elt);
3652 rtx xtarget = target;
bbf6f052 3653
e1a43f73
PB
3654 if (cleared && is_zeros_p (value))
3655 continue;
9de08200 3656
bbf6f052
RK
3657 mode = TYPE_MODE (elttype);
3658 bitsize = GET_MODE_BITSIZE (mode);
3659 unsignedp = TREE_UNSIGNED (elttype);
3660
e1a43f73
PB
3661 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3662 {
3663 tree lo_index = TREE_OPERAND (index, 0);
3664 tree hi_index = TREE_OPERAND (index, 1);
3665 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
3666 struct nesting *loop;
05c0b405
PB
3667 HOST_WIDE_INT lo, hi, count;
3668 tree position;
e1a43f73 3669
0f41302f 3670 /* If the range is constant and "small", unroll the loop. */
e1a43f73 3671 if (TREE_CODE (lo_index) == INTEGER_CST
05c0b405
PB
3672 && TREE_CODE (hi_index) == INTEGER_CST
3673 && (lo = TREE_INT_CST_LOW (lo_index),
3674 hi = TREE_INT_CST_LOW (hi_index),
3675 count = hi - lo + 1,
3676 (GET_CODE (target) != MEM
3677 || count <= 2
3678 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
3679 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
3680 <= 40 * 8))))
e1a43f73 3681 {
05c0b405
PB
3682 lo -= minelt; hi -= minelt;
3683 for (; lo <= hi; lo++)
e1a43f73 3684 {
05c0b405
PB
3685 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
3686 store_constructor_field (target, bitsize, bitpos,
3687 mode, value, type, cleared);
e1a43f73
PB
3688 }
3689 }
3690 else
3691 {
3692 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
3693 loop_top = gen_label_rtx ();
3694 loop_end = gen_label_rtx ();
3695
3696 unsignedp = TREE_UNSIGNED (domain);
3697
3698 index = build_decl (VAR_DECL, NULL_TREE, domain);
3699
3700 DECL_RTL (index) = index_r
3701 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
3702 &unsignedp, 0));
3703
3704 if (TREE_CODE (value) == SAVE_EXPR
3705 && SAVE_EXPR_RTL (value) == 0)
3706 {
0f41302f
MS
3707 /* Make sure value gets expanded once before the
3708 loop. */
e1a43f73
PB
3709 expand_expr (value, const0_rtx, VOIDmode, 0);
3710 emit_queue ();
3711 }
3712 store_expr (lo_index, index_r, 0);
3713 loop = expand_start_loop (0);
3714
0f41302f 3715 /* Assign value to element index. */
e1a43f73
PB
3716 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3717 size_int (BITS_PER_UNIT));
3718 position = size_binop (MULT_EXPR,
3719 size_binop (MINUS_EXPR, index,
3720 TYPE_MIN_VALUE (domain)),
3721 position);
3722 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3723 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
3724 xtarget = change_address (target, mode, addr);
3725 if (TREE_CODE (value) == CONSTRUCTOR)
05c0b405 3726 store_constructor (value, xtarget, cleared);
e1a43f73
PB
3727 else
3728 store_expr (value, xtarget, 0);
3729
3730 expand_exit_loop_if_false (loop,
3731 build (LT_EXPR, integer_type_node,
3732 index, hi_index));
3733
3734 expand_increment (build (PREINCREMENT_EXPR,
3735 TREE_TYPE (index),
7b8b9722 3736 index, integer_one_node), 0, 0);
e1a43f73
PB
3737 expand_end_loop ();
3738 emit_label (loop_end);
3739
3740 /* Needed by stupid register allocation. to extend the
3741 lifetime of pseudo-regs used by target past the end
3742 of the loop. */
3743 emit_insn (gen_rtx (USE, GET_MODE (target), target));
3744 }
3745 }
3746 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
5b6c44ff 3747 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
03dc44a6 3748 {
e1a43f73 3749 rtx pos_rtx, addr;
03dc44a6
RS
3750 tree position;
3751
5b6c44ff
RK
3752 if (index == 0)
3753 index = size_int (i);
3754
e1a43f73
PB
3755 if (minelt)
3756 index = size_binop (MINUS_EXPR, index,
3757 TYPE_MIN_VALUE (domain));
5b6c44ff
RK
3758 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3759 size_int (BITS_PER_UNIT));
3760 position = size_binop (MULT_EXPR, index, position);
03dc44a6
RS
3761 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3762 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
3763 xtarget = change_address (target, mode, addr);
e1a43f73 3764 store_expr (value, xtarget, 0);
03dc44a6
RS
3765 }
3766 else
3767 {
3768 if (index != 0)
7c314719 3769 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
03dc44a6
RS
3770 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3771 else
3772 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
e1a43f73
PB
3773 store_constructor_field (target, bitsize, bitpos,
3774 mode, value, type, cleared);
03dc44a6 3775 }
bbf6f052
RK
3776 }
3777 }
071a6595
PB
3778 /* set constructor assignments */
3779 else if (TREE_CODE (type) == SET_TYPE)
3780 {
e1a43f73 3781 tree elt = CONSTRUCTOR_ELTS (exp);
071a6595
PB
3782 rtx xtarget = XEXP (target, 0);
3783 int set_word_size = TYPE_ALIGN (type);
e1a43f73 3784 int nbytes = int_size_in_bytes (type), nbits;
071a6595
PB
3785 tree domain = TYPE_DOMAIN (type);
3786 tree domain_min, domain_max, bitlength;
3787
9faa82d8 3788 /* The default implementation strategy is to extract the constant
071a6595
PB
3789 parts of the constructor, use that to initialize the target,
3790 and then "or" in whatever non-constant ranges we need in addition.
3791
3792 If a large set is all zero or all ones, it is
3793 probably better to set it using memset (if available) or bzero.
3794 Also, if a large set has just a single range, it may also be
3795 better to first clear all the first clear the set (using
0f41302f 3796 bzero/memset), and set the bits we want. */
071a6595 3797
0f41302f 3798 /* Check for all zeros. */
e1a43f73 3799 if (elt == NULL_TREE)
071a6595 3800 {
e1a43f73
PB
3801 if (!cleared)
3802 clear_storage (target, expr_size (exp),
3803 TYPE_ALIGN (type) / BITS_PER_UNIT);
071a6595
PB
3804 return;
3805 }
3806
071a6595
PB
3807 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
3808 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
3809 bitlength = size_binop (PLUS_EXPR,
3810 size_binop (MINUS_EXPR, domain_max, domain_min),
3811 size_one_node);
3812
e1a43f73
PB
3813 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
3814 abort ();
3815 nbits = TREE_INT_CST_LOW (bitlength);
3816
3817 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
3818 are "complicated" (more than one range), initialize (the
3819 constant parts) by copying from a constant. */
3820 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
3821 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
071a6595 3822 {
b4ee5a72
PB
3823 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
3824 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
0f41302f 3825 char *bit_buffer = (char *) alloca (nbits);
b4ee5a72
PB
3826 HOST_WIDE_INT word = 0;
3827 int bit_pos = 0;
3828 int ibit = 0;
0f41302f 3829 int offset = 0; /* In bytes from beginning of set. */
e1a43f73 3830 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
b4ee5a72 3831 for (;;)
071a6595 3832 {
b4ee5a72
PB
3833 if (bit_buffer[ibit])
3834 {
b09f3348 3835 if (BYTES_BIG_ENDIAN)
b4ee5a72
PB
3836 word |= (1 << (set_word_size - 1 - bit_pos));
3837 else
3838 word |= 1 << bit_pos;
3839 }
3840 bit_pos++; ibit++;
3841 if (bit_pos >= set_word_size || ibit == nbits)
071a6595 3842 {
e1a43f73
PB
3843 if (word != 0 || ! cleared)
3844 {
3845 rtx datum = GEN_INT (word);
3846 rtx to_rtx;
0f41302f
MS
3847 /* The assumption here is that it is safe to use
3848 XEXP if the set is multi-word, but not if
3849 it's single-word. */
e1a43f73
PB
3850 if (GET_CODE (target) == MEM)
3851 {
3852 to_rtx = plus_constant (XEXP (target, 0), offset);
3853 to_rtx = change_address (target, mode, to_rtx);
3854 }
3855 else if (offset == 0)
3856 to_rtx = target;
3857 else
3858 abort ();
3859 emit_move_insn (to_rtx, datum);
3860 }
b4ee5a72
PB
3861 if (ibit == nbits)
3862 break;
3863 word = 0;
3864 bit_pos = 0;
3865 offset += set_word_size / BITS_PER_UNIT;
071a6595
PB
3866 }
3867 }
071a6595 3868 }
e1a43f73
PB
3869 else if (!cleared)
3870 {
0f41302f 3871 /* Don't bother clearing storage if the set is all ones. */
e1a43f73
PB
3872 if (TREE_CHAIN (elt) != NULL_TREE
3873 || (TREE_PURPOSE (elt) == NULL_TREE
3874 ? nbits != 1
3875 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
3876 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
3877 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
3878 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
3879 != nbits))))
3880 clear_storage (target, expr_size (exp),
3881 TYPE_ALIGN (type) / BITS_PER_UNIT);
3882 }
3883
3884 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
071a6595
PB
3885 {
3886 /* start of range of element or NULL */
3887 tree startbit = TREE_PURPOSE (elt);
3888 /* end of range of element, or element value */
3889 tree endbit = TREE_VALUE (elt);
3890 HOST_WIDE_INT startb, endb;
3891 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
3892
3893 bitlength_rtx = expand_expr (bitlength,
3894 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
3895
3896 /* handle non-range tuple element like [ expr ] */
3897 if (startbit == NULL_TREE)
3898 {
3899 startbit = save_expr (endbit);
3900 endbit = startbit;
3901 }
3902 startbit = convert (sizetype, startbit);
3903 endbit = convert (sizetype, endbit);
3904 if (! integer_zerop (domain_min))
3905 {
3906 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
3907 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
3908 }
3909 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
3910 EXPAND_CONST_ADDRESS);
3911 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
3912 EXPAND_CONST_ADDRESS);
3913
3914 if (REG_P (target))
3915 {
3916 targetx = assign_stack_temp (GET_MODE (target),
3917 GET_MODE_SIZE (GET_MODE (target)),
3918 0);
3919 emit_move_insn (targetx, target);
3920 }
3921 else if (GET_CODE (target) == MEM)
3922 targetx = target;
3923 else
3924 abort ();
3925
3926#ifdef TARGET_MEM_FUNCTIONS
3927 /* Optimization: If startbit and endbit are
9faa82d8 3928 constants divisible by BITS_PER_UNIT,
0f41302f 3929 call memset instead. */
071a6595
PB
3930 if (TREE_CODE (startbit) == INTEGER_CST
3931 && TREE_CODE (endbit) == INTEGER_CST
3932 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
e1a43f73 3933 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
071a6595 3934 {
071a6595
PB
3935 emit_library_call (memset_libfunc, 0,
3936 VOIDmode, 3,
e1a43f73
PB
3937 plus_constant (XEXP (targetx, 0),
3938 startb / BITS_PER_UNIT),
071a6595 3939 Pmode,
3b6f75e2 3940 constm1_rtx, TYPE_MODE (integer_type_node),
071a6595 3941 GEN_INT ((endb - startb) / BITS_PER_UNIT),
3b6f75e2 3942 TYPE_MODE (sizetype));
071a6595
PB
3943 }
3944 else
3945#endif
3946 {
071a6595
PB
3947 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__setbits"),
3948 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
3949 bitlength_rtx, TYPE_MODE (sizetype),
3950 startbit_rtx, TYPE_MODE (sizetype),
3951 endbit_rtx, TYPE_MODE (sizetype));
3952 }
3953 if (REG_P (target))
3954 emit_move_insn (target, targetx);
3955 }
3956 }
bbf6f052
RK
3957
3958 else
3959 abort ();
3960}
3961
3962/* Store the value of EXP (an expression tree)
3963 into a subfield of TARGET which has mode MODE and occupies
3964 BITSIZE bits, starting BITPOS bits from the start of TARGET.
3965 If MODE is VOIDmode, it means that we are storing into a bit-field.
3966
3967 If VALUE_MODE is VOIDmode, return nothing in particular.
3968 UNSIGNEDP is not used in this case.
3969
3970 Otherwise, return an rtx for the value stored. This rtx
3971 has mode VALUE_MODE if that is convenient to do.
3972 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
3973
3974 ALIGN is the alignment that TARGET is known to have, measured in bytes.
3975 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
3976
3977static rtx
3978store_field (target, bitsize, bitpos, mode, exp, value_mode,
3979 unsignedp, align, total_size)
3980 rtx target;
3981 int bitsize, bitpos;
3982 enum machine_mode mode;
3983 tree exp;
3984 enum machine_mode value_mode;
3985 int unsignedp;
3986 int align;
3987 int total_size;
3988{
906c4e36 3989 HOST_WIDE_INT width_mask = 0;
bbf6f052 3990
906c4e36
RK
3991 if (bitsize < HOST_BITS_PER_WIDE_INT)
3992 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
bbf6f052
RK
3993
3994 /* If we are storing into an unaligned field of an aligned union that is
3995 in a register, we may have the mode of TARGET being an integer mode but
3996 MODE == BLKmode. In that case, get an aligned object whose size and
3997 alignment are the same as TARGET and store TARGET into it (we can avoid
3998 the store if the field being stored is the entire width of TARGET). Then
3999 call ourselves recursively to store the field into a BLKmode version of
4000 that object. Finally, load from the object into TARGET. This is not
4001 very efficient in general, but should only be slightly more expensive
4002 than the otherwise-required unaligned accesses. Perhaps this can be
4003 cleaned up later. */
4004
4005 if (mode == BLKmode
4006 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4007 {
4008 rtx object = assign_stack_temp (GET_MODE (target),
4009 GET_MODE_SIZE (GET_MODE (target)), 0);
4010 rtx blk_object = copy_rtx (object);
4011
24a13950
JW
4012 MEM_IN_STRUCT_P (object) = 1;
4013 MEM_IN_STRUCT_P (blk_object) = 1;
bbf6f052
RK
4014 PUT_MODE (blk_object, BLKmode);
4015
4016 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4017 emit_move_insn (object, target);
4018
4019 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4020 align, total_size);
4021
46093b97
RS
4022 /* Even though we aren't returning target, we need to
4023 give it the updated value. */
bbf6f052
RK
4024 emit_move_insn (target, object);
4025
46093b97 4026 return blk_object;
bbf6f052
RK
4027 }
4028
4029 /* If the structure is in a register or if the component
4030 is a bit field, we cannot use addressing to access it.
4031 Use bit-field techniques or SUBREG to store in it. */
4032
4fa52007
RK
4033 if (mode == VOIDmode
4034 || (mode != BLKmode && ! direct_store[(int) mode])
4035 || GET_CODE (target) == REG
c980ac49 4036 || GET_CODE (target) == SUBREG
ccc98036
RS
4037 /* If the field isn't aligned enough to store as an ordinary memref,
4038 store it as a bit field. */
c7a7ac46 4039 || (SLOW_UNALIGNED_ACCESS
ccc98036 4040 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
c7a7ac46 4041 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
bbf6f052 4042 {
906c4e36 4043 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbd6cf73 4044
ef19912d
RK
4045 /* If BITSIZE is narrower than the size of the type of EXP
4046 we will be narrowing TEMP. Normally, what's wanted are the
4047 low-order bits. However, if EXP's type is a record and this is
4048 big-endian machine, we want the upper BITSIZE bits. */
4049 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4050 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4051 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4052 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4053 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4054 - bitsize),
4055 temp, 1);
4056
bbd6cf73
RK
4057 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4058 MODE. */
4059 if (mode != VOIDmode && mode != BLKmode
4060 && mode != TYPE_MODE (TREE_TYPE (exp)))
4061 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4062
a281e72d
RK
4063 /* If the modes of TARGET and TEMP are both BLKmode, both
4064 must be in memory and BITPOS must be aligned on a byte
4065 boundary. If so, we simply do a block copy. */
4066 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4067 {
4068 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4069 || bitpos % BITS_PER_UNIT != 0)
4070 abort ();
4071
0086427c
RK
4072 target = change_address (target, VOIDmode,
4073 plus_constant (XEXP (target, 0),
a281e72d
RK
4074 bitpos / BITS_PER_UNIT));
4075
4076 emit_block_move (target, temp,
4077 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4078 / BITS_PER_UNIT),
4079 1);
4080
4081 return value_mode == VOIDmode ? const0_rtx : target;
4082 }
4083
bbf6f052
RK
4084 /* Store the value in the bitfield. */
4085 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4086 if (value_mode != VOIDmode)
4087 {
4088 /* The caller wants an rtx for the value. */
4089 /* If possible, avoid refetching from the bitfield itself. */
4090 if (width_mask != 0
4091 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5c4d7cfb 4092 {
9074de27 4093 tree count;
5c4d7cfb 4094 enum machine_mode tmode;
86a2c12a 4095
5c4d7cfb
RS
4096 if (unsignedp)
4097 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4098 tmode = GET_MODE (temp);
86a2c12a
RS
4099 if (tmode == VOIDmode)
4100 tmode = value_mode;
5c4d7cfb
RS
4101 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4102 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4103 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4104 }
bbf6f052 4105 return extract_bit_field (target, bitsize, bitpos, unsignedp,
906c4e36
RK
4106 NULL_RTX, value_mode, 0, align,
4107 total_size);
bbf6f052
RK
4108 }
4109 return const0_rtx;
4110 }
4111 else
4112 {
4113 rtx addr = XEXP (target, 0);
4114 rtx to_rtx;
4115
4116 /* If a value is wanted, it must be the lhs;
4117 so make the address stable for multiple use. */
4118
4119 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4120 && ! CONSTANT_ADDRESS_P (addr)
4121 /* A frame-pointer reference is already stable. */
4122 && ! (GET_CODE (addr) == PLUS
4123 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4124 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4125 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4126 addr = copy_to_reg (addr);
4127
4128 /* Now build a reference to just the desired component. */
4129
4130 to_rtx = change_address (target, mode,
4131 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
4132 MEM_IN_STRUCT_P (to_rtx) = 1;
4133
4134 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4135 }
4136}
4137\f
6be58303
JW
4138/* Return true if any object containing the innermost array is an unaligned
4139 packed structure field. */
4140
4141static int
4142get_inner_unaligned_p (exp)
4143 tree exp;
4144{
4145 int needed_alignment = TYPE_ALIGN (TREE_TYPE (exp));
4146
4147 while (1)
4148 {
4149 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4150 {
4151 if (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
4152 < needed_alignment)
4153 return 1;
4154 }
4155 else if (TREE_CODE (exp) != ARRAY_REF
4156 && TREE_CODE (exp) != NON_LVALUE_EXPR
4157 && ! ((TREE_CODE (exp) == NOP_EXPR
4158 || TREE_CODE (exp) == CONVERT_EXPR)
4159 && (TYPE_MODE (TREE_TYPE (exp))
4160 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4161 break;
4162
4163 exp = TREE_OPERAND (exp, 0);
4164 }
4165
4166 return 0;
4167}
4168
bbf6f052
RK
4169/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4170 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
742920c7 4171 ARRAY_REFs and find the ultimate containing object, which we return.
bbf6f052
RK
4172
4173 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4174 bit position, and *PUNSIGNEDP to the signedness of the field.
7bb0943f
RS
4175 If the position of the field is variable, we store a tree
4176 giving the variable offset (in units) in *POFFSET.
4177 This offset is in addition to the bit position.
4178 If the position is not variable, we store 0 in *POFFSET.
bbf6f052
RK
4179
4180 If any of the extraction expressions is volatile,
4181 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4182
4183 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4184 is a mode that can be used to access the field. In that case, *PBITSIZE
e7c33f54
RK
4185 is redundant.
4186
4187 If the field describes a variable-sized object, *PMODE is set to
4188 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4189 this case, but the address of the object can be found. */
bbf6f052
RK
4190
4191tree
4969d05d
RK
4192get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4193 punsignedp, pvolatilep)
bbf6f052
RK
4194 tree exp;
4195 int *pbitsize;
4196 int *pbitpos;
7bb0943f 4197 tree *poffset;
bbf6f052
RK
4198 enum machine_mode *pmode;
4199 int *punsignedp;
4200 int *pvolatilep;
4201{
b50d17a1 4202 tree orig_exp = exp;
bbf6f052
RK
4203 tree size_tree = 0;
4204 enum machine_mode mode = VOIDmode;
742920c7 4205 tree offset = integer_zero_node;
bbf6f052
RK
4206
4207 if (TREE_CODE (exp) == COMPONENT_REF)
4208 {
4209 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4210 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4211 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4212 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4213 }
4214 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4215 {
4216 size_tree = TREE_OPERAND (exp, 1);
4217 *punsignedp = TREE_UNSIGNED (exp);
4218 }
4219 else
4220 {
4221 mode = TYPE_MODE (TREE_TYPE (exp));
4222 *pbitsize = GET_MODE_BITSIZE (mode);
4223 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4224 }
4225
4226 if (size_tree)
4227 {
4228 if (TREE_CODE (size_tree) != INTEGER_CST)
e7c33f54
RK
4229 mode = BLKmode, *pbitsize = -1;
4230 else
4231 *pbitsize = TREE_INT_CST_LOW (size_tree);
bbf6f052
RK
4232 }
4233
4234 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4235 and find the ultimate containing object. */
4236
4237 *pbitpos = 0;
4238
4239 while (1)
4240 {
7bb0943f 4241 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
bbf6f052 4242 {
7bb0943f
RS
4243 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4244 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4245 : TREE_OPERAND (exp, 2));
e6d8c385 4246 tree constant = integer_zero_node, var = pos;
bbf6f052 4247
e7f3c83f
RK
4248 /* If this field hasn't been filled in yet, don't go
4249 past it. This should only happen when folding expressions
4250 made during type construction. */
4251 if (pos == 0)
4252 break;
4253
e6d8c385
RK
4254 /* Assume here that the offset is a multiple of a unit.
4255 If not, there should be an explicitly added constant. */
4256 if (TREE_CODE (pos) == PLUS_EXPR
4257 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4258 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
7bb0943f 4259 else if (TREE_CODE (pos) == INTEGER_CST)
e6d8c385
RK
4260 constant = pos, var = integer_zero_node;
4261
4262 *pbitpos += TREE_INT_CST_LOW (constant);
8d8c9ba9
RK
4263 offset = size_binop (PLUS_EXPR, offset,
4264 size_binop (EXACT_DIV_EXPR, var,
4265 size_int (BITS_PER_UNIT)));
bbf6f052 4266 }
bbf6f052 4267
742920c7 4268 else if (TREE_CODE (exp) == ARRAY_REF)
bbf6f052 4269 {
742920c7
RK
4270 /* This code is based on the code in case ARRAY_REF in expand_expr
4271 below. We assume here that the size of an array element is
4272 always an integral multiple of BITS_PER_UNIT. */
4273
4274 tree index = TREE_OPERAND (exp, 1);
4275 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4276 tree low_bound
4277 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4278 tree index_type = TREE_TYPE (index);
4279
4280 if (! integer_zerop (low_bound))
4281 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
4282
4c08eef0 4283 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
742920c7 4284 {
4c08eef0
RK
4285 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4286 index);
742920c7
RK
4287 index_type = TREE_TYPE (index);
4288 }
4289
4290 index = fold (build (MULT_EXPR, index_type, index,
4291 TYPE_SIZE (TREE_TYPE (exp))));
4292
4293 if (TREE_CODE (index) == INTEGER_CST
4294 && TREE_INT_CST_HIGH (index) == 0)
4295 *pbitpos += TREE_INT_CST_LOW (index);
4296 else
4297 offset = size_binop (PLUS_EXPR, offset,
4298 size_binop (FLOOR_DIV_EXPR, index,
4299 size_int (BITS_PER_UNIT)));
bbf6f052
RK
4300 }
4301 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
4302 && ! ((TREE_CODE (exp) == NOP_EXPR
4303 || TREE_CODE (exp) == CONVERT_EXPR)
7f62854a
RK
4304 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4305 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
4306 != UNION_TYPE))
bbf6f052
RK
4307 && (TYPE_MODE (TREE_TYPE (exp))
4308 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4309 break;
7bb0943f
RS
4310
4311 /* If any reference in the chain is volatile, the effect is volatile. */
4312 if (TREE_THIS_VOLATILE (exp))
4313 *pvolatilep = 1;
bbf6f052
RK
4314 exp = TREE_OPERAND (exp, 0);
4315 }
4316
742920c7
RK
4317 if (integer_zerop (offset))
4318 offset = 0;
4319
b50d17a1
RK
4320 if (offset != 0 && contains_placeholder_p (offset))
4321 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
4322
bbf6f052 4323 *pmode = mode;
7bb0943f 4324 *poffset = offset;
bbf6f052
RK
4325 return exp;
4326}
4327\f
4328/* Given an rtx VALUE that may contain additions and multiplications,
4329 return an equivalent value that just refers to a register or memory.
4330 This is done by generating instructions to perform the arithmetic
c45a13a6
RK
4331 and returning a pseudo-register containing the value.
4332
4333 The returned value may be a REG, SUBREG, MEM or constant. */
bbf6f052
RK
4334
4335rtx
4336force_operand (value, target)
4337 rtx value, target;
4338{
4339 register optab binoptab = 0;
4340 /* Use a temporary to force order of execution of calls to
4341 `force_operand'. */
4342 rtx tmp;
4343 register rtx op2;
4344 /* Use subtarget as the target for operand 0 of a binary operation. */
4345 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4346
4347 if (GET_CODE (value) == PLUS)
4348 binoptab = add_optab;
4349 else if (GET_CODE (value) == MINUS)
4350 binoptab = sub_optab;
4351 else if (GET_CODE (value) == MULT)
4352 {
4353 op2 = XEXP (value, 1);
4354 if (!CONSTANT_P (op2)
4355 && !(GET_CODE (op2) == REG && op2 != subtarget))
4356 subtarget = 0;
4357 tmp = force_operand (XEXP (value, 0), subtarget);
4358 return expand_mult (GET_MODE (value), tmp,
906c4e36 4359 force_operand (op2, NULL_RTX),
bbf6f052
RK
4360 target, 0);
4361 }
4362
4363 if (binoptab)
4364 {
4365 op2 = XEXP (value, 1);
4366 if (!CONSTANT_P (op2)
4367 && !(GET_CODE (op2) == REG && op2 != subtarget))
4368 subtarget = 0;
4369 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
4370 {
4371 binoptab = add_optab;
4372 op2 = negate_rtx (GET_MODE (value), op2);
4373 }
4374
4375 /* Check for an addition with OP2 a constant integer and our first
4376 operand a PLUS of a virtual register and something else. In that
4377 case, we want to emit the sum of the virtual register and the
4378 constant first and then add the other value. This allows virtual
4379 register instantiation to simply modify the constant rather than
4380 creating another one around this addition. */
4381 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
4382 && GET_CODE (XEXP (value, 0)) == PLUS
4383 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
4384 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
4385 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
4386 {
4387 rtx temp = expand_binop (GET_MODE (value), binoptab,
4388 XEXP (XEXP (value, 0), 0), op2,
4389 subtarget, 0, OPTAB_LIB_WIDEN);
4390 return expand_binop (GET_MODE (value), binoptab, temp,
4391 force_operand (XEXP (XEXP (value, 0), 1), 0),
4392 target, 0, OPTAB_LIB_WIDEN);
4393 }
4394
4395 tmp = force_operand (XEXP (value, 0), subtarget);
4396 return expand_binop (GET_MODE (value), binoptab, tmp,
906c4e36 4397 force_operand (op2, NULL_RTX),
bbf6f052 4398 target, 0, OPTAB_LIB_WIDEN);
8008b228 4399 /* We give UNSIGNEDP = 0 to expand_binop
bbf6f052
RK
4400 because the only operations we are expanding here are signed ones. */
4401 }
4402 return value;
4403}
4404\f
4405/* Subroutine of expand_expr:
4406 save the non-copied parts (LIST) of an expr (LHS), and return a list
4407 which can restore these values to their previous values,
4408 should something modify their storage. */
4409
4410static tree
4411save_noncopied_parts (lhs, list)
4412 tree lhs;
4413 tree list;
4414{
4415 tree tail;
4416 tree parts = 0;
4417
4418 for (tail = list; tail; tail = TREE_CHAIN (tail))
4419 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4420 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
4421 else
4422 {
4423 tree part = TREE_VALUE (tail);
4424 tree part_type = TREE_TYPE (part);
906c4e36 4425 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
06089a8b 4426 rtx target = assign_temp (part_type, 0, 1, 1);
bbf6f052 4427 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
906c4e36 4428 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
bbf6f052 4429 parts = tree_cons (to_be_saved,
906c4e36
RK
4430 build (RTL_EXPR, part_type, NULL_TREE,
4431 (tree) target),
bbf6f052
RK
4432 parts);
4433 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
4434 }
4435 return parts;
4436}
4437
4438/* Subroutine of expand_expr:
4439 record the non-copied parts (LIST) of an expr (LHS), and return a list
4440 which specifies the initial values of these parts. */
4441
4442static tree
4443init_noncopied_parts (lhs, list)
4444 tree lhs;
4445 tree list;
4446{
4447 tree tail;
4448 tree parts = 0;
4449
4450 for (tail = list; tail; tail = TREE_CHAIN (tail))
4451 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4452 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
4453 else
4454 {
4455 tree part = TREE_VALUE (tail);
4456 tree part_type = TREE_TYPE (part);
906c4e36 4457 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
bbf6f052
RK
4458 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
4459 }
4460 return parts;
4461}
4462
4463/* Subroutine of expand_expr: return nonzero iff there is no way that
4464 EXP can reference X, which is being modified. */
4465
4466static int
4467safe_from_p (x, exp)
4468 rtx x;
4469 tree exp;
4470{
4471 rtx exp_rtl = 0;
4472 int i, nops;
4473
6676e72f
RK
4474 if (x == 0
4475 /* If EXP has varying size, we MUST use a target since we currently
8f6562d0
PB
4476 have no way of allocating temporaries of variable size
4477 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
4478 So we assume here that something at a higher level has prevented a
f4510f37
RK
4479 clash. This is somewhat bogus, but the best we can do. Only
4480 do this when X is BLKmode. */
45524ce9 4481 || (TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
f4510f37 4482 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
8f6562d0
PB
4483 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
4484 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
4485 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
4486 != INTEGER_CST)
f4510f37 4487 && GET_MODE (x) == BLKmode))
bbf6f052
RK
4488 return 1;
4489
4490 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
4491 find the underlying pseudo. */
4492 if (GET_CODE (x) == SUBREG)
4493 {
4494 x = SUBREG_REG (x);
4495 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4496 return 0;
4497 }
4498
4499 /* If X is a location in the outgoing argument area, it is always safe. */
4500 if (GET_CODE (x) == MEM
4501 && (XEXP (x, 0) == virtual_outgoing_args_rtx
4502 || (GET_CODE (XEXP (x, 0)) == PLUS
4503 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
4504 return 1;
4505
4506 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4507 {
4508 case 'd':
4509 exp_rtl = DECL_RTL (exp);
4510 break;
4511
4512 case 'c':
4513 return 1;
4514
4515 case 'x':
4516 if (TREE_CODE (exp) == TREE_LIST)
f32fd778
RS
4517 return ((TREE_VALUE (exp) == 0
4518 || safe_from_p (x, TREE_VALUE (exp)))
bbf6f052
RK
4519 && (TREE_CHAIN (exp) == 0
4520 || safe_from_p (x, TREE_CHAIN (exp))));
4521 else
4522 return 0;
4523
4524 case '1':
4525 return safe_from_p (x, TREE_OPERAND (exp, 0));
4526
4527 case '2':
4528 case '<':
4529 return (safe_from_p (x, TREE_OPERAND (exp, 0))
4530 && safe_from_p (x, TREE_OPERAND (exp, 1)));
4531
4532 case 'e':
4533 case 'r':
4534 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
4535 the expression. If it is set, we conflict iff we are that rtx or
4536 both are in memory. Otherwise, we check all operands of the
4537 expression recursively. */
4538
4539 switch (TREE_CODE (exp))
4540 {
4541 case ADDR_EXPR:
e44842fe
RK
4542 return (staticp (TREE_OPERAND (exp, 0))
4543 || safe_from_p (x, TREE_OPERAND (exp, 0)));
bbf6f052
RK
4544
4545 case INDIRECT_REF:
4546 if (GET_CODE (x) == MEM)
4547 return 0;
4548 break;
4549
4550 case CALL_EXPR:
4551 exp_rtl = CALL_EXPR_RTL (exp);
4552 if (exp_rtl == 0)
4553 {
4554 /* Assume that the call will clobber all hard registers and
4555 all of memory. */
4556 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4557 || GET_CODE (x) == MEM)
4558 return 0;
4559 }
4560
4561 break;
4562
4563 case RTL_EXPR:
3bb5826a
RK
4564 /* If a sequence exists, we would have to scan every instruction
4565 in the sequence to see if it was safe. This is probably not
4566 worthwhile. */
4567 if (RTL_EXPR_SEQUENCE (exp))
bbf6f052
RK
4568 return 0;
4569
3bb5826a 4570 exp_rtl = RTL_EXPR_RTL (exp);
bbf6f052
RK
4571 break;
4572
4573 case WITH_CLEANUP_EXPR:
4574 exp_rtl = RTL_EXPR_RTL (exp);
4575 break;
4576
5dab5552
MS
4577 case CLEANUP_POINT_EXPR:
4578 return safe_from_p (x, TREE_OPERAND (exp, 0));
4579
bbf6f052
RK
4580 case SAVE_EXPR:
4581 exp_rtl = SAVE_EXPR_RTL (exp);
4582 break;
4583
8129842c
RS
4584 case BIND_EXPR:
4585 /* The only operand we look at is operand 1. The rest aren't
4586 part of the expression. */
4587 return safe_from_p (x, TREE_OPERAND (exp, 1));
4588
bbf6f052 4589 case METHOD_CALL_EXPR:
0f41302f 4590 /* This takes a rtx argument, but shouldn't appear here. */
bbf6f052
RK
4591 abort ();
4592 }
4593
4594 /* If we have an rtx, we do not need to scan our operands. */
4595 if (exp_rtl)
4596 break;
4597
4598 nops = tree_code_length[(int) TREE_CODE (exp)];
4599 for (i = 0; i < nops; i++)
4600 if (TREE_OPERAND (exp, i) != 0
4601 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
4602 return 0;
4603 }
4604
4605 /* If we have an rtl, find any enclosed object. Then see if we conflict
4606 with it. */
4607 if (exp_rtl)
4608 {
4609 if (GET_CODE (exp_rtl) == SUBREG)
4610 {
4611 exp_rtl = SUBREG_REG (exp_rtl);
4612 if (GET_CODE (exp_rtl) == REG
4613 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
4614 return 0;
4615 }
4616
4617 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
4618 are memory and EXP is not readonly. */
4619 return ! (rtx_equal_p (x, exp_rtl)
4620 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
4621 && ! TREE_READONLY (exp)));
4622 }
4623
4624 /* If we reach here, it is safe. */
4625 return 1;
4626}
4627
4628/* Subroutine of expand_expr: return nonzero iff EXP is an
4629 expression whose type is statically determinable. */
4630
4631static int
4632fixed_type_p (exp)
4633 tree exp;
4634{
4635 if (TREE_CODE (exp) == PARM_DECL
4636 || TREE_CODE (exp) == VAR_DECL
4637 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
4638 || TREE_CODE (exp) == COMPONENT_REF
4639 || TREE_CODE (exp) == ARRAY_REF)
4640 return 1;
4641 return 0;
4642}
01c8a7c8
RK
4643
4644/* Subroutine of expand_expr: return rtx if EXP is a
4645 variable or parameter; else return 0. */
4646
4647static rtx
4648var_rtx (exp)
4649 tree exp;
4650{
4651 STRIP_NOPS (exp);
4652 switch (TREE_CODE (exp))
4653 {
4654 case PARM_DECL:
4655 case VAR_DECL:
4656 return DECL_RTL (exp);
4657 default:
4658 return 0;
4659 }
4660}
bbf6f052
RK
4661\f
4662/* expand_expr: generate code for computing expression EXP.
4663 An rtx for the computed value is returned. The value is never null.
4664 In the case of a void EXP, const0_rtx is returned.
4665
4666 The value may be stored in TARGET if TARGET is nonzero.
4667 TARGET is just a suggestion; callers must assume that
4668 the rtx returned may not be the same as TARGET.
4669
4670 If TARGET is CONST0_RTX, it means that the value will be ignored.
4671
4672 If TMODE is not VOIDmode, it suggests generating the
4673 result in mode TMODE. But this is done only when convenient.
4674 Otherwise, TMODE is ignored and the value generated in its natural mode.
4675 TMODE is just a suggestion; callers must assume that
4676 the rtx returned may not have mode TMODE.
4677
d6a5ac33
RK
4678 Note that TARGET may have neither TMODE nor MODE. In that case, it
4679 probably will not be used.
bbf6f052
RK
4680
4681 If MODIFIER is EXPAND_SUM then when EXP is an addition
4682 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
4683 or a nest of (PLUS ...) and (MINUS ...) where the terms are
4684 products as above, or REG or MEM, or constant.
4685 Ordinarily in such cases we would output mul or add instructions
4686 and then return a pseudo reg containing the sum.
4687
4688 EXPAND_INITIALIZER is much like EXPAND_SUM except that
4689 it also marks a label as absolutely required (it can't be dead).
26fcb35a 4690 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
d6a5ac33
RK
4691 This is used for outputting expressions used in initializers.
4692
4693 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
4694 with a constant address even if that address is not normally legitimate.
4695 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
bbf6f052
RK
4696
4697rtx
4698expand_expr (exp, target, tmode, modifier)
4699 register tree exp;
4700 rtx target;
4701 enum machine_mode tmode;
4702 enum expand_modifier modifier;
4703{
b50d17a1
RK
4704 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
4705 This is static so it will be accessible to our recursive callees. */
4706 static tree placeholder_list = 0;
bbf6f052
RK
4707 register rtx op0, op1, temp;
4708 tree type = TREE_TYPE (exp);
4709 int unsignedp = TREE_UNSIGNED (type);
4710 register enum machine_mode mode = TYPE_MODE (type);
4711 register enum tree_code code = TREE_CODE (exp);
4712 optab this_optab;
4713 /* Use subtarget as the target for operand 0 of a binary operation. */
4714 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4715 rtx original_target = target;
ca695ac9 4716 /* Maybe defer this until sure not doing bytecode? */
dd27116b
RK
4717 int ignore = (target == const0_rtx
4718 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
4d87de75
RS
4719 || code == CONVERT_EXPR || code == REFERENCE_EXPR
4720 || code == COND_EXPR)
dd27116b 4721 && TREE_CODE (type) == VOID_TYPE));
bbf6f052
RK
4722 tree context;
4723
ca695ac9 4724
1d556704 4725 if (output_bytecode && modifier != EXPAND_INITIALIZER)
ca695ac9
JB
4726 {
4727 bc_expand_expr (exp);
4728 return NULL;
4729 }
4730
bbf6f052
RK
4731 /* Don't use hard regs as subtargets, because the combiner
4732 can only handle pseudo regs. */
4733 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
4734 subtarget = 0;
4735 /* Avoid subtargets inside loops,
4736 since they hide some invariant expressions. */
4737 if (preserve_subexpressions_p ())
4738 subtarget = 0;
4739
dd27116b
RK
4740 /* If we are going to ignore this result, we need only do something
4741 if there is a side-effect somewhere in the expression. If there
b50d17a1
RK
4742 is, short-circuit the most common cases here. Note that we must
4743 not call expand_expr with anything but const0_rtx in case this
4744 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
bbf6f052 4745
dd27116b
RK
4746 if (ignore)
4747 {
4748 if (! TREE_SIDE_EFFECTS (exp))
4749 return const0_rtx;
4750
4751 /* Ensure we reference a volatile object even if value is ignored. */
4752 if (TREE_THIS_VOLATILE (exp)
4753 && TREE_CODE (exp) != FUNCTION_DECL
4754 && mode != VOIDmode && mode != BLKmode)
4755 {
4756 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
4757 if (GET_CODE (temp) == MEM)
4758 temp = copy_to_reg (temp);
4759 return const0_rtx;
4760 }
4761
4762 if (TREE_CODE_CLASS (code) == '1')
4763 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4764 VOIDmode, modifier);
4765 else if (TREE_CODE_CLASS (code) == '2'
4766 || TREE_CODE_CLASS (code) == '<')
4767 {
4768 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
4769 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
4770 return const0_rtx;
4771 }
4772 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
4773 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
4774 /* If the second operand has no side effects, just evaluate
0f41302f 4775 the first. */
dd27116b
RK
4776 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4777 VOIDmode, modifier);
dd27116b 4778
90764a87 4779 target = 0;
dd27116b 4780 }
bbf6f052 4781
e44842fe
RK
4782 /* If will do cse, generate all results into pseudo registers
4783 since 1) that allows cse to find more things
4784 and 2) otherwise cse could produce an insn the machine
4785 cannot support. */
4786
bbf6f052
RK
4787 if (! cse_not_expected && mode != BLKmode && target
4788 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
4789 target = subtarget;
4790
bbf6f052
RK
4791 switch (code)
4792 {
4793 case LABEL_DECL:
b552441b
RS
4794 {
4795 tree function = decl_function_context (exp);
4796 /* Handle using a label in a containing function. */
4797 if (function != current_function_decl && function != 0)
4798 {
4799 struct function *p = find_function_data (function);
4800 /* Allocate in the memory associated with the function
4801 that the label is in. */
4802 push_obstacks (p->function_obstack,
4803 p->function_maybepermanent_obstack);
4804
4805 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4806 label_rtx (exp), p->forced_labels);
4807 pop_obstacks ();
4808 }
4809 else if (modifier == EXPAND_INITIALIZER)
4810 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4811 label_rtx (exp), forced_labels);
26fcb35a 4812 temp = gen_rtx (MEM, FUNCTION_MODE,
b552441b 4813 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
26fcb35a
RS
4814 if (function != current_function_decl && function != 0)
4815 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
4816 return temp;
b552441b 4817 }
bbf6f052
RK
4818
4819 case PARM_DECL:
4820 if (DECL_RTL (exp) == 0)
4821 {
4822 error_with_decl (exp, "prior parameter's size depends on `%s'");
4af3895e 4823 return CONST0_RTX (mode);
bbf6f052
RK
4824 }
4825
0f41302f 4826 /* ... fall through ... */
d6a5ac33 4827
bbf6f052 4828 case VAR_DECL:
2dca20cd
RS
4829 /* If a static var's type was incomplete when the decl was written,
4830 but the type is complete now, lay out the decl now. */
4831 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4832 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
4833 {
4834 push_obstacks_nochange ();
4835 end_temporary_allocation ();
4836 layout_decl (exp, 0);
4837 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
4838 pop_obstacks ();
4839 }
d6a5ac33 4840
0f41302f 4841 /* ... fall through ... */
d6a5ac33 4842
2dca20cd 4843 case FUNCTION_DECL:
bbf6f052
RK
4844 case RESULT_DECL:
4845 if (DECL_RTL (exp) == 0)
4846 abort ();
d6a5ac33 4847
e44842fe
RK
4848 /* Ensure variable marked as used even if it doesn't go through
4849 a parser. If it hasn't be used yet, write out an external
4850 definition. */
4851 if (! TREE_USED (exp))
4852 {
4853 assemble_external (exp);
4854 TREE_USED (exp) = 1;
4855 }
4856
dc6d66b3
RK
4857 /* Show we haven't gotten RTL for this yet. */
4858 temp = 0;
4859
bbf6f052
RK
4860 /* Handle variables inherited from containing functions. */
4861 context = decl_function_context (exp);
4862
4863 /* We treat inline_function_decl as an alias for the current function
4864 because that is the inline function whose vars, types, etc.
4865 are being merged into the current function.
4866 See expand_inline_function. */
d6a5ac33 4867
bbf6f052
RK
4868 if (context != 0 && context != current_function_decl
4869 && context != inline_function_decl
4870 /* If var is static, we don't need a static chain to access it. */
4871 && ! (GET_CODE (DECL_RTL (exp)) == MEM
4872 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
4873 {
4874 rtx addr;
4875
4876 /* Mark as non-local and addressable. */
81feeecb 4877 DECL_NONLOCAL (exp) = 1;
38ee6ed9
JM
4878 if (DECL_NO_STATIC_CHAIN (current_function_decl))
4879 abort ();
bbf6f052
RK
4880 mark_addressable (exp);
4881 if (GET_CODE (DECL_RTL (exp)) != MEM)
4882 abort ();
4883 addr = XEXP (DECL_RTL (exp), 0);
4884 if (GET_CODE (addr) == MEM)
d6a5ac33
RK
4885 addr = gen_rtx (MEM, Pmode,
4886 fix_lexical_addr (XEXP (addr, 0), exp));
bbf6f052
RK
4887 else
4888 addr = fix_lexical_addr (addr, exp);
dc6d66b3 4889 temp = change_address (DECL_RTL (exp), mode, addr);
bbf6f052 4890 }
4af3895e 4891
bbf6f052
RK
4892 /* This is the case of an array whose size is to be determined
4893 from its initializer, while the initializer is still being parsed.
4894 See expand_decl. */
d6a5ac33 4895
dc6d66b3
RK
4896 else if (GET_CODE (DECL_RTL (exp)) == MEM
4897 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
4898 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
bbf6f052 4899 XEXP (DECL_RTL (exp), 0));
d6a5ac33
RK
4900
4901 /* If DECL_RTL is memory, we are in the normal case and either
4902 the address is not valid or it is not a register and -fforce-addr
4903 is specified, get the address into a register. */
4904
dc6d66b3
RK
4905 else if (GET_CODE (DECL_RTL (exp)) == MEM
4906 && modifier != EXPAND_CONST_ADDRESS
4907 && modifier != EXPAND_SUM
4908 && modifier != EXPAND_INITIALIZER
4909 && (! memory_address_p (DECL_MODE (exp),
4910 XEXP (DECL_RTL (exp), 0))
4911 || (flag_force_addr
4912 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
4913 temp = change_address (DECL_RTL (exp), VOIDmode,
d6a5ac33 4914 copy_rtx (XEXP (DECL_RTL (exp), 0)));
1499e0a8 4915
dc6d66b3
RK
4916 /* If we got something, return it. But first, set the alignment
4917 the address is a register. */
4918 if (temp != 0)
4919 {
4920 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
4921 mark_reg_pointer (XEXP (temp, 0),
4922 DECL_ALIGN (exp) / BITS_PER_UNIT);
4923
4924 return temp;
4925 }
4926
1499e0a8
RK
4927 /* If the mode of DECL_RTL does not match that of the decl, it
4928 must be a promoted value. We return a SUBREG of the wanted mode,
4929 but mark it so that we know that it was already extended. */
4930
4931 if (GET_CODE (DECL_RTL (exp)) == REG
4932 && GET_MODE (DECL_RTL (exp)) != mode)
4933 {
1499e0a8
RK
4934 /* Get the signedness used for this variable. Ensure we get the
4935 same mode we got when the variable was declared. */
78911e8b
RK
4936 if (GET_MODE (DECL_RTL (exp))
4937 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
1499e0a8
RK
4938 abort ();
4939
4940 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
4941 SUBREG_PROMOTED_VAR_P (temp) = 1;
4942 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4943 return temp;
4944 }
4945
bbf6f052
RK
4946 return DECL_RTL (exp);
4947
4948 case INTEGER_CST:
4949 return immed_double_const (TREE_INT_CST_LOW (exp),
4950 TREE_INT_CST_HIGH (exp),
4951 mode);
4952
4953 case CONST_DECL:
4954 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
4955
4956 case REAL_CST:
4957 /* If optimized, generate immediate CONST_DOUBLE
4958 which will be turned into memory by reload if necessary.
4959
4960 We used to force a register so that loop.c could see it. But
4961 this does not allow gen_* patterns to perform optimizations with
4962 the constants. It also produces two insns in cases like "x = 1.0;".
4963 On most machines, floating-point constants are not permitted in
4964 many insns, so we'd end up copying it to a register in any case.
4965
4966 Now, we do the copying in expand_binop, if appropriate. */
4967 return immed_real_const (exp);
4968
4969 case COMPLEX_CST:
4970 case STRING_CST:
4971 if (! TREE_CST_RTL (exp))
4972 output_constant_def (exp);
4973
4974 /* TREE_CST_RTL probably contains a constant address.
4975 On RISC machines where a constant address isn't valid,
4976 make some insns to get that address into a register. */
4977 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
4978 && modifier != EXPAND_CONST_ADDRESS
4979 && modifier != EXPAND_INITIALIZER
4980 && modifier != EXPAND_SUM
d6a5ac33
RK
4981 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
4982 || (flag_force_addr
4983 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
bbf6f052
RK
4984 return change_address (TREE_CST_RTL (exp), VOIDmode,
4985 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
4986 return TREE_CST_RTL (exp);
4987
4988 case SAVE_EXPR:
4989 context = decl_function_context (exp);
d6a5ac33 4990
bbf6f052
RK
4991 /* We treat inline_function_decl as an alias for the current function
4992 because that is the inline function whose vars, types, etc.
4993 are being merged into the current function.
4994 See expand_inline_function. */
4995 if (context == current_function_decl || context == inline_function_decl)
4996 context = 0;
4997
4998 /* If this is non-local, handle it. */
4999 if (context)
5000 {
5001 temp = SAVE_EXPR_RTL (exp);
5002 if (temp && GET_CODE (temp) == REG)
5003 {
5004 put_var_into_stack (exp);
5005 temp = SAVE_EXPR_RTL (exp);
5006 }
5007 if (temp == 0 || GET_CODE (temp) != MEM)
5008 abort ();
5009 return change_address (temp, mode,
5010 fix_lexical_addr (XEXP (temp, 0), exp));
5011 }
5012 if (SAVE_EXPR_RTL (exp) == 0)
5013 {
06089a8b
RK
5014 if (mode == VOIDmode)
5015 temp = const0_rtx;
5016 else
5017 temp = assign_temp (type, 0, 0, 0);
1499e0a8 5018
bbf6f052 5019 SAVE_EXPR_RTL (exp) = temp;
bbf6f052
RK
5020 if (!optimize && GET_CODE (temp) == REG)
5021 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
5022 save_expr_regs);
ff78f773
RK
5023
5024 /* If the mode of TEMP does not match that of the expression, it
5025 must be a promoted value. We pass store_expr a SUBREG of the
5026 wanted mode but mark it so that we know that it was already
5027 extended. Note that `unsignedp' was modified above in
5028 this case. */
5029
5030 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
5031 {
5032 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
5033 SUBREG_PROMOTED_VAR_P (temp) = 1;
5034 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5035 }
5036
4c7a0be9
JW
5037 if (temp == const0_rtx)
5038 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
5039 else
5040 store_expr (TREE_OPERAND (exp, 0), temp, 0);
bbf6f052 5041 }
1499e0a8
RK
5042
5043 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
5044 must be a promoted value. We return a SUBREG of the wanted mode,
0f41302f 5045 but mark it so that we know that it was already extended. */
1499e0a8
RK
5046
5047 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
5048 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
5049 {
e70d22c8
RK
5050 /* Compute the signedness and make the proper SUBREG. */
5051 promote_mode (type, mode, &unsignedp, 0);
5052 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
1499e0a8
RK
5053 SUBREG_PROMOTED_VAR_P (temp) = 1;
5054 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5055 return temp;
5056 }
5057
bbf6f052
RK
5058 return SAVE_EXPR_RTL (exp);
5059
679163cf
MS
5060 case UNSAVE_EXPR:
5061 {
5062 rtx temp;
5063 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5064 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
5065 return temp;
5066 }
5067
b50d17a1
RK
5068 case PLACEHOLDER_EXPR:
5069 /* If there is an object on the head of the placeholder list,
5070 see if some object in it's references is of type TYPE. For
5071 further information, see tree.def. */
5072 if (placeholder_list)
5073 {
5074 tree object;
f59d43a9 5075 tree old_list = placeholder_list;
b50d17a1
RK
5076
5077 for (object = TREE_PURPOSE (placeholder_list);
330446eb
RK
5078 (TYPE_MAIN_VARIANT (TREE_TYPE (object))
5079 != TYPE_MAIN_VARIANT (type))
b50d17a1 5080 && (TREE_CODE_CLASS (TREE_CODE (object)) == 'r'
4805bfa0
RK
5081 || TREE_CODE_CLASS (TREE_CODE (object)) == '1'
5082 || TREE_CODE_CLASS (TREE_CODE (object)) == '2'
5083 || TREE_CODE_CLASS (TREE_CODE (object)) == 'e');
b50d17a1
RK
5084 object = TREE_OPERAND (object, 0))
5085 ;
5086
330446eb
RK
5087 if (object != 0
5088 && (TYPE_MAIN_VARIANT (TREE_TYPE (object))
5089 == TYPE_MAIN_VARIANT (type)))
f59d43a9
RK
5090 {
5091 /* Expand this object skipping the list entries before
5092 it was found in case it is also a PLACEHOLDER_EXPR.
5093 In that case, we want to translate it using subsequent
5094 entries. */
5095 placeholder_list = TREE_CHAIN (placeholder_list);
5096 temp = expand_expr (object, original_target, tmode, modifier);
5097 placeholder_list = old_list;
5098 return temp;
5099 }
b50d17a1
RK
5100 }
5101
5102 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
5103 abort ();
5104
5105 case WITH_RECORD_EXPR:
5106 /* Put the object on the placeholder list, expand our first operand,
5107 and pop the list. */
5108 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
5109 placeholder_list);
5110 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
5111 tmode, modifier);
5112 placeholder_list = TREE_CHAIN (placeholder_list);
5113 return target;
5114
bbf6f052 5115 case EXIT_EXPR:
e44842fe
RK
5116 expand_exit_loop_if_false (NULL_PTR,
5117 invert_truthvalue (TREE_OPERAND (exp, 0)));
bbf6f052
RK
5118 return const0_rtx;
5119
5120 case LOOP_EXPR:
0088fcb1 5121 push_temp_slots ();
bbf6f052
RK
5122 expand_start_loop (1);
5123 expand_expr_stmt (TREE_OPERAND (exp, 0));
5124 expand_end_loop ();
0088fcb1 5125 pop_temp_slots ();
bbf6f052
RK
5126
5127 return const0_rtx;
5128
5129 case BIND_EXPR:
5130 {
5131 tree vars = TREE_OPERAND (exp, 0);
5132 int vars_need_expansion = 0;
5133
5134 /* Need to open a binding contour here because
5135 if there are any cleanups they most be contained here. */
5136 expand_start_bindings (0);
5137
2df53c0b
RS
5138 /* Mark the corresponding BLOCK for output in its proper place. */
5139 if (TREE_OPERAND (exp, 2) != 0
5140 && ! TREE_USED (TREE_OPERAND (exp, 2)))
5141 insert_block (TREE_OPERAND (exp, 2));
bbf6f052
RK
5142
5143 /* If VARS have not yet been expanded, expand them now. */
5144 while (vars)
5145 {
5146 if (DECL_RTL (vars) == 0)
5147 {
5148 vars_need_expansion = 1;
5149 expand_decl (vars);
5150 }
5151 expand_decl_init (vars);
5152 vars = TREE_CHAIN (vars);
5153 }
5154
5155 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
5156
5157 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
5158
5159 return temp;
5160 }
5161
5162 case RTL_EXPR:
5163 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
5164 abort ();
5165 emit_insns (RTL_EXPR_SEQUENCE (exp));
5166 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
99310285 5167 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
ca814259 5168 free_temps_for_rtl_expr (exp);
bbf6f052
RK
5169 return RTL_EXPR_RTL (exp);
5170
5171 case CONSTRUCTOR:
dd27116b
RK
5172 /* If we don't need the result, just ensure we evaluate any
5173 subexpressions. */
5174 if (ignore)
5175 {
5176 tree elt;
5177 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
5178 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
5179 return const0_rtx;
5180 }
3207b172 5181
4af3895e
JVA
5182 /* All elts simple constants => refer to a constant in memory. But
5183 if this is a non-BLKmode mode, let it store a field at a time
5184 since that should make a CONST_INT or CONST_DOUBLE when we
3207b172 5185 fold. Likewise, if we have a target we can use, it is best to
d720b9d1
RK
5186 store directly into the target unless the type is large enough
5187 that memcpy will be used. If we are making an initializer and
3207b172 5188 all operands are constant, put it in memory as well. */
dd27116b 5189 else if ((TREE_STATIC (exp)
3207b172
RK
5190 && ((mode == BLKmode
5191 && ! (target != 0 && safe_from_p (target, exp)))
d720b9d1
RK
5192 || TREE_ADDRESSABLE (exp)
5193 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5194 && (move_by_pieces_ninsns
67225c15
RK
5195 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
5196 TYPE_ALIGN (type) / BITS_PER_UNIT)
9de08200
RK
5197 > MOVE_RATIO)
5198 && ! mostly_zeros_p (exp))))
dd27116b 5199 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
bbf6f052
RK
5200 {
5201 rtx constructor = output_constant_def (exp);
b552441b
RS
5202 if (modifier != EXPAND_CONST_ADDRESS
5203 && modifier != EXPAND_INITIALIZER
5204 && modifier != EXPAND_SUM
d6a5ac33
RK
5205 && (! memory_address_p (GET_MODE (constructor),
5206 XEXP (constructor, 0))
5207 || (flag_force_addr
5208 && GET_CODE (XEXP (constructor, 0)) != REG)))
bbf6f052
RK
5209 constructor = change_address (constructor, VOIDmode,
5210 XEXP (constructor, 0));
5211 return constructor;
5212 }
5213
bbf6f052
RK
5214 else
5215 {
5216 if (target == 0 || ! safe_from_p (target, exp))
06089a8b
RK
5217 {
5218 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
5219 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5220 else
5221 target = assign_temp (type, 0, 1, 1);
5222 }
07604beb
RK
5223
5224 if (TREE_READONLY (exp))
5225 {
9151b3bf
RK
5226 if (GET_CODE (target) == MEM)
5227 target = change_address (target, GET_MODE (target),
5228 XEXP (target, 0));
07604beb
RK
5229 RTX_UNCHANGING_P (target) = 1;
5230 }
5231
e1a43f73 5232 store_constructor (exp, target, 0);
bbf6f052
RK
5233 return target;
5234 }
5235
5236 case INDIRECT_REF:
5237 {
5238 tree exp1 = TREE_OPERAND (exp, 0);
5239 tree exp2;
5240
405f0da6
JW
5241 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
5242 op0 = memory_address (mode, op0);
8c8a8e34
JW
5243
5244 temp = gen_rtx (MEM, mode, op0);
5245 /* If address was computed by addition,
5246 mark this as an element of an aggregate. */
5247 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5248 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
5249 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
05e3bdb9 5250 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
8c8a8e34
JW
5251 || (TREE_CODE (exp1) == ADDR_EXPR
5252 && (exp2 = TREE_OPERAND (exp1, 0))
05e3bdb9 5253 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
8c8a8e34 5254 MEM_IN_STRUCT_P (temp) = 1;
2c4c436a 5255 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
1125706f
RK
5256
5257 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
5258 here, because, in C and C++, the fact that a location is accessed
5259 through a pointer to const does not mean that the value there can
5260 never change. Languages where it can never change should
5261 also set TREE_STATIC. */
5cb7a25a 5262 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
8c8a8e34
JW
5263 return temp;
5264 }
bbf6f052
RK
5265
5266 case ARRAY_REF:
742920c7
RK
5267 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
5268 abort ();
bbf6f052 5269
bbf6f052 5270 {
742920c7
RK
5271 tree array = TREE_OPERAND (exp, 0);
5272 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5273 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
5274 tree index = TREE_OPERAND (exp, 1);
5275 tree index_type = TREE_TYPE (index);
bbf6f052 5276 int i;
bbf6f052 5277
b50d17a1
RK
5278 if (TREE_CODE (low_bound) != INTEGER_CST
5279 && contains_placeholder_p (low_bound))
5280 low_bound = build (WITH_RECORD_EXPR, sizetype, low_bound, exp);
5281
d4c89139
PB
5282 /* Optimize the special-case of a zero lower bound.
5283
5284 We convert the low_bound to sizetype to avoid some problems
5285 with constant folding. (E.g. suppose the lower bound is 1,
5286 and its mode is QI. Without the conversion, (ARRAY
5287 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
5288 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
5289
5290 But sizetype isn't quite right either (especially if
5291 the lowbound is negative). FIXME */
5292
742920c7 5293 if (! integer_zerop (low_bound))
d4c89139
PB
5294 index = fold (build (MINUS_EXPR, index_type, index,
5295 convert (sizetype, low_bound)));
742920c7 5296
6be58303
JW
5297 if ((TREE_CODE (index) != INTEGER_CST
5298 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
c7a7ac46 5299 && (! SLOW_UNALIGNED_ACCESS || ! get_inner_unaligned_p (exp)))
742920c7 5300 {
6be58303
JW
5301 /* Nonconstant array index or nonconstant element size, and
5302 not an array in an unaligned (packed) structure field.
742920c7
RK
5303 Generate the tree for *(&array+index) and expand that,
5304 except do it in a language-independent way
5305 and don't complain about non-lvalue arrays.
5306 `mark_addressable' should already have been called
5307 for any array for which this case will be reached. */
5308
5309 /* Don't forget the const or volatile flag from the array
0f41302f 5310 element. */
742920c7
RK
5311 tree variant_type = build_type_variant (type,
5312 TREE_READONLY (exp),
5313 TREE_THIS_VOLATILE (exp));
5314 tree array_adr = build1 (ADDR_EXPR,
5315 build_pointer_type (variant_type), array);
5316 tree elt;
b50d17a1 5317 tree size = size_in_bytes (type);
742920c7 5318
4c08eef0
RK
5319 /* Convert the integer argument to a type the same size as sizetype
5320 so the multiply won't overflow spuriously. */
5321 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
5322 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
5323 index);
742920c7 5324
b50d17a1
RK
5325 if (TREE_CODE (size) != INTEGER_CST
5326 && contains_placeholder_p (size))
5327 size = build (WITH_RECORD_EXPR, sizetype, size, exp);
5328
742920c7
RK
5329 /* Don't think the address has side effects
5330 just because the array does.
5331 (In some cases the address might have side effects,
5332 and we fail to record that fact here. However, it should not
5333 matter, since expand_expr should not care.) */
5334 TREE_SIDE_EFFECTS (array_adr) = 0;
5335
2ae342f7
RK
5336 elt
5337 = build1
5338 (INDIRECT_REF, type,
5339 fold (build (PLUS_EXPR,
5340 TYPE_POINTER_TO (variant_type),
5341 array_adr,
5342 fold
5343 (build1
5344 (NOP_EXPR,
5345 TYPE_POINTER_TO (variant_type),
5346 fold (build (MULT_EXPR, TREE_TYPE (index),
5347 index,
5348 convert (TREE_TYPE (index),
5349 size))))))));;
742920c7
RK
5350
5351 /* Volatility, etc., of new expression is same as old
5352 expression. */
5353 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
5354 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
5355 TREE_READONLY (elt) = TREE_READONLY (exp);
5356
5357 return expand_expr (elt, target, tmode, modifier);
5358 }
5359
5360 /* Fold an expression like: "foo"[2].
ad2e7dd0
RK
5361 This is not done in fold so it won't happen inside &.
5362 Don't fold if this is for wide characters since it's too
5363 difficult to do correctly and this is a very rare case. */
742920c7
RK
5364
5365 if (TREE_CODE (array) == STRING_CST
5366 && TREE_CODE (index) == INTEGER_CST
5367 && !TREE_INT_CST_HIGH (index)
307b821c 5368 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
ad2e7dd0
RK
5369 && GET_MODE_CLASS (mode) == MODE_INT
5370 && GET_MODE_SIZE (mode) == 1)
307b821c 5371 return GEN_INT (TREE_STRING_POINTER (array)[i]);
bbf6f052 5372
742920c7
RK
5373 /* If this is a constant index into a constant array,
5374 just get the value from the array. Handle both the cases when
5375 we have an explicit constructor and when our operand is a variable
5376 that was declared const. */
4af3895e 5377
742920c7
RK
5378 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
5379 {
5380 if (TREE_CODE (index) == INTEGER_CST
5381 && TREE_INT_CST_HIGH (index) == 0)
5382 {
5383 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
5384
5385 i = TREE_INT_CST_LOW (index);
5386 while (elem && i--)
5387 elem = TREE_CHAIN (elem);
5388 if (elem)
5389 return expand_expr (fold (TREE_VALUE (elem)), target,
5390 tmode, modifier);
5391 }
5392 }
4af3895e 5393
742920c7
RK
5394 else if (optimize >= 1
5395 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
5396 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
5397 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
5398 {
5399 if (TREE_CODE (index) == INTEGER_CST
5400 && TREE_INT_CST_HIGH (index) == 0)
5401 {
5402 tree init = DECL_INITIAL (array);
5403
5404 i = TREE_INT_CST_LOW (index);
5405 if (TREE_CODE (init) == CONSTRUCTOR)
5406 {
5407 tree elem = CONSTRUCTOR_ELTS (init);
5408
03dc44a6
RS
5409 while (elem
5410 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
742920c7
RK
5411 elem = TREE_CHAIN (elem);
5412 if (elem)
5413 return expand_expr (fold (TREE_VALUE (elem)), target,
5414 tmode, modifier);
5415 }
5416 else if (TREE_CODE (init) == STRING_CST
5417 && i < TREE_STRING_LENGTH (init))
307b821c 5418 return GEN_INT (TREE_STRING_POINTER (init)[i]);
742920c7
RK
5419 }
5420 }
5421 }
8c8a8e34 5422
bbf6f052
RK
5423 /* Treat array-ref with constant index as a component-ref. */
5424
5425 case COMPONENT_REF:
5426 case BIT_FIELD_REF:
4af3895e 5427 /* If the operand is a CONSTRUCTOR, we can just extract the
7a0b7b9a
RK
5428 appropriate field if it is present. Don't do this if we have
5429 already written the data since we want to refer to that copy
5430 and varasm.c assumes that's what we'll do. */
4af3895e 5431 if (code != ARRAY_REF
7a0b7b9a
RK
5432 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
5433 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
4af3895e
JVA
5434 {
5435 tree elt;
5436
5437 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
5438 elt = TREE_CHAIN (elt))
5439 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
5440 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
5441 }
5442
bbf6f052
RK
5443 {
5444 enum machine_mode mode1;
5445 int bitsize;
5446 int bitpos;
7bb0943f 5447 tree offset;
bbf6f052 5448 int volatilep = 0;
7bb0943f 5449 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
bbf6f052 5450 &mode1, &unsignedp, &volatilep);
034f9101 5451 int alignment;
bbf6f052 5452
e7f3c83f
RK
5453 /* If we got back the original object, something is wrong. Perhaps
5454 we are evaluating an expression too early. In any event, don't
5455 infinitely recurse. */
5456 if (tem == exp)
5457 abort ();
5458
3d27140a 5459 /* If TEM's type is a union of variable size, pass TARGET to the inner
b74f5ff2
RK
5460 computation, since it will need a temporary and TARGET is known
5461 to have to do. This occurs in unchecked conversion in Ada. */
5462
5463 op0 = expand_expr (tem,
5464 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
5465 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
5466 != INTEGER_CST)
5467 ? target : NULL_RTX),
4ed67205
RK
5468 VOIDmode,
5469 modifier == EXPAND_INITIALIZER ? modifier : 0);
bbf6f052 5470
8c8a8e34 5471 /* If this is a constant, put it into a register if it is a
8008b228 5472 legitimate constant and memory if it isn't. */
8c8a8e34
JW
5473 if (CONSTANT_P (op0))
5474 {
5475 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
f2878c6b 5476 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
8c8a8e34
JW
5477 op0 = force_reg (mode, op0);
5478 else
5479 op0 = validize_mem (force_const_mem (mode, op0));
5480 }
5481
034f9101 5482 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
7bb0943f
RS
5483 if (offset != 0)
5484 {
906c4e36 5485 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
5486
5487 if (GET_CODE (op0) != MEM)
5488 abort ();
5489 op0 = change_address (op0, VOIDmode,
88f63c77
RK
5490 gen_rtx (PLUS, ptr_mode, XEXP (op0, 0),
5491 force_reg (ptr_mode, offset_rtx)));
034f9101
RS
5492 /* If we have a variable offset, the known alignment
5493 is only that of the innermost structure containing the field.
5494 (Actually, we could sometimes do better by using the
5495 size of an element of the innermost array, but no need.) */
5496 if (TREE_CODE (exp) == COMPONENT_REF
5497 || TREE_CODE (exp) == BIT_FIELD_REF)
5498 alignment = (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5499 / BITS_PER_UNIT);
7bb0943f
RS
5500 }
5501
bbf6f052
RK
5502 /* Don't forget about volatility even if this is a bitfield. */
5503 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
5504 {
5505 op0 = copy_rtx (op0);
5506 MEM_VOLATILE_P (op0) = 1;
5507 }
5508
ccc98036
RS
5509 /* In cases where an aligned union has an unaligned object
5510 as a field, we might be extracting a BLKmode value from
5511 an integer-mode (e.g., SImode) object. Handle this case
5512 by doing the extract into an object as wide as the field
5513 (which we know to be the width of a basic mode), then
f2420d0b
JW
5514 storing into memory, and changing the mode to BLKmode.
5515 If we ultimately want the address (EXPAND_CONST_ADDRESS or
5516 EXPAND_INITIALIZER), then we must not copy to a temporary. */
bbf6f052 5517 if (mode1 == VOIDmode
ccc98036 5518 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
f9409c3a 5519 || (modifier != EXPAND_CONST_ADDRESS
f9409c3a
JW
5520 && modifier != EXPAND_INITIALIZER
5521 && ((mode1 != BLKmode && ! direct_load[(int) mode1])
5522 /* If the field isn't aligned enough to fetch as a memref,
5523 fetch it as a bit field. */
5524 || (SLOW_UNALIGNED_ACCESS
5525 && ((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
5526 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
bbf6f052 5527 {
bbf6f052
RK
5528 enum machine_mode ext_mode = mode;
5529
5530 if (ext_mode == BLKmode)
5531 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
5532
5533 if (ext_mode == BLKmode)
a281e72d
RK
5534 {
5535 /* In this case, BITPOS must start at a byte boundary and
5536 TARGET, if specified, must be a MEM. */
5537 if (GET_CODE (op0) != MEM
5538 || (target != 0 && GET_CODE (target) != MEM)
5539 || bitpos % BITS_PER_UNIT != 0)
5540 abort ();
5541
5542 op0 = change_address (op0, VOIDmode,
5543 plus_constant (XEXP (op0, 0),
5544 bitpos / BITS_PER_UNIT));
5545 if (target == 0)
5546 target = assign_temp (type, 0, 1, 1);
5547
5548 emit_block_move (target, op0,
5549 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5550 / BITS_PER_UNIT),
5551 1);
5552
5553 return target;
5554 }
bbf6f052 5555
dc6d66b3
RK
5556 op0 = validize_mem (op0);
5557
5558 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
5559 mark_reg_pointer (XEXP (op0, 0), alignment);
5560
5561 op0 = extract_bit_field (op0, bitsize, bitpos,
bbf6f052 5562 unsignedp, target, ext_mode, ext_mode,
034f9101 5563 alignment,
bbf6f052 5564 int_size_in_bytes (TREE_TYPE (tem)));
ef19912d
RK
5565
5566 /* If the result is a record type and BITSIZE is narrower than
5567 the mode of OP0, an integral mode, and this is a big endian
5568 machine, we must put the field into the high-order bits. */
5569 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
5570 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
5571 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
5572 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
5573 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
5574 - bitsize),
5575 op0, 1);
5576
bbf6f052
RK
5577 if (mode == BLKmode)
5578 {
5579 rtx new = assign_stack_temp (ext_mode,
5580 bitsize / BITS_PER_UNIT, 0);
5581
5582 emit_move_insn (new, op0);
5583 op0 = copy_rtx (new);
5584 PUT_MODE (op0, BLKmode);
092dded9 5585 MEM_IN_STRUCT_P (op0) = 1;
bbf6f052
RK
5586 }
5587
5588 return op0;
5589 }
5590
05019f83
RK
5591 /* If the result is BLKmode, use that to access the object
5592 now as well. */
5593 if (mode == BLKmode)
5594 mode1 = BLKmode;
5595
bbf6f052
RK
5596 /* Get a reference to just this component. */
5597 if (modifier == EXPAND_CONST_ADDRESS
5598 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5599 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
5600 (bitpos / BITS_PER_UNIT)));
5601 else
5602 op0 = change_address (op0, mode1,
5603 plus_constant (XEXP (op0, 0),
5604 (bitpos / BITS_PER_UNIT)));
dc6d66b3
RK
5605 if (GET_CODE (XEXP (op0, 0)) == REG)
5606 mark_reg_pointer (XEXP (op0, 0), alignment);
5607
bbf6f052
RK
5608 MEM_IN_STRUCT_P (op0) = 1;
5609 MEM_VOLATILE_P (op0) |= volatilep;
5610 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
5611 return op0;
5612 if (target == 0)
5613 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5614 convert_move (target, op0, unsignedp);
5615 return target;
5616 }
5617
bbf6f052
RK
5618 /* Intended for a reference to a buffer of a file-object in Pascal.
5619 But it's not certain that a special tree code will really be
5620 necessary for these. INDIRECT_REF might work for them. */
5621 case BUFFER_REF:
5622 abort ();
5623
7308a047 5624 case IN_EXPR:
7308a047 5625 {
d6a5ac33
RK
5626 /* Pascal set IN expression.
5627
5628 Algorithm:
5629 rlo = set_low - (set_low%bits_per_word);
5630 the_word = set [ (index - rlo)/bits_per_word ];
5631 bit_index = index % bits_per_word;
5632 bitmask = 1 << bit_index;
5633 return !!(the_word & bitmask); */
5634
7308a047
RS
5635 tree set = TREE_OPERAND (exp, 0);
5636 tree index = TREE_OPERAND (exp, 1);
d6a5ac33 5637 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7308a047 5638 tree set_type = TREE_TYPE (set);
7308a047
RS
5639 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
5640 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
d6a5ac33
RK
5641 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
5642 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
5643 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
5644 rtx setval = expand_expr (set, 0, VOIDmode, 0);
5645 rtx setaddr = XEXP (setval, 0);
5646 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7308a047
RS
5647 rtx rlow;
5648 rtx diff, quo, rem, addr, bit, result;
7308a047 5649
d6a5ac33
RK
5650 preexpand_calls (exp);
5651
5652 /* If domain is empty, answer is no. Likewise if index is constant
5653 and out of bounds. */
5654 if ((TREE_CODE (set_high_bound) == INTEGER_CST
5655 && TREE_CODE (set_low_bound) == INTEGER_CST
5656 && tree_int_cst_lt (set_high_bound, set_low_bound)
5657 || (TREE_CODE (index) == INTEGER_CST
5658 && TREE_CODE (set_low_bound) == INTEGER_CST
5659 && tree_int_cst_lt (index, set_low_bound))
5660 || (TREE_CODE (set_high_bound) == INTEGER_CST
5661 && TREE_CODE (index) == INTEGER_CST
5662 && tree_int_cst_lt (set_high_bound, index))))
7308a047
RS
5663 return const0_rtx;
5664
d6a5ac33
RK
5665 if (target == 0)
5666 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7308a047
RS
5667
5668 /* If we get here, we have to generate the code for both cases
5669 (in range and out of range). */
5670
5671 op0 = gen_label_rtx ();
5672 op1 = gen_label_rtx ();
5673
5674 if (! (GET_CODE (index_val) == CONST_INT
5675 && GET_CODE (lo_r) == CONST_INT))
5676 {
17938e57 5677 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
d6a5ac33 5678 GET_MODE (index_val), iunsignedp, 0);
7308a047
RS
5679 emit_jump_insn (gen_blt (op1));
5680 }
5681
5682 if (! (GET_CODE (index_val) == CONST_INT
5683 && GET_CODE (hi_r) == CONST_INT))
5684 {
17938e57 5685 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
d6a5ac33 5686 GET_MODE (index_val), iunsignedp, 0);
7308a047
RS
5687 emit_jump_insn (gen_bgt (op1));
5688 }
5689
5690 /* Calculate the element number of bit zero in the first word
5691 of the set. */
5692 if (GET_CODE (lo_r) == CONST_INT)
17938e57
RK
5693 rlow = GEN_INT (INTVAL (lo_r)
5694 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7308a047 5695 else
17938e57
RK
5696 rlow = expand_binop (index_mode, and_optab, lo_r,
5697 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
d6a5ac33 5698 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047 5699
d6a5ac33
RK
5700 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
5701 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047
RS
5702
5703 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
d6a5ac33 5704 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7308a047 5705 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
d6a5ac33
RK
5706 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
5707
7308a047 5708 addr = memory_address (byte_mode,
d6a5ac33
RK
5709 expand_binop (index_mode, add_optab, diff,
5710 setaddr, NULL_RTX, iunsignedp,
17938e57 5711 OPTAB_LIB_WIDEN));
d6a5ac33 5712
7308a047
RS
5713 /* Extract the bit we want to examine */
5714 bit = expand_shift (RSHIFT_EXPR, byte_mode,
17938e57
RK
5715 gen_rtx (MEM, byte_mode, addr),
5716 make_tree (TREE_TYPE (index), rem),
5717 NULL_RTX, 1);
5718 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
5719 GET_MODE (target) == byte_mode ? target : 0,
7308a047 5720 1, OPTAB_LIB_WIDEN);
17938e57
RK
5721
5722 if (result != target)
5723 convert_move (target, result, 1);
7308a047
RS
5724
5725 /* Output the code to handle the out-of-range case. */
5726 emit_jump (op0);
5727 emit_label (op1);
5728 emit_move_insn (target, const0_rtx);
5729 emit_label (op0);
5730 return target;
5731 }
5732
bbf6f052
RK
5733 case WITH_CLEANUP_EXPR:
5734 if (RTL_EXPR_RTL (exp) == 0)
5735 {
5736 RTL_EXPR_RTL (exp)
6fcc9690 5737 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
906c4e36
RK
5738 cleanups_this_call
5739 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
bbf6f052
RK
5740 /* That's it for this cleanup. */
5741 TREE_OPERAND (exp, 2) = 0;
3d195391 5742 expand_eh_region_start ();
bbf6f052
RK
5743 }
5744 return RTL_EXPR_RTL (exp);
5745
5dab5552
MS
5746 case CLEANUP_POINT_EXPR:
5747 {
d93d4205 5748 extern int temp_slot_level;
5dab5552 5749 tree old_cleanups = cleanups_this_call;
d93d4205
MS
5750 int old_temp_level = target_temp_slot_level;
5751 push_temp_slots ();
5752 target_temp_slot_level = temp_slot_level;
f283f66b
JM
5753 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5754 /* If we're going to use this value, load it up now. */
5755 if (! ignore)
5756 op0 = force_not_mem (op0);
5dab5552 5757 expand_cleanups_to (old_cleanups);
d93d4205
MS
5758 preserve_temp_slots (op0);
5759 free_temp_slots ();
5760 pop_temp_slots ();
5761 target_temp_slot_level = old_temp_level;
5dab5552
MS
5762 }
5763 return op0;
5764
bbf6f052
RK
5765 case CALL_EXPR:
5766 /* Check for a built-in function. */
5767 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
d6a5ac33
RK
5768 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5769 == FUNCTION_DECL)
bbf6f052
RK
5770 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5771 return expand_builtin (exp, target, subtarget, tmode, ignore);
d6a5ac33 5772
bbf6f052
RK
5773 /* If this call was expanded already by preexpand_calls,
5774 just return the result we got. */
5775 if (CALL_EXPR_RTL (exp) != 0)
5776 return CALL_EXPR_RTL (exp);
d6a5ac33 5777
8129842c 5778 return expand_call (exp, target, ignore);
bbf6f052
RK
5779
5780 case NON_LVALUE_EXPR:
5781 case NOP_EXPR:
5782 case CONVERT_EXPR:
5783 case REFERENCE_EXPR:
bbf6f052
RK
5784 if (TREE_CODE (type) == UNION_TYPE)
5785 {
5786 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
5787 if (target == 0)
06089a8b
RK
5788 {
5789 if (mode != BLKmode)
5790 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5791 else
5792 target = assign_temp (type, 0, 1, 1);
5793 }
d6a5ac33 5794
bbf6f052
RK
5795 if (GET_CODE (target) == MEM)
5796 /* Store data into beginning of memory target. */
5797 store_expr (TREE_OPERAND (exp, 0),
1499e0a8
RK
5798 change_address (target, TYPE_MODE (valtype), 0), 0);
5799
bbf6f052
RK
5800 else if (GET_CODE (target) == REG)
5801 /* Store this field into a union of the proper type. */
5802 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
5803 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
5804 VOIDmode, 0, 1,
5805 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
5806 else
5807 abort ();
5808
5809 /* Return the entire union. */
5810 return target;
5811 }
d6a5ac33 5812
7f62854a
RK
5813 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5814 {
5815 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
5816 modifier);
5817
5818 /* If the signedness of the conversion differs and OP0 is
5819 a promoted SUBREG, clear that indication since we now
5820 have to do the proper extension. */
5821 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
5822 && GET_CODE (op0) == SUBREG)
5823 SUBREG_PROMOTED_VAR_P (op0) = 0;
5824
5825 return op0;
5826 }
5827
1499e0a8 5828 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
12342f90
RS
5829 if (GET_MODE (op0) == mode)
5830 return op0;
12342f90 5831
d6a5ac33
RK
5832 /* If OP0 is a constant, just convert it into the proper mode. */
5833 if (CONSTANT_P (op0))
5834 return
5835 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5836 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
12342f90 5837
26fcb35a
RS
5838 if (modifier == EXPAND_INITIALIZER)
5839 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
d6a5ac33 5840
bbf6f052 5841 if (target == 0)
d6a5ac33
RK
5842 return
5843 convert_to_mode (mode, op0,
5844 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052 5845 else
d6a5ac33
RK
5846 convert_move (target, op0,
5847 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052
RK
5848 return target;
5849
5850 case PLUS_EXPR:
0f41302f
MS
5851 /* We come here from MINUS_EXPR when the second operand is a
5852 constant. */
bbf6f052
RK
5853 plus_expr:
5854 this_optab = add_optab;
5855
5856 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
5857 something else, make sure we add the register to the constant and
5858 then to the other thing. This case can occur during strength
5859 reduction and doing it this way will produce better code if the
5860 frame pointer or argument pointer is eliminated.
5861
5862 fold-const.c will ensure that the constant is always in the inner
5863 PLUS_EXPR, so the only case we need to do anything about is if
5864 sp, ap, or fp is our second argument, in which case we must swap
5865 the innermost first argument and our second argument. */
5866
5867 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5868 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
5869 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
5870 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
5871 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
5872 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
5873 {
5874 tree t = TREE_OPERAND (exp, 1);
5875
5876 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5877 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
5878 }
5879
88f63c77 5880 /* If the result is to be ptr_mode and we are adding an integer to
bbf6f052
RK
5881 something, we might be forming a constant. So try to use
5882 plus_constant. If it produces a sum and we can't accept it,
5883 use force_operand. This allows P = &ARR[const] to generate
5884 efficient code on machines where a SYMBOL_REF is not a valid
5885 address.
5886
5887 If this is an EXPAND_SUM call, always return the sum. */
c980ac49 5888 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
88f63c77 5889 || mode == ptr_mode)
bbf6f052 5890 {
c980ac49
RS
5891 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
5892 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5893 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
5894 {
5895 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
5896 EXPAND_SUM);
5897 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
5898 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5899 op1 = force_operand (op1, target);
5900 return op1;
5901 }
bbf6f052 5902
c980ac49
RS
5903 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
5904 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
5905 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
5906 {
5907 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
5908 EXPAND_SUM);
5909 if (! CONSTANT_P (op0))
5910 {
5911 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5912 VOIDmode, modifier);
709f5be1
RS
5913 /* Don't go to both_summands if modifier
5914 says it's not right to return a PLUS. */
5915 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5916 goto binop2;
c980ac49
RS
5917 goto both_summands;
5918 }
5919 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
5920 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5921 op0 = force_operand (op0, target);
5922 return op0;
5923 }
bbf6f052
RK
5924 }
5925
5926 /* No sense saving up arithmetic to be done
5927 if it's all in the wrong mode to form part of an address.
5928 And force_operand won't know whether to sign-extend or
5929 zero-extend. */
5930 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
88f63c77 5931 || mode != ptr_mode)
c980ac49 5932 goto binop;
bbf6f052
RK
5933
5934 preexpand_calls (exp);
5935 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5936 subtarget = 0;
5937
5938 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
906c4e36 5939 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
bbf6f052 5940
c980ac49 5941 both_summands:
bbf6f052
RK
5942 /* Make sure any term that's a sum with a constant comes last. */
5943 if (GET_CODE (op0) == PLUS
5944 && CONSTANT_P (XEXP (op0, 1)))
5945 {
5946 temp = op0;
5947 op0 = op1;
5948 op1 = temp;
5949 }
5950 /* If adding to a sum including a constant,
5951 associate it to put the constant outside. */
5952 if (GET_CODE (op1) == PLUS
5953 && CONSTANT_P (XEXP (op1, 1)))
5954 {
5955 rtx constant_term = const0_rtx;
5956
5957 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
5958 if (temp != 0)
5959 op0 = temp;
6f90e075
JW
5960 /* Ensure that MULT comes first if there is one. */
5961 else if (GET_CODE (op0) == MULT)
5962 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
bbf6f052
RK
5963 else
5964 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
5965
5966 /* Let's also eliminate constants from op0 if possible. */
5967 op0 = eliminate_constant_term (op0, &constant_term);
5968
5969 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
5970 their sum should be a constant. Form it into OP1, since the
5971 result we want will then be OP0 + OP1. */
5972
5973 temp = simplify_binary_operation (PLUS, mode, constant_term,
5974 XEXP (op1, 1));
5975 if (temp != 0)
5976 op1 = temp;
5977 else
5978 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
5979 }
5980
5981 /* Put a constant term last and put a multiplication first. */
5982 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
5983 temp = op1, op1 = op0, op0 = temp;
5984
5985 temp = simplify_binary_operation (PLUS, mode, op0, op1);
5986 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
5987
5988 case MINUS_EXPR:
ea87523e
RK
5989 /* For initializers, we are allowed to return a MINUS of two
5990 symbolic constants. Here we handle all cases when both operands
5991 are constant. */
bbf6f052
RK
5992 /* Handle difference of two symbolic constants,
5993 for the sake of an initializer. */
5994 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5995 && really_constant_p (TREE_OPERAND (exp, 0))
5996 && really_constant_p (TREE_OPERAND (exp, 1)))
5997 {
906c4e36
RK
5998 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
5999 VOIDmode, modifier);
6000 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6001 VOIDmode, modifier);
ea87523e 6002
ea87523e
RK
6003 /* If the last operand is a CONST_INT, use plus_constant of
6004 the negated constant. Else make the MINUS. */
6005 if (GET_CODE (op1) == CONST_INT)
6006 return plus_constant (op0, - INTVAL (op1));
6007 else
6008 return gen_rtx (MINUS, mode, op0, op1);
bbf6f052
RK
6009 }
6010 /* Convert A - const to A + (-const). */
6011 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6012 {
ae431183
RK
6013 tree negated = fold (build1 (NEGATE_EXPR, type,
6014 TREE_OPERAND (exp, 1)));
6015
6016 /* Deal with the case where we can't negate the constant
6017 in TYPE. */
6018 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
6019 {
6020 tree newtype = signed_type (type);
6021 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
6022 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
6023 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
6024
6025 if (! TREE_OVERFLOW (newneg))
6026 return expand_expr (convert (type,
6027 build (PLUS_EXPR, newtype,
6028 newop0, newneg)),
6029 target, tmode, modifier);
6030 }
6031 else
6032 {
6033 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
6034 goto plus_expr;
6035 }
bbf6f052
RK
6036 }
6037 this_optab = sub_optab;
6038 goto binop;
6039
6040 case MULT_EXPR:
6041 preexpand_calls (exp);
6042 /* If first operand is constant, swap them.
6043 Thus the following special case checks need only
6044 check the second operand. */
6045 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6046 {
6047 register tree t1 = TREE_OPERAND (exp, 0);
6048 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
6049 TREE_OPERAND (exp, 1) = t1;
6050 }
6051
6052 /* Attempt to return something suitable for generating an
6053 indexed address, for machines that support that. */
6054
88f63c77 6055 if (modifier == EXPAND_SUM && mode == ptr_mode
bbf6f052 6056 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
906c4e36 6057 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
6058 {
6059 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
6060
6061 /* Apply distributive law if OP0 is x+c. */
6062 if (GET_CODE (op0) == PLUS
6063 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
6064 return gen_rtx (PLUS, mode,
6065 gen_rtx (MULT, mode, XEXP (op0, 0),
906c4e36
RK
6066 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
6067 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
6068 * INTVAL (XEXP (op0, 1))));
bbf6f052
RK
6069
6070 if (GET_CODE (op0) != REG)
906c4e36 6071 op0 = force_operand (op0, NULL_RTX);
bbf6f052
RK
6072 if (GET_CODE (op0) != REG)
6073 op0 = copy_to_mode_reg (mode, op0);
6074
6075 return gen_rtx (MULT, mode, op0,
906c4e36 6076 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
bbf6f052
RK
6077 }
6078
6079 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6080 subtarget = 0;
6081
6082 /* Check for multiplying things that have been extended
6083 from a narrower type. If this machine supports multiplying
6084 in that narrower type with a result in the desired type,
6085 do it that way, and avoid the explicit type-conversion. */
6086 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
6087 && TREE_CODE (type) == INTEGER_TYPE
6088 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6089 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
6090 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6091 && int_fits_type_p (TREE_OPERAND (exp, 1),
6092 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6093 /* Don't use a widening multiply if a shift will do. */
6094 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
906c4e36 6095 > HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
6096 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
6097 ||
6098 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
6099 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6100 ==
6101 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
6102 /* If both operands are extended, they must either both
6103 be zero-extended or both be sign-extended. */
6104 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6105 ==
6106 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
6107 {
6108 enum machine_mode innermode
6109 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
b10af0c8
TG
6110 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6111 ? smul_widen_optab : umul_widen_optab);
bbf6f052
RK
6112 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6113 ? umul_widen_optab : smul_widen_optab);
b10af0c8 6114 if (mode == GET_MODE_WIDER_MODE (innermode))
bbf6f052 6115 {
b10af0c8
TG
6116 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
6117 {
6118 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6119 NULL_RTX, VOIDmode, 0);
6120 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6121 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6122 VOIDmode, 0);
6123 else
6124 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6125 NULL_RTX, VOIDmode, 0);
6126 goto binop2;
6127 }
6128 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
6129 && innermode == word_mode)
6130 {
6131 rtx htem;
6132 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6133 NULL_RTX, VOIDmode, 0);
6134 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6135 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6136 VOIDmode, 0);
6137 else
6138 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6139 NULL_RTX, VOIDmode, 0);
6140 temp = expand_binop (mode, other_optab, op0, op1, target,
6141 unsignedp, OPTAB_LIB_WIDEN);
6142 htem = expand_mult_highpart_adjust (innermode,
6143 gen_highpart (innermode, temp),
6144 op0, op1,
6145 gen_highpart (innermode, temp),
6146 unsignedp);
6147 emit_move_insn (gen_highpart (innermode, temp), htem);
6148 return temp;
6149 }
bbf6f052
RK
6150 }
6151 }
6152 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 6153 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6154 return expand_mult (mode, op0, op1, target, unsignedp);
6155
6156 case TRUNC_DIV_EXPR:
6157 case FLOOR_DIV_EXPR:
6158 case CEIL_DIV_EXPR:
6159 case ROUND_DIV_EXPR:
6160 case EXACT_DIV_EXPR:
6161 preexpand_calls (exp);
6162 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6163 subtarget = 0;
6164 /* Possible optimization: compute the dividend with EXPAND_SUM
6165 then if the divisor is constant can optimize the case
6166 where some terms of the dividend have coeffs divisible by it. */
6167 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 6168 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6169 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
6170
6171 case RDIV_EXPR:
6172 this_optab = flodiv_optab;
6173 goto binop;
6174
6175 case TRUNC_MOD_EXPR:
6176 case FLOOR_MOD_EXPR:
6177 case CEIL_MOD_EXPR:
6178 case ROUND_MOD_EXPR:
6179 preexpand_calls (exp);
6180 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6181 subtarget = 0;
6182 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 6183 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6184 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
6185
6186 case FIX_ROUND_EXPR:
6187 case FIX_FLOOR_EXPR:
6188 case FIX_CEIL_EXPR:
6189 abort (); /* Not used for C. */
6190
6191 case FIX_TRUNC_EXPR:
906c4e36 6192 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6193 if (target == 0)
6194 target = gen_reg_rtx (mode);
6195 expand_fix (target, op0, unsignedp);
6196 return target;
6197
6198 case FLOAT_EXPR:
906c4e36 6199 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6200 if (target == 0)
6201 target = gen_reg_rtx (mode);
6202 /* expand_float can't figure out what to do if FROM has VOIDmode.
6203 So give it the correct mode. With -O, cse will optimize this. */
6204 if (GET_MODE (op0) == VOIDmode)
6205 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6206 op0);
6207 expand_float (target, op0,
6208 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6209 return target;
6210
6211 case NEGATE_EXPR:
5b22bee8 6212 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
bbf6f052
RK
6213 temp = expand_unop (mode, neg_optab, op0, target, 0);
6214 if (temp == 0)
6215 abort ();
6216 return temp;
6217
6218 case ABS_EXPR:
6219 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6220
2d7050fd 6221 /* Handle complex values specially. */
d6a5ac33
RK
6222 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
6223 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
6224 return expand_complex_abs (mode, op0, target, unsignedp);
2d7050fd 6225
bbf6f052
RK
6226 /* Unsigned abs is simply the operand. Testing here means we don't
6227 risk generating incorrect code below. */
6228 if (TREE_UNSIGNED (type))
6229 return op0;
6230
2e5ec6cf
RK
6231 return expand_abs (mode, op0, target, unsignedp,
6232 safe_from_p (target, TREE_OPERAND (exp, 0)));
bbf6f052
RK
6233
6234 case MAX_EXPR:
6235 case MIN_EXPR:
6236 target = original_target;
6237 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
fc155707 6238 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
d6a5ac33 6239 || GET_MODE (target) != mode
bbf6f052
RK
6240 || (GET_CODE (target) == REG
6241 && REGNO (target) < FIRST_PSEUDO_REGISTER))
6242 target = gen_reg_rtx (mode);
906c4e36 6243 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6244 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6245
6246 /* First try to do it with a special MIN or MAX instruction.
6247 If that does not win, use a conditional jump to select the proper
6248 value. */
6249 this_optab = (TREE_UNSIGNED (type)
6250 ? (code == MIN_EXPR ? umin_optab : umax_optab)
6251 : (code == MIN_EXPR ? smin_optab : smax_optab));
6252
6253 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
6254 OPTAB_WIDEN);
6255 if (temp != 0)
6256 return temp;
6257
fa2981d8
JW
6258 /* At this point, a MEM target is no longer useful; we will get better
6259 code without it. */
6260
6261 if (GET_CODE (target) == MEM)
6262 target = gen_reg_rtx (mode);
6263
ee456b1c
RK
6264 if (target != op0)
6265 emit_move_insn (target, op0);
d6a5ac33 6266
bbf6f052 6267 op0 = gen_label_rtx ();
d6a5ac33 6268
f81497d9
RS
6269 /* If this mode is an integer too wide to compare properly,
6270 compare word by word. Rely on cse to optimize constant cases. */
d6a5ac33 6271 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
bbf6f052 6272 {
f81497d9 6273 if (code == MAX_EXPR)
d6a5ac33
RK
6274 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6275 target, op1, NULL_RTX, op0);
bbf6f052 6276 else
d6a5ac33
RK
6277 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6278 op1, target, NULL_RTX, op0);
ee456b1c 6279 emit_move_insn (target, op1);
bbf6f052 6280 }
f81497d9
RS
6281 else
6282 {
6283 if (code == MAX_EXPR)
6284 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
ee456b1c
RK
6285 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
6286 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
f81497d9
RS
6287 else
6288 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
ee456b1c
RK
6289 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
6290 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
f81497d9 6291 if (temp == const0_rtx)
ee456b1c 6292 emit_move_insn (target, op1);
f81497d9
RS
6293 else if (temp != const_true_rtx)
6294 {
6295 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
6296 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
6297 else
6298 abort ();
ee456b1c 6299 emit_move_insn (target, op1);
f81497d9
RS
6300 }
6301 }
bbf6f052
RK
6302 emit_label (op0);
6303 return target;
6304
bbf6f052
RK
6305 case BIT_NOT_EXPR:
6306 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6307 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
6308 if (temp == 0)
6309 abort ();
6310 return temp;
6311
6312 case FFS_EXPR:
6313 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6314 temp = expand_unop (mode, ffs_optab, op0, target, 1);
6315 if (temp == 0)
6316 abort ();
6317 return temp;
6318
d6a5ac33
RK
6319 /* ??? Can optimize bitwise operations with one arg constant.
6320 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
6321 and (a bitwise1 b) bitwise2 b (etc)
6322 but that is probably not worth while. */
6323
6324 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
6325 boolean values when we want in all cases to compute both of them. In
6326 general it is fastest to do TRUTH_AND_EXPR by computing both operands
6327 as actual zero-or-1 values and then bitwise anding. In cases where
6328 there cannot be any side effects, better code would be made by
6329 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
6330 how to recognize those cases. */
6331
bbf6f052
RK
6332 case TRUTH_AND_EXPR:
6333 case BIT_AND_EXPR:
6334 this_optab = and_optab;
6335 goto binop;
6336
bbf6f052
RK
6337 case TRUTH_OR_EXPR:
6338 case BIT_IOR_EXPR:
6339 this_optab = ior_optab;
6340 goto binop;
6341
874726a8 6342 case TRUTH_XOR_EXPR:
bbf6f052
RK
6343 case BIT_XOR_EXPR:
6344 this_optab = xor_optab;
6345 goto binop;
6346
6347 case LSHIFT_EXPR:
6348 case RSHIFT_EXPR:
6349 case LROTATE_EXPR:
6350 case RROTATE_EXPR:
6351 preexpand_calls (exp);
6352 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6353 subtarget = 0;
6354 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6355 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
6356 unsignedp);
6357
d6a5ac33
RK
6358 /* Could determine the answer when only additive constants differ. Also,
6359 the addition of one can be handled by changing the condition. */
bbf6f052
RK
6360 case LT_EXPR:
6361 case LE_EXPR:
6362 case GT_EXPR:
6363 case GE_EXPR:
6364 case EQ_EXPR:
6365 case NE_EXPR:
6366 preexpand_calls (exp);
6367 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
6368 if (temp != 0)
6369 return temp;
d6a5ac33 6370
0f41302f 6371 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
bbf6f052
RK
6372 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
6373 && original_target
6374 && GET_CODE (original_target) == REG
6375 && (GET_MODE (original_target)
6376 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6377 {
d6a5ac33
RK
6378 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
6379 VOIDmode, 0);
6380
bbf6f052
RK
6381 if (temp != original_target)
6382 temp = copy_to_reg (temp);
d6a5ac33 6383
bbf6f052 6384 op1 = gen_label_rtx ();
906c4e36 6385 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
bbf6f052
RK
6386 GET_MODE (temp), unsignedp, 0);
6387 emit_jump_insn (gen_beq (op1));
6388 emit_move_insn (temp, const1_rtx);
6389 emit_label (op1);
6390 return temp;
6391 }
d6a5ac33 6392
bbf6f052
RK
6393 /* If no set-flag instruction, must generate a conditional
6394 store into a temporary variable. Drop through
6395 and handle this like && and ||. */
6396
6397 case TRUTH_ANDIF_EXPR:
6398 case TRUTH_ORIF_EXPR:
e44842fe
RK
6399 if (! ignore
6400 && (target == 0 || ! safe_from_p (target, exp)
6401 /* Make sure we don't have a hard reg (such as function's return
6402 value) live across basic blocks, if not optimizing. */
6403 || (!optimize && GET_CODE (target) == REG
6404 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
bbf6f052 6405 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
e44842fe
RK
6406
6407 if (target)
6408 emit_clr_insn (target);
6409
bbf6f052
RK
6410 op1 = gen_label_rtx ();
6411 jumpifnot (exp, op1);
e44842fe
RK
6412
6413 if (target)
6414 emit_0_to_1_insn (target);
6415
bbf6f052 6416 emit_label (op1);
e44842fe 6417 return ignore ? const0_rtx : target;
bbf6f052
RK
6418
6419 case TRUTH_NOT_EXPR:
6420 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6421 /* The parser is careful to generate TRUTH_NOT_EXPR
6422 only with operands that are always zero or one. */
906c4e36 6423 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
bbf6f052
RK
6424 target, 1, OPTAB_LIB_WIDEN);
6425 if (temp == 0)
6426 abort ();
6427 return temp;
6428
6429 case COMPOUND_EXPR:
6430 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6431 emit_queue ();
6432 return expand_expr (TREE_OPERAND (exp, 1),
6433 (ignore ? const0_rtx : target),
6434 VOIDmode, 0);
6435
6436 case COND_EXPR:
6437 {
5dab5552
MS
6438 rtx flag = NULL_RTX;
6439 tree left_cleanups = NULL_TREE;
6440 tree right_cleanups = NULL_TREE;
6441
6442 /* Used to save a pointer to the place to put the setting of
6443 the flag that indicates if this side of the conditional was
6444 taken. We backpatch the code, if we find out later that we
0f41302f 6445 have any conditional cleanups that need to be performed. */
5dab5552
MS
6446 rtx dest_right_flag = NULL_RTX;
6447 rtx dest_left_flag = NULL_RTX;
6448
bbf6f052
RK
6449 /* Note that COND_EXPRs whose type is a structure or union
6450 are required to be constructed to contain assignments of
6451 a temporary variable, so that we can evaluate them here
6452 for side effect only. If type is void, we must do likewise. */
6453
6454 /* If an arm of the branch requires a cleanup,
6455 only that cleanup is performed. */
6456
6457 tree singleton = 0;
6458 tree binary_op = 0, unary_op = 0;
6459 tree old_cleanups = cleanups_this_call;
bbf6f052
RK
6460
6461 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
6462 convert it to our mode, if necessary. */
6463 if (integer_onep (TREE_OPERAND (exp, 1))
6464 && integer_zerop (TREE_OPERAND (exp, 2))
6465 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6466 {
dd27116b
RK
6467 if (ignore)
6468 {
6469 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6470 modifier);
6471 return const0_rtx;
6472 }
6473
bbf6f052
RK
6474 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
6475 if (GET_MODE (op0) == mode)
6476 return op0;
d6a5ac33 6477
bbf6f052
RK
6478 if (target == 0)
6479 target = gen_reg_rtx (mode);
6480 convert_move (target, op0, unsignedp);
6481 return target;
6482 }
6483
bbf6f052
RK
6484 /* Check for X ? A + B : A. If we have this, we can copy
6485 A to the output and conditionally add B. Similarly for unary
6486 operations. Don't do this if X has side-effects because
6487 those side effects might affect A or B and the "?" operation is
6488 a sequence point in ANSI. (We test for side effects later.) */
6489
6490 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
6491 && operand_equal_p (TREE_OPERAND (exp, 2),
6492 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6493 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
6494 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
6495 && operand_equal_p (TREE_OPERAND (exp, 1),
6496 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6497 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
6498 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
6499 && operand_equal_p (TREE_OPERAND (exp, 2),
6500 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6501 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
6502 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
6503 && operand_equal_p (TREE_OPERAND (exp, 1),
6504 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6505 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
6506
01c8a7c8
RK
6507 /* If we are not to produce a result, we have no target. Otherwise,
6508 if a target was specified use it; it will not be used as an
6509 intermediate target unless it is safe. If no target, use a
6510 temporary. */
6511
6512 if (ignore)
6513 temp = 0;
6514 else if (original_target
6515 && (safe_from_p (original_target, TREE_OPERAND (exp, 0))
6516 || (singleton && GET_CODE (original_target) == REG
6517 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
6518 && original_target == var_rtx (singleton)))
6519 && GET_MODE (original_target) == mode
6520 && ! (GET_CODE (original_target) == MEM
6521 && MEM_VOLATILE_P (original_target)))
6522 temp = original_target;
6523 else if (TREE_ADDRESSABLE (type))
6524 abort ();
6525 else
6526 temp = assign_temp (type, 0, 0, 1);
6527
bbf6f052
RK
6528 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
6529 operation, do this as A + (X != 0). Similarly for other simple
6530 binary operators. */
dd27116b 6531 if (temp && singleton && binary_op
bbf6f052
RK
6532 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6533 && (TREE_CODE (binary_op) == PLUS_EXPR
6534 || TREE_CODE (binary_op) == MINUS_EXPR
6535 || TREE_CODE (binary_op) == BIT_IOR_EXPR
9fbd9f58 6536 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
bbf6f052
RK
6537 && integer_onep (TREE_OPERAND (binary_op, 1))
6538 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6539 {
6540 rtx result;
6541 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
6542 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
6543 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
2d444001 6544 : xor_optab);
bbf6f052
RK
6545
6546 /* If we had X ? A : A + 1, do this as A + (X == 0).
6547
6548 We have to invert the truth value here and then put it
6549 back later if do_store_flag fails. We cannot simply copy
6550 TREE_OPERAND (exp, 0) to another variable and modify that
6551 because invert_truthvalue can modify the tree pointed to
6552 by its argument. */
6553 if (singleton == TREE_OPERAND (exp, 1))
6554 TREE_OPERAND (exp, 0)
6555 = invert_truthvalue (TREE_OPERAND (exp, 0));
6556
6557 result = do_store_flag (TREE_OPERAND (exp, 0),
906c4e36
RK
6558 (safe_from_p (temp, singleton)
6559 ? temp : NULL_RTX),
bbf6f052
RK
6560 mode, BRANCH_COST <= 1);
6561
6562 if (result)
6563 {
906c4e36 6564 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6565 return expand_binop (mode, boptab, op1, result, temp,
6566 unsignedp, OPTAB_LIB_WIDEN);
6567 }
6568 else if (singleton == TREE_OPERAND (exp, 1))
6569 TREE_OPERAND (exp, 0)
6570 = invert_truthvalue (TREE_OPERAND (exp, 0));
6571 }
6572
dabf8373 6573 do_pending_stack_adjust ();
bbf6f052
RK
6574 NO_DEFER_POP;
6575 op0 = gen_label_rtx ();
6576
5dab5552 6577 flag = gen_reg_rtx (word_mode);
bbf6f052
RK
6578 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
6579 {
6580 if (temp != 0)
6581 {
6582 /* If the target conflicts with the other operand of the
6583 binary op, we can't use it. Also, we can't use the target
6584 if it is a hard register, because evaluating the condition
6585 might clobber it. */
6586 if ((binary_op
6587 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
6588 || (GET_CODE (temp) == REG
6589 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
6590 temp = gen_reg_rtx (mode);
6591 store_expr (singleton, temp, 0);
6592 }
6593 else
906c4e36 6594 expand_expr (singleton,
2937cf87 6595 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5dab5552 6596 dest_left_flag = get_last_insn ();
bbf6f052
RK
6597 if (singleton == TREE_OPERAND (exp, 1))
6598 jumpif (TREE_OPERAND (exp, 0), op0);
6599 else
6600 jumpifnot (TREE_OPERAND (exp, 0), op0);
6601
0f41302f 6602 /* Allows cleanups up to here. */
5dab5552 6603 old_cleanups = cleanups_this_call;
bbf6f052
RK
6604 if (binary_op && temp == 0)
6605 /* Just touch the other operand. */
6606 expand_expr (TREE_OPERAND (binary_op, 1),
906c4e36 6607 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6608 else if (binary_op)
6609 store_expr (build (TREE_CODE (binary_op), type,
6610 make_tree (type, temp),
6611 TREE_OPERAND (binary_op, 1)),
6612 temp, 0);
6613 else
6614 store_expr (build1 (TREE_CODE (unary_op), type,
6615 make_tree (type, temp)),
6616 temp, 0);
6617 op1 = op0;
5dab5552 6618 dest_right_flag = get_last_insn ();
bbf6f052
RK
6619 }
6620#if 0
6621 /* This is now done in jump.c and is better done there because it
6622 produces shorter register lifetimes. */
6623
6624 /* Check for both possibilities either constants or variables
6625 in registers (but not the same as the target!). If so, can
6626 save branches by assigning one, branching, and assigning the
6627 other. */
6628 else if (temp && GET_MODE (temp) != BLKmode
6629 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
6630 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
6631 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
6632 && DECL_RTL (TREE_OPERAND (exp, 1))
6633 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
6634 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
6635 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
6636 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
6637 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
6638 && DECL_RTL (TREE_OPERAND (exp, 2))
6639 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
6640 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
6641 {
6642 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6643 temp = gen_reg_rtx (mode);
6644 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5dab5552 6645 dest_left_flag = get_last_insn ();
bbf6f052 6646 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 6647
0f41302f 6648 /* Allows cleanups up to here. */
5dab5552 6649 old_cleanups = cleanups_this_call;
bbf6f052
RK
6650 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6651 op1 = op0;
5dab5552 6652 dest_right_flag = get_last_insn ();
bbf6f052
RK
6653 }
6654#endif
6655 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
6656 comparison operator. If we have one of these cases, set the
6657 output to A, branch on A (cse will merge these two references),
6658 then set the output to FOO. */
6659 else if (temp
6660 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6661 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6662 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6663 TREE_OPERAND (exp, 1), 0)
6664 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6665 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
6666 {
6667 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6668 temp = gen_reg_rtx (mode);
6669 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5dab5552 6670 dest_left_flag = get_last_insn ();
bbf6f052 6671 jumpif (TREE_OPERAND (exp, 0), op0);
5dab5552 6672
0f41302f 6673 /* Allows cleanups up to here. */
5dab5552 6674 old_cleanups = cleanups_this_call;
bbf6f052
RK
6675 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6676 op1 = op0;
5dab5552 6677 dest_right_flag = get_last_insn ();
bbf6f052
RK
6678 }
6679 else if (temp
6680 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6681 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6682 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6683 TREE_OPERAND (exp, 2), 0)
6684 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6685 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
6686 {
6687 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6688 temp = gen_reg_rtx (mode);
6689 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5dab5552 6690 dest_left_flag = get_last_insn ();
bbf6f052 6691 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 6692
0f41302f 6693 /* Allows cleanups up to here. */
5dab5552 6694 old_cleanups = cleanups_this_call;
bbf6f052
RK
6695 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6696 op1 = op0;
5dab5552 6697 dest_right_flag = get_last_insn ();
bbf6f052
RK
6698 }
6699 else
6700 {
6701 op1 = gen_label_rtx ();
6702 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 6703
0f41302f 6704 /* Allows cleanups up to here. */
5dab5552 6705 old_cleanups = cleanups_this_call;
bbf6f052
RK
6706 if (temp != 0)
6707 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6708 else
906c4e36
RK
6709 expand_expr (TREE_OPERAND (exp, 1),
6710 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5dab5552
MS
6711 dest_left_flag = get_last_insn ();
6712
0f41302f 6713 /* Handle conditional cleanups, if any. */
5dab5552 6714 left_cleanups = defer_cleanups_to (old_cleanups);
bbf6f052
RK
6715
6716 emit_queue ();
6717 emit_jump_insn (gen_jump (op1));
6718 emit_barrier ();
6719 emit_label (op0);
6720 if (temp != 0)
6721 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6722 else
906c4e36
RK
6723 expand_expr (TREE_OPERAND (exp, 2),
6724 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5dab5552 6725 dest_right_flag = get_last_insn ();
bbf6f052
RK
6726 }
6727
0f41302f 6728 /* Handle conditional cleanups, if any. */
5dab5552 6729 right_cleanups = defer_cleanups_to (old_cleanups);
bbf6f052
RK
6730
6731 emit_queue ();
6732 emit_label (op1);
6733 OK_DEFER_POP;
5dab5552 6734
0f41302f 6735 /* Add back in, any conditional cleanups. */
5dab5552
MS
6736 if (left_cleanups || right_cleanups)
6737 {
6738 tree new_cleanups;
6739 tree cond;
6740 rtx last;
6741
6742 /* Now that we know that a flag is needed, go back and add in the
0f41302f 6743 setting of the flag. */
5dab5552 6744
0f41302f 6745 /* Do the left side flag. */
5dab5552 6746 last = get_last_insn ();
0f41302f 6747 /* Flag left cleanups as needed. */
5dab5552
MS
6748 emit_move_insn (flag, const1_rtx);
6749 /* ??? deprecated, use sequences instead. */
6750 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_left_flag);
6751
0f41302f 6752 /* Do the right side flag. */
5dab5552 6753 last = get_last_insn ();
0f41302f 6754 /* Flag left cleanups as needed. */
5dab5552
MS
6755 emit_move_insn (flag, const0_rtx);
6756 /* ??? deprecated, use sequences instead. */
6757 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_right_flag);
6758
9ba73d38
MS
6759 /* All cleanups must be on the function_obstack. */
6760 push_obstacks_nochange ();
6761 resume_temporary_allocation ();
6762
0f41302f 6763 /* convert flag, which is an rtx, into a tree. */
5dab5552
MS
6764 cond = make_node (RTL_EXPR);
6765 TREE_TYPE (cond) = integer_type_node;
6766 RTL_EXPR_RTL (cond) = flag;
6767 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
01842234 6768 cond = save_expr (cond);
5dab5552
MS
6769
6770 if (! left_cleanups)
6771 left_cleanups = integer_zero_node;
6772 if (! right_cleanups)
6773 right_cleanups = integer_zero_node;
fd67d2b6
JM
6774 new_cleanups = build (COND_EXPR, void_type_node,
6775 truthvalue_conversion (cond),
5dab5552
MS
6776 left_cleanups, right_cleanups);
6777 new_cleanups = fold (new_cleanups);
6778
9ba73d38
MS
6779 pop_obstacks ();
6780
3d195391 6781 /* Now add in the conditionalized cleanups. */
5dab5552
MS
6782 cleanups_this_call
6783 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
3d195391 6784 expand_eh_region_start ();
5dab5552 6785 }
bbf6f052
RK
6786 return temp;
6787 }
6788
6789 case TARGET_EXPR:
6790 {
6791 /* Something needs to be initialized, but we didn't know
6792 where that thing was when building the tree. For example,
6793 it could be the return value of a function, or a parameter
6794 to a function which lays down in the stack, or a temporary
6795 variable which must be passed by reference.
6796
6797 We guarantee that the expression will either be constructed
6798 or copied into our original target. */
6799
6800 tree slot = TREE_OPERAND (exp, 0);
2a888d4c 6801 tree cleanups = NULL_TREE;
5c062816 6802 tree exp1;
61d6b1cc 6803 rtx temp;
bbf6f052
RK
6804
6805 if (TREE_CODE (slot) != VAR_DECL)
6806 abort ();
6807
9c51f375
RK
6808 if (! ignore)
6809 target = original_target;
6810
bbf6f052
RK
6811 if (target == 0)
6812 {
6813 if (DECL_RTL (slot) != 0)
ac993f4f
MS
6814 {
6815 target = DECL_RTL (slot);
5c062816 6816 /* If we have already expanded the slot, so don't do
ac993f4f 6817 it again. (mrs) */
5c062816
MS
6818 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6819 return target;
ac993f4f 6820 }
bbf6f052
RK
6821 else
6822 {
06089a8b 6823 target = assign_temp (type, 2, 1, 1);
bbf6f052
RK
6824 /* All temp slots at this level must not conflict. */
6825 preserve_temp_slots (target);
6826 DECL_RTL (slot) = target;
bbf6f052 6827
e287fd6e
RK
6828 /* Since SLOT is not known to the called function
6829 to belong to its stack frame, we must build an explicit
6830 cleanup. This case occurs when we must build up a reference
6831 to pass the reference as an argument. In this case,
6832 it is very likely that such a reference need not be
6833 built here. */
6834
6835 if (TREE_OPERAND (exp, 2) == 0)
6836 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
2a888d4c 6837 cleanups = TREE_OPERAND (exp, 2);
e287fd6e 6838 }
bbf6f052
RK
6839 }
6840 else
6841 {
6842 /* This case does occur, when expanding a parameter which
6843 needs to be constructed on the stack. The target
6844 is the actual stack address that we want to initialize.
6845 The function we call will perform the cleanup in this case. */
6846
8c042b47
RS
6847 /* If we have already assigned it space, use that space,
6848 not target that we were passed in, as our target
6849 parameter is only a hint. */
6850 if (DECL_RTL (slot) != 0)
6851 {
6852 target = DECL_RTL (slot);
6853 /* If we have already expanded the slot, so don't do
6854 it again. (mrs) */
6855 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6856 return target;
6857 }
6858
bbf6f052
RK
6859 DECL_RTL (slot) = target;
6860 }
6861
4847c938 6862 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
5c062816
MS
6863 /* Mark it as expanded. */
6864 TREE_OPERAND (exp, 1) = NULL_TREE;
6865
41531e5b 6866 store_expr (exp1, target, 0);
61d6b1cc 6867
2a888d4c
MS
6868 if (cleanups)
6869 {
6870 cleanups_this_call = tree_cons (NULL_TREE,
6871 cleanups,
6872 cleanups_this_call);
3d195391 6873 expand_eh_region_start ();
2a888d4c 6874 }
61d6b1cc 6875
41531e5b 6876 return target;
bbf6f052
RK
6877 }
6878
6879 case INIT_EXPR:
6880 {
6881 tree lhs = TREE_OPERAND (exp, 0);
6882 tree rhs = TREE_OPERAND (exp, 1);
6883 tree noncopied_parts = 0;
6884 tree lhs_type = TREE_TYPE (lhs);
6885
6886 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
6887 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
6888 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
6889 TYPE_NONCOPIED_PARTS (lhs_type));
6890 while (noncopied_parts != 0)
6891 {
6892 expand_assignment (TREE_VALUE (noncopied_parts),
6893 TREE_PURPOSE (noncopied_parts), 0, 0);
6894 noncopied_parts = TREE_CHAIN (noncopied_parts);
6895 }
6896 return temp;
6897 }
6898
6899 case MODIFY_EXPR:
6900 {
6901 /* If lhs is complex, expand calls in rhs before computing it.
6902 That's so we don't compute a pointer and save it over a call.
6903 If lhs is simple, compute it first so we can give it as a
6904 target if the rhs is just a call. This avoids an extra temp and copy
6905 and that prevents a partial-subsumption which makes bad code.
6906 Actually we could treat component_ref's of vars like vars. */
6907
6908 tree lhs = TREE_OPERAND (exp, 0);
6909 tree rhs = TREE_OPERAND (exp, 1);
6910 tree noncopied_parts = 0;
6911 tree lhs_type = TREE_TYPE (lhs);
6912
6913 temp = 0;
6914
6915 if (TREE_CODE (lhs) != VAR_DECL
6916 && TREE_CODE (lhs) != RESULT_DECL
6917 && TREE_CODE (lhs) != PARM_DECL)
6918 preexpand_calls (exp);
6919
6920 /* Check for |= or &= of a bitfield of size one into another bitfield
6921 of size 1. In this case, (unless we need the result of the
6922 assignment) we can do this more efficiently with a
6923 test followed by an assignment, if necessary.
6924
6925 ??? At this point, we can't get a BIT_FIELD_REF here. But if
6926 things change so we do, this code should be enhanced to
6927 support it. */
6928 if (ignore
6929 && TREE_CODE (lhs) == COMPONENT_REF
6930 && (TREE_CODE (rhs) == BIT_IOR_EXPR
6931 || TREE_CODE (rhs) == BIT_AND_EXPR)
6932 && TREE_OPERAND (rhs, 0) == lhs
6933 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
6934 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
6935 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
6936 {
6937 rtx label = gen_label_rtx ();
6938
6939 do_jump (TREE_OPERAND (rhs, 1),
6940 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
6941 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
6942 expand_assignment (lhs, convert (TREE_TYPE (rhs),
6943 (TREE_CODE (rhs) == BIT_IOR_EXPR
6944 ? integer_one_node
6945 : integer_zero_node)),
6946 0, 0);
e7c33f54 6947 do_pending_stack_adjust ();
bbf6f052
RK
6948 emit_label (label);
6949 return const0_rtx;
6950 }
6951
6952 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
6953 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
6954 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
6955 TYPE_NONCOPIED_PARTS (lhs_type));
6956
6957 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
6958 while (noncopied_parts != 0)
6959 {
6960 expand_assignment (TREE_PURPOSE (noncopied_parts),
6961 TREE_VALUE (noncopied_parts), 0, 0);
6962 noncopied_parts = TREE_CHAIN (noncopied_parts);
6963 }
6964 return temp;
6965 }
6966
6967 case PREINCREMENT_EXPR:
6968 case PREDECREMENT_EXPR:
7b8b9722 6969 return expand_increment (exp, 0, ignore);
bbf6f052
RK
6970
6971 case POSTINCREMENT_EXPR:
6972 case POSTDECREMENT_EXPR:
6973 /* Faster to treat as pre-increment if result is not used. */
7b8b9722 6974 return expand_increment (exp, ! ignore, ignore);
bbf6f052
RK
6975
6976 case ADDR_EXPR:
987c71d9 6977 /* If nonzero, TEMP will be set to the address of something that might
0f41302f 6978 be a MEM corresponding to a stack slot. */
987c71d9
RK
6979 temp = 0;
6980
bbf6f052
RK
6981 /* Are we taking the address of a nested function? */
6982 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
38ee6ed9
JM
6983 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
6984 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0)))
bbf6f052
RK
6985 {
6986 op0 = trampoline_address (TREE_OPERAND (exp, 0));
6987 op0 = force_operand (op0, target);
6988 }
682ba3a6
RK
6989 /* If we are taking the address of something erroneous, just
6990 return a zero. */
6991 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
6992 return const0_rtx;
bbf6f052
RK
6993 else
6994 {
e287fd6e
RK
6995 /* We make sure to pass const0_rtx down if we came in with
6996 ignore set, to avoid doing the cleanups twice for something. */
6997 op0 = expand_expr (TREE_OPERAND (exp, 0),
6998 ignore ? const0_rtx : NULL_RTX, VOIDmode,
bbf6f052
RK
6999 (modifier == EXPAND_INITIALIZER
7000 ? modifier : EXPAND_CONST_ADDRESS));
896102d0 7001
119af78a
RK
7002 /* If we are going to ignore the result, OP0 will have been set
7003 to const0_rtx, so just return it. Don't get confused and
7004 think we are taking the address of the constant. */
7005 if (ignore)
7006 return op0;
7007
3539e816
MS
7008 op0 = protect_from_queue (op0, 0);
7009
896102d0
RK
7010 /* We would like the object in memory. If it is a constant,
7011 we can have it be statically allocated into memory. For
682ba3a6 7012 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
896102d0
RK
7013 memory and store the value into it. */
7014
7015 if (CONSTANT_P (op0))
7016 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7017 op0);
987c71d9 7018 else if (GET_CODE (op0) == MEM)
af5b53ed
RK
7019 {
7020 mark_temp_addr_taken (op0);
7021 temp = XEXP (op0, 0);
7022 }
896102d0 7023
682ba3a6
RK
7024 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7025 || GET_CODE (op0) == CONCAT)
896102d0
RK
7026 {
7027 /* If this object is in a register, it must be not
0f41302f 7028 be BLKmode. */
896102d0 7029 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
06089a8b 7030 rtx memloc = assign_temp (inner_type, 1, 1, 1);
896102d0 7031
7a0b7b9a 7032 mark_temp_addr_taken (memloc);
896102d0
RK
7033 emit_move_insn (memloc, op0);
7034 op0 = memloc;
7035 }
7036
bbf6f052
RK
7037 if (GET_CODE (op0) != MEM)
7038 abort ();
7039
7040 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
88f63c77
RK
7041 {
7042 temp = XEXP (op0, 0);
7043#ifdef POINTERS_EXTEND_UNSIGNED
7044 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
7045 && mode == ptr_mode)
9fcfcce7 7046 temp = convert_memory_address (ptr_mode, temp);
88f63c77
RK
7047#endif
7048 return temp;
7049 }
987c71d9 7050
bbf6f052
RK
7051 op0 = force_operand (XEXP (op0, 0), target);
7052 }
987c71d9 7053
bbf6f052 7054 if (flag_force_addr && GET_CODE (op0) != REG)
987c71d9
RK
7055 op0 = force_reg (Pmode, op0);
7056
dc6d66b3
RK
7057 if (GET_CODE (op0) == REG
7058 && ! REG_USERVAR_P (op0))
7059 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
987c71d9
RK
7060
7061 /* If we might have had a temp slot, add an equivalent address
7062 for it. */
7063 if (temp != 0)
7064 update_temp_slot_address (temp, op0);
7065
88f63c77
RK
7066#ifdef POINTERS_EXTEND_UNSIGNED
7067 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
7068 && mode == ptr_mode)
9fcfcce7 7069 op0 = convert_memory_address (ptr_mode, op0);
88f63c77
RK
7070#endif
7071
bbf6f052
RK
7072 return op0;
7073
7074 case ENTRY_VALUE_EXPR:
7075 abort ();
7076
7308a047
RS
7077 /* COMPLEX type for Extended Pascal & Fortran */
7078 case COMPLEX_EXPR:
7079 {
7080 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6551fa4d 7081 rtx insns;
7308a047
RS
7082
7083 /* Get the rtx code of the operands. */
7084 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7085 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
7086
7087 if (! target)
7088 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7089
6551fa4d 7090 start_sequence ();
7308a047
RS
7091
7092 /* Move the real (op0) and imaginary (op1) parts to their location. */
2d7050fd
RS
7093 emit_move_insn (gen_realpart (mode, target), op0);
7094 emit_move_insn (gen_imagpart (mode, target), op1);
7308a047 7095
6551fa4d
JW
7096 insns = get_insns ();
7097 end_sequence ();
7098
7308a047 7099 /* Complex construction should appear as a single unit. */
6551fa4d
JW
7100 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
7101 each with a separate pseudo as destination.
7102 It's not correct for flow to treat them as a unit. */
6d6e61ce 7103 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
7104 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
7105 else
7106 emit_insns (insns);
7308a047
RS
7107
7108 return target;
7109 }
7110
7111 case REALPART_EXPR:
2d7050fd
RS
7112 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7113 return gen_realpart (mode, op0);
7308a047
RS
7114
7115 case IMAGPART_EXPR:
2d7050fd
RS
7116 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7117 return gen_imagpart (mode, op0);
7308a047
RS
7118
7119 case CONJ_EXPR:
7120 {
62acb978 7121 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7308a047 7122 rtx imag_t;
6551fa4d 7123 rtx insns;
7308a047
RS
7124
7125 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7126
7127 if (! target)
d6a5ac33 7128 target = gen_reg_rtx (mode);
7308a047 7129
6551fa4d 7130 start_sequence ();
7308a047
RS
7131
7132 /* Store the realpart and the negated imagpart to target. */
62acb978
RK
7133 emit_move_insn (gen_realpart (partmode, target),
7134 gen_realpart (partmode, op0));
7308a047 7135
62acb978
RK
7136 imag_t = gen_imagpart (partmode, target);
7137 temp = expand_unop (partmode, neg_optab,
7138 gen_imagpart (partmode, op0), imag_t, 0);
7308a047
RS
7139 if (temp != imag_t)
7140 emit_move_insn (imag_t, temp);
7141
6551fa4d
JW
7142 insns = get_insns ();
7143 end_sequence ();
7144
d6a5ac33
RK
7145 /* Conjugate should appear as a single unit
7146 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
6551fa4d
JW
7147 each with a separate pseudo as destination.
7148 It's not correct for flow to treat them as a unit. */
6d6e61ce 7149 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
7150 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
7151 else
7152 emit_insns (insns);
7308a047
RS
7153
7154 return target;
7155 }
7156
bbf6f052 7157 case ERROR_MARK:
66538193
RS
7158 op0 = CONST0_RTX (tmode);
7159 if (op0 != 0)
7160 return op0;
bbf6f052
RK
7161 return const0_rtx;
7162
7163 default:
90764a87 7164 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
bbf6f052
RK
7165 }
7166
7167 /* Here to do an ordinary binary operator, generating an instruction
7168 from the optab already placed in `this_optab'. */
7169 binop:
7170 preexpand_calls (exp);
7171 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
7172 subtarget = 0;
7173 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 7174 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7175 binop2:
7176 temp = expand_binop (mode, this_optab, op0, op1, target,
7177 unsignedp, OPTAB_LIB_WIDEN);
7178 if (temp == 0)
7179 abort ();
7180 return temp;
7181}
bbf6f052 7182
bbf6f052 7183
0f41302f
MS
7184/* Emit bytecode to evaluate the given expression EXP to the stack. */
7185
ca695ac9
JB
7186void
7187bc_expand_expr (exp)
7188 tree exp;
bbf6f052 7189{
ca695ac9
JB
7190 enum tree_code code;
7191 tree type, arg0;
7192 rtx r;
7193 struct binary_operator *binoptab;
7194 struct unary_operator *unoptab;
7195 struct increment_operator *incroptab;
7196 struct bc_label *lab, *lab1;
7197 enum bytecode_opcode opcode;
7198
7199
7200 code = TREE_CODE (exp);
7201
7202 switch (code)
bbf6f052 7203 {
ca695ac9
JB
7204 case PARM_DECL:
7205
7206 if (DECL_RTL (exp) == 0)
bbf6f052 7207 {
ca695ac9
JB
7208 error_with_decl (exp, "prior parameter's size depends on `%s'");
7209 return;
bbf6f052 7210 }
ca695ac9
JB
7211
7212 bc_load_parmaddr (DECL_RTL (exp));
7213 bc_load_memory (TREE_TYPE (exp), exp);
7214
7215 return;
7216
7217 case VAR_DECL:
7218
7219 if (DECL_RTL (exp) == 0)
7220 abort ();
7221
7222#if 0
e7a42772 7223 if (BYTECODE_LABEL (DECL_RTL (exp)))
ca695ac9
JB
7224 bc_load_externaddr (DECL_RTL (exp));
7225 else
7226 bc_load_localaddr (DECL_RTL (exp));
7227#endif
7228 if (TREE_PUBLIC (exp))
e7a42772
JB
7229 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
7230 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
ca695ac9
JB
7231 else
7232 bc_load_localaddr (DECL_RTL (exp));
7233
7234 bc_load_memory (TREE_TYPE (exp), exp);
7235 return;
7236
7237 case INTEGER_CST:
7238
7239#ifdef DEBUG_PRINT_CODE
7240 fprintf (stderr, " [%x]\n", TREE_INT_CST_LOW (exp));
7241#endif
6bd6178d 7242 bc_emit_instruction (mode_to_const_map[(int) (DECL_BIT_FIELD (exp)
ca695ac9 7243 ? SImode
6bd6178d 7244 : TYPE_MODE (TREE_TYPE (exp)))],
ca695ac9
JB
7245 (HOST_WIDE_INT) TREE_INT_CST_LOW (exp));
7246 return;
7247
7248 case REAL_CST:
7249
c02bd5d9 7250#if 0
ca695ac9
JB
7251#ifdef DEBUG_PRINT_CODE
7252 fprintf (stderr, " [%g]\n", (double) TREE_INT_CST_LOW (exp));
7253#endif
c02bd5d9 7254 /* FIX THIS: find a better way to pass real_cst's. -bson */
ca695ac9
JB
7255 bc_emit_instruction (mode_to_const_map[TYPE_MODE (TREE_TYPE (exp))],
7256 (double) TREE_REAL_CST (exp));
c02bd5d9
JB
7257#else
7258 abort ();
7259#endif
7260
ca695ac9
JB
7261 return;
7262
7263 case CALL_EXPR:
7264
7265 /* We build a call description vector describing the type of
7266 the return value and of the arguments; this call vector,
7267 together with a pointer to a location for the return value
7268 and the base of the argument list, is passed to the low
7269 level machine dependent call subroutine, which is responsible
7270 for putting the arguments wherever real functions expect
7271 them, as well as getting the return value back. */
7272 {
7273 tree calldesc = 0, arg;
7274 int nargs = 0, i;
7275 rtx retval;
7276
7277 /* Push the evaluated args on the evaluation stack in reverse
7278 order. Also make an entry for each arg in the calldesc
7279 vector while we're at it. */
7280
7281 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
7282
7283 for (arg = TREE_OPERAND (exp, 1); arg; arg = TREE_CHAIN (arg))
7284 {
7285 ++nargs;
7286 bc_expand_expr (TREE_VALUE (arg));
7287
7288 calldesc = tree_cons ((tree) 0,
7289 size_in_bytes (TREE_TYPE (TREE_VALUE (arg))),
7290 calldesc);
7291 calldesc = tree_cons ((tree) 0,
7292 bc_runtime_type_code (TREE_TYPE (TREE_VALUE (arg))),
7293 calldesc);
7294 }
7295
7296 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
7297
7298 /* Allocate a location for the return value and push its
7299 address on the evaluation stack. Also make an entry
0f41302f 7300 at the front of the calldesc for the return value type. */
ca695ac9
JB
7301
7302 type = TREE_TYPE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7303 retval = bc_allocate_local (int_size_in_bytes (type), TYPE_ALIGN (type));
7304 bc_load_localaddr (retval);
7305
7306 calldesc = tree_cons ((tree) 0, size_in_bytes (type), calldesc);
7307 calldesc = tree_cons ((tree) 0, bc_runtime_type_code (type), calldesc);
7308
7309 /* Prepend the argument count. */
7310 calldesc = tree_cons ((tree) 0,
7311 build_int_2 (nargs, 0),
7312 calldesc);
7313
7314 /* Push the address of the call description vector on the stack. */
7315 calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
7316 TREE_TYPE (calldesc) = build_array_type (integer_type_node,
7317 build_index_type (build_int_2 (nargs * 2, 0)));
7318 r = output_constant_def (calldesc);
7319 bc_load_externaddr (r);
7320
0f41302f 7321 /* Push the address of the function to be called. */
ca695ac9
JB
7322 bc_expand_expr (TREE_OPERAND (exp, 0));
7323
7324 /* Call the function, popping its address and the calldesc vector
7325 address off the evaluation stack in the process. */
7326 bc_emit_instruction (call);
7327
7328 /* Pop the arguments off the stack. */
7329 bc_adjust_stack (nargs);
7330
7331 /* Load the return value onto the stack. */
7332 bc_load_localaddr (retval);
7333 bc_load_memory (type, TREE_OPERAND (exp, 0));
7334 }
7335 return;
7336
7337 case SAVE_EXPR:
7338
7339 if (!SAVE_EXPR_RTL (exp))
bbf6f052 7340 {
ca695ac9
JB
7341 /* First time around: copy to local variable */
7342 SAVE_EXPR_RTL (exp) = bc_allocate_local (int_size_in_bytes (TREE_TYPE (exp)),
7343 TYPE_ALIGN (TREE_TYPE(exp)));
7344 bc_expand_expr (TREE_OPERAND (exp, 0));
6d6e61ce 7345 bc_emit_instruction (duplicate);
ca695ac9
JB
7346
7347 bc_load_localaddr (SAVE_EXPR_RTL (exp));
7348 bc_store_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
bbf6f052 7349 }
ca695ac9 7350 else
bbf6f052 7351 {
ca695ac9
JB
7352 /* Consecutive reference: use saved copy */
7353 bc_load_localaddr (SAVE_EXPR_RTL (exp));
7354 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
bbf6f052 7355 }
ca695ac9
JB
7356 return;
7357
7358#if 0
7359 /* FIXME: the XXXX_STMT codes have been removed in GCC2, but
7360 how are they handled instead? */
7361 case LET_STMT:
7362
7363 TREE_USED (exp) = 1;
7364 bc_expand_expr (STMT_BODY (exp));
7365 return;
7366#endif
7367
7368 case NOP_EXPR:
7369 case CONVERT_EXPR:
7370
7371 bc_expand_expr (TREE_OPERAND (exp, 0));
7372 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)), TREE_TYPE (exp));
7373 return;
7374
7375 case MODIFY_EXPR:
7376
c02bd5d9 7377 expand_assignment (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 0, 0);
ca695ac9
JB
7378 return;
7379
7380 case ADDR_EXPR:
7381
7382 bc_expand_address (TREE_OPERAND (exp, 0));
7383 return;
7384
7385 case INDIRECT_REF:
7386
7387 bc_expand_expr (TREE_OPERAND (exp, 0));
7388 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7389 return;
7390
7391 case ARRAY_REF:
7392
7393 bc_expand_expr (bc_canonicalize_array_ref (exp));
7394 return;
7395
7396 case COMPONENT_REF:
7397
7398 bc_expand_component_address (exp);
7399
7400 /* If we have a bitfield, generate a proper load */
7401 bc_load_memory (TREE_TYPE (TREE_OPERAND (exp, 1)), TREE_OPERAND (exp, 1));
7402 return;
7403
7404 case COMPOUND_EXPR:
7405
7406 bc_expand_expr (TREE_OPERAND (exp, 0));
7407 bc_emit_instruction (drop);
7408 bc_expand_expr (TREE_OPERAND (exp, 1));
7409 return;
7410
7411 case COND_EXPR:
7412
7413 bc_expand_expr (TREE_OPERAND (exp, 0));
7414 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
7415 lab = bc_get_bytecode_label ();
c02bd5d9 7416 bc_emit_bytecode (xjumpifnot);
ca695ac9
JB
7417 bc_emit_bytecode_labelref (lab);
7418
7419#ifdef DEBUG_PRINT_CODE
7420 fputc ('\n', stderr);
7421#endif
7422 bc_expand_expr (TREE_OPERAND (exp, 1));
7423 lab1 = bc_get_bytecode_label ();
7424 bc_emit_bytecode (jump);
7425 bc_emit_bytecode_labelref (lab1);
7426
7427#ifdef DEBUG_PRINT_CODE
7428 fputc ('\n', stderr);
7429#endif
7430
7431 bc_emit_bytecode_labeldef (lab);
7432 bc_expand_expr (TREE_OPERAND (exp, 2));
7433 bc_emit_bytecode_labeldef (lab1);
7434 return;
7435
7436 case TRUTH_ANDIF_EXPR:
7437
c02bd5d9 7438 opcode = xjumpifnot;
ca695ac9
JB
7439 goto andorif;
7440
7441 case TRUTH_ORIF_EXPR:
7442
c02bd5d9 7443 opcode = xjumpif;
ca695ac9
JB
7444 goto andorif;
7445
7446 case PLUS_EXPR:
7447
7448 binoptab = optab_plus_expr;
7449 goto binop;
7450
7451 case MINUS_EXPR:
7452
7453 binoptab = optab_minus_expr;
7454 goto binop;
7455
7456 case MULT_EXPR:
7457
7458 binoptab = optab_mult_expr;
7459 goto binop;
7460
7461 case TRUNC_DIV_EXPR:
7462 case FLOOR_DIV_EXPR:
7463 case CEIL_DIV_EXPR:
7464 case ROUND_DIV_EXPR:
7465 case EXACT_DIV_EXPR:
7466
7467 binoptab = optab_trunc_div_expr;
7468 goto binop;
7469
7470 case TRUNC_MOD_EXPR:
7471 case FLOOR_MOD_EXPR:
7472 case CEIL_MOD_EXPR:
7473 case ROUND_MOD_EXPR:
7474
7475 binoptab = optab_trunc_mod_expr;
7476 goto binop;
7477
7478 case FIX_ROUND_EXPR:
7479 case FIX_FLOOR_EXPR:
7480 case FIX_CEIL_EXPR:
7481 abort (); /* Not used for C. */
7482
7483 case FIX_TRUNC_EXPR:
7484 case FLOAT_EXPR:
7485 case MAX_EXPR:
7486 case MIN_EXPR:
7487 case FFS_EXPR:
7488 case LROTATE_EXPR:
7489 case RROTATE_EXPR:
7490 abort (); /* FIXME */
7491
7492 case RDIV_EXPR:
7493
7494 binoptab = optab_rdiv_expr;
7495 goto binop;
7496
7497 case BIT_AND_EXPR:
7498
7499 binoptab = optab_bit_and_expr;
7500 goto binop;
7501
7502 case BIT_IOR_EXPR:
7503
7504 binoptab = optab_bit_ior_expr;
7505 goto binop;
7506
7507 case BIT_XOR_EXPR:
7508
7509 binoptab = optab_bit_xor_expr;
7510 goto binop;
7511
7512 case LSHIFT_EXPR:
7513
7514 binoptab = optab_lshift_expr;
7515 goto binop;
7516
7517 case RSHIFT_EXPR:
7518
7519 binoptab = optab_rshift_expr;
7520 goto binop;
7521
7522 case TRUTH_AND_EXPR:
7523
7524 binoptab = optab_truth_and_expr;
7525 goto binop;
7526
7527 case TRUTH_OR_EXPR:
7528
7529 binoptab = optab_truth_or_expr;
7530 goto binop;
7531
7532 case LT_EXPR:
7533
7534 binoptab = optab_lt_expr;
7535 goto binop;
7536
7537 case LE_EXPR:
7538
7539 binoptab = optab_le_expr;
7540 goto binop;
7541
7542 case GE_EXPR:
7543
7544 binoptab = optab_ge_expr;
7545 goto binop;
7546
7547 case GT_EXPR:
7548
7549 binoptab = optab_gt_expr;
7550 goto binop;
7551
7552 case EQ_EXPR:
7553
7554 binoptab = optab_eq_expr;
7555 goto binop;
7556
7557 case NE_EXPR:
7558
7559 binoptab = optab_ne_expr;
7560 goto binop;
7561
7562 case NEGATE_EXPR:
7563
7564 unoptab = optab_negate_expr;
7565 goto unop;
7566
7567 case BIT_NOT_EXPR:
7568
7569 unoptab = optab_bit_not_expr;
7570 goto unop;
7571
7572 case TRUTH_NOT_EXPR:
7573
7574 unoptab = optab_truth_not_expr;
7575 goto unop;
7576
7577 case PREDECREMENT_EXPR:
7578
7579 incroptab = optab_predecrement_expr;
7580 goto increment;
7581
7582 case PREINCREMENT_EXPR:
7583
7584 incroptab = optab_preincrement_expr;
7585 goto increment;
7586
7587 case POSTDECREMENT_EXPR:
7588
7589 incroptab = optab_postdecrement_expr;
7590 goto increment;
7591
7592 case POSTINCREMENT_EXPR:
7593
7594 incroptab = optab_postincrement_expr;
7595 goto increment;
7596
7597 case CONSTRUCTOR:
7598
7599 bc_expand_constructor (exp);
7600 return;
7601
7602 case ERROR_MARK:
7603 case RTL_EXPR:
7604
7605 return;
7606
7607 case BIND_EXPR:
7608 {
7609 tree vars = TREE_OPERAND (exp, 0);
7610 int vars_need_expansion = 0;
7611
7612 /* Need to open a binding contour here because
7613 if there are any cleanups they most be contained here. */
7614 expand_start_bindings (0);
7615
7616 /* Mark the corresponding BLOCK for output. */
7617 if (TREE_OPERAND (exp, 2) != 0)
7618 TREE_USED (TREE_OPERAND (exp, 2)) = 1;
7619
7620 /* If VARS have not yet been expanded, expand them now. */
7621 while (vars)
7622 {
7623 if (DECL_RTL (vars) == 0)
7624 {
7625 vars_need_expansion = 1;
9bac07c3 7626 expand_decl (vars);
ca695ac9 7627 }
9bac07c3 7628 expand_decl_init (vars);
ca695ac9
JB
7629 vars = TREE_CHAIN (vars);
7630 }
7631
7632 bc_expand_expr (TREE_OPERAND (exp, 1));
7633
7634 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
7635
7636 return;
7637 }
7638 }
7639
7640 abort ();
7641
7642 binop:
7643
7644 bc_expand_binary_operation (binoptab, TREE_TYPE (exp),
7645 TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1));
7646 return;
7647
7648
7649 unop:
7650
7651 bc_expand_unary_operation (unoptab, TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7652 return;
7653
7654
7655 andorif:
7656
7657 bc_expand_expr (TREE_OPERAND (exp, 0));
7658 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
7659 lab = bc_get_bytecode_label ();
7660
6d6e61ce 7661 bc_emit_instruction (duplicate);
ca695ac9
JB
7662 bc_emit_bytecode (opcode);
7663 bc_emit_bytecode_labelref (lab);
7664
7665#ifdef DEBUG_PRINT_CODE
7666 fputc ('\n', stderr);
7667#endif
7668
7669 bc_emit_instruction (drop);
7670
7671 bc_expand_expr (TREE_OPERAND (exp, 1));
7672 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)));
7673 bc_emit_bytecode_labeldef (lab);
7674 return;
7675
7676
7677 increment:
7678
7679 type = TREE_TYPE (TREE_OPERAND (exp, 0));
7680
7681 /* Push the quantum. */
7682 bc_expand_expr (TREE_OPERAND (exp, 1));
7683
7684 /* Convert it to the lvalue's type. */
7685 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)), type);
7686
7687 /* Push the address of the lvalue */
c02bd5d9 7688 bc_expand_expr (build1 (ADDR_EXPR, TYPE_POINTER_TO (type), TREE_OPERAND (exp, 0)));
ca695ac9
JB
7689
7690 /* Perform actual increment */
c02bd5d9 7691 bc_expand_increment (incroptab, type);
ca695ac9
JB
7692 return;
7693}
7694\f
7695/* Return the alignment in bits of EXP, a pointer valued expression.
7696 But don't return more than MAX_ALIGN no matter what.
7697 The alignment returned is, by default, the alignment of the thing that
7698 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
7699
7700 Otherwise, look at the expression to see if we can do better, i.e., if the
7701 expression is actually pointing at an object whose alignment is tighter. */
7702
7703static int
7704get_pointer_alignment (exp, max_align)
7705 tree exp;
7706 unsigned max_align;
7707{
7708 unsigned align, inner;
7709
7710 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7711 return 0;
7712
7713 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7714 align = MIN (align, max_align);
7715
7716 while (1)
7717 {
7718 switch (TREE_CODE (exp))
7719 {
7720 case NOP_EXPR:
7721 case CONVERT_EXPR:
7722 case NON_LVALUE_EXPR:
7723 exp = TREE_OPERAND (exp, 0);
7724 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7725 return align;
7726 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
8dc2fbcf 7727 align = MIN (inner, max_align);
ca695ac9
JB
7728 break;
7729
7730 case PLUS_EXPR:
7731 /* If sum of pointer + int, restrict our maximum alignment to that
7732 imposed by the integer. If not, we can't do any better than
7733 ALIGN. */
7734 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
7735 return align;
7736
7737 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
7738 & (max_align - 1))
7739 != 0)
7740 max_align >>= 1;
7741
7742 exp = TREE_OPERAND (exp, 0);
7743 break;
7744
7745 case ADDR_EXPR:
7746 /* See what we are pointing at and look at its alignment. */
7747 exp = TREE_OPERAND (exp, 0);
7748 if (TREE_CODE (exp) == FUNCTION_DECL)
8dc2fbcf 7749 align = FUNCTION_BOUNDARY;
ca695ac9 7750 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
8dc2fbcf 7751 align = DECL_ALIGN (exp);
ca695ac9
JB
7752#ifdef CONSTANT_ALIGNMENT
7753 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
7754 align = CONSTANT_ALIGNMENT (exp, align);
7755#endif
7756 return MIN (align, max_align);
7757
7758 default:
7759 return align;
7760 }
7761 }
7762}
7763\f
7764/* Return the tree node and offset if a given argument corresponds to
7765 a string constant. */
7766
7767static tree
7768string_constant (arg, ptr_offset)
7769 tree arg;
7770 tree *ptr_offset;
7771{
7772 STRIP_NOPS (arg);
7773
7774 if (TREE_CODE (arg) == ADDR_EXPR
7775 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
7776 {
7777 *ptr_offset = integer_zero_node;
7778 return TREE_OPERAND (arg, 0);
7779 }
7780 else if (TREE_CODE (arg) == PLUS_EXPR)
7781 {
7782 tree arg0 = TREE_OPERAND (arg, 0);
7783 tree arg1 = TREE_OPERAND (arg, 1);
7784
7785 STRIP_NOPS (arg0);
7786 STRIP_NOPS (arg1);
7787
7788 if (TREE_CODE (arg0) == ADDR_EXPR
7789 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
7790 {
7791 *ptr_offset = arg1;
7792 return TREE_OPERAND (arg0, 0);
7793 }
7794 else if (TREE_CODE (arg1) == ADDR_EXPR
7795 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
7796 {
7797 *ptr_offset = arg0;
7798 return TREE_OPERAND (arg1, 0);
7799 }
7800 }
7801
7802 return 0;
7803}
7804
7805/* Compute the length of a C string. TREE_STRING_LENGTH is not the right
7806 way, because it could contain a zero byte in the middle.
7807 TREE_STRING_LENGTH is the size of the character array, not the string.
7808
7809 Unfortunately, string_constant can't access the values of const char
7810 arrays with initializers, so neither can we do so here. */
7811
7812static tree
7813c_strlen (src)
7814 tree src;
7815{
7816 tree offset_node;
7817 int offset, max;
7818 char *ptr;
7819
7820 src = string_constant (src, &offset_node);
7821 if (src == 0)
7822 return 0;
7823 max = TREE_STRING_LENGTH (src);
7824 ptr = TREE_STRING_POINTER (src);
7825 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
7826 {
7827 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
7828 compute the offset to the following null if we don't know where to
7829 start searching for it. */
7830 int i;
7831 for (i = 0; i < max; i++)
7832 if (ptr[i] == 0)
7833 return 0;
7834 /* We don't know the starting offset, but we do know that the string
7835 has no internal zero bytes. We can assume that the offset falls
7836 within the bounds of the string; otherwise, the programmer deserves
7837 what he gets. Subtract the offset from the length of the string,
7838 and return that. */
7839 /* This would perhaps not be valid if we were dealing with named
7840 arrays in addition to literal string constants. */
7841 return size_binop (MINUS_EXPR, size_int (max), offset_node);
7842 }
7843
7844 /* We have a known offset into the string. Start searching there for
7845 a null character. */
7846 if (offset_node == 0)
7847 offset = 0;
7848 else
7849 {
7850 /* Did we get a long long offset? If so, punt. */
7851 if (TREE_INT_CST_HIGH (offset_node) != 0)
7852 return 0;
7853 offset = TREE_INT_CST_LOW (offset_node);
7854 }
7855 /* If the offset is known to be out of bounds, warn, and call strlen at
7856 runtime. */
7857 if (offset < 0 || offset > max)
7858 {
7859 warning ("offset outside bounds of constant string");
7860 return 0;
7861 }
7862 /* Use strlen to search for the first zero byte. Since any strings
7863 constructed with build_string will have nulls appended, we win even
7864 if we get handed something like (char[4])"abcd".
7865
7866 Since OFFSET is our starting index into the string, no further
7867 calculation is needed. */
7868 return size_int (strlen (ptr + offset));
7869}
2bbf216f
RK
7870
7871rtx
7872expand_builtin_return_addr (fndecl_code, count, tem)
7873 enum built_in_function fndecl_code;
2bbf216f 7874 int count;
75bb87ea 7875 rtx tem;
2bbf216f
RK
7876{
7877 int i;
7878
7879 /* Some machines need special handling before we can access
7880 arbitrary frames. For example, on the sparc, we must first flush
7881 all register windows to the stack. */
7882#ifdef SETUP_FRAME_ADDRESSES
7883 SETUP_FRAME_ADDRESSES ();
7884#endif
7885
7886 /* On the sparc, the return address is not in the frame, it is in a
7887 register. There is no way to access it off of the current frame
7888 pointer, but it can be accessed off the previous frame pointer by
7889 reading the value from the register window save area. */
7890#ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
7891 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
7892 count--;
7893#endif
7894
7895 /* Scan back COUNT frames to the specified frame. */
7896 for (i = 0; i < count; i++)
7897 {
7898 /* Assume the dynamic chain pointer is in the word that the
7899 frame address points to, unless otherwise specified. */
7900#ifdef DYNAMIC_CHAIN_ADDRESS
7901 tem = DYNAMIC_CHAIN_ADDRESS (tem);
7902#endif
7903 tem = memory_address (Pmode, tem);
7904 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
7905 }
7906
7907 /* For __builtin_frame_address, return what we've got. */
7908 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
7909 return tem;
7910
7911 /* For __builtin_return_address, Get the return address from that
7912 frame. */
7913#ifdef RETURN_ADDR_RTX
7914 tem = RETURN_ADDR_RTX (count, tem);
7915#else
7916 tem = memory_address (Pmode,
7917 plus_constant (tem, GET_MODE_SIZE (Pmode)));
7918 tem = gen_rtx (MEM, Pmode, tem);
7919#endif
0ebba7fc 7920 return tem;
2bbf216f 7921}
ca695ac9
JB
7922\f
7923/* Expand an expression EXP that calls a built-in function,
7924 with result going to TARGET if that's convenient
7925 (and in mode MODE if that's convenient).
7926 SUBTARGET may be used as the target for computing one of EXP's operands.
7927 IGNORE is nonzero if the value is to be ignored. */
7928
98aad286
RK
7929#define CALLED_AS_BUILT_IN(NODE) \
7930 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
7931
ca695ac9
JB
7932static rtx
7933expand_builtin (exp, target, subtarget, mode, ignore)
7934 tree exp;
7935 rtx target;
7936 rtx subtarget;
7937 enum machine_mode mode;
7938 int ignore;
7939{
7940 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7941 tree arglist = TREE_OPERAND (exp, 1);
7942 rtx op0;
7943 rtx lab1, insns;
7944 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
7945 optab builtin_optab;
7946
7947 switch (DECL_FUNCTION_CODE (fndecl))
7948 {
7949 case BUILT_IN_ABS:
7950 case BUILT_IN_LABS:
7951 case BUILT_IN_FABS:
7952 /* build_function_call changes these into ABS_EXPR. */
7953 abort ();
7954
7955 case BUILT_IN_SIN:
7956 case BUILT_IN_COS:
0f41302f 7957 /* Treat these like sqrt, but only if the user asks for them. */
ba558a85
RK
7958 if (! flag_fast_math)
7959 break;
ca695ac9
JB
7960 case BUILT_IN_FSQRT:
7961 /* If not optimizing, call the library function. */
7962 if (! optimize)
7963 break;
7964
7965 if (arglist == 0
7966 /* Arg could be wrong type if user redeclared this fcn wrong. */
7967 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
7b073ca6 7968 break;
ca695ac9
JB
7969
7970 /* Stabilize and compute the argument. */
7971 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
7972 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
7973 {
7974 exp = copy_node (exp);
7975 arglist = copy_node (arglist);
7976 TREE_OPERAND (exp, 1) = arglist;
7977 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
7978 }
7979 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
7980
7981 /* Make a suitable register to place result in. */
7982 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7983
7984 emit_queue ();
7985 start_sequence ();
7986
7987 switch (DECL_FUNCTION_CODE (fndecl))
7988 {
7989 case BUILT_IN_SIN:
7990 builtin_optab = sin_optab; break;
7991 case BUILT_IN_COS:
7992 builtin_optab = cos_optab; break;
7993 case BUILT_IN_FSQRT:
7994 builtin_optab = sqrt_optab; break;
7995 default:
7996 abort ();
7997 }
7998
7999 /* Compute into TARGET.
8000 Set TARGET to wherever the result comes back. */
8001 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8002 builtin_optab, op0, target, 0);
8003
8004 /* If we were unable to expand via the builtin, stop the
8005 sequence (without outputting the insns) and break, causing
8006 a call the the library function. */
8007 if (target == 0)
8008 {
8009 end_sequence ();
8010 break;
8011 }
8012
8013 /* Check the results by default. But if flag_fast_math is turned on,
8014 then assume sqrt will always be called with valid arguments. */
8015
8016 if (! flag_fast_math)
8017 {
8018 /* Don't define the builtin FP instructions
8019 if your machine is not IEEE. */
8020 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
8021 abort ();
8022
8023 lab1 = gen_label_rtx ();
8024
8025 /* Test the result; if it is NaN, set errno=EDOM because
8026 the argument was not in the domain. */
8027 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
8028 emit_jump_insn (gen_beq (lab1));
8029
4ac09687 8030#ifdef TARGET_EDOM
ca695ac9
JB
8031 {
8032#ifdef GEN_ERRNO_RTX
8033 rtx errno_rtx = GEN_ERRNO_RTX;
8034#else
8035 rtx errno_rtx
e74a2201 8036 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "errno"));
ca695ac9
JB
8037#endif
8038
8039 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
8040 }
8041#else
8042 /* We can't set errno=EDOM directly; let the library call do it.
0f41302f 8043 Pop the arguments right away in case the call gets deleted. */
ca695ac9
JB
8044 NO_DEFER_POP;
8045 expand_call (exp, target, 0);
8046 OK_DEFER_POP;
8047#endif
8048
8049 emit_label (lab1);
8050 }
8051
0f41302f 8052 /* Output the entire sequence. */
ca695ac9
JB
8053 insns = get_insns ();
8054 end_sequence ();
8055 emit_insns (insns);
8056
8057 return target;
8058
8059 /* __builtin_apply_args returns block of memory allocated on
8060 the stack into which is stored the arg pointer, structure
8061 value address, static chain, and all the registers that might
8062 possibly be used in performing a function call. The code is
8063 moved to the start of the function so the incoming values are
8064 saved. */
8065 case BUILT_IN_APPLY_ARGS:
8066 /* Don't do __builtin_apply_args more than once in a function.
8067 Save the result of the first call and reuse it. */
8068 if (apply_args_value != 0)
8069 return apply_args_value;
8070 {
8071 /* When this function is called, it means that registers must be
8072 saved on entry to this function. So we migrate the
8073 call to the first insn of this function. */
8074 rtx temp;
8075 rtx seq;
8076
8077 start_sequence ();
8078 temp = expand_builtin_apply_args ();
8079 seq = get_insns ();
8080 end_sequence ();
8081
8082 apply_args_value = temp;
8083
8084 /* Put the sequence after the NOTE that starts the function.
8085 If this is inside a SEQUENCE, make the outer-level insn
8086 chain current, so the code is placed at the start of the
8087 function. */
8088 push_topmost_sequence ();
8089 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8090 pop_topmost_sequence ();
8091 return temp;
8092 }
8093
8094 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8095 FUNCTION with a copy of the parameters described by
8096 ARGUMENTS, and ARGSIZE. It returns a block of memory
8097 allocated on the stack into which is stored all the registers
8098 that might possibly be used for returning the result of a
8099 function. ARGUMENTS is the value returned by
8100 __builtin_apply_args. ARGSIZE is the number of bytes of
8101 arguments that must be copied. ??? How should this value be
8102 computed? We'll also need a safe worst case value for varargs
8103 functions. */
8104 case BUILT_IN_APPLY:
8105 if (arglist == 0
8106 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8107 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8108 || TREE_CHAIN (arglist) == 0
8109 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8110 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8111 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8112 return const0_rtx;
8113 else
8114 {
8115 int i;
8116 tree t;
8117 rtx ops[3];
8118
8119 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
8120 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
8121
8122 return expand_builtin_apply (ops[0], ops[1], ops[2]);
8123 }
8124
8125 /* __builtin_return (RESULT) causes the function to return the
8126 value described by RESULT. RESULT is address of the block of
8127 memory returned by __builtin_apply. */
8128 case BUILT_IN_RETURN:
8129 if (arglist
8130 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8131 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
8132 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
8133 NULL_RTX, VOIDmode, 0));
8134 return const0_rtx;
8135
8136 case BUILT_IN_SAVEREGS:
8137 /* Don't do __builtin_saveregs more than once in a function.
8138 Save the result of the first call and reuse it. */
8139 if (saveregs_value != 0)
8140 return saveregs_value;
8141 {
8142 /* When this function is called, it means that registers must be
8143 saved on entry to this function. So we migrate the
8144 call to the first insn of this function. */
8145 rtx temp;
8146 rtx seq;
ca695ac9
JB
8147
8148 /* Now really call the function. `expand_call' does not call
8149 expand_builtin, so there is no danger of infinite recursion here. */
8150 start_sequence ();
8151
8152#ifdef EXPAND_BUILTIN_SAVEREGS
8153 /* Do whatever the machine needs done in this case. */
8154 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
8155#else
8156 /* The register where the function returns its value
8157 is likely to have something else in it, such as an argument.
8158 So preserve that register around the call. */
d0c76654 8159
ca695ac9
JB
8160 if (value_mode != VOIDmode)
8161 {
d0c76654
RK
8162 rtx valreg = hard_libcall_value (value_mode);
8163 rtx saved_valreg = gen_reg_rtx (value_mode);
8164
ca695ac9 8165 emit_move_insn (saved_valreg, valreg);
d0c76654
RK
8166 temp = expand_call (exp, target, ignore);
8167 emit_move_insn (valreg, saved_valreg);
ca695ac9 8168 }
d0c76654
RK
8169 else
8170 /* Generate the call, putting the value in a pseudo. */
8171 temp = expand_call (exp, target, ignore);
ca695ac9
JB
8172#endif
8173
8174 seq = get_insns ();
8175 end_sequence ();
8176
8177 saveregs_value = temp;
8178
8179 /* Put the sequence after the NOTE that starts the function.
8180 If this is inside a SEQUENCE, make the outer-level insn
8181 chain current, so the code is placed at the start of the
8182 function. */
8183 push_topmost_sequence ();
8184 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8185 pop_topmost_sequence ();
8186 return temp;
8187 }
8188
8189 /* __builtin_args_info (N) returns word N of the arg space info
8190 for the current function. The number and meanings of words
8191 is controlled by the definition of CUMULATIVE_ARGS. */
8192 case BUILT_IN_ARGS_INFO:
8193 {
8194 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
8195 int i;
8196 int *word_ptr = (int *) &current_function_args_info;
8197 tree type, elts, result;
8198
8199 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
8200 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
8201 __FILE__, __LINE__);
8202
8203 if (arglist != 0)
8204 {
8205 tree arg = TREE_VALUE (arglist);
8206 if (TREE_CODE (arg) != INTEGER_CST)
8207 error ("argument of `__builtin_args_info' must be constant");
8208 else
8209 {
8210 int wordnum = TREE_INT_CST_LOW (arg);
8211
8212 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
8213 error ("argument of `__builtin_args_info' out of range");
8214 else
8215 return GEN_INT (word_ptr[wordnum]);
8216 }
8217 }
8218 else
8219 error ("missing argument in `__builtin_args_info'");
8220
8221 return const0_rtx;
8222
8223#if 0
8224 for (i = 0; i < nwords; i++)
8225 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
8226
8227 type = build_array_type (integer_type_node,
8228 build_index_type (build_int_2 (nwords, 0)));
8229 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
8230 TREE_CONSTANT (result) = 1;
8231 TREE_STATIC (result) = 1;
8232 result = build (INDIRECT_REF, build_pointer_type (type), result);
8233 TREE_CONSTANT (result) = 1;
8234 return expand_expr (result, NULL_RTX, VOIDmode, 0);
8235#endif
8236 }
8237
17bbab26 8238 /* Return the address of the first anonymous stack arg. */
ca695ac9
JB
8239 case BUILT_IN_NEXT_ARG:
8240 {
8241 tree fntype = TREE_TYPE (current_function_decl);
c4dfe0fc 8242
33162beb
DE
8243 if ((TYPE_ARG_TYPES (fntype) == 0
8244 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
8245 == void_type_node))
8246 && ! current_function_varargs)
ca695ac9
JB
8247 {
8248 error ("`va_start' used in function with fixed args");
8249 return const0_rtx;
8250 }
c4dfe0fc 8251
e4493c04
RK
8252 if (arglist)
8253 {
8254 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
8255 tree arg = TREE_VALUE (arglist);
8256
8257 /* Strip off all nops for the sake of the comparison. This
6692a31f
RK
8258 is not quite the same as STRIP_NOPS. It does more.
8259 We must also strip off INDIRECT_EXPR for C++ reference
8260 parameters. */
e4493c04
RK
8261 while (TREE_CODE (arg) == NOP_EXPR
8262 || TREE_CODE (arg) == CONVERT_EXPR
6692a31f
RK
8263 || TREE_CODE (arg) == NON_LVALUE_EXPR
8264 || TREE_CODE (arg) == INDIRECT_REF)
e4493c04
RK
8265 arg = TREE_OPERAND (arg, 0);
8266 if (arg != last_parm)
8267 warning ("second parameter of `va_start' not last named argument");
8268 }
5b4ff0de 8269 else if (! current_function_varargs)
e4493c04
RK
8270 /* Evidently an out of date version of <stdarg.h>; can't validate
8271 va_start's second argument, but can still work as intended. */
8272 warning ("`__builtin_next_arg' called without an argument");
ca695ac9
JB
8273 }
8274
8275 return expand_binop (Pmode, add_optab,
8276 current_function_internal_arg_pointer,
8277 current_function_arg_offset_rtx,
8278 NULL_RTX, 0, OPTAB_LIB_WIDEN);
8279
8280 case BUILT_IN_CLASSIFY_TYPE:
8281 if (arglist != 0)
8282 {
8283 tree type = TREE_TYPE (TREE_VALUE (arglist));
8284 enum tree_code code = TREE_CODE (type);
8285 if (code == VOID_TYPE)
8286 return GEN_INT (void_type_class);
8287 if (code == INTEGER_TYPE)
8288 return GEN_INT (integer_type_class);
8289 if (code == CHAR_TYPE)
8290 return GEN_INT (char_type_class);
8291 if (code == ENUMERAL_TYPE)
8292 return GEN_INT (enumeral_type_class);
8293 if (code == BOOLEAN_TYPE)
8294 return GEN_INT (boolean_type_class);
8295 if (code == POINTER_TYPE)
8296 return GEN_INT (pointer_type_class);
8297 if (code == REFERENCE_TYPE)
8298 return GEN_INT (reference_type_class);
8299 if (code == OFFSET_TYPE)
8300 return GEN_INT (offset_type_class);
8301 if (code == REAL_TYPE)
8302 return GEN_INT (real_type_class);
8303 if (code == COMPLEX_TYPE)
8304 return GEN_INT (complex_type_class);
8305 if (code == FUNCTION_TYPE)
8306 return GEN_INT (function_type_class);
8307 if (code == METHOD_TYPE)
8308 return GEN_INT (method_type_class);
8309 if (code == RECORD_TYPE)
8310 return GEN_INT (record_type_class);
8311 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
8312 return GEN_INT (union_type_class);
8313 if (code == ARRAY_TYPE)
4042d440
PB
8314 {
8315 if (TYPE_STRING_FLAG (type))
8316 return GEN_INT (string_type_class);
8317 else
8318 return GEN_INT (array_type_class);
8319 }
ca695ac9
JB
8320 if (code == SET_TYPE)
8321 return GEN_INT (set_type_class);
8322 if (code == FILE_TYPE)
8323 return GEN_INT (file_type_class);
8324 if (code == LANG_TYPE)
8325 return GEN_INT (lang_type_class);
8326 }
8327 return GEN_INT (no_type_class);
8328
8329 case BUILT_IN_CONSTANT_P:
8330 if (arglist == 0)
8331 return const0_rtx;
8332 else
33cf5823
RK
8333 {
8334 tree arg = TREE_VALUE (arglist);
8335
8336 STRIP_NOPS (arg);
8337 return (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c'
8338 || (TREE_CODE (arg) == ADDR_EXPR
8339 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8340 ? const1_rtx : const0_rtx);
8341 }
ca695ac9
JB
8342
8343 case BUILT_IN_FRAME_ADDRESS:
8344 /* The argument must be a nonnegative integer constant.
8345 It counts the number of frames to scan up the stack.
8346 The value is the address of that frame. */
8347 case BUILT_IN_RETURN_ADDRESS:
8348 /* The argument must be a nonnegative integer constant.
8349 It counts the number of frames to scan up the stack.
8350 The value is the return address saved in that frame. */
8351 if (arglist == 0)
8352 /* Warning about missing arg was already issued. */
8353 return const0_rtx;
8354 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
8355 {
8356 error ("invalid arg to `__builtin_return_address'");
8357 return const0_rtx;
8358 }
153c149b 8359 else if (tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
ca695ac9
JB
8360 {
8361 error ("invalid arg to `__builtin_return_address'");
8362 return const0_rtx;
8363 }
8364 else
8365 {
2bbf216f
RK
8366 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
8367 TREE_INT_CST_LOW (TREE_VALUE (arglist)),
8368 hard_frame_pointer_rtx);
ca695ac9
JB
8369
8370 /* For __builtin_frame_address, return what we've got. */
8371 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8372 return tem;
8373
2bbf216f
RK
8374 if (GET_CODE (tem) != REG)
8375 tem = copy_to_reg (tem);
8376 return tem;
ca695ac9
JB
8377 }
8378
8379 case BUILT_IN_ALLOCA:
8380 if (arglist == 0
8381 /* Arg could be non-integer if user redeclared this fcn wrong. */
8382 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
7b073ca6 8383 break;
1ee86d15 8384
ca695ac9
JB
8385 /* Compute the argument. */
8386 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
8387
8388 /* Allocate the desired space. */
1ee86d15 8389 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
ca695ac9
JB
8390
8391 case BUILT_IN_FFS:
8392 /* If not optimizing, call the library function. */
98aad286 8393 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
ca695ac9
JB
8394 break;
8395
8396 if (arglist == 0
8397 /* Arg could be non-integer if user redeclared this fcn wrong. */
8398 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
7b073ca6 8399 break;
ca695ac9
JB
8400
8401 /* Compute the argument. */
8402 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8403 /* Compute ffs, into TARGET if possible.
8404 Set TARGET to wherever the result comes back. */
8405 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8406 ffs_optab, op0, target, 1);
8407 if (target == 0)
8408 abort ();
8409 return target;
8410
8411 case BUILT_IN_STRLEN:
8412 /* If not optimizing, call the library function. */
98aad286 8413 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
ca695ac9
JB
8414 break;
8415
8416 if (arglist == 0
8417 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8418 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
7b073ca6 8419 break;
ca695ac9
JB
8420 else
8421 {
8422 tree src = TREE_VALUE (arglist);
8423 tree len = c_strlen (src);
8424
8425 int align
8426 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8427
8428 rtx result, src_rtx, char_rtx;
8429 enum machine_mode insn_mode = value_mode, char_mode;
8430 enum insn_code icode;
8431
0f41302f 8432 /* If the length is known, just return it. */
ca695ac9
JB
8433 if (len != 0)
8434 return expand_expr (len, target, mode, 0);
8435
0f41302f 8436 /* If SRC is not a pointer type, don't do this operation inline. */
ca695ac9
JB
8437 if (align == 0)
8438 break;
8439
0f41302f 8440 /* Call a function if we can't compute strlen in the right mode. */
ca695ac9
JB
8441
8442 while (insn_mode != VOIDmode)
8443 {
8444 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
8445 if (icode != CODE_FOR_nothing)
8446 break;
bbf6f052 8447
ca695ac9
JB
8448 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
8449 }
8450 if (insn_mode == VOIDmode)
8451 break;
bbf6f052 8452
ca695ac9
JB
8453 /* Make a place to write the result of the instruction. */
8454 result = target;
8455 if (! (result != 0
8456 && GET_CODE (result) == REG
8457 && GET_MODE (result) == insn_mode
8458 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8459 result = gen_reg_rtx (insn_mode);
bbf6f052 8460
ca695ac9
JB
8461 /* Make sure the operands are acceptable to the predicates. */
8462
8463 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
8464 result = gen_reg_rtx (insn_mode);
8465
8466 src_rtx = memory_address (BLKmode,
88f63c77 8467 expand_expr (src, NULL_RTX, ptr_mode,
ca695ac9
JB
8468 EXPAND_NORMAL));
8469 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
8470 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
8471
8472 char_rtx = const0_rtx;
8473 char_mode = insn_operand_mode[(int)icode][2];
8474 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
8475 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
8476
8477 emit_insn (GEN_FCN (icode) (result,
8478 gen_rtx (MEM, BLKmode, src_rtx),
8479 char_rtx, GEN_INT (align)));
8480
8481 /* Return the value in the proper mode for this function. */
8482 if (GET_MODE (result) == value_mode)
8483 return result;
8484 else if (target != 0)
8485 {
8486 convert_move (target, result, 0);
8487 return target;
8488 }
8489 else
8490 return convert_to_mode (value_mode, result, 0);
8491 }
8492
8493 case BUILT_IN_STRCPY:
e87b4f3f 8494 /* If not optimizing, call the library function. */
98aad286 8495 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
e87b4f3f
RS
8496 break;
8497
8498 if (arglist == 0
ca695ac9
JB
8499 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8500 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8501 || TREE_CHAIN (arglist) == 0
8502 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
7b073ca6 8503 break;
ca695ac9 8504 else
db0e6d01 8505 {
ca695ac9 8506 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
e7c33f54 8507
ca695ac9
JB
8508 if (len == 0)
8509 break;
e7c33f54 8510
ca695ac9 8511 len = size_binop (PLUS_EXPR, len, integer_one_node);
e7c33f54 8512
ca695ac9 8513 chainon (arglist, build_tree_list (NULL_TREE, len));
1bbddf11
JVA
8514 }
8515
ca695ac9
JB
8516 /* Drops in. */
8517 case BUILT_IN_MEMCPY:
8518 /* If not optimizing, call the library function. */
98aad286 8519 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
ca695ac9 8520 break;
e7c33f54 8521
ca695ac9
JB
8522 if (arglist == 0
8523 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8524 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8525 || TREE_CHAIN (arglist) == 0
8526 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8527 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8528 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7b073ca6 8529 break;
ca695ac9 8530 else
e7c33f54 8531 {
ca695ac9
JB
8532 tree dest = TREE_VALUE (arglist);
8533 tree src = TREE_VALUE (TREE_CHAIN (arglist));
8534 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
e9cf6a97 8535 tree type;
e87b4f3f 8536
ca695ac9
JB
8537 int src_align
8538 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8539 int dest_align
8540 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8541 rtx dest_rtx, dest_mem, src_mem;
60bac6ea 8542
ca695ac9
JB
8543 /* If either SRC or DEST is not a pointer type, don't do
8544 this operation in-line. */
8545 if (src_align == 0 || dest_align == 0)
8546 {
8547 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
8548 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8549 break;
8550 }
8551
88f63c77 8552 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
ca695ac9
JB
8553 dest_mem = gen_rtx (MEM, BLKmode,
8554 memory_address (BLKmode, dest_rtx));
e9cf6a97 8555 /* There could be a void* cast on top of the object. */
5480a90c
RK
8556 while (TREE_CODE (dest) == NOP_EXPR)
8557 dest = TREE_OPERAND (dest, 0);
8558 type = TREE_TYPE (TREE_TYPE (dest));
e9cf6a97 8559 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
ca695ac9
JB
8560 src_mem = gen_rtx (MEM, BLKmode,
8561 memory_address (BLKmode,
8562 expand_expr (src, NULL_RTX,
88f63c77
RK
8563 ptr_mode,
8564 EXPAND_SUM)));
e9cf6a97 8565 /* There could be a void* cast on top of the object. */
5480a90c
RK
8566 while (TREE_CODE (src) == NOP_EXPR)
8567 src = TREE_OPERAND (src, 0);
8568 type = TREE_TYPE (TREE_TYPE (src));
e9cf6a97 8569 MEM_IN_STRUCT_P (src_mem) = AGGREGATE_TYPE_P (type);
ca695ac9
JB
8570
8571 /* Copy word part most expediently. */
8572 emit_block_move (dest_mem, src_mem,
8573 expand_expr (len, NULL_RTX, VOIDmode, 0),
8574 MIN (src_align, dest_align));
85c53d24 8575 return force_operand (dest_rtx, NULL_RTX);
ca695ac9
JB
8576 }
8577
d7f21d63
RK
8578 case BUILT_IN_MEMSET:
8579 /* If not optimizing, call the library function. */
8580 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8581 break;
8582
8583 if (arglist == 0
8584 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8585 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8586 || TREE_CHAIN (arglist) == 0
8587 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
8588 != INTEGER_TYPE)
8589 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8590 || (INTEGER_CST
8591 != (TREE_CODE (TREE_TYPE
8592 (TREE_VALUE
8593 (TREE_CHAIN (TREE_CHAIN (arglist))))))))
8594 break;
8595 else
8596 {
8597 tree dest = TREE_VALUE (arglist);
8598 tree val = TREE_VALUE (TREE_CHAIN (arglist));
8599 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8600 tree type;
8601
8602 int dest_align
8603 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8604 rtx dest_rtx, dest_mem;
8605
8606 /* If DEST is not a pointer type, don't do this
8607 operation in-line. */
8608 if (dest_align == 0)
8609 break;
8610
8611 /* If VAL is not 0, don't do this operation in-line. */
8612 if (expand_expr (val, NULL_RTX, VOIDmode, 0) != const0_rtx)
8613 break;
8614
8615 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
8616 dest_mem = gen_rtx (MEM, BLKmode,
8617 memory_address (BLKmode, dest_rtx));
8618 /* There could be a void* cast on top of the object. */
8619 while (TREE_CODE (dest) == NOP_EXPR)
8620 dest = TREE_OPERAND (dest, 0);
8621 type = TREE_TYPE (TREE_TYPE (dest));
8622 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
8623
8624 clear_storage (dest_mem, expand_expr (len, NULL_RTX, VOIDmode, 0),
8625 dest_align);
8626
8627 return force_operand (dest_rtx, NULL_RTX);
8628 }
8629
ca695ac9
JB
8630/* These comparison functions need an instruction that returns an actual
8631 index. An ordinary compare that just sets the condition codes
8632 is not enough. */
8633#ifdef HAVE_cmpstrsi
8634 case BUILT_IN_STRCMP:
8635 /* If not optimizing, call the library function. */
98aad286 8636 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
ca695ac9
JB
8637 break;
8638
8639 if (arglist == 0
8640 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8641 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8642 || TREE_CHAIN (arglist) == 0
8643 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
7b073ca6 8644 break;
ca695ac9
JB
8645 else if (!HAVE_cmpstrsi)
8646 break;
8647 {
8648 tree arg1 = TREE_VALUE (arglist);
8649 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
8650 tree offset;
8651 tree len, len2;
8652
8653 len = c_strlen (arg1);
8654 if (len)
8655 len = size_binop (PLUS_EXPR, integer_one_node, len);
8656 len2 = c_strlen (arg2);
8657 if (len2)
8658 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
8659
8660 /* If we don't have a constant length for the first, use the length
8661 of the second, if we know it. We don't require a constant for
8662 this case; some cost analysis could be done if both are available
8663 but neither is constant. For now, assume they're equally cheap.
8664
8665 If both strings have constant lengths, use the smaller. This
8666 could arise if optimization results in strcpy being called with
8667 two fixed strings, or if the code was machine-generated. We should
8668 add some code to the `memcmp' handler below to deal with such
8669 situations, someday. */
8670 if (!len || TREE_CODE (len) != INTEGER_CST)
8671 {
8672 if (len2)
8673 len = len2;
8674 else if (len == 0)
8675 break;
8676 }
8677 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
8678 {
8679 if (tree_int_cst_lt (len2, len))
8680 len = len2;
8681 }
8682
8683 chainon (arglist, build_tree_list (NULL_TREE, len));
8684 }
8685
8686 /* Drops in. */
8687 case BUILT_IN_MEMCMP:
8688 /* If not optimizing, call the library function. */
98aad286 8689 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
ca695ac9
JB
8690 break;
8691
8692 if (arglist == 0
8693 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8694 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8695 || TREE_CHAIN (arglist) == 0
8696 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8697 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8698 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7b073ca6 8699 break;
ca695ac9
JB
8700 else if (!HAVE_cmpstrsi)
8701 break;
8702 {
8703 tree arg1 = TREE_VALUE (arglist);
8704 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
8705 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8706 rtx result;
8707
8708 int arg1_align
8709 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8710 int arg2_align
8711 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8712 enum machine_mode insn_mode
8713 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
60bac6ea 8714
ca695ac9
JB
8715 /* If we don't have POINTER_TYPE, call the function. */
8716 if (arg1_align == 0 || arg2_align == 0)
8717 {
8718 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
8719 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8720 break;
8721 }
60bac6ea 8722
ca695ac9
JB
8723 /* Make a place to write the result of the instruction. */
8724 result = target;
8725 if (! (result != 0
8726 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
8727 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8728 result = gen_reg_rtx (insn_mode);
60bac6ea 8729
ca695ac9
JB
8730 emit_insn (gen_cmpstrsi (result,
8731 gen_rtx (MEM, BLKmode,
88f63c77
RK
8732 expand_expr (arg1, NULL_RTX,
8733 ptr_mode,
ca695ac9
JB
8734 EXPAND_NORMAL)),
8735 gen_rtx (MEM, BLKmode,
88f63c77
RK
8736 expand_expr (arg2, NULL_RTX,
8737 ptr_mode,
ca695ac9
JB
8738 EXPAND_NORMAL)),
8739 expand_expr (len, NULL_RTX, VOIDmode, 0),
8740 GEN_INT (MIN (arg1_align, arg2_align))));
60bac6ea 8741
ca695ac9
JB
8742 /* Return the value in the proper mode for this function. */
8743 mode = TYPE_MODE (TREE_TYPE (exp));
8744 if (GET_MODE (result) == mode)
8745 return result;
8746 else if (target != 0)
8747 {
8748 convert_move (target, result, 0);
8749 return target;
60bac6ea 8750 }
ca695ac9
JB
8751 else
8752 return convert_to_mode (mode, result, 0);
8753 }
60bac6ea 8754#else
ca695ac9
JB
8755 case BUILT_IN_STRCMP:
8756 case BUILT_IN_MEMCMP:
8757 break;
60bac6ea
RS
8758#endif
8759
4ed67205
RK
8760 /* __builtin_setjmp is passed a pointer to an array of five words
8761 (not all will be used on all machines). It operates similarly to
8762 the C library function of the same name, but is more efficient.
8763 Much of the code below (and for longjmp) is copied from the handling
8764 of non-local gotos.
8765
8766 NOTE: This is intended for use by GNAT and will only work in
8767 the method used by it. This code will likely NOT survive to
8768 the GCC 2.8.0 release. */
8769 case BUILT_IN_SETJMP:
8770 if (arglist == 0
8771 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8772 break;
8773
8774 {
85ab4aaa
RK
8775 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
8776 VOIDmode, 0);
4ed67205
RK
8777 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
8778 enum machine_mode sa_mode = Pmode;
8779 rtx stack_save;
7565a035
RK
8780 int old_inhibit_defer_pop = inhibit_defer_pop;
8781 int return_pops = RETURN_POPS_ARGS (get_identifier ("__dummy"),
8782 get_identifier ("__dummy"), 0);
8783 rtx next_arg_reg;
8784 CUMULATIVE_ARGS args_so_far;
a8a8cbb7 8785 int i;
4ed67205 8786
85ab4aaa
RK
8787#ifdef POINTERS_EXTEND_UNSIGNED
8788 buf_addr = convert_memory_address (Pmode, buf_addr);
8789#endif
8790
8791 buf_addr = force_reg (Pmode, buf_addr);
8792
4ed67205
RK
8793 if (target == 0 || GET_CODE (target) != REG
8794 || REGNO (target) < FIRST_PSEUDO_REGISTER)
8795 target = gen_reg_rtx (value_mode);
8796
8797 emit_queue ();
8798
0dddb42d 8799 CONST_CALL_P (emit_note (NULL_PTR, NOTE_INSN_SETJMP)) = 1;
4ed67205
RK
8800 current_function_calls_setjmp = 1;
8801
8802 /* We store the frame pointer and the address of lab1 in the buffer
8803 and use the rest of it for the stack save area, which is
8804 machine-dependent. */
8805 emit_move_insn (gen_rtx (MEM, Pmode, buf_addr),
8806 virtual_stack_vars_rtx);
8807 emit_move_insn
8808 (validize_mem (gen_rtx (MEM, Pmode,
8809 plus_constant (buf_addr,
8810 GET_MODE_SIZE (Pmode)))),
8811 gen_rtx (LABEL_REF, Pmode, lab1));
8812
8813#ifdef HAVE_save_stack_nonlocal
8814 if (HAVE_save_stack_nonlocal)
8815 sa_mode = insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0];
8816#endif
8817
8818 stack_save = gen_rtx (MEM, sa_mode,
8819 plus_constant (buf_addr,
8820 2 * GET_MODE_SIZE (Pmode)));
8821 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
8822
7565a035
RK
8823#ifdef HAVE_setjmp
8824 if (HAVE_setjmp)
8825 emit_insn (gen_setjmp ());
8826#endif
8827
4ed67205
RK
8828 /* Set TARGET to zero and branch around the other case. */
8829 emit_move_insn (target, const0_rtx);
8830 emit_jump_insn (gen_jump (lab2));
8831 emit_barrier ();
8832 emit_label (lab1);
8833
a8a8cbb7 8834 /* Note that setjmp clobbers FP when we get here, so we have to
0dddb42d 8835 make sure it's marked as used by this function. */
a8a8cbb7
RK
8836 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
8837
477efd50
RK
8838 /* Mark the static chain as clobbered here so life information
8839 doesn't get messed up for it. */
8840 emit_insn (gen_rtx (CLOBBER, VOIDmode, static_chain_rtx));
8841
4ed67205
RK
8842 /* Now put in the code to restore the frame pointer, and argument
8843 pointer, if needed. The code below is from expand_end_bindings
8844 in stmt.c; see detailed documentation there. */
8845#ifdef HAVE_nonlocal_goto
8846 if (! HAVE_nonlocal_goto)
8847#endif
8848 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
8849
a8a8cbb7
RK
8850 current_function_has_nonlocal_goto = 1;
8851
4ed67205
RK
8852#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
8853 if (fixed_regs[ARG_POINTER_REGNUM])
8854 {
8855#ifdef ELIMINABLE_REGS
8856 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
4ed67205
RK
8857
8858 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
8859 if (elim_regs[i].from == ARG_POINTER_REGNUM
8860 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
8861 break;
8862
8863 if (i == sizeof elim_regs / sizeof elim_regs [0])
8864#endif
8865 {
8866 /* Now restore our arg pointer from the address at which it
8867 was saved in our stack frame.
8868 If there hasn't be space allocated for it yet, make
8869 some now. */
8870 if (arg_pointer_save_area == 0)
8871 arg_pointer_save_area
8872 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
8873 emit_move_insn (virtual_incoming_args_rtx,
8874 copy_to_reg (arg_pointer_save_area));
8875 }
8876 }
8877#endif
8878
32ccb0ac
RK
8879#ifdef HAVE_nonlocal_goto_receiver
8880 if (HAVE_nonlocal_goto_receiver)
8881 emit_insn (gen_nonlocal_goto_receiver ());
8882#endif
7565a035
RK
8883 /* The static chain pointer contains the address of dummy function.
8884 We need to call it here to handle some PIC cases of restoring
8885 a global pointer. Then return 1. */
8886 op0 = copy_to_mode_reg (Pmode, static_chain_rtx);
8887
8888 /* We can't actually call emit_library_call here, so do everything
8889 it does, which isn't much for a libfunc with no args. */
8890 op0 = memory_address (FUNCTION_MODE, op0);
8891
8892 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE,
2c7ee1a6 8893 gen_rtx (SYMBOL_REF, Pmode, "__dummy"), 1);
7565a035
RK
8894 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1);
8895
8896#ifndef ACCUMULATE_OUTGOING_ARGS
8897#ifdef HAVE_call_pop
8898 if (HAVE_call_pop)
8899 emit_call_insn (gen_call_pop (gen_rtx (MEM, FUNCTION_MODE, op0),
8900 const0_rtx, next_arg_reg,
8901 GEN_INT (return_pops)));
8902 else
8903#endif
8904#endif
8905
8906#ifdef HAVE_call
8907 if (HAVE_call)
8908 emit_call_insn (gen_call (gen_rtx (MEM, FUNCTION_MODE, op0),
8909 const0_rtx, next_arg_reg, const0_rtx));
4ed67205 8910 else
7565a035
RK
8911#endif
8912 abort ();
4ed67205 8913
7565a035 8914 emit_move_insn (target, const1_rtx);
4ed67205
RK
8915 emit_label (lab2);
8916 return target;
8917 }
8918
8919 /* __builtin_longjmp is passed a pointer to an array of five words
7565a035 8920 and a value, which is a dummy. It's similar to the C library longjmp
4ed67205
RK
8921 function but works with __builtin_setjmp above. */
8922 case BUILT_IN_LONGJMP:
8923 if (arglist == 0 || TREE_CHAIN (arglist) == 0
8924 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8925 break;
8926
8927 {
b089937a
RK
8928 tree dummy_id = get_identifier ("__dummy");
8929 tree dummy_type = build_function_type (void_type_node, NULL_TREE);
8930 tree dummy_decl = build_decl (FUNCTION_DECL, dummy_id, dummy_type);
85ab4aaa 8931#ifdef POINTERS_EXTEND_UNSIGNED
4ed67205 8932 rtx buf_addr
0fedef28 8933 = force_reg (Pmode,
85ab4aaa
RK
8934 convert_memory_address
8935 (Pmode,
8936 expand_expr (TREE_VALUE (arglist),
8937 NULL_RTX, VOIDmode, 0)));
8938#else
8939 rtx buf_addr
8940 = force_reg (Pmode, expand_expr (TREE_VALUE (arglist),
8941 NULL_RTX,
8942 VOIDmode, 0));
8943#endif
4ed67205
RK
8944 rtx fp = gen_rtx (MEM, Pmode, buf_addr);
8945 rtx lab = gen_rtx (MEM, Pmode,
8946 plus_constant (buf_addr, GET_MODE_SIZE (Pmode)));
8947 enum machine_mode sa_mode
8948#ifdef HAVE_save_stack_nonlocal
8949 = (HAVE_save_stack_nonlocal
8950 ? insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0]
8951 : Pmode);
8952#else
8953 = Pmode;
8954#endif
8955 rtx stack = gen_rtx (MEM, sa_mode,
8956 plus_constant (buf_addr,
8957 2 * GET_MODE_SIZE (Pmode)));
b089937a
RK
8958
8959 DECL_EXTERNAL (dummy_decl) = 1;
8960 TREE_PUBLIC (dummy_decl) = 1;
8961 make_decl_rtl (dummy_decl, NULL_PTR, 1);
7565a035
RK
8962
8963 /* Expand the second expression just for side-effects. */
8964 expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
8965 const0_rtx, VOIDmode, 0);
8966
b089937a 8967 assemble_external (dummy_decl);
4ed67205
RK
8968
8969 /* Pick up FP, label, and SP from the block and jump. This code is
8970 from expand_goto in stmt.c; see there for detailed comments. */
8971#if HAVE_nonlocal_goto
8972 if (HAVE_nonlocal_goto)
b089937a
RK
8973 emit_insn (gen_nonlocal_goto (fp, lab, stack,
8974 XEXP (DECL_RTL (dummy_decl), 0)));
4ed67205
RK
8975 else
8976#endif
8977 {
7565a035 8978 lab = copy_to_reg (lab);
4ed67205
RK
8979 emit_move_insn (hard_frame_pointer_rtx, fp);
8980 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
8981
7565a035
RK
8982 /* Put in the static chain register the address of the dummy
8983 function. */
b089937a 8984 emit_move_insn (static_chain_rtx, XEXP (DECL_RTL (dummy_decl), 0));
4ed67205
RK
8985 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
8986 emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
8987 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
7565a035 8988 emit_indirect_jump (lab);
4ed67205
RK
8989 }
8990
8991 return const0_rtx;
8992 }
8993
ca695ac9
JB
8994 default: /* just do library call, if unknown builtin */
8995 error ("built-in function `%s' not currently supported",
8996 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
8997 }
e87b4f3f 8998
ca695ac9
JB
8999 /* The switch statement above can drop through to cause the function
9000 to be called normally. */
e7c33f54 9001
ca695ac9
JB
9002 return expand_call (exp, target, ignore);
9003}
9004\f
9005/* Built-in functions to perform an untyped call and return. */
0006469d 9006
ca695ac9
JB
9007/* For each register that may be used for calling a function, this
9008 gives a mode used to copy the register's value. VOIDmode indicates
9009 the register is not used for calling a function. If the machine
9010 has register windows, this gives only the outbound registers.
9011 INCOMING_REGNO gives the corresponding inbound register. */
9012static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
0006469d 9013
ca695ac9
JB
9014/* For each register that may be used for returning values, this gives
9015 a mode used to copy the register's value. VOIDmode indicates the
9016 register is not used for returning values. If the machine has
9017 register windows, this gives only the outbound registers.
9018 INCOMING_REGNO gives the corresponding inbound register. */
9019static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
0006469d 9020
ca695ac9
JB
9021/* For each register that may be used for calling a function, this
9022 gives the offset of that register into the block returned by
9faa82d8 9023 __builtin_apply_args. 0 indicates that the register is not
0f41302f 9024 used for calling a function. */
ca695ac9 9025static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
0006469d 9026
ca695ac9
JB
9027/* Return the offset of register REGNO into the block returned by
9028 __builtin_apply_args. This is not declared static, since it is
0f41302f 9029 needed in objc-act.c. */
0006469d 9030
ca695ac9
JB
9031int
9032apply_args_register_offset (regno)
9033 int regno;
9034{
9035 apply_args_size ();
0006469d 9036
ca695ac9 9037 /* Arguments are always put in outgoing registers (in the argument
0f41302f 9038 block) if such make sense. */
ca695ac9
JB
9039#ifdef OUTGOING_REGNO
9040 regno = OUTGOING_REGNO(regno);
9041#endif
9042 return apply_args_reg_offset[regno];
9043}
0006469d 9044
ca695ac9
JB
9045/* Return the size required for the block returned by __builtin_apply_args,
9046 and initialize apply_args_mode. */
0006469d 9047
ca695ac9
JB
9048static int
9049apply_args_size ()
9050{
9051 static int size = -1;
9052 int align, regno;
9053 enum machine_mode mode;
bbf6f052 9054
ca695ac9
JB
9055 /* The values computed by this function never change. */
9056 if (size < 0)
9057 {
9058 /* The first value is the incoming arg-pointer. */
9059 size = GET_MODE_SIZE (Pmode);
bbf6f052 9060
ca695ac9
JB
9061 /* The second value is the structure value address unless this is
9062 passed as an "invisible" first argument. */
9063 if (struct_value_rtx)
9064 size += GET_MODE_SIZE (Pmode);
9065
9066 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9067 if (FUNCTION_ARG_REGNO_P (regno))
bbf6f052 9068 {
ca695ac9
JB
9069 /* Search for the proper mode for copying this register's
9070 value. I'm not sure this is right, but it works so far. */
9071 enum machine_mode best_mode = VOIDmode;
9072
9073 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9074 mode != VOIDmode;
9075 mode = GET_MODE_WIDER_MODE (mode))
9076 if (HARD_REGNO_MODE_OK (regno, mode)
9077 && HARD_REGNO_NREGS (regno, mode) == 1)
9078 best_mode = mode;
9079
9080 if (best_mode == VOIDmode)
9081 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9082 mode != VOIDmode;
9083 mode = GET_MODE_WIDER_MODE (mode))
9084 if (HARD_REGNO_MODE_OK (regno, mode)
9085 && (mov_optab->handlers[(int) mode].insn_code
9086 != CODE_FOR_nothing))
9087 best_mode = mode;
9088
9089 mode = best_mode;
9090 if (mode == VOIDmode)
9091 abort ();
9092
9093 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9094 if (size % align != 0)
9095 size = CEIL (size, align) * align;
9096 apply_args_reg_offset[regno] = size;
9097 size += GET_MODE_SIZE (mode);
9098 apply_args_mode[regno] = mode;
9099 }
9100 else
9101 {
9102 apply_args_mode[regno] = VOIDmode;
9103 apply_args_reg_offset[regno] = 0;
bbf6f052 9104 }
ca695ac9
JB
9105 }
9106 return size;
9107}
bbf6f052 9108
ca695ac9
JB
9109/* Return the size required for the block returned by __builtin_apply,
9110 and initialize apply_result_mode. */
bbf6f052 9111
ca695ac9
JB
9112static int
9113apply_result_size ()
9114{
9115 static int size = -1;
9116 int align, regno;
9117 enum machine_mode mode;
bbf6f052 9118
ca695ac9
JB
9119 /* The values computed by this function never change. */
9120 if (size < 0)
9121 {
9122 size = 0;
bbf6f052 9123
ca695ac9
JB
9124 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9125 if (FUNCTION_VALUE_REGNO_P (regno))
9126 {
9127 /* Search for the proper mode for copying this register's
9128 value. I'm not sure this is right, but it works so far. */
9129 enum machine_mode best_mode = VOIDmode;
bbf6f052 9130
ca695ac9
JB
9131 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9132 mode != TImode;
9133 mode = GET_MODE_WIDER_MODE (mode))
9134 if (HARD_REGNO_MODE_OK (regno, mode))
9135 best_mode = mode;
bbf6f052 9136
ca695ac9
JB
9137 if (best_mode == VOIDmode)
9138 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9139 mode != VOIDmode;
9140 mode = GET_MODE_WIDER_MODE (mode))
9141 if (HARD_REGNO_MODE_OK (regno, mode)
9142 && (mov_optab->handlers[(int) mode].insn_code
9143 != CODE_FOR_nothing))
9144 best_mode = mode;
bbf6f052 9145
ca695ac9
JB
9146 mode = best_mode;
9147 if (mode == VOIDmode)
9148 abort ();
bbf6f052 9149
ca695ac9
JB
9150 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9151 if (size % align != 0)
9152 size = CEIL (size, align) * align;
9153 size += GET_MODE_SIZE (mode);
9154 apply_result_mode[regno] = mode;
bbf6f052
RK
9155 }
9156 else
ca695ac9 9157 apply_result_mode[regno] = VOIDmode;
bbf6f052 9158
ca695ac9
JB
9159 /* Allow targets that use untyped_call and untyped_return to override
9160 the size so that machine-specific information can be stored here. */
9161#ifdef APPLY_RESULT_SIZE
9162 size = APPLY_RESULT_SIZE;
9163#endif
9164 }
9165 return size;
9166}
bbf6f052 9167
ca695ac9
JB
9168#if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
9169/* Create a vector describing the result block RESULT. If SAVEP is true,
9170 the result block is used to save the values; otherwise it is used to
9171 restore the values. */
bbf6f052 9172
ca695ac9
JB
9173static rtx
9174result_vector (savep, result)
9175 int savep;
9176 rtx result;
9177{
9178 int regno, size, align, nelts;
9179 enum machine_mode mode;
9180 rtx reg, mem;
9181 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
9182
9183 size = nelts = 0;
9184 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9185 if ((mode = apply_result_mode[regno]) != VOIDmode)
9186 {
9187 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9188 if (size % align != 0)
9189 size = CEIL (size, align) * align;
18992995 9190 reg = gen_rtx (REG, mode, savep ? regno : INCOMING_REGNO (regno));
ca695ac9
JB
9191 mem = change_address (result, mode,
9192 plus_constant (XEXP (result, 0), size));
9193 savevec[nelts++] = (savep
9194 ? gen_rtx (SET, VOIDmode, mem, reg)
9195 : gen_rtx (SET, VOIDmode, reg, mem));
9196 size += GET_MODE_SIZE (mode);
bbf6f052 9197 }
ca695ac9
JB
9198 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
9199}
9200#endif /* HAVE_untyped_call or HAVE_untyped_return */
bbf6f052 9201
ca695ac9
JB
9202/* Save the state required to perform an untyped call with the same
9203 arguments as were passed to the current function. */
9204
9205static rtx
9206expand_builtin_apply_args ()
9207{
9208 rtx registers;
9209 int size, align, regno;
9210 enum machine_mode mode;
9211
9212 /* Create a block where the arg-pointer, structure value address,
9213 and argument registers can be saved. */
9214 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
9215
9216 /* Walk past the arg-pointer and structure value address. */
9217 size = GET_MODE_SIZE (Pmode);
9218 if (struct_value_rtx)
9219 size += GET_MODE_SIZE (Pmode);
9220
c816db88
RK
9221 /* Save each register used in calling a function to the block. */
9222 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
ca695ac9 9223 if ((mode = apply_args_mode[regno]) != VOIDmode)
bbf6f052 9224 {
ee33823f
RK
9225 rtx tem;
9226
ca695ac9
JB
9227 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9228 if (size % align != 0)
9229 size = CEIL (size, align) * align;
ee33823f
RK
9230
9231 tem = gen_rtx (REG, mode, INCOMING_REGNO (regno));
9232
9233#ifdef STACK_REGS
9234 /* For reg-stack.c's stack register household.
9235 Compare with a similar piece of code in function.c. */
9236
9237 emit_insn (gen_rtx (USE, mode, tem));
9238#endif
9239
ca695ac9
JB
9240 emit_move_insn (change_address (registers, mode,
9241 plus_constant (XEXP (registers, 0),
9242 size)),
ee33823f 9243 tem);
ca695ac9 9244 size += GET_MODE_SIZE (mode);
bbf6f052
RK
9245 }
9246
ca695ac9
JB
9247 /* Save the arg pointer to the block. */
9248 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
9249 copy_to_reg (virtual_incoming_args_rtx));
9250 size = GET_MODE_SIZE (Pmode);
bbf6f052 9251
ca695ac9
JB
9252 /* Save the structure value address unless this is passed as an
9253 "invisible" first argument. */
9254 if (struct_value_incoming_rtx)
9255 {
9256 emit_move_insn (change_address (registers, Pmode,
9257 plus_constant (XEXP (registers, 0),
9258 size)),
9259 copy_to_reg (struct_value_incoming_rtx));
9260 size += GET_MODE_SIZE (Pmode);
9261 }
9262
9263 /* Return the address of the block. */
9264 return copy_addr_to_reg (XEXP (registers, 0));
9265}
9266
9267/* Perform an untyped call and save the state required to perform an
9268 untyped return of whatever value was returned by the given function. */
9269
9270static rtx
9271expand_builtin_apply (function, arguments, argsize)
9272 rtx function, arguments, argsize;
9273{
9274 int size, align, regno;
9275 enum machine_mode mode;
9276 rtx incoming_args, result, reg, dest, call_insn;
9277 rtx old_stack_level = 0;
b3f8cf4a 9278 rtx call_fusage = 0;
bbf6f052 9279
ca695ac9
JB
9280 /* Create a block where the return registers can be saved. */
9281 result = assign_stack_local (BLKmode, apply_result_size (), -1);
bbf6f052 9282
ca695ac9 9283 /* ??? The argsize value should be adjusted here. */
bbf6f052 9284
ca695ac9
JB
9285 /* Fetch the arg pointer from the ARGUMENTS block. */
9286 incoming_args = gen_reg_rtx (Pmode);
9287 emit_move_insn (incoming_args,
9288 gen_rtx (MEM, Pmode, arguments));
9289#ifndef STACK_GROWS_DOWNWARD
9290 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
9291 incoming_args, 0, OPTAB_LIB_WIDEN);
46b68a37
JW
9292#endif
9293
ca695ac9
JB
9294 /* Perform postincrements before actually calling the function. */
9295 emit_queue ();
46b68a37 9296
ca695ac9
JB
9297 /* Push a new argument block and copy the arguments. */
9298 do_pending_stack_adjust ();
9299 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
bbf6f052 9300
ca695ac9
JB
9301 /* Push a block of memory onto the stack to store the memory arguments.
9302 Save the address in a register, and copy the memory arguments. ??? I
9303 haven't figured out how the calling convention macros effect this,
9304 but it's likely that the source and/or destination addresses in
9305 the block copy will need updating in machine specific ways. */
9306 dest = copy_addr_to_reg (push_block (argsize, 0, 0));
9307 emit_block_move (gen_rtx (MEM, BLKmode, dest),
9308 gen_rtx (MEM, BLKmode, incoming_args),
9309 argsize,
9310 PARM_BOUNDARY / BITS_PER_UNIT);
bbf6f052 9311
ca695ac9
JB
9312 /* Refer to the argument block. */
9313 apply_args_size ();
9314 arguments = gen_rtx (MEM, BLKmode, arguments);
9315
9316 /* Walk past the arg-pointer and structure value address. */
9317 size = GET_MODE_SIZE (Pmode);
9318 if (struct_value_rtx)
9319 size += GET_MODE_SIZE (Pmode);
9320
9321 /* Restore each of the registers previously saved. Make USE insns
c816db88
RK
9322 for each of these registers for use in making the call. */
9323 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
ca695ac9
JB
9324 if ((mode = apply_args_mode[regno]) != VOIDmode)
9325 {
9326 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9327 if (size % align != 0)
9328 size = CEIL (size, align) * align;
9329 reg = gen_rtx (REG, mode, regno);
9330 emit_move_insn (reg,
9331 change_address (arguments, mode,
9332 plus_constant (XEXP (arguments, 0),
9333 size)));
9334
b3f8cf4a 9335 use_reg (&call_fusage, reg);
ca695ac9
JB
9336 size += GET_MODE_SIZE (mode);
9337 }
9338
9339 /* Restore the structure value address unless this is passed as an
9340 "invisible" first argument. */
9341 size = GET_MODE_SIZE (Pmode);
9342 if (struct_value_rtx)
9343 {
9344 rtx value = gen_reg_rtx (Pmode);
9345 emit_move_insn (value,
9346 change_address (arguments, Pmode,
9347 plus_constant (XEXP (arguments, 0),
9348 size)));
9349 emit_move_insn (struct_value_rtx, value);
9350 if (GET_CODE (struct_value_rtx) == REG)
b3f8cf4a 9351 use_reg (&call_fusage, struct_value_rtx);
ca695ac9
JB
9352 size += GET_MODE_SIZE (Pmode);
9353 }
bbf6f052 9354
ca695ac9 9355 /* All arguments and registers used for the call are set up by now! */
b3f8cf4a 9356 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
bbf6f052 9357
ca695ac9
JB
9358 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
9359 and we don't want to load it into a register as an optimization,
9360 because prepare_call_address already did it if it should be done. */
9361 if (GET_CODE (function) != SYMBOL_REF)
9362 function = memory_address (FUNCTION_MODE, function);
bbf6f052 9363
ca695ac9
JB
9364 /* Generate the actual call instruction and save the return value. */
9365#ifdef HAVE_untyped_call
9366 if (HAVE_untyped_call)
9367 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
9368 result, result_vector (1, result)));
9369 else
9370#endif
9371#ifdef HAVE_call_value
9372 if (HAVE_call_value)
9373 {
9374 rtx valreg = 0;
bbf6f052 9375
ca695ac9
JB
9376 /* Locate the unique return register. It is not possible to
9377 express a call that sets more than one return register using
9378 call_value; use untyped_call for that. In fact, untyped_call
9379 only needs to save the return registers in the given block. */
9380 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9381 if ((mode = apply_result_mode[regno]) != VOIDmode)
9382 {
9383 if (valreg)
9384 abort (); /* HAVE_untyped_call required. */
9385 valreg = gen_rtx (REG, mode, regno);
9386 }
bbf6f052 9387
ca695ac9
JB
9388 emit_call_insn (gen_call_value (valreg,
9389 gen_rtx (MEM, FUNCTION_MODE, function),
9390 const0_rtx, NULL_RTX, const0_rtx));
bbf6f052 9391
ca695ac9
JB
9392 emit_move_insn (change_address (result, GET_MODE (valreg),
9393 XEXP (result, 0)),
9394 valreg);
9395 }
9396 else
9397#endif
9398 abort ();
bbf6f052 9399
b3f8cf4a 9400 /* Find the CALL insn we just emitted. */
ca695ac9
JB
9401 for (call_insn = get_last_insn ();
9402 call_insn && GET_CODE (call_insn) != CALL_INSN;
9403 call_insn = PREV_INSN (call_insn))
9404 ;
bbf6f052 9405
ca695ac9
JB
9406 if (! call_insn)
9407 abort ();
bbf6f052 9408
6d100794
RK
9409 /* Put the register usage information on the CALL. If there is already
9410 some usage information, put ours at the end. */
9411 if (CALL_INSN_FUNCTION_USAGE (call_insn))
9412 {
9413 rtx link;
9414
9415 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
9416 link = XEXP (link, 1))
9417 ;
9418
9419 XEXP (link, 1) = call_fusage;
9420 }
9421 else
9422 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
e7c33f54 9423
ca695ac9
JB
9424 /* Restore the stack. */
9425 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
e7c33f54 9426
ca695ac9
JB
9427 /* Return the address of the result block. */
9428 return copy_addr_to_reg (XEXP (result, 0));
9429}
e7c33f54 9430
ca695ac9 9431/* Perform an untyped return. */
e7c33f54 9432
ca695ac9
JB
9433static void
9434expand_builtin_return (result)
9435 rtx result;
9436{
9437 int size, align, regno;
9438 enum machine_mode mode;
9439 rtx reg;
b3f8cf4a 9440 rtx call_fusage = 0;
e7c33f54 9441
ca695ac9
JB
9442 apply_result_size ();
9443 result = gen_rtx (MEM, BLKmode, result);
e7c33f54 9444
ca695ac9
JB
9445#ifdef HAVE_untyped_return
9446 if (HAVE_untyped_return)
9447 {
9448 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
9449 emit_barrier ();
9450 return;
9451 }
9452#endif
e7c33f54 9453
ca695ac9
JB
9454 /* Restore the return value and note that each value is used. */
9455 size = 0;
9456 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9457 if ((mode = apply_result_mode[regno]) != VOIDmode)
9458 {
9459 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9460 if (size % align != 0)
9461 size = CEIL (size, align) * align;
9462 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
9463 emit_move_insn (reg,
9464 change_address (result, mode,
9465 plus_constant (XEXP (result, 0),
9466 size)));
e7c33f54 9467
b3f8cf4a 9468 push_to_sequence (call_fusage);
ca695ac9 9469 emit_insn (gen_rtx (USE, VOIDmode, reg));
b3f8cf4a 9470 call_fusage = get_insns ();
ca695ac9
JB
9471 end_sequence ();
9472 size += GET_MODE_SIZE (mode);
9473 }
e7c33f54 9474
ca695ac9 9475 /* Put the USE insns before the return. */
b3f8cf4a 9476 emit_insns (call_fusage);
e7c33f54 9477
ca695ac9
JB
9478 /* Return whatever values was restored by jumping directly to the end
9479 of the function. */
9480 expand_null_return ();
9481}
9482\f
9483/* Expand code for a post- or pre- increment or decrement
9484 and return the RTX for the result.
9485 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
e7c33f54 9486
ca695ac9 9487static rtx
7b8b9722 9488expand_increment (exp, post, ignore)
ca695ac9 9489 register tree exp;
7b8b9722 9490 int post, ignore;
ca695ac9
JB
9491{
9492 register rtx op0, op1;
9493 register rtx temp, value;
9494 register tree incremented = TREE_OPERAND (exp, 0);
9495 optab this_optab = add_optab;
9496 int icode;
9497 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9498 int op0_is_copy = 0;
9499 int single_insn = 0;
a97f5a86
RS
9500 /* 1 means we can't store into OP0 directly,
9501 because it is a subreg narrower than a word,
9502 and we don't dare clobber the rest of the word. */
9503 int bad_subreg = 0;
e7c33f54 9504
ca695ac9 9505 if (output_bytecode)
c02bd5d9
JB
9506 {
9507 bc_expand_expr (exp);
9508 return NULL_RTX;
9509 }
e7c33f54 9510
ca695ac9
JB
9511 /* Stabilize any component ref that might need to be
9512 evaluated more than once below. */
9513 if (!post
9514 || TREE_CODE (incremented) == BIT_FIELD_REF
9515 || (TREE_CODE (incremented) == COMPONENT_REF
9516 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9517 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9518 incremented = stabilize_reference (incremented);
9519 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9520 ones into save exprs so that they don't accidentally get evaluated
9521 more than once by the code below. */
9522 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9523 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9524 incremented = save_expr (incremented);
bbf6f052 9525
ca695ac9
JB
9526 /* Compute the operands as RTX.
9527 Note whether OP0 is the actual lvalue or a copy of it:
9528 I believe it is a copy iff it is a register or subreg
9529 and insns were generated in computing it. */
bbf6f052 9530
ca695ac9
JB
9531 temp = get_last_insn ();
9532 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
bbf6f052 9533
ca695ac9 9534 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9faa82d8 9535 in place but instead must do sign- or zero-extension during assignment,
ca695ac9
JB
9536 so we copy it into a new register and let the code below use it as
9537 a copy.
bbf6f052 9538
ca695ac9
JB
9539 Note that we can safely modify this SUBREG since it is know not to be
9540 shared (it was made by the expand_expr call above). */
bbf6f052 9541
ca695ac9 9542 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
3e073e72
RK
9543 {
9544 if (post)
9545 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9546 else
9547 bad_subreg = 1;
9548 }
a97f5a86
RS
9549 else if (GET_CODE (op0) == SUBREG
9550 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
79777b79
RK
9551 {
9552 /* We cannot increment this SUBREG in place. If we are
9553 post-incrementing, get a copy of the old value. Otherwise,
9554 just mark that we cannot increment in place. */
9555 if (post)
9556 op0 = copy_to_reg (op0);
9557 else
9558 bad_subreg = 1;
9559 }
bbf6f052 9560
ca695ac9
JB
9561 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9562 && temp != get_last_insn ());
9563 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052 9564
ca695ac9
JB
9565 /* Decide whether incrementing or decrementing. */
9566 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9567 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9568 this_optab = sub_optab;
bbf6f052 9569
ca695ac9
JB
9570 /* Convert decrement by a constant into a negative increment. */
9571 if (this_optab == sub_optab
9572 && GET_CODE (op1) == CONST_INT)
9573 {
9574 op1 = GEN_INT (- INTVAL (op1));
9575 this_optab = add_optab;
9576 }
bbf6f052 9577
ca695ac9
JB
9578 /* For a preincrement, see if we can do this with a single instruction. */
9579 if (!post)
9580 {
9581 icode = (int) this_optab->handlers[(int) mode].insn_code;
9582 if (icode != (int) CODE_FOR_nothing
9583 /* Make sure that OP0 is valid for operands 0 and 1
9584 of the insn we want to queue. */
9585 && (*insn_operand_predicate[icode][0]) (op0, mode)
9586 && (*insn_operand_predicate[icode][1]) (op0, mode)
9587 && (*insn_operand_predicate[icode][2]) (op1, mode))
9588 single_insn = 1;
9589 }
bbf6f052 9590
ca695ac9
JB
9591 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9592 then we cannot just increment OP0. We must therefore contrive to
9593 increment the original value. Then, for postincrement, we can return
9594 OP0 since it is a copy of the old value. For preincrement, expand here
a97f5a86
RS
9595 unless we can do it with a single insn.
9596
9597 Likewise if storing directly into OP0 would clobber high bits
9598 we need to preserve (bad_subreg). */
9599 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
ca695ac9
JB
9600 {
9601 /* This is the easiest way to increment the value wherever it is.
9602 Problems with multiple evaluation of INCREMENTED are prevented
9603 because either (1) it is a component_ref or preincrement,
9604 in which case it was stabilized above, or (2) it is an array_ref
9605 with constant index in an array in a register, which is
9606 safe to reevaluate. */
9607 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9608 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9609 ? MINUS_EXPR : PLUS_EXPR),
9610 TREE_TYPE (exp),
9611 incremented,
9612 TREE_OPERAND (exp, 1));
e9cdf6e4
RK
9613
9614 while (TREE_CODE (incremented) == NOP_EXPR
9615 || TREE_CODE (incremented) == CONVERT_EXPR)
9616 {
9617 newexp = convert (TREE_TYPE (incremented), newexp);
9618 incremented = TREE_OPERAND (incremented, 0);
9619 }
9620
7b8b9722 9621 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
ca695ac9
JB
9622 return post ? op0 : temp;
9623 }
bbf6f052 9624
ca695ac9
JB
9625 if (post)
9626 {
9627 /* We have a true reference to the value in OP0.
9628 If there is an insn to add or subtract in this mode, queue it.
9629 Queueing the increment insn avoids the register shuffling
9630 that often results if we must increment now and first save
9631 the old value for subsequent use. */
bbf6f052 9632
ca695ac9
JB
9633#if 0 /* Turned off to avoid making extra insn for indexed memref. */
9634 op0 = stabilize (op0);
9635#endif
bbf6f052 9636
ca695ac9
JB
9637 icode = (int) this_optab->handlers[(int) mode].insn_code;
9638 if (icode != (int) CODE_FOR_nothing
9639 /* Make sure that OP0 is valid for operands 0 and 1
9640 of the insn we want to queue. */
9641 && (*insn_operand_predicate[icode][0]) (op0, mode)
9642 && (*insn_operand_predicate[icode][1]) (op0, mode))
9643 {
9644 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
9645 op1 = force_reg (mode, op1);
bbf6f052 9646
ca695ac9
JB
9647 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9648 }
0842a179
RK
9649 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9650 {
9651 rtx addr = force_reg (Pmode, XEXP (op0, 0));
9652 rtx temp, result;
9653
9654 op0 = change_address (op0, VOIDmode, addr);
9655 temp = force_reg (GET_MODE (op0), op0);
9656 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
9657 op1 = force_reg (mode, op1);
9658
9659 /* The increment queue is LIFO, thus we have to `queue'
9660 the instructions in reverse order. */
9661 enqueue_insn (op0, gen_move_insn (op0, temp));
9662 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9663 return result;
9664 }
ca695ac9 9665 }
bbf6f052 9666
ca695ac9
JB
9667 /* Preincrement, or we can't increment with one simple insn. */
9668 if (post)
9669 /* Save a copy of the value before inc or dec, to return it later. */
9670 temp = value = copy_to_reg (op0);
9671 else
9672 /* Arrange to return the incremented value. */
9673 /* Copy the rtx because expand_binop will protect from the queue,
9674 and the results of that would be invalid for us to return
9675 if our caller does emit_queue before using our result. */
9676 temp = copy_rtx (value = op0);
bbf6f052 9677
ca695ac9
JB
9678 /* Increment however we can. */
9679 op1 = expand_binop (mode, this_optab, value, op1, op0,
9680 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9681 /* Make sure the value is stored into OP0. */
9682 if (op1 != op0)
9683 emit_move_insn (op0, op1);
bbf6f052 9684
ca695ac9
JB
9685 return temp;
9686}
9687\f
9688/* Expand all function calls contained within EXP, innermost ones first.
9689 But don't look within expressions that have sequence points.
9690 For each CALL_EXPR, record the rtx for its value
9691 in the CALL_EXPR_RTL field. */
bbf6f052 9692
ca695ac9
JB
9693static void
9694preexpand_calls (exp)
9695 tree exp;
9696{
9697 register int nops, i;
9698 int type = TREE_CODE_CLASS (TREE_CODE (exp));
bbf6f052 9699
ca695ac9
JB
9700 if (! do_preexpand_calls)
9701 return;
bbf6f052 9702
ca695ac9 9703 /* Only expressions and references can contain calls. */
bbf6f052 9704
ca695ac9
JB
9705 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
9706 return;
bbf6f052 9707
ca695ac9
JB
9708 switch (TREE_CODE (exp))
9709 {
9710 case CALL_EXPR:
9711 /* Do nothing if already expanded. */
43198be7
RK
9712 if (CALL_EXPR_RTL (exp) != 0
9713 /* Do nothing if the call returns a variable-sized object. */
9714 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
9715 /* Do nothing to built-in functions. */
9716 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
9717 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9718 == FUNCTION_DECL)
9719 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
ca695ac9 9720 return;
bbf6f052 9721
43198be7 9722 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
ca695ac9 9723 return;
bbf6f052 9724
ca695ac9
JB
9725 case COMPOUND_EXPR:
9726 case COND_EXPR:
9727 case TRUTH_ANDIF_EXPR:
9728 case TRUTH_ORIF_EXPR:
9729 /* If we find one of these, then we can be sure
9730 the adjust will be done for it (since it makes jumps).
9731 Do it now, so that if this is inside an argument
9732 of a function, we don't get the stack adjustment
9733 after some other args have already been pushed. */
9734 do_pending_stack_adjust ();
9735 return;
bbf6f052 9736
ca695ac9
JB
9737 case BLOCK:
9738 case RTL_EXPR:
9739 case WITH_CLEANUP_EXPR:
402c7311 9740 case CLEANUP_POINT_EXPR:
ca695ac9 9741 return;
bbf6f052 9742
ca695ac9
JB
9743 case SAVE_EXPR:
9744 if (SAVE_EXPR_RTL (exp) != 0)
9745 return;
9746 }
bbf6f052 9747
ca695ac9
JB
9748 nops = tree_code_length[(int) TREE_CODE (exp)];
9749 for (i = 0; i < nops; i++)
9750 if (TREE_OPERAND (exp, i) != 0)
9751 {
9752 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9753 if (type == 'e' || type == '<' || type == '1' || type == '2'
9754 || type == 'r')
9755 preexpand_calls (TREE_OPERAND (exp, i));
9756 }
bbf6f052
RK
9757}
9758\f
ca695ac9
JB
9759/* At the start of a function, record that we have no previously-pushed
9760 arguments waiting to be popped. */
0006469d 9761
ca695ac9
JB
9762void
9763init_pending_stack_adjust ()
9764{
9765 pending_stack_adjust = 0;
9766}
fb2ca25a 9767
ca695ac9
JB
9768/* When exiting from function, if safe, clear out any pending stack adjust
9769 so the adjustment won't get done. */
904762c8 9770
ca695ac9
JB
9771void
9772clear_pending_stack_adjust ()
fb2ca25a 9773{
ca695ac9 9774#ifdef EXIT_IGNORE_STACK
b7c2e1e2
RK
9775 if (optimize > 0
9776 && ! flag_omit_frame_pointer && EXIT_IGNORE_STACK
ca695ac9
JB
9777 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9778 && ! flag_inline_functions)
9779 pending_stack_adjust = 0;
fb2ca25a 9780#endif
fb2ca25a
KKT
9781}
9782
ca695ac9
JB
9783/* Pop any previously-pushed arguments that have not been popped yet. */
9784
9785void
9786do_pending_stack_adjust ()
9787{
9788 if (inhibit_defer_pop == 0)
9789 {
9790 if (pending_stack_adjust != 0)
9791 adjust_stack (GEN_INT (pending_stack_adjust));
9792 pending_stack_adjust = 0;
9793 }
9794}
9795
5dab5552
MS
9796/* Defer the expansion all cleanups up to OLD_CLEANUPS.
9797 Returns the cleanups to be performed. */
9798
9799static tree
9800defer_cleanups_to (old_cleanups)
9801 tree old_cleanups;
9802{
9803 tree new_cleanups = NULL_TREE;
9804 tree cleanups = cleanups_this_call;
9805 tree last = NULL_TREE;
9806
9807 while (cleanups_this_call != old_cleanups)
9808 {
3d195391 9809 expand_eh_region_end (TREE_VALUE (cleanups_this_call));
4ea8537b 9810 last = cleanups_this_call;
5dab5552
MS
9811 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
9812 }
9813
9814 if (last)
9815 {
9816 /* Remove the list from the chain of cleanups. */
9817 TREE_CHAIN (last) = NULL_TREE;
9818
9819 /* reverse them so that we can build them in the right order. */
9820 cleanups = nreverse (cleanups);
9821
9ba73d38
MS
9822 /* All cleanups must be on the function_obstack. */
9823 push_obstacks_nochange ();
9824 resume_temporary_allocation ();
9825
5dab5552
MS
9826 while (cleanups)
9827 {
9828 if (new_cleanups)
9829 new_cleanups = build (COMPOUND_EXPR, TREE_TYPE (new_cleanups),
9830 TREE_VALUE (cleanups), new_cleanups);
9831 else
9832 new_cleanups = TREE_VALUE (cleanups);
9833
9834 cleanups = TREE_CHAIN (cleanups);
9835 }
9ba73d38
MS
9836
9837 pop_obstacks ();
5dab5552
MS
9838 }
9839
9840 return new_cleanups;
9841}
9842
ca695ac9
JB
9843/* Expand all cleanups up to OLD_CLEANUPS.
9844 Needed here, and also for language-dependent calls. */
904762c8 9845
ca695ac9
JB
9846void
9847expand_cleanups_to (old_cleanups)
9848 tree old_cleanups;
0006469d 9849{
ca695ac9 9850 while (cleanups_this_call != old_cleanups)
0006469d 9851 {
3d195391 9852 expand_eh_region_end (TREE_VALUE (cleanups_this_call));
d3158f1a 9853 expand_expr (TREE_VALUE (cleanups_this_call), const0_rtx, VOIDmode, 0);
ca695ac9
JB
9854 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
9855 }
9856}
9857\f
9858/* Expand conditional expressions. */
0006469d 9859
ca695ac9
JB
9860/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9861 LABEL is an rtx of code CODE_LABEL, in this function and all the
9862 functions here. */
0006469d 9863
ca695ac9
JB
9864void
9865jumpifnot (exp, label)
9866 tree exp;
9867 rtx label;
9868{
9869 do_jump (exp, label, NULL_RTX);
9870}
0006469d 9871
ca695ac9 9872/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
0006469d 9873
ca695ac9
JB
9874void
9875jumpif (exp, label)
9876 tree exp;
9877 rtx label;
9878{
9879 do_jump (exp, NULL_RTX, label);
9880}
0006469d 9881
ca695ac9
JB
9882/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9883 the result is zero, or IF_TRUE_LABEL if the result is one.
9884 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9885 meaning fall through in that case.
0006469d 9886
ca695ac9
JB
9887 do_jump always does any pending stack adjust except when it does not
9888 actually perform a jump. An example where there is no jump
9889 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
0006469d 9890
ca695ac9
JB
9891 This function is responsible for optimizing cases such as
9892 &&, || and comparison operators in EXP. */
904762c8 9893
ca695ac9
JB
9894void
9895do_jump (exp, if_false_label, if_true_label)
9896 tree exp;
9897 rtx if_false_label, if_true_label;
0006469d 9898{
ca695ac9
JB
9899 register enum tree_code code = TREE_CODE (exp);
9900 /* Some cases need to create a label to jump to
9901 in order to properly fall through.
9902 These cases set DROP_THROUGH_LABEL nonzero. */
9903 rtx drop_through_label = 0;
9904 rtx temp;
9905 rtx comparison = 0;
9906 int i;
9907 tree type;
2f6e6d22 9908 enum machine_mode mode;
0006469d 9909
ca695ac9 9910 emit_queue ();
0006469d 9911
ca695ac9
JB
9912 switch (code)
9913 {
9914 case ERROR_MARK:
9915 break;
0006469d 9916
ca695ac9
JB
9917 case INTEGER_CST:
9918 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9919 if (temp)
9920 emit_jump (temp);
9921 break;
0006469d 9922
ca695ac9
JB
9923#if 0
9924 /* This is not true with #pragma weak */
9925 case ADDR_EXPR:
9926 /* The address of something can never be zero. */
9927 if (if_true_label)
9928 emit_jump (if_true_label);
9929 break;
9930#endif
0006469d 9931
ca695ac9
JB
9932 case NOP_EXPR:
9933 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9934 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9935 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9936 goto normal;
9937 case CONVERT_EXPR:
9938 /* If we are narrowing the operand, we have to do the compare in the
9939 narrower mode. */
9940 if ((TYPE_PRECISION (TREE_TYPE (exp))
9941 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9942 goto normal;
9943 case NON_LVALUE_EXPR:
9944 case REFERENCE_EXPR:
9945 case ABS_EXPR:
9946 case NEGATE_EXPR:
9947 case LROTATE_EXPR:
9948 case RROTATE_EXPR:
9949 /* These cannot change zero->non-zero or vice versa. */
9950 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9951 break;
0006469d 9952
ca695ac9
JB
9953#if 0
9954 /* This is never less insns than evaluating the PLUS_EXPR followed by
9955 a test and can be longer if the test is eliminated. */
9956 case PLUS_EXPR:
9957 /* Reduce to minus. */
9958 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9959 TREE_OPERAND (exp, 0),
9960 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9961 TREE_OPERAND (exp, 1))));
9962 /* Process as MINUS. */
0006469d 9963#endif
0006469d 9964
ca695ac9
JB
9965 case MINUS_EXPR:
9966 /* Non-zero iff operands of minus differ. */
9967 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
9968 TREE_OPERAND (exp, 0),
9969 TREE_OPERAND (exp, 1)),
9970 NE, NE);
9971 break;
904762c8 9972
ca695ac9
JB
9973 case BIT_AND_EXPR:
9974 /* If we are AND'ing with a small constant, do this comparison in the
9975 smallest type that fits. If the machine doesn't have comparisons
9976 that small, it will be converted back to the wider comparison.
9977 This helps if we are testing the sign bit of a narrower object.
9978 combine can't do this for us because it can't know whether a
9979 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
0006469d 9980
ca695ac9
JB
9981 if (! SLOW_BYTE_ACCESS
9982 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9983 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9984 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
2f6e6d22
RK
9985 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9986 && (type = type_for_mode (mode, 1)) != 0
ca695ac9
JB
9987 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9988 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9989 != CODE_FOR_nothing))
9990 {
9991 do_jump (convert (type, exp), if_false_label, if_true_label);
9992 break;
9993 }
9994 goto normal;
904762c8 9995
ca695ac9
JB
9996 case TRUTH_NOT_EXPR:
9997 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9998 break;
0006469d 9999
ca695ac9 10000 case TRUTH_ANDIF_EXPR:
7ee055f4
MS
10001 {
10002 rtx seq1, seq2;
10003 tree cleanups, old_cleanups;
10004
10005 if (if_false_label == 0)
10006 if_false_label = drop_through_label = gen_label_rtx ();
10007 start_sequence ();
10008 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
10009 seq1 = get_insns ();
10010 end_sequence ();
10011
10012 old_cleanups = cleanups_this_call;
10013 start_sequence ();
10014 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10015 seq2 = get_insns ();
26657dca 10016 cleanups = defer_cleanups_to (old_cleanups);
7ee055f4
MS
10017 end_sequence ();
10018
7ee055f4
MS
10019 if (cleanups)
10020 {
10021 rtx flag = gen_reg_rtx (word_mode);
10022 tree new_cleanups;
10023 tree cond;
10024
0f41302f 10025 /* Flag cleanups as not needed. */
7ee055f4
MS
10026 emit_move_insn (flag, const0_rtx);
10027 emit_insns (seq1);
10028
0f41302f 10029 /* Flag cleanups as needed. */
7ee055f4
MS
10030 emit_move_insn (flag, const1_rtx);
10031 emit_insns (seq2);
10032
9ba73d38
MS
10033 /* All cleanups must be on the function_obstack. */
10034 push_obstacks_nochange ();
10035 resume_temporary_allocation ();
10036
0f41302f 10037 /* convert flag, which is an rtx, into a tree. */
7ee055f4
MS
10038 cond = make_node (RTL_EXPR);
10039 TREE_TYPE (cond) = integer_type_node;
10040 RTL_EXPR_RTL (cond) = flag;
10041 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
01842234 10042 cond = save_expr (cond);
7ee055f4
MS
10043
10044 new_cleanups = build (COND_EXPR, void_type_node,
10045 truthvalue_conversion (cond),
10046 cleanups, integer_zero_node);
10047 new_cleanups = fold (new_cleanups);
10048
9ba73d38
MS
10049 pop_obstacks ();
10050
3d195391 10051 /* Now add in the conditionalized cleanups. */
7ee055f4
MS
10052 cleanups_this_call
10053 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
3d195391 10054 expand_eh_region_start ();
7ee055f4
MS
10055 }
10056 else
10057 {
10058 emit_insns (seq1);
10059 emit_insns (seq2);
10060 }
10061 }
ca695ac9 10062 break;
0006469d 10063
ca695ac9 10064 case TRUTH_ORIF_EXPR:
7ee055f4
MS
10065 {
10066 rtx seq1, seq2;
10067 tree cleanups, old_cleanups;
10068
10069 if (if_true_label == 0)
10070 if_true_label = drop_through_label = gen_label_rtx ();
10071 start_sequence ();
10072 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
10073 seq1 = get_insns ();
10074 end_sequence ();
10075
10076 old_cleanups = cleanups_this_call;
10077 start_sequence ();
10078 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10079 seq2 = get_insns ();
26657dca 10080 cleanups = defer_cleanups_to (old_cleanups);
7ee055f4
MS
10081 end_sequence ();
10082
7ee055f4
MS
10083 if (cleanups)
10084 {
10085 rtx flag = gen_reg_rtx (word_mode);
10086 tree new_cleanups;
10087 tree cond;
10088
0f41302f 10089 /* Flag cleanups as not needed. */
7ee055f4
MS
10090 emit_move_insn (flag, const0_rtx);
10091 emit_insns (seq1);
10092
0f41302f 10093 /* Flag cleanups as needed. */
7ee055f4
MS
10094 emit_move_insn (flag, const1_rtx);
10095 emit_insns (seq2);
10096
9ba73d38
MS
10097 /* All cleanups must be on the function_obstack. */
10098 push_obstacks_nochange ();
10099 resume_temporary_allocation ();
10100
0f41302f 10101 /* convert flag, which is an rtx, into a tree. */
7ee055f4
MS
10102 cond = make_node (RTL_EXPR);
10103 TREE_TYPE (cond) = integer_type_node;
10104 RTL_EXPR_RTL (cond) = flag;
10105 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
01842234 10106 cond = save_expr (cond);
7ee055f4
MS
10107
10108 new_cleanups = build (COND_EXPR, void_type_node,
10109 truthvalue_conversion (cond),
10110 cleanups, integer_zero_node);
10111 new_cleanups = fold (new_cleanups);
10112
9ba73d38
MS
10113 pop_obstacks ();
10114
3d195391 10115 /* Now add in the conditionalized cleanups. */
7ee055f4
MS
10116 cleanups_this_call
10117 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
3d195391 10118 expand_eh_region_start ();
7ee055f4
MS
10119 }
10120 else
10121 {
10122 emit_insns (seq1);
10123 emit_insns (seq2);
10124 }
10125 }
ca695ac9 10126 break;
0006469d 10127
ca695ac9 10128 case COMPOUND_EXPR:
0088fcb1 10129 push_temp_slots ();
ca695ac9 10130 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
d80f96e9 10131 preserve_temp_slots (NULL_RTX);
ca695ac9 10132 free_temp_slots ();
0088fcb1 10133 pop_temp_slots ();
ca695ac9
JB
10134 emit_queue ();
10135 do_pending_stack_adjust ();
10136 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10137 break;
0006469d 10138
ca695ac9
JB
10139 case COMPONENT_REF:
10140 case BIT_FIELD_REF:
10141 case ARRAY_REF:
10142 {
10143 int bitsize, bitpos, unsignedp;
10144 enum machine_mode mode;
10145 tree type;
10146 tree offset;
10147 int volatilep = 0;
0006469d 10148
ca695ac9
JB
10149 /* Get description of this reference. We don't actually care
10150 about the underlying object here. */
10151 get_inner_reference (exp, &bitsize, &bitpos, &offset,
10152 &mode, &unsignedp, &volatilep);
0006469d 10153
ca695ac9
JB
10154 type = type_for_size (bitsize, unsignedp);
10155 if (! SLOW_BYTE_ACCESS
10156 && type != 0 && bitsize >= 0
10157 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10158 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10159 != CODE_FOR_nothing))
10160 {
10161 do_jump (convert (type, exp), if_false_label, if_true_label);
10162 break;
10163 }
10164 goto normal;
10165 }
0006469d 10166
ca695ac9
JB
10167 case COND_EXPR:
10168 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
10169 if (integer_onep (TREE_OPERAND (exp, 1))
10170 && integer_zerop (TREE_OPERAND (exp, 2)))
10171 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
904762c8 10172
ca695ac9
JB
10173 else if (integer_zerop (TREE_OPERAND (exp, 1))
10174 && integer_onep (TREE_OPERAND (exp, 2)))
10175 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
0006469d 10176
ca695ac9
JB
10177 else
10178 {
10179 register rtx label1 = gen_label_rtx ();
10180 drop_through_label = gen_label_rtx ();
10181 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
10182 /* Now the THEN-expression. */
10183 do_jump (TREE_OPERAND (exp, 1),
10184 if_false_label ? if_false_label : drop_through_label,
10185 if_true_label ? if_true_label : drop_through_label);
10186 /* In case the do_jump just above never jumps. */
10187 do_pending_stack_adjust ();
10188 emit_label (label1);
10189 /* Now the ELSE-expression. */
10190 do_jump (TREE_OPERAND (exp, 2),
10191 if_false_label ? if_false_label : drop_through_label,
10192 if_true_label ? if_true_label : drop_through_label);
10193 }
10194 break;
0006469d 10195
ca695ac9 10196 case EQ_EXPR:
0e8c9172
RK
10197 {
10198 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10199
10200 if (integer_zerop (TREE_OPERAND (exp, 1)))
10201 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10202 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
201012cb 10203 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
0e8c9172
RK
10204 do_jump
10205 (fold
10206 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
10207 fold (build (EQ_EXPR, TREE_TYPE (exp),
c8465d86
RK
10208 fold (build1 (REALPART_EXPR,
10209 TREE_TYPE (inner_type),
0e8c9172 10210 TREE_OPERAND (exp, 0))),
c8465d86
RK
10211 fold (build1 (REALPART_EXPR,
10212 TREE_TYPE (inner_type),
0e8c9172
RK
10213 TREE_OPERAND (exp, 1))))),
10214 fold (build (EQ_EXPR, TREE_TYPE (exp),
c8465d86
RK
10215 fold (build1 (IMAGPART_EXPR,
10216 TREE_TYPE (inner_type),
0e8c9172 10217 TREE_OPERAND (exp, 0))),
c8465d86
RK
10218 fold (build1 (IMAGPART_EXPR,
10219 TREE_TYPE (inner_type),
0e8c9172
RK
10220 TREE_OPERAND (exp, 1))))))),
10221 if_false_label, if_true_label);
10222 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10223 && !can_compare_p (TYPE_MODE (inner_type)))
10224 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
10225 else
10226 comparison = compare (exp, EQ, EQ);
10227 break;
10228 }
0006469d 10229
ca695ac9 10230 case NE_EXPR:
0e8c9172
RK
10231 {
10232 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10233
10234 if (integer_zerop (TREE_OPERAND (exp, 1)))
10235 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10236 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
201012cb 10237 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
0e8c9172
RK
10238 do_jump
10239 (fold
10240 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
10241 fold (build (NE_EXPR, TREE_TYPE (exp),
c8465d86
RK
10242 fold (build1 (REALPART_EXPR,
10243 TREE_TYPE (inner_type),
0e8c9172 10244 TREE_OPERAND (exp, 0))),
c8465d86
RK
10245 fold (build1 (REALPART_EXPR,
10246 TREE_TYPE (inner_type),
0e8c9172
RK
10247 TREE_OPERAND (exp, 1))))),
10248 fold (build (NE_EXPR, TREE_TYPE (exp),
c8465d86
RK
10249 fold (build1 (IMAGPART_EXPR,
10250 TREE_TYPE (inner_type),
0e8c9172 10251 TREE_OPERAND (exp, 0))),
c8465d86
RK
10252 fold (build1 (IMAGPART_EXPR,
10253 TREE_TYPE (inner_type),
0e8c9172
RK
10254 TREE_OPERAND (exp, 1))))))),
10255 if_false_label, if_true_label);
10256 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10257 && !can_compare_p (TYPE_MODE (inner_type)))
10258 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
10259 else
10260 comparison = compare (exp, NE, NE);
10261 break;
10262 }
0006469d 10263
ca695ac9
JB
10264 case LT_EXPR:
10265 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10266 == MODE_INT)
10267 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10268 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
10269 else
10270 comparison = compare (exp, LT, LTU);
10271 break;
0006469d 10272
ca695ac9
JB
10273 case LE_EXPR:
10274 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10275 == MODE_INT)
10276 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10277 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
10278 else
10279 comparison = compare (exp, LE, LEU);
10280 break;
0006469d 10281
ca695ac9
JB
10282 case GT_EXPR:
10283 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10284 == MODE_INT)
10285 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10286 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
10287 else
10288 comparison = compare (exp, GT, GTU);
10289 break;
0006469d 10290
ca695ac9
JB
10291 case GE_EXPR:
10292 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10293 == MODE_INT)
10294 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10295 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
10296 else
10297 comparison = compare (exp, GE, GEU);
10298 break;
0006469d 10299
ca695ac9
JB
10300 default:
10301 normal:
10302 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10303#if 0
10304 /* This is not needed any more and causes poor code since it causes
10305 comparisons and tests from non-SI objects to have different code
10306 sequences. */
10307 /* Copy to register to avoid generating bad insns by cse
10308 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10309 if (!cse_not_expected && GET_CODE (temp) == MEM)
10310 temp = copy_to_reg (temp);
10311#endif
10312 do_pending_stack_adjust ();
10313 if (GET_CODE (temp) == CONST_INT)
10314 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
10315 else if (GET_CODE (temp) == LABEL_REF)
10316 comparison = const_true_rtx;
10317 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10318 && !can_compare_p (GET_MODE (temp)))
10319 /* Note swapping the labels gives us not-equal. */
10320 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10321 else if (GET_MODE (temp) != VOIDmode)
10322 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
10323 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10324 GET_MODE (temp), NULL_RTX, 0);
10325 else
10326 abort ();
10327 }
0006469d 10328
ca695ac9
JB
10329 /* Do any postincrements in the expression that was tested. */
10330 emit_queue ();
0006469d 10331
ca695ac9
JB
10332 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
10333 straight into a conditional jump instruction as the jump condition.
10334 Otherwise, all the work has been done already. */
0006469d 10335
ca695ac9 10336 if (comparison == const_true_rtx)
0006469d 10337 {
ca695ac9
JB
10338 if (if_true_label)
10339 emit_jump (if_true_label);
0006469d 10340 }
ca695ac9
JB
10341 else if (comparison == const0_rtx)
10342 {
10343 if (if_false_label)
10344 emit_jump (if_false_label);
10345 }
10346 else if (comparison)
10347 do_jump_for_compare (comparison, if_false_label, if_true_label);
0006469d 10348
ca695ac9 10349 if (drop_through_label)
0006469d 10350 {
ca695ac9
JB
10351 /* If do_jump produces code that might be jumped around,
10352 do any stack adjusts from that code, before the place
10353 where control merges in. */
10354 do_pending_stack_adjust ();
10355 emit_label (drop_through_label);
10356 }
10357}
10358\f
10359/* Given a comparison expression EXP for values too wide to be compared
10360 with one insn, test the comparison and jump to the appropriate label.
10361 The code of EXP is ignored; we always test GT if SWAP is 0,
10362 and LT if SWAP is 1. */
0006469d 10363
ca695ac9
JB
10364static void
10365do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10366 tree exp;
10367 int swap;
10368 rtx if_false_label, if_true_label;
10369{
10370 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10371 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10372 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10373 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10374 rtx drop_through_label = 0;
10375 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10376 int i;
0006469d 10377
ca695ac9
JB
10378 if (! if_true_label || ! if_false_label)
10379 drop_through_label = gen_label_rtx ();
10380 if (! if_true_label)
10381 if_true_label = drop_through_label;
10382 if (! if_false_label)
10383 if_false_label = drop_through_label;
0006469d 10384
ca695ac9
JB
10385 /* Compare a word at a time, high order first. */
10386 for (i = 0; i < nwords; i++)
10387 {
10388 rtx comp;
10389 rtx op0_word, op1_word;
0006469d 10390
ca695ac9
JB
10391 if (WORDS_BIG_ENDIAN)
10392 {
10393 op0_word = operand_subword_force (op0, i, mode);
10394 op1_word = operand_subword_force (op1, i, mode);
10395 }
10396 else
10397 {
10398 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10399 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10400 }
0006469d 10401
ca695ac9
JB
10402 /* All but high-order word must be compared as unsigned. */
10403 comp = compare_from_rtx (op0_word, op1_word,
10404 (unsignedp || i > 0) ? GTU : GT,
10405 unsignedp, word_mode, NULL_RTX, 0);
10406 if (comp == const_true_rtx)
10407 emit_jump (if_true_label);
10408 else if (comp != const0_rtx)
10409 do_jump_for_compare (comp, NULL_RTX, if_true_label);
0006469d 10410
ca695ac9
JB
10411 /* Consider lower words only if these are equal. */
10412 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10413 NULL_RTX, 0);
10414 if (comp == const_true_rtx)
10415 emit_jump (if_false_label);
10416 else if (comp != const0_rtx)
10417 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10418 }
0006469d 10419
ca695ac9
JB
10420 if (if_false_label)
10421 emit_jump (if_false_label);
10422 if (drop_through_label)
10423 emit_label (drop_through_label);
0006469d
TW
10424}
10425
ca695ac9
JB
10426/* Compare OP0 with OP1, word at a time, in mode MODE.
10427 UNSIGNEDP says to do unsigned comparison.
10428 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
904762c8 10429
2e5ec6cf 10430void
ca695ac9
JB
10431do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10432 enum machine_mode mode;
10433 int unsignedp;
10434 rtx op0, op1;
10435 rtx if_false_label, if_true_label;
0006469d 10436{
ca695ac9
JB
10437 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10438 rtx drop_through_label = 0;
10439 int i;
0006469d 10440
ca695ac9
JB
10441 if (! if_true_label || ! if_false_label)
10442 drop_through_label = gen_label_rtx ();
10443 if (! if_true_label)
10444 if_true_label = drop_through_label;
10445 if (! if_false_label)
10446 if_false_label = drop_through_label;
0006469d 10447
ca695ac9
JB
10448 /* Compare a word at a time, high order first. */
10449 for (i = 0; i < nwords; i++)
0006469d 10450 {
ca695ac9
JB
10451 rtx comp;
10452 rtx op0_word, op1_word;
0006469d 10453
ca695ac9
JB
10454 if (WORDS_BIG_ENDIAN)
10455 {
10456 op0_word = operand_subword_force (op0, i, mode);
10457 op1_word = operand_subword_force (op1, i, mode);
10458 }
10459 else
10460 {
10461 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10462 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10463 }
0006469d 10464
ca695ac9
JB
10465 /* All but high-order word must be compared as unsigned. */
10466 comp = compare_from_rtx (op0_word, op1_word,
10467 (unsignedp || i > 0) ? GTU : GT,
10468 unsignedp, word_mode, NULL_RTX, 0);
10469 if (comp == const_true_rtx)
10470 emit_jump (if_true_label);
10471 else if (comp != const0_rtx)
10472 do_jump_for_compare (comp, NULL_RTX, if_true_label);
0006469d 10473
ca695ac9
JB
10474 /* Consider lower words only if these are equal. */
10475 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10476 NULL_RTX, 0);
10477 if (comp == const_true_rtx)
10478 emit_jump (if_false_label);
10479 else if (comp != const0_rtx)
10480 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10481 }
0006469d 10482
ca695ac9
JB
10483 if (if_false_label)
10484 emit_jump (if_false_label);
10485 if (drop_through_label)
10486 emit_label (drop_through_label);
0006469d 10487}
bbf6f052 10488
ca695ac9
JB
10489/* Given an EQ_EXPR expression EXP for values too wide to be compared
10490 with one insn, test the comparison and jump to the appropriate label. */
10491
10492static void
10493do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10494 tree exp;
10495 rtx if_false_label, if_true_label;
bbf6f052 10496{
ca695ac9
JB
10497 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10498 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10499 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10500 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10501 int i;
10502 rtx drop_through_label = 0;
bbf6f052 10503
ca695ac9
JB
10504 if (! if_false_label)
10505 drop_through_label = if_false_label = gen_label_rtx ();
bbf6f052 10506
ca695ac9
JB
10507 for (i = 0; i < nwords; i++)
10508 {
10509 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
10510 operand_subword_force (op1, i, mode),
10511 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10512 word_mode, NULL_RTX, 0);
10513 if (comp == const_true_rtx)
10514 emit_jump (if_false_label);
10515 else if (comp != const0_rtx)
10516 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10517 }
1499e0a8 10518
ca695ac9
JB
10519 if (if_true_label)
10520 emit_jump (if_true_label);
10521 if (drop_through_label)
10522 emit_label (drop_through_label);
10523}
10524\f
10525/* Jump according to whether OP0 is 0.
10526 We assume that OP0 has an integer mode that is too wide
10527 for the available compare insns. */
1499e0a8 10528
ca695ac9
JB
10529static void
10530do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10531 rtx op0;
10532 rtx if_false_label, if_true_label;
10533{
10534 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10535 int i;
10536 rtx drop_through_label = 0;
1499e0a8 10537
ca695ac9
JB
10538 if (! if_false_label)
10539 drop_through_label = if_false_label = gen_label_rtx ();
1499e0a8 10540
ca695ac9
JB
10541 for (i = 0; i < nwords; i++)
10542 {
10543 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
10544 GET_MODE (op0)),
10545 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
10546 if (comp == const_true_rtx)
10547 emit_jump (if_false_label);
10548 else if (comp != const0_rtx)
10549 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10550 }
1499e0a8 10551
ca695ac9
JB
10552 if (if_true_label)
10553 emit_jump (if_true_label);
10554 if (drop_through_label)
10555 emit_label (drop_through_label);
10556}
bbf6f052 10557
ca695ac9
JB
10558/* Given a comparison expression in rtl form, output conditional branches to
10559 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
bbf6f052 10560
ca695ac9
JB
10561static void
10562do_jump_for_compare (comparison, if_false_label, if_true_label)
10563 rtx comparison, if_false_label, if_true_label;
10564{
10565 if (if_true_label)
a358cee0 10566 {
ca695ac9
JB
10567 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10568 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
10569 else
10570 abort ();
a358cee0 10571
ca695ac9
JB
10572 if (if_false_label)
10573 emit_jump (if_false_label);
c980ac49 10574 }
ca695ac9 10575 else if (if_false_label)
bbf6f052 10576 {
ca695ac9 10577 rtx insn;
f12f485a 10578 rtx prev = get_last_insn ();
ca695ac9 10579 rtx branch = 0;
bbf6f052 10580
ca695ac9
JB
10581 /* Output the branch with the opposite condition. Then try to invert
10582 what is generated. If more than one insn is a branch, or if the
10583 branch is not the last insn written, abort. If we can't invert
10584 the branch, emit make a true label, redirect this jump to that,
10585 emit a jump to the false label and define the true label. */
bbf6f052 10586
ca695ac9 10587 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
34661f5c 10588 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])(if_false_label));
ca695ac9
JB
10589 else
10590 abort ();
bbf6f052 10591
41dfd40c
RK
10592 /* Here we get the first insn that was just emitted. It used to be the
10593 case that, on some machines, emitting the branch would discard
10594 the previous compare insn and emit a replacement. This isn't
10595 done anymore, but abort if we see that PREV is deleted. */
10596
ca695ac9 10597 if (prev == 0)
ca695ac9 10598 insn = get_insns ();
41dfd40c
RK
10599 else if (INSN_DELETED_P (prev))
10600 abort ();
ca695ac9 10601 else
41dfd40c 10602 insn = NEXT_INSN (prev);
bbf6f052 10603
34661f5c 10604 for (; insn; insn = NEXT_INSN (insn))
ca695ac9
JB
10605 if (GET_CODE (insn) == JUMP_INSN)
10606 {
10607 if (branch)
10608 abort ();
10609 branch = insn;
10610 }
10611
10612 if (branch != get_last_insn ())
10613 abort ();
10614
127e4d19 10615 JUMP_LABEL (branch) = if_false_label;
ca695ac9
JB
10616 if (! invert_jump (branch, if_false_label))
10617 {
10618 if_true_label = gen_label_rtx ();
10619 redirect_jump (branch, if_true_label);
10620 emit_jump (if_false_label);
10621 emit_label (if_true_label);
bbf6f052
RK
10622 }
10623 }
ca695ac9
JB
10624}
10625\f
10626/* Generate code for a comparison expression EXP
10627 (including code to compute the values to be compared)
10628 and set (CC0) according to the result.
10629 SIGNED_CODE should be the rtx operation for this comparison for
10630 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10631
10632 We force a stack adjustment unless there are currently
10633 things pushed on the stack that aren't yet used. */
10634
10635static rtx
10636compare (exp, signed_code, unsigned_code)
10637 register tree exp;
10638 enum rtx_code signed_code, unsigned_code;
10639{
10640 register rtx op0
10641 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10642 register rtx op1
10643 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10644 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
10645 register enum machine_mode mode = TYPE_MODE (type);
10646 int unsignedp = TREE_UNSIGNED (type);
10647 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
bbf6f052 10648
5718612f
JL
10649#ifdef HAVE_canonicalize_funcptr_for_compare
10650 /* If function pointers need to be "canonicalized" before they can
10651 be reliably compared, then canonicalize them. */
10652 if (HAVE_canonicalize_funcptr_for_compare
10653 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10654 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10655 == FUNCTION_TYPE))
10656 {
10657 rtx new_op0 = gen_reg_rtx (mode);
10658
10659 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10660 op0 = new_op0;
10661 }
10662
10663 if (HAVE_canonicalize_funcptr_for_compare
10664 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10665 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10666 == FUNCTION_TYPE))
10667 {
10668 rtx new_op1 = gen_reg_rtx (mode);
10669
10670 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10671 op1 = new_op1;
10672 }
10673#endif
10674
ca695ac9
JB
10675 return compare_from_rtx (op0, op1, code, unsignedp, mode,
10676 ((mode == BLKmode)
10677 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10678 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
10679}
bbf6f052 10680
ca695ac9
JB
10681/* Like compare but expects the values to compare as two rtx's.
10682 The decision as to signed or unsigned comparison must be made by the caller.
bbf6f052 10683
ca695ac9
JB
10684 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10685 compared.
bbf6f052 10686
ca695ac9
JB
10687 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10688 size of MODE should be used. */
bbf6f052 10689
ca695ac9
JB
10690rtx
10691compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10692 register rtx op0, op1;
10693 enum rtx_code code;
10694 int unsignedp;
10695 enum machine_mode mode;
10696 rtx size;
10697 int align;
10698{
10699 rtx tem;
bbf6f052 10700
ca695ac9
JB
10701 /* If one operand is constant, make it the second one. Only do this
10702 if the other operand is not constant as well. */
bbf6f052 10703
ca695ac9
JB
10704 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10705 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
10706 {
10707 tem = op0;
10708 op0 = op1;
10709 op1 = tem;
10710 code = swap_condition (code);
10711 }
bbf6f052 10712
ca695ac9 10713 if (flag_force_mem)
bbf6f052 10714 {
ca695ac9
JB
10715 op0 = force_not_mem (op0);
10716 op1 = force_not_mem (op1);
10717 }
bbf6f052 10718
ca695ac9 10719 do_pending_stack_adjust ();
bbf6f052 10720
ca695ac9
JB
10721 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10722 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10723 return tem;
bbf6f052 10724
ca695ac9
JB
10725#if 0
10726 /* There's no need to do this now that combine.c can eliminate lots of
10727 sign extensions. This can be less efficient in certain cases on other
0f41302f 10728 machines. */
bbf6f052 10729
ca695ac9
JB
10730 /* If this is a signed equality comparison, we can do it as an
10731 unsigned comparison since zero-extension is cheaper than sign
10732 extension and comparisons with zero are done as unsigned. This is
10733 the case even on machines that can do fast sign extension, since
10734 zero-extension is easier to combine with other operations than
10735 sign-extension is. If we are comparing against a constant, we must
10736 convert it to what it would look like unsigned. */
10737 if ((code == EQ || code == NE) && ! unsignedp
10738 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10739 {
10740 if (GET_CODE (op1) == CONST_INT
10741 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10742 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10743 unsignedp = 1;
bbf6f052 10744 }
ca695ac9
JB
10745#endif
10746
10747 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
bbf6f052 10748
ca695ac9 10749 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
bbf6f052
RK
10750}
10751\f
ca695ac9
JB
10752/* Generate code to calculate EXP using a store-flag instruction
10753 and return an rtx for the result. EXP is either a comparison
10754 or a TRUTH_NOT_EXPR whose operand is a comparison.
bbf6f052 10755
ca695ac9 10756 If TARGET is nonzero, store the result there if convenient.
bbf6f052 10757
ca695ac9
JB
10758 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10759 cheap.
bbf6f052 10760
ca695ac9
JB
10761 Return zero if there is no suitable set-flag instruction
10762 available on this machine.
bbf6f052 10763
ca695ac9
JB
10764 Once expand_expr has been called on the arguments of the comparison,
10765 we are committed to doing the store flag, since it is not safe to
10766 re-evaluate the expression. We emit the store-flag insn by calling
10767 emit_store_flag, but only expand the arguments if we have a reason
10768 to believe that emit_store_flag will be successful. If we think that
10769 it will, but it isn't, we have to simulate the store-flag with a
10770 set/jump/set sequence. */
bbf6f052 10771
ca695ac9
JB
10772static rtx
10773do_store_flag (exp, target, mode, only_cheap)
10774 tree exp;
10775 rtx target;
10776 enum machine_mode mode;
10777 int only_cheap;
bbf6f052 10778{
ca695ac9
JB
10779 enum rtx_code code;
10780 tree arg0, arg1, type;
10781 tree tem;
10782 enum machine_mode operand_mode;
10783 int invert = 0;
10784 int unsignedp;
10785 rtx op0, op1;
10786 enum insn_code icode;
10787 rtx subtarget = target;
10788 rtx result, label, pattern, jump_pat;
bbf6f052 10789
ca695ac9
JB
10790 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10791 result at the end. We can't simply invert the test since it would
10792 have already been inverted if it were valid. This case occurs for
10793 some floating-point comparisons. */
10794
10795 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10796 invert = 1, exp = TREE_OPERAND (exp, 0);
10797
10798 arg0 = TREE_OPERAND (exp, 0);
10799 arg1 = TREE_OPERAND (exp, 1);
10800 type = TREE_TYPE (arg0);
10801 operand_mode = TYPE_MODE (type);
10802 unsignedp = TREE_UNSIGNED (type);
10803
10804 /* We won't bother with BLKmode store-flag operations because it would mean
10805 passing a lot of information to emit_store_flag. */
10806 if (operand_mode == BLKmode)
10807 return 0;
10808
5718612f
JL
10809 /* We won't bother with store-flag operations involving function pointers
10810 when function pointers must be canonicalized before comparisons. */
10811#ifdef HAVE_canonicalize_funcptr_for_compare
10812 if (HAVE_canonicalize_funcptr_for_compare
10813 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10814 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10815 == FUNCTION_TYPE))
10816 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10817 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10818 == FUNCTION_TYPE))))
10819 return 0;
10820#endif
10821
ca695ac9
JB
10822 STRIP_NOPS (arg0);
10823 STRIP_NOPS (arg1);
10824
10825 /* Get the rtx comparison code to use. We know that EXP is a comparison
10826 operation of some type. Some comparisons against 1 and -1 can be
10827 converted to comparisons with zero. Do so here so that the tests
10828 below will be aware that we have a comparison with zero. These
10829 tests will not catch constants in the first operand, but constants
10830 are rarely passed as the first operand. */
10831
10832 switch (TREE_CODE (exp))
10833 {
10834 case EQ_EXPR:
10835 code = EQ;
10836 break;
10837 case NE_EXPR:
10838 code = NE;
10839 break;
10840 case LT_EXPR:
10841 if (integer_onep (arg1))
10842 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10843 else
10844 code = unsignedp ? LTU : LT;
10845 break;
10846 case LE_EXPR:
10847 if (! unsignedp && integer_all_onesp (arg1))
10848 arg1 = integer_zero_node, code = LT;
10849 else
10850 code = unsignedp ? LEU : LE;
10851 break;
10852 case GT_EXPR:
10853 if (! unsignedp && integer_all_onesp (arg1))
10854 arg1 = integer_zero_node, code = GE;
10855 else
10856 code = unsignedp ? GTU : GT;
10857 break;
10858 case GE_EXPR:
10859 if (integer_onep (arg1))
10860 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10861 else
10862 code = unsignedp ? GEU : GE;
10863 break;
10864 default:
10865 abort ();
10866 }
bbf6f052 10867
ca695ac9
JB
10868 /* Put a constant second. */
10869 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
bbf6f052 10870 {
ca695ac9
JB
10871 tem = arg0; arg0 = arg1; arg1 = tem;
10872 code = swap_condition (code);
bbf6f052 10873 }
bbf6f052 10874
ca695ac9
JB
10875 /* If this is an equality or inequality test of a single bit, we can
10876 do this by shifting the bit being tested to the low-order bit and
10877 masking the result with the constant 1. If the condition was EQ,
10878 we xor it with 1. This does not require an scc insn and is faster
10879 than an scc insn even if we have it. */
bbf6f052 10880
ca695ac9
JB
10881 if ((code == NE || code == EQ)
10882 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10883 && integer_pow2p (TREE_OPERAND (arg0, 1))
10884 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
10885 {
10886 tree inner = TREE_OPERAND (arg0, 0);
21b2a157
JW
10887 HOST_WIDE_INT tem;
10888 int bitnum;
ca695ac9 10889 int ops_unsignedp;
bbf6f052 10890
21b2a157
JW
10891 tem = INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
10892 NULL_RTX, VOIDmode, 0));
10893 /* In this case, immed_double_const will sign extend the value to make
10894 it look the same on the host and target. We must remove the
10895 sign-extension before calling exact_log2, since exact_log2 will
10896 fail for negative values. */
10897 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT
10898 && BITS_PER_WORD == GET_MODE_BITSIZE (TYPE_MODE (type)))
8b295000
JW
10899 /* We don't use the obvious constant shift to generate the mask,
10900 because that generates compiler warnings when BITS_PER_WORD is
10901 greater than or equal to HOST_BITS_PER_WIDE_INT, even though this
10902 code is unreachable in that case. */
10903 tem = tem & GET_MODE_MASK (word_mode);
21b2a157
JW
10904 bitnum = exact_log2 (tem);
10905
ca695ac9
JB
10906 /* If INNER is a right shift of a constant and it plus BITNUM does
10907 not overflow, adjust BITNUM and INNER. */
bbf6f052 10908
ca695ac9
JB
10909 if (TREE_CODE (inner) == RSHIFT_EXPR
10910 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10911 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10912 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
10913 < TYPE_PRECISION (type)))
10914 {
10915 bitnum +=TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10916 inner = TREE_OPERAND (inner, 0);
10917 }
bbf6f052 10918
ca695ac9
JB
10919 /* If we are going to be able to omit the AND below, we must do our
10920 operations as unsigned. If we must use the AND, we have a choice.
10921 Normally unsigned is faster, but for some machines signed is. */
10922 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
ad92c826
RK
10923#ifdef LOAD_EXTEND_OP
10924 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
ca695ac9
JB
10925#else
10926 : 1
10927#endif
10928 );
bbf6f052 10929
ca695ac9
JB
10930 if (subtarget == 0 || GET_CODE (subtarget) != REG
10931 || GET_MODE (subtarget) != operand_mode
10932 || ! safe_from_p (subtarget, inner))
10933 subtarget = 0;
e7c33f54 10934
ca695ac9 10935 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
bbf6f052 10936
ca695ac9
JB
10937 if (bitnum != 0)
10938 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
0c316b20 10939 size_int (bitnum), subtarget, ops_unsignedp);
bbf6f052 10940
ca695ac9
JB
10941 if (GET_MODE (op0) != mode)
10942 op0 = convert_to_mode (mode, op0, ops_unsignedp);
bbf6f052 10943
ca695ac9 10944 if ((code == EQ && ! invert) || (code == NE && invert))
0c316b20 10945 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
ca695ac9 10946 ops_unsignedp, OPTAB_LIB_WIDEN);
bbf6f052 10947
ca695ac9
JB
10948 /* Put the AND last so it can combine with more things. */
10949 if (bitnum != TYPE_PRECISION (type) - 1)
0c316b20 10950 op0 = expand_and (op0, const1_rtx, subtarget);
bbf6f052 10951
ca695ac9
JB
10952 return op0;
10953 }
bbf6f052 10954
ca695ac9
JB
10955 /* Now see if we are likely to be able to do this. Return if not. */
10956 if (! can_compare_p (operand_mode))
10957 return 0;
10958 icode = setcc_gen_code[(int) code];
10959 if (icode == CODE_FOR_nothing
10960 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
10961 {
10962 /* We can only do this if it is one of the special cases that
10963 can be handled without an scc insn. */
10964 if ((code == LT && integer_zerop (arg1))
10965 || (! only_cheap && code == GE && integer_zerop (arg1)))
10966 ;
10967 else if (BRANCH_COST >= 0
10968 && ! only_cheap && (code == NE || code == EQ)
10969 && TREE_CODE (type) != REAL_TYPE
10970 && ((abs_optab->handlers[(int) operand_mode].insn_code
10971 != CODE_FOR_nothing)
10972 || (ffs_optab->handlers[(int) operand_mode].insn_code
10973 != CODE_FOR_nothing)))
10974 ;
10975 else
10976 return 0;
10977 }
10978
10979 preexpand_calls (exp);
10980 if (subtarget == 0 || GET_CODE (subtarget) != REG
10981 || GET_MODE (subtarget) != operand_mode
10982 || ! safe_from_p (subtarget, arg1))
10983 subtarget = 0;
bbf6f052 10984
ca695ac9
JB
10985 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10986 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
bbf6f052 10987
ca695ac9
JB
10988 if (target == 0)
10989 target = gen_reg_rtx (mode);
bbf6f052 10990
ca695ac9
JB
10991 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10992 because, if the emit_store_flag does anything it will succeed and
10993 OP0 and OP1 will not be used subsequently. */
bbf6f052 10994
ca695ac9
JB
10995 result = emit_store_flag (target, code,
10996 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10997 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10998 operand_mode, unsignedp, 1);
bbf6f052 10999
ca695ac9
JB
11000 if (result)
11001 {
11002 if (invert)
11003 result = expand_binop (mode, xor_optab, result, const1_rtx,
11004 result, 0, OPTAB_LIB_WIDEN);
11005 return result;
11006 }
bbf6f052 11007
ca695ac9
JB
11008 /* If this failed, we have to do this with set/compare/jump/set code. */
11009 if (target == 0 || GET_CODE (target) != REG
11010 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
11011 target = gen_reg_rtx (GET_MODE (target));
bbf6f052 11012
ca695ac9
JB
11013 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
11014 result = compare_from_rtx (op0, op1, code, unsignedp,
11015 operand_mode, NULL_RTX, 0);
11016 if (GET_CODE (result) == CONST_INT)
11017 return (((result == const0_rtx && ! invert)
11018 || (result != const0_rtx && invert))
11019 ? const0_rtx : const1_rtx);
bbf6f052 11020
ca695ac9
JB
11021 label = gen_label_rtx ();
11022 if (bcc_gen_fctn[(int) code] == 0)
11023 abort ();
bbf6f052 11024
ca695ac9
JB
11025 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
11026 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
11027 emit_label (label);
bbf6f052 11028
ca695ac9
JB
11029 return target;
11030}
11031\f
11032/* Generate a tablejump instruction (used for switch statements). */
bbf6f052 11033
ca695ac9 11034#ifdef HAVE_tablejump
bbf6f052 11035
ca695ac9
JB
11036/* INDEX is the value being switched on, with the lowest value
11037 in the table already subtracted.
11038 MODE is its expected mode (needed if INDEX is constant).
11039 RANGE is the length of the jump table.
11040 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
bbf6f052 11041
ca695ac9
JB
11042 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11043 index value is out of range. */
bbf6f052 11044
ca695ac9
JB
11045void
11046do_tablejump (index, mode, range, table_label, default_label)
11047 rtx index, range, table_label, default_label;
11048 enum machine_mode mode;
11049{
11050 register rtx temp, vector;
bbf6f052 11051
ca695ac9
JB
11052 /* Do an unsigned comparison (in the proper mode) between the index
11053 expression and the value which represents the length of the range.
11054 Since we just finished subtracting the lower bound of the range
11055 from the index expression, this comparison allows us to simultaneously
11056 check that the original index expression value is both greater than
11057 or equal to the minimum value of the range and less than or equal to
11058 the maximum value of the range. */
bbf6f052 11059
bf500664
RK
11060 emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
11061 emit_jump_insn (gen_bgtu (default_label));
bbf6f052 11062
ca695ac9
JB
11063 /* If index is in range, it must fit in Pmode.
11064 Convert to Pmode so we can index with it. */
11065 if (mode != Pmode)
11066 index = convert_to_mode (Pmode, index, 1);
bbf6f052 11067
ca695ac9
JB
11068 /* Don't let a MEM slip thru, because then INDEX that comes
11069 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11070 and break_out_memory_refs will go to work on it and mess it up. */
11071#ifdef PIC_CASE_VECTOR_ADDRESS
11072 if (flag_pic && GET_CODE (index) != REG)
11073 index = copy_to_mode_reg (Pmode, index);
11074#endif
bbf6f052 11075
ca695ac9
JB
11076 /* If flag_force_addr were to affect this address
11077 it could interfere with the tricky assumptions made
11078 about addresses that contain label-refs,
11079 which may be valid only very near the tablejump itself. */
11080 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11081 GET_MODE_SIZE, because this indicates how large insns are. The other
11082 uses should all be Pmode, because they are addresses. This code
11083 could fail if addresses and insns are not the same size. */
11084 index = gen_rtx (PLUS, Pmode,
11085 gen_rtx (MULT, Pmode, index,
11086 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
11087 gen_rtx (LABEL_REF, Pmode, table_label));
11088#ifdef PIC_CASE_VECTOR_ADDRESS
11089 if (flag_pic)
11090 index = PIC_CASE_VECTOR_ADDRESS (index);
11091 else
11092#endif
11093 index = memory_address_noforce (CASE_VECTOR_MODE, index);
11094 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11095 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
11096 RTX_UNCHANGING_P (vector) = 1;
11097 convert_move (temp, vector, 0);
bbf6f052 11098
ca695ac9 11099 emit_jump_insn (gen_tablejump (temp, table_label));
bbf6f052 11100
ca695ac9
JB
11101#ifndef CASE_VECTOR_PC_RELATIVE
11102 /* If we are generating PIC code or if the table is PC-relative, the
11103 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11104 if (! flag_pic)
11105 emit_barrier ();
bbf6f052 11106#endif
ca695ac9 11107}
bbf6f052 11108
ca695ac9 11109#endif /* HAVE_tablejump */
bbf6f052 11110
bbf6f052 11111
ca695ac9
JB
11112/* Emit a suitable bytecode to load a value from memory, assuming a pointer
11113 to that value is on the top of the stack. The resulting type is TYPE, and
0f41302f 11114 the source declaration is DECL. */
bbf6f052 11115
ca695ac9
JB
11116void
11117bc_load_memory (type, decl)
11118 tree type, decl;
11119{
11120 enum bytecode_opcode opcode;
11121
11122
11123 /* Bit fields are special. We only know about signed and
11124 unsigned ints, and enums. The latter are treated as
0f41302f 11125 signed integers. */
ca695ac9
JB
11126
11127 if (DECL_BIT_FIELD (decl))
11128 if (TREE_CODE (type) == ENUMERAL_TYPE
11129 || TREE_CODE (type) == INTEGER_TYPE)
11130 opcode = TREE_UNSIGNED (type) ? zxloadBI : sxloadBI;
11131 else
11132 abort ();
11133 else
0f41302f 11134 /* See corresponding comment in bc_store_memory(). */
ca695ac9
JB
11135 if (TYPE_MODE (type) == BLKmode
11136 || TYPE_MODE (type) == VOIDmode)
11137 return;
11138 else
6bd6178d 11139 opcode = mode_to_load_map [(int) TYPE_MODE (type)];
bbf6f052 11140
ca695ac9
JB
11141 if (opcode == neverneverland)
11142 abort ();
11143
11144 bc_emit_bytecode (opcode);
11145
11146#ifdef DEBUG_PRINT_CODE
11147 fputc ('\n', stderr);
11148#endif
bbf6f052 11149}
bbf6f052 11150
bbf6f052 11151
ca695ac9
JB
11152/* Store the contents of the second stack slot to the address in the
11153 top stack slot. DECL is the declaration of the destination and is used
0f41302f 11154 to determine whether we're dealing with a bitfield. */
bbf6f052 11155
ca695ac9
JB
11156void
11157bc_store_memory (type, decl)
11158 tree type, decl;
11159{
11160 enum bytecode_opcode opcode;
11161
11162
11163 if (DECL_BIT_FIELD (decl))
f81497d9 11164 {
ca695ac9
JB
11165 if (TREE_CODE (type) == ENUMERAL_TYPE
11166 || TREE_CODE (type) == INTEGER_TYPE)
11167 opcode = sstoreBI;
f81497d9 11168 else
ca695ac9 11169 abort ();
f81497d9 11170 }
ca695ac9
JB
11171 else
11172 if (TYPE_MODE (type) == BLKmode)
11173 {
11174 /* Copy structure. This expands to a block copy instruction, storeBLK.
11175 In addition to the arguments expected by the other store instructions,
11176 it also expects a type size (SImode) on top of the stack, which is the
11177 structure size in size units (usually bytes). The two first arguments
11178 are already on the stack; so we just put the size on level 1. For some
11179 other languages, the size may be variable, this is why we don't encode
0f41302f 11180 it as a storeBLK literal, but rather treat it as a full-fledged expression. */
ca695ac9
JB
11181
11182 bc_expand_expr (TYPE_SIZE (type));
11183 opcode = storeBLK;
11184 }
11185 else
6bd6178d 11186 opcode = mode_to_store_map [(int) TYPE_MODE (type)];
f81497d9 11187
ca695ac9
JB
11188 if (opcode == neverneverland)
11189 abort ();
11190
11191 bc_emit_bytecode (opcode);
11192
11193#ifdef DEBUG_PRINT_CODE
11194 fputc ('\n', stderr);
11195#endif
f81497d9
RS
11196}
11197
f81497d9 11198
ca695ac9
JB
11199/* Allocate local stack space sufficient to hold a value of the given
11200 SIZE at alignment boundary ALIGNMENT bits. ALIGNMENT must be an
11201 integral power of 2. A special case is locals of type VOID, which
11202 have size 0 and alignment 1 - any "voidish" SIZE or ALIGNMENT is
11203 remapped into the corresponding attribute of SI. */
11204
11205rtx
11206bc_allocate_local (size, alignment)
11207 int size, alignment;
f81497d9 11208{
ca695ac9
JB
11209 rtx retval;
11210 int byte_alignment;
f81497d9 11211
ca695ac9
JB
11212 if (size < 0)
11213 abort ();
f81497d9 11214
ca695ac9
JB
11215 /* Normalize size and alignment */
11216 if (!size)
11217 size = UNITS_PER_WORD;
bbf6f052 11218
ca695ac9
JB
11219 if (alignment < BITS_PER_UNIT)
11220 byte_alignment = 1 << (INT_ALIGN - 1);
11221 else
11222 /* Align */
11223 byte_alignment = alignment / BITS_PER_UNIT;
bbf6f052 11224
ca695ac9
JB
11225 if (local_vars_size & (byte_alignment - 1))
11226 local_vars_size += byte_alignment - (local_vars_size & (byte_alignment - 1));
bbf6f052 11227
ca695ac9
JB
11228 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
11229 local_vars_size += size;
bbf6f052 11230
ca695ac9 11231 return retval;
bbf6f052
RK
11232}
11233
bbf6f052 11234
ca695ac9 11235/* Allocate variable-sized local array. Variable-sized arrays are
0f41302f 11236 actually pointers to the address in memory where they are stored. */
ca695ac9
JB
11237
11238rtx
11239bc_allocate_variable_array (size)
11240 tree size;
bbf6f052 11241{
ca695ac9
JB
11242 rtx retval;
11243 const int ptralign = (1 << (PTR_ALIGN - 1));
bbf6f052 11244
ca695ac9
JB
11245 /* Align pointer */
11246 if (local_vars_size & ptralign)
11247 local_vars_size += ptralign - (local_vars_size & ptralign);
bbf6f052 11248
ca695ac9
JB
11249 /* Note down local space needed: pointer to block; also return
11250 dummy rtx */
bbf6f052 11251
ca695ac9
JB
11252 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
11253 local_vars_size += POINTER_SIZE / BITS_PER_UNIT;
11254 return retval;
bbf6f052 11255}
bbf6f052 11256
bbf6f052 11257
ca695ac9 11258/* Push the machine address for the given external variable offset. */
0f41302f 11259
ca695ac9
JB
11260void
11261bc_load_externaddr (externaddr)
11262 rtx externaddr;
11263{
11264 bc_emit_bytecode (constP);
e7a42772
JB
11265 bc_emit_code_labelref (BYTECODE_LABEL (externaddr),
11266 BYTECODE_BC_LABEL (externaddr)->offset);
bbf6f052 11267
ca695ac9
JB
11268#ifdef DEBUG_PRINT_CODE
11269 fputc ('\n', stderr);
11270#endif
bbf6f052
RK
11271}
11272
bbf6f052 11273
ca695ac9 11274/* Like above, but expects an IDENTIFIER. */
0f41302f 11275
ca695ac9
JB
11276void
11277bc_load_externaddr_id (id, offset)
11278 tree id;
11279 int offset;
11280{
11281 if (!IDENTIFIER_POINTER (id))
11282 abort ();
bbf6f052 11283
ca695ac9 11284 bc_emit_bytecode (constP);
3d8e9bc2 11285 bc_emit_code_labelref (xstrdup (IDENTIFIER_POINTER (id)), offset);
bbf6f052 11286
ca695ac9
JB
11287#ifdef DEBUG_PRINT_CODE
11288 fputc ('\n', stderr);
11289#endif
11290}
bbf6f052 11291
bbf6f052 11292
ca695ac9 11293/* Push the machine address for the given local variable offset. */
0f41302f 11294
ca695ac9
JB
11295void
11296bc_load_localaddr (localaddr)
11297 rtx localaddr;
11298{
e7a42772 11299 bc_emit_instruction (localP, (HOST_WIDE_INT) BYTECODE_BC_LABEL (localaddr)->offset);
bbf6f052 11300}
bbf6f052 11301
bbf6f052 11302
ca695ac9 11303/* Push the machine address for the given parameter offset.
0f41302f
MS
11304 NOTE: offset is in bits. */
11305
ca695ac9
JB
11306void
11307bc_load_parmaddr (parmaddr)
11308 rtx parmaddr;
bbf6f052 11309{
e7a42772
JB
11310 bc_emit_instruction (argP, ((HOST_WIDE_INT) BYTECODE_BC_LABEL (parmaddr)->offset
11311 / BITS_PER_UNIT));
ca695ac9 11312}
bbf6f052 11313
ca695ac9
JB
11314
11315/* Convert a[i] into *(a + i). */
0f41302f 11316
ca695ac9
JB
11317tree
11318bc_canonicalize_array_ref (exp)
11319 tree exp;
11320{
11321 tree type = TREE_TYPE (exp);
11322 tree array_adr = build1 (ADDR_EXPR, TYPE_POINTER_TO (type),
11323 TREE_OPERAND (exp, 0));
11324 tree index = TREE_OPERAND (exp, 1);
11325
11326
11327 /* Convert the integer argument to a type the same size as a pointer
11328 so the multiply won't overflow spuriously. */
11329
11330 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
11331 index = convert (type_for_size (POINTER_SIZE, 0), index);
11332
11333 /* The array address isn't volatile even if the array is.
11334 (Of course this isn't terribly relevant since the bytecode
11335 translator treats nearly everything as volatile anyway.) */
11336 TREE_THIS_VOLATILE (array_adr) = 0;
11337
11338 return build1 (INDIRECT_REF, type,
11339 fold (build (PLUS_EXPR,
11340 TYPE_POINTER_TO (type),
11341 array_adr,
11342 fold (build (MULT_EXPR,
11343 TYPE_POINTER_TO (type),
11344 index,
11345 size_in_bytes (type))))));
bbf6f052
RK
11346}
11347
bbf6f052 11348
ca695ac9
JB
11349/* Load the address of the component referenced by the given
11350 COMPONENT_REF expression.
bbf6f052 11351
0f41302f 11352 Returns innermost lvalue. */
bbf6f052 11353
ca695ac9
JB
11354tree
11355bc_expand_component_address (exp)
11356 tree exp;
bbf6f052 11357{
ca695ac9
JB
11358 tree tem, chain;
11359 enum machine_mode mode;
11360 int bitpos = 0;
11361 HOST_WIDE_INT SIval;
a7c5971a 11362
bbf6f052 11363
ca695ac9
JB
11364 tem = TREE_OPERAND (exp, 1);
11365 mode = DECL_MODE (tem);
bbf6f052 11366
ca695ac9
JB
11367
11368 /* Compute cumulative bit offset for nested component refs
11369 and array refs, and find the ultimate containing object. */
11370
11371 for (tem = exp;; tem = TREE_OPERAND (tem, 0))
bbf6f052 11372 {
ca695ac9
JB
11373 if (TREE_CODE (tem) == COMPONENT_REF)
11374 bitpos += TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (tem, 1)));
11375 else
11376 if (TREE_CODE (tem) == ARRAY_REF
11377 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
11378 && TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) == INTEGER_CST)
bbf6f052 11379
ca695ac9
JB
11380 bitpos += (TREE_INT_CST_LOW (TREE_OPERAND (tem, 1))
11381 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)))
11382 /* * TYPE_SIZE_UNIT (TREE_TYPE (tem)) */);
11383 else
11384 break;
11385 }
bbf6f052 11386
c02bd5d9 11387 bc_expand_expr (tem);
bbf6f052 11388
cd1b4b44 11389
ca695ac9
JB
11390 /* For bitfields also push their offset and size */
11391 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
11392 bc_push_offset_and_size (bitpos, /* DECL_SIZE_UNIT */ (TREE_OPERAND (exp, 1)));
11393 else
11394 if (SIval = bitpos / BITS_PER_UNIT)
11395 bc_emit_instruction (addconstPSI, SIval);
bbf6f052 11396
ca695ac9 11397 return (TREE_OPERAND (exp, 1));
bbf6f052 11398}
e7c33f54 11399
bbf6f052 11400
ca695ac9 11401/* Emit code to push two SI constants */
0f41302f 11402
ca695ac9
JB
11403void
11404bc_push_offset_and_size (offset, size)
11405 HOST_WIDE_INT offset, size;
11406{
11407 bc_emit_instruction (constSI, offset);
11408 bc_emit_instruction (constSI, size);
11409}
bbf6f052 11410
bbf6f052 11411
ca695ac9
JB
11412/* Emit byte code to push the address of the given lvalue expression to
11413 the stack. If it's a bit field, we also push offset and size info.
bbf6f052 11414
ca695ac9 11415 Returns innermost component, which allows us to determine not only
0f41302f 11416 its type, but also whether it's a bitfield. */
ca695ac9
JB
11417
11418tree
11419bc_expand_address (exp)
bbf6f052 11420 tree exp;
bbf6f052 11421{
ca695ac9
JB
11422 /* Safeguard */
11423 if (!exp || TREE_CODE (exp) == ERROR_MARK)
11424 return (exp);
bbf6f052 11425
e7c33f54 11426
ca695ac9
JB
11427 switch (TREE_CODE (exp))
11428 {
11429 case ARRAY_REF:
e7c33f54 11430
ca695ac9 11431 return (bc_expand_address (bc_canonicalize_array_ref (exp)));
e7c33f54 11432
ca695ac9 11433 case COMPONENT_REF:
bbf6f052 11434
ca695ac9 11435 return (bc_expand_component_address (exp));
bbf6f052 11436
ca695ac9 11437 case INDIRECT_REF:
bbf6f052 11438
ca695ac9
JB
11439 bc_expand_expr (TREE_OPERAND (exp, 0));
11440
11441 /* For variable-sized types: retrieve pointer. Sometimes the
11442 TYPE_SIZE tree is NULL. Is this a bug or a feature? Let's
0f41302f 11443 also make sure we have an operand, just in case... */
ca695ac9
JB
11444
11445 if (TREE_OPERAND (exp, 0)
11446 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))
11447 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))) != INTEGER_CST)
11448 bc_emit_instruction (loadP);
11449
11450 /* If packed, also return offset and size */
11451 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 0)))
11452
11453 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (exp, 0))),
11454 TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (exp, 0))));
11455
11456 return (TREE_OPERAND (exp, 0));
11457
11458 case FUNCTION_DECL:
11459
e7a42772
JB
11460 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
11461 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
bbf6f052 11462 break;
ca695ac9
JB
11463
11464 case PARM_DECL:
11465
11466 bc_load_parmaddr (DECL_RTL (exp));
11467
11468 /* For variable-sized types: retrieve pointer */
11469 if (TYPE_SIZE (TREE_TYPE (exp))
11470 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
11471 bc_emit_instruction (loadP);
11472
11473 /* If packed, also return offset and size */
11474 if (DECL_BIT_FIELD (exp))
11475 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
11476 TREE_INT_CST_LOW (DECL_SIZE (exp)));
11477
bbf6f052 11478 break;
ca695ac9
JB
11479
11480 case RESULT_DECL:
11481
11482 bc_emit_instruction (returnP);
bbf6f052 11483 break;
ca695ac9
JB
11484
11485 case VAR_DECL:
11486
11487#if 0
e7a42772 11488 if (BYTECODE_LABEL (DECL_RTL (exp)))
ca695ac9
JB
11489 bc_load_externaddr (DECL_RTL (exp));
11490#endif
11491
11492 if (DECL_EXTERNAL (exp))
e7a42772 11493 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
eb862a37 11494 (BYTECODE_BC_LABEL (DECL_RTL (exp)))->offset);
bbf6f052 11495 else
ca695ac9
JB
11496 bc_load_localaddr (DECL_RTL (exp));
11497
11498 /* For variable-sized types: retrieve pointer */
11499 if (TYPE_SIZE (TREE_TYPE (exp))
11500 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
11501 bc_emit_instruction (loadP);
11502
11503 /* If packed, also return offset and size */
11504 if (DECL_BIT_FIELD (exp))
11505 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
11506 TREE_INT_CST_LOW (DECL_SIZE (exp)));
11507
bbf6f052 11508 break;
ca695ac9
JB
11509
11510 case STRING_CST:
11511 {
11512 rtx r;
11513
11514 bc_emit_bytecode (constP);
11515 r = output_constant_def (exp);
e7a42772 11516 bc_emit_code_labelref (BYTECODE_LABEL (r), BYTECODE_BC_LABEL (r)->offset);
ca695ac9
JB
11517
11518#ifdef DEBUG_PRINT_CODE
11519 fputc ('\n', stderr);
11520#endif
11521 }
bbf6f052 11522 break;
ca695ac9 11523
bbf6f052 11524 default:
bbf6f052 11525
ca695ac9
JB
11526 abort();
11527 break;
bbf6f052
RK
11528 }
11529
0f41302f 11530 /* Most lvalues don't have components. */
ca695ac9
JB
11531 return (exp);
11532}
bbf6f052 11533
ca695ac9
JB
11534
11535/* Emit a type code to be used by the runtime support in handling
11536 parameter passing. The type code consists of the machine mode
11537 plus the minimal alignment shifted left 8 bits. */
11538
11539tree
11540bc_runtime_type_code (type)
11541 tree type;
11542{
11543 int val;
11544
11545 switch (TREE_CODE (type))
bbf6f052 11546 {
ca695ac9
JB
11547 case VOID_TYPE:
11548 case INTEGER_TYPE:
11549 case REAL_TYPE:
11550 case COMPLEX_TYPE:
11551 case ENUMERAL_TYPE:
11552 case POINTER_TYPE:
11553 case RECORD_TYPE:
11554
6bd6178d 11555 val = (int) TYPE_MODE (type) | TYPE_ALIGN (type) << 8;
ca695ac9
JB
11556 break;
11557
11558 case ERROR_MARK:
11559
11560 val = 0;
11561 break;
11562
11563 default:
af508edd 11564
ca695ac9
JB
11565 abort ();
11566 }
11567 return build_int_2 (val, 0);
11568}
af508edd 11569
af508edd 11570
ca695ac9 11571/* Generate constructor label */
0f41302f 11572
ca695ac9
JB
11573char *
11574bc_gen_constr_label ()
11575{
11576 static int label_counter;
11577 static char label[20];
bbf6f052 11578
ca695ac9 11579 sprintf (label, "*LR%d", label_counter++);
bbf6f052 11580
ca695ac9
JB
11581 return (obstack_copy0 (&permanent_obstack, label, strlen (label)));
11582}
bbf6f052 11583
bbf6f052 11584
ca695ac9
JB
11585/* Evaluate constructor CONSTR and return pointer to it on level one. We
11586 expand the constructor data as static data, and push a pointer to it.
11587 The pointer is put in the pointer table and is retrieved by a constP
11588 bytecode instruction. We then loop and store each constructor member in
11589 the corresponding component. Finally, we return the original pointer on
0f41302f 11590 the stack. */
af508edd 11591
ca695ac9
JB
11592void
11593bc_expand_constructor (constr)
11594 tree constr;
11595{
11596 char *l;
11597 HOST_WIDE_INT ptroffs;
11598 rtx constr_rtx;
bbf6f052 11599
ca695ac9
JB
11600
11601 /* Literal constructors are handled as constants, whereas
11602 non-literals are evaluated and stored element by element
0f41302f 11603 into the data segment. */
ca695ac9
JB
11604
11605 /* Allocate space in proper segment and push pointer to space on stack.
11606 */
bbf6f052 11607
ca695ac9 11608 l = bc_gen_constr_label ();
bbf6f052 11609
ca695ac9 11610 if (TREE_CONSTANT (constr))
bbf6f052 11611 {
ca695ac9
JB
11612 text_section ();
11613
11614 bc_emit_const_labeldef (l);
11615 bc_output_constructor (constr, int_size_in_bytes (TREE_TYPE (constr)));
bbf6f052 11616 }
ca695ac9
JB
11617 else
11618 {
11619 data_section ();
bbf6f052 11620
ca695ac9
JB
11621 bc_emit_data_labeldef (l);
11622 bc_output_data_constructor (constr);
11623 }
bbf6f052 11624
ca695ac9
JB
11625
11626 /* Add reference to pointer table and recall pointer to stack;
11627 this code is common for both types of constructors: literals
0f41302f 11628 and non-literals. */
bbf6f052 11629
de7d9320
JB
11630 ptroffs = bc_define_pointer (l);
11631 bc_emit_instruction (constP, ptroffs);
d39985fa 11632
0f41302f 11633 /* This is all that has to be done if it's a literal. */
ca695ac9
JB
11634 if (TREE_CONSTANT (constr))
11635 return;
bbf6f052 11636
ca695ac9
JB
11637
11638 /* At this point, we have the pointer to the structure on top of the stack.
0f41302f 11639 Generate sequences of store_memory calls for the constructor. */
ca695ac9
JB
11640
11641 /* constructor type is structure */
11642 if (TREE_CODE (TREE_TYPE (constr)) == RECORD_TYPE)
e7c33f54 11643 {
ca695ac9
JB
11644 register tree elt;
11645
11646 /* If the constructor has fewer fields than the structure,
11647 clear the whole structure first. */
11648
11649 if (list_length (CONSTRUCTOR_ELTS (constr))
11650 != list_length (TYPE_FIELDS (TREE_TYPE (constr))))
11651 {
6d6e61ce 11652 bc_emit_instruction (duplicate);
ca695ac9
JB
11653 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
11654 bc_emit_instruction (clearBLK);
11655 }
11656
11657 /* Store each element of the constructor into the corresponding
11658 field of TARGET. */
11659
11660 for (elt = CONSTRUCTOR_ELTS (constr); elt; elt = TREE_CHAIN (elt))
11661 {
11662 register tree field = TREE_PURPOSE (elt);
11663 register enum machine_mode mode;
11664 int bitsize;
11665 int bitpos;
11666 int unsignedp;
11667
11668 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field)) /* * DECL_SIZE_UNIT (field) */;
11669 mode = DECL_MODE (field);
11670 unsignedp = TREE_UNSIGNED (field);
11671
11672 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
11673
11674 bc_store_field (elt, bitsize, bitpos, mode, TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
11675 /* The alignment of TARGET is
11676 at least what its type requires. */
11677 VOIDmode, 0,
11678 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
11679 int_size_in_bytes (TREE_TYPE (constr)));
11680 }
e7c33f54 11681 }
ca695ac9
JB
11682 else
11683
11684 /* Constructor type is array */
11685 if (TREE_CODE (TREE_TYPE (constr)) == ARRAY_TYPE)
11686 {
11687 register tree elt;
11688 register int i;
11689 tree domain = TYPE_DOMAIN (TREE_TYPE (constr));
11690 int minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
11691 int maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
11692 tree elttype = TREE_TYPE (TREE_TYPE (constr));
11693
11694 /* If the constructor has fewer fields than the structure,
11695 clear the whole structure first. */
11696
11697 if (list_length (CONSTRUCTOR_ELTS (constr)) < maxelt - minelt + 1)
11698 {
6d6e61ce 11699 bc_emit_instruction (duplicate);
ca695ac9
JB
11700 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
11701 bc_emit_instruction (clearBLK);
11702 }
11703
11704
11705 /* Store each element of the constructor into the corresponding
0f41302f 11706 element of TARGET, determined by counting the elements. */
ca695ac9
JB
11707
11708 for (elt = CONSTRUCTOR_ELTS (constr), i = 0;
11709 elt;
11710 elt = TREE_CHAIN (elt), i++)
11711 {
11712 register enum machine_mode mode;
11713 int bitsize;
11714 int bitpos;
11715 int unsignedp;
11716
11717 mode = TYPE_MODE (elttype);
11718 bitsize = GET_MODE_BITSIZE (mode);
11719 unsignedp = TREE_UNSIGNED (elttype);
11720
11721 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype))
11722 /* * TYPE_SIZE_UNIT (elttype) */ );
11723
11724 bc_store_field (elt, bitsize, bitpos, mode,
11725 TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
11726 /* The alignment of TARGET is
11727 at least what its type requires. */
11728 VOIDmode, 0,
11729 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
11730 int_size_in_bytes (TREE_TYPE (constr)));
11731 }
11732
11733 }
11734}
bbf6f052 11735
bbf6f052 11736
ca695ac9
JB
11737/* Store the value of EXP (an expression tree) into member FIELD of
11738 structure at address on stack, which has type TYPE, mode MODE and
11739 occupies BITSIZE bits, starting BITPOS bits from the beginning of the
11740 structure.
bbf6f052 11741
ca695ac9
JB
11742 ALIGN is the alignment that TARGET is known to have, measured in bytes.
11743 TOTAL_SIZE is its size in bytes, or -1 if variable. */
bbf6f052 11744
ca695ac9
JB
11745void
11746bc_store_field (field, bitsize, bitpos, mode, exp, type,
11747 value_mode, unsignedp, align, total_size)
11748 int bitsize, bitpos;
11749 enum machine_mode mode;
11750 tree field, exp, type;
11751 enum machine_mode value_mode;
11752 int unsignedp;
11753 int align;
11754 int total_size;
11755{
bbf6f052 11756
ca695ac9
JB
11757 /* Expand expression and copy pointer */
11758 bc_expand_expr (exp);
11759 bc_emit_instruction (over);
bbf6f052 11760
bbf6f052 11761
ca695ac9
JB
11762 /* If the component is a bit field, we cannot use addressing to access
11763 it. Use bit-field techniques to store in it. */
bbf6f052 11764
ca695ac9
JB
11765 if (DECL_BIT_FIELD (field))
11766 {
11767 bc_store_bit_field (bitpos, bitsize, unsignedp);
11768 return;
11769 }
11770 else
11771 /* Not bit field */
11772 {
11773 HOST_WIDE_INT offset = bitpos / BITS_PER_UNIT;
11774
11775 /* Advance pointer to the desired member */
11776 if (offset)
11777 bc_emit_instruction (addconstPSI, offset);
11778
11779 /* Store */
11780 bc_store_memory (type, field);
11781 }
11782}
bbf6f052 11783
ca695ac9
JB
11784
11785/* Store SI/SU in bitfield */
0f41302f 11786
bbf6f052 11787void
ca695ac9
JB
11788bc_store_bit_field (offset, size, unsignedp)
11789 int offset, size, unsignedp;
bbf6f052 11790{
ca695ac9
JB
11791 /* Push bitfield offset and size */
11792 bc_push_offset_and_size (offset, size);
bbf6f052 11793
ca695ac9
JB
11794 /* Store */
11795 bc_emit_instruction (sstoreBI);
11796}
e87b4f3f 11797
88d3b7f0 11798
ca695ac9 11799/* Load SI/SU from bitfield */
0f41302f 11800
ca695ac9
JB
11801void
11802bc_load_bit_field (offset, size, unsignedp)
11803 int offset, size, unsignedp;
11804{
11805 /* Push bitfield offset and size */
11806 bc_push_offset_and_size (offset, size);
88d3b7f0 11807
ca695ac9
JB
11808 /* Load: sign-extend if signed, else zero-extend */
11809 bc_emit_instruction (unsignedp ? zxloadBI : sxloadBI);
11810}
709f5be1 11811
bbf6f052 11812
ca695ac9
JB
11813/* Adjust interpreter stack by NLEVELS. Positive means drop NLEVELS
11814 (adjust stack pointer upwards), negative means add that number of
11815 levels (adjust the stack pointer downwards). Only positive values
0f41302f 11816 normally make sense. */
bbf6f052 11817
ca695ac9
JB
11818void
11819bc_adjust_stack (nlevels)
11820 int nlevels;
11821{
11822 switch (nlevels)
11823 {
11824 case 0:
11825 break;
11826
11827 case 2:
11828 bc_emit_instruction (drop);
11829
11830 case 1:
11831 bc_emit_instruction (drop);
11832 break;
11833
11834 default:
11835
11836 bc_emit_instruction (adjstackSI, (HOST_WIDE_INT) nlevels);
11837 stack_depth -= nlevels;
11838 }
11839
a68c7608
RS
11840#if defined (VALIDATE_STACK_FOR_BC)
11841 VALIDATE_STACK_FOR_BC ();
bbf6f052
RK
11842#endif
11843}
This page took 2.116282 seconds and 5 git commands to generate.