]> gcc.gnu.org Git - gcc.git/blame - gcc/expr.c
formatting tweaks
[gcc.git] / gcc / expr.c
CommitLineData
bbf6f052 1/* Convert tree expression to rtl instructions, for GNU compiler.
3d27140a 2 Copyright (C) 1988, 92, 93, 94, 95, 1996 Free Software Foundation, Inc.
bbf6f052
RK
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
940d9d63
RK
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA. */
bbf6f052
RK
20
21
22#include "config.h"
ca695ac9 23#include "machmode.h"
bbf6f052
RK
24#include "rtl.h"
25#include "tree.h"
ca695ac9 26#include "obstack.h"
bbf6f052 27#include "flags.h"
bf76bb5a 28#include "regs.h"
4ed67205 29#include "hard-reg-set.h"
bbf6f052
RK
30#include "function.h"
31#include "insn-flags.h"
32#include "insn-codes.h"
33#include "expr.h"
34#include "insn-config.h"
35#include "recog.h"
36#include "output.h"
bbf6f052
RK
37#include "typeclass.h"
38
ca695ac9
JB
39#include "bytecode.h"
40#include "bc-opcode.h"
41#include "bc-typecd.h"
42#include "bc-optab.h"
43#include "bc-emit.h"
44
45
bbf6f052
RK
46#define CEIL(x,y) (((x) + (y) - 1) / (y))
47
48/* Decide whether a function's arguments should be processed
bbc8a071
RK
49 from first to last or from last to first.
50
51 They should if the stack and args grow in opposite directions, but
52 only if we have push insns. */
bbf6f052 53
bbf6f052 54#ifdef PUSH_ROUNDING
bbc8a071 55
3319a347 56#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
bbf6f052
RK
57#define PUSH_ARGS_REVERSED /* If it's last to first */
58#endif
bbc8a071 59
bbf6f052
RK
60#endif
61
62#ifndef STACK_PUSH_CODE
63#ifdef STACK_GROWS_DOWNWARD
64#define STACK_PUSH_CODE PRE_DEC
65#else
66#define STACK_PUSH_CODE PRE_INC
67#endif
68#endif
69
70/* Like STACK_BOUNDARY but in units of bytes, not bits. */
71#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
72
73/* If this is nonzero, we do not bother generating VOLATILE
74 around volatile memory references, and we are willing to
75 output indirect addresses. If cse is to follow, we reject
76 indirect addresses so a useful potential cse is generated;
77 if it is used only once, instruction combination will produce
78 the same indirect address eventually. */
79int cse_not_expected;
80
81/* Nonzero to generate code for all the subroutines within an
82 expression before generating the upper levels of the expression.
83 Nowadays this is never zero. */
84int do_preexpand_calls = 1;
85
86/* Number of units that we should eventually pop off the stack.
87 These are the arguments to function calls that have already returned. */
88int pending_stack_adjust;
89
90/* Nonzero means stack pops must not be deferred, and deferred stack
91 pops must not be output. It is nonzero inside a function call,
92 inside a conditional expression, inside a statement expression,
93 and in other cases as well. */
94int inhibit_defer_pop;
95
96/* A list of all cleanups which belong to the arguments of
97 function calls being expanded by expand_call. */
98tree cleanups_this_call;
99
d93d4205
MS
100/* When temporaries are created by TARGET_EXPRs, they are created at
101 this level of temp_slot_level, so that they can remain allocated
102 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
103 of TARGET_EXPRs. */
104int target_temp_slot_level;
105
bbf6f052
RK
106/* Nonzero means __builtin_saveregs has already been done in this function.
107 The value is the pseudoreg containing the value __builtin_saveregs
108 returned. */
109static rtx saveregs_value;
110
dcf76fff
TW
111/* Similarly for __builtin_apply_args. */
112static rtx apply_args_value;
113
4969d05d
RK
114/* This structure is used by move_by_pieces to describe the move to
115 be performed. */
116
117struct move_by_pieces
118{
119 rtx to;
120 rtx to_addr;
121 int autinc_to;
122 int explicit_inc_to;
e9cf6a97 123 int to_struct;
4969d05d
RK
124 rtx from;
125 rtx from_addr;
126 int autinc_from;
127 int explicit_inc_from;
e9cf6a97 128 int from_struct;
4969d05d
RK
129 int len;
130 int offset;
131 int reverse;
132};
133
9de08200
RK
134/* This structure is used by clear_by_pieces to describe the clear to
135 be performed. */
136
137struct clear_by_pieces
138{
139 rtx to;
140 rtx to_addr;
141 int autinc_to;
142 int explicit_inc_to;
143 int to_struct;
144 int len;
145 int offset;
146 int reverse;
147};
148
c02bd5d9
JB
149/* Used to generate bytecodes: keep track of size of local variables,
150 as well as depth of arithmetic stack. (Notice that variables are
151 stored on the machine's stack, not the arithmetic stack.) */
152
186f92ce 153extern int local_vars_size;
c02bd5d9
JB
154extern int stack_depth;
155extern int max_stack_depth;
292b1216 156extern struct obstack permanent_obstack;
4ed67205 157extern rtx arg_pointer_save_area;
c02bd5d9 158
4969d05d
RK
159static rtx enqueue_insn PROTO((rtx, rtx));
160static int queued_subexp_p PROTO((rtx));
161static void init_queue PROTO((void));
162static void move_by_pieces PROTO((rtx, rtx, int, int));
163static int move_by_pieces_ninsns PROTO((unsigned int, int));
164static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
165 struct move_by_pieces *));
9de08200
RK
166static void clear_by_pieces PROTO((rtx, int, int));
167static void clear_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
168 struct clear_by_pieces *));
169static int is_zeros_p PROTO((tree));
170static int mostly_zeros_p PROTO((tree));
e1a43f73 171static void store_constructor PROTO((tree, rtx, int));
4969d05d
RK
172static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
173 enum machine_mode, int, int, int));
6be58303 174static int get_inner_unaligned_p PROTO((tree));
4969d05d
RK
175static tree save_noncopied_parts PROTO((tree, tree));
176static tree init_noncopied_parts PROTO((tree, tree));
177static int safe_from_p PROTO((rtx, tree));
178static int fixed_type_p PROTO((tree));
179static int get_pointer_alignment PROTO((tree, unsigned));
180static tree string_constant PROTO((tree, tree *));
181static tree c_strlen PROTO((tree));
307b821c
RK
182static rtx expand_builtin PROTO((tree, rtx, rtx,
183 enum machine_mode, int));
0006469d
TW
184static int apply_args_size PROTO((void));
185static int apply_result_size PROTO((void));
186static rtx result_vector PROTO((int, rtx));
187static rtx expand_builtin_apply_args PROTO((void));
188static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
189static void expand_builtin_return PROTO((rtx));
7b8b9722 190static rtx expand_increment PROTO((tree, int, int));
0dc09c0f 191void bc_expand_increment PROTO((struct increment_operator *, tree));
ca695ac9
JB
192rtx bc_allocate_local PROTO((int, int));
193void bc_store_memory PROTO((tree, tree));
194tree bc_expand_component_address PROTO((tree));
195tree bc_expand_address PROTO((tree));
196void bc_expand_constructor PROTO((tree));
197void bc_adjust_stack PROTO((int));
198tree bc_canonicalize_array_ref PROTO((tree));
199void bc_load_memory PROTO((tree, tree));
200void bc_load_externaddr PROTO((rtx));
201void bc_load_externaddr_id PROTO((tree, int));
202void bc_load_localaddr PROTO((rtx));
203void bc_load_parmaddr PROTO((rtx));
4969d05d
RK
204static void preexpand_calls PROTO((tree));
205static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
2e5ec6cf 206void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
4969d05d
RK
207static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
208static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
209static void do_jump_for_compare PROTO((rtx, rtx, rtx));
210static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
211static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
5dab5552 212static tree defer_cleanups_to PROTO((tree));
61d6b1cc 213extern void (*interim_eh_hook) PROTO((tree));
16545b0a 214extern tree truthvalue_conversion PROTO((tree));
bbf6f052 215
4fa52007
RK
216/* Record for each mode whether we can move a register directly to or
217 from an object of that mode in memory. If we can't, we won't try
218 to use that mode directly when accessing a field of that mode. */
219
220static char direct_load[NUM_MACHINE_MODES];
221static char direct_store[NUM_MACHINE_MODES];
222
bbf6f052
RK
223/* MOVE_RATIO is the number of move instructions that is better than
224 a block move. */
225
226#ifndef MOVE_RATIO
266007a7 227#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
bbf6f052
RK
228#define MOVE_RATIO 2
229#else
230/* A value of around 6 would minimize code size; infinity would minimize
231 execution time. */
232#define MOVE_RATIO 15
233#endif
234#endif
e87b4f3f 235
266007a7 236/* This array records the insn_code of insns to perform block moves. */
e6677db3 237enum insn_code movstr_optab[NUM_MACHINE_MODES];
266007a7 238
9de08200
RK
239/* This array records the insn_code of insns to perform block clears. */
240enum insn_code clrstr_optab[NUM_MACHINE_MODES];
241
0f41302f 242/* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
e87b4f3f
RS
243
244#ifndef SLOW_UNALIGNED_ACCESS
c7a7ac46 245#define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
e87b4f3f 246#endif
0006469d
TW
247
248/* Register mappings for target machines without register windows. */
249#ifndef INCOMING_REGNO
250#define INCOMING_REGNO(OUT) (OUT)
251#endif
252#ifndef OUTGOING_REGNO
253#define OUTGOING_REGNO(IN) (IN)
254#endif
bbf6f052 255\f
0f41302f 256/* Maps used to convert modes to const, load, and store bytecodes. */
ca695ac9
JB
257enum bytecode_opcode mode_to_const_map[MAX_MACHINE_MODE];
258enum bytecode_opcode mode_to_load_map[MAX_MACHINE_MODE];
259enum bytecode_opcode mode_to_store_map[MAX_MACHINE_MODE];
260
261/* Initialize maps used to convert modes to const, load, and store
0f41302f
MS
262 bytecodes. */
263
ca695ac9
JB
264void
265bc_init_mode_to_opcode_maps ()
266{
267 int mode;
268
6bd6178d 269 for (mode = 0; mode < (int) MAX_MACHINE_MODE; mode++)
ca695ac9
JB
270 mode_to_const_map[mode] =
271 mode_to_load_map[mode] =
272 mode_to_store_map[mode] = neverneverland;
273
274#define DEF_MODEMAP(SYM, CODE, UCODE, CONST, LOAD, STORE) \
6bd6178d
RK
275 mode_to_const_map[(int) SYM] = CONST; \
276 mode_to_load_map[(int) SYM] = LOAD; \
277 mode_to_store_map[(int) SYM] = STORE;
ca695ac9
JB
278
279#include "modemap.def"
280#undef DEF_MODEMAP
281}
282\f
4fa52007 283/* This is run once per compilation to set up which modes can be used
266007a7 284 directly in memory and to initialize the block move optab. */
4fa52007
RK
285
286void
287init_expr_once ()
288{
289 rtx insn, pat;
290 enum machine_mode mode;
e2549997
RS
291 /* Try indexing by frame ptr and try by stack ptr.
292 It is known that on the Convex the stack ptr isn't a valid index.
293 With luck, one or the other is valid on any machine. */
4fa52007 294 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
e2549997 295 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
4fa52007
RK
296
297 start_sequence ();
298 insn = emit_insn (gen_rtx (SET, 0, 0));
299 pat = PATTERN (insn);
300
301 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
302 mode = (enum machine_mode) ((int) mode + 1))
303 {
304 int regno;
305 rtx reg;
306 int num_clobbers;
307
308 direct_load[(int) mode] = direct_store[(int) mode] = 0;
309 PUT_MODE (mem, mode);
e2549997 310 PUT_MODE (mem1, mode);
4fa52007 311
e6fe56a4
RK
312 /* See if there is some register that can be used in this mode and
313 directly loaded or stored from memory. */
314
7308a047
RS
315 if (mode != VOIDmode && mode != BLKmode)
316 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
317 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
318 regno++)
319 {
320 if (! HARD_REGNO_MODE_OK (regno, mode))
321 continue;
e6fe56a4 322
7308a047 323 reg = gen_rtx (REG, mode, regno);
e6fe56a4 324
7308a047
RS
325 SET_SRC (pat) = mem;
326 SET_DEST (pat) = reg;
327 if (recog (pat, insn, &num_clobbers) >= 0)
328 direct_load[(int) mode] = 1;
e6fe56a4 329
e2549997
RS
330 SET_SRC (pat) = mem1;
331 SET_DEST (pat) = reg;
332 if (recog (pat, insn, &num_clobbers) >= 0)
333 direct_load[(int) mode] = 1;
334
7308a047
RS
335 SET_SRC (pat) = reg;
336 SET_DEST (pat) = mem;
337 if (recog (pat, insn, &num_clobbers) >= 0)
338 direct_store[(int) mode] = 1;
e2549997
RS
339
340 SET_SRC (pat) = reg;
341 SET_DEST (pat) = mem1;
342 if (recog (pat, insn, &num_clobbers) >= 0)
343 direct_store[(int) mode] = 1;
7308a047 344 }
4fa52007
RK
345 }
346
347 end_sequence ();
348}
349
bbf6f052
RK
350/* This is run at the start of compiling a function. */
351
352void
353init_expr ()
354{
355 init_queue ();
356
357 pending_stack_adjust = 0;
358 inhibit_defer_pop = 0;
359 cleanups_this_call = 0;
360 saveregs_value = 0;
0006469d 361 apply_args_value = 0;
e87b4f3f 362 forced_labels = 0;
bbf6f052
RK
363}
364
365/* Save all variables describing the current status into the structure *P.
366 This is used before starting a nested function. */
367
368void
369save_expr_status (p)
370 struct function *p;
371{
372 /* Instead of saving the postincrement queue, empty it. */
373 emit_queue ();
374
375 p->pending_stack_adjust = pending_stack_adjust;
376 p->inhibit_defer_pop = inhibit_defer_pop;
377 p->cleanups_this_call = cleanups_this_call;
378 p->saveregs_value = saveregs_value;
0006469d 379 p->apply_args_value = apply_args_value;
e87b4f3f 380 p->forced_labels = forced_labels;
bbf6f052
RK
381
382 pending_stack_adjust = 0;
383 inhibit_defer_pop = 0;
384 cleanups_this_call = 0;
385 saveregs_value = 0;
0006469d 386 apply_args_value = 0;
e87b4f3f 387 forced_labels = 0;
bbf6f052
RK
388}
389
390/* Restore all variables describing the current status from the structure *P.
391 This is used after a nested function. */
392
393void
394restore_expr_status (p)
395 struct function *p;
396{
397 pending_stack_adjust = p->pending_stack_adjust;
398 inhibit_defer_pop = p->inhibit_defer_pop;
399 cleanups_this_call = p->cleanups_this_call;
400 saveregs_value = p->saveregs_value;
0006469d 401 apply_args_value = p->apply_args_value;
e87b4f3f 402 forced_labels = p->forced_labels;
bbf6f052
RK
403}
404\f
405/* Manage the queue of increment instructions to be output
406 for POSTINCREMENT_EXPR expressions, etc. */
407
408static rtx pending_chain;
409
410/* Queue up to increment (or change) VAR later. BODY says how:
411 BODY should be the same thing you would pass to emit_insn
412 to increment right away. It will go to emit_insn later on.
413
414 The value is a QUEUED expression to be used in place of VAR
415 where you want to guarantee the pre-incrementation value of VAR. */
416
417static rtx
418enqueue_insn (var, body)
419 rtx var, body;
420{
421 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
906c4e36 422 var, NULL_RTX, NULL_RTX, body, pending_chain);
bbf6f052
RK
423 return pending_chain;
424}
425
426/* Use protect_from_queue to convert a QUEUED expression
427 into something that you can put immediately into an instruction.
428 If the queued incrementation has not happened yet,
429 protect_from_queue returns the variable itself.
430 If the incrementation has happened, protect_from_queue returns a temp
431 that contains a copy of the old value of the variable.
432
433 Any time an rtx which might possibly be a QUEUED is to be put
434 into an instruction, it must be passed through protect_from_queue first.
435 QUEUED expressions are not meaningful in instructions.
436
437 Do not pass a value through protect_from_queue and then hold
438 on to it for a while before putting it in an instruction!
439 If the queue is flushed in between, incorrect code will result. */
440
441rtx
442protect_from_queue (x, modify)
443 register rtx x;
444 int modify;
445{
446 register RTX_CODE code = GET_CODE (x);
447
448#if 0 /* A QUEUED can hang around after the queue is forced out. */
449 /* Shortcut for most common case. */
450 if (pending_chain == 0)
451 return x;
452#endif
453
454 if (code != QUEUED)
455 {
e9baa644
RK
456 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
457 use of autoincrement. Make a copy of the contents of the memory
458 location rather than a copy of the address, but not if the value is
459 of mode BLKmode. Don't modify X in place since it might be
460 shared. */
bbf6f052
RK
461 if (code == MEM && GET_MODE (x) != BLKmode
462 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
463 {
464 register rtx y = XEXP (x, 0);
e9baa644
RK
465 register rtx new = gen_rtx (MEM, GET_MODE (x), QUEUED_VAR (y));
466
467 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
468 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
469 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
470
bbf6f052
RK
471 if (QUEUED_INSN (y))
472 {
e9baa644
RK
473 register rtx temp = gen_reg_rtx (GET_MODE (new));
474 emit_insn_before (gen_move_insn (temp, new),
bbf6f052
RK
475 QUEUED_INSN (y));
476 return temp;
477 }
e9baa644 478 return new;
bbf6f052
RK
479 }
480 /* Otherwise, recursively protect the subexpressions of all
481 the kinds of rtx's that can contain a QUEUED. */
482 if (code == MEM)
3f15938e
RS
483 {
484 rtx tem = protect_from_queue (XEXP (x, 0), 0);
485 if (tem != XEXP (x, 0))
486 {
487 x = copy_rtx (x);
488 XEXP (x, 0) = tem;
489 }
490 }
bbf6f052
RK
491 else if (code == PLUS || code == MULT)
492 {
3f15938e
RS
493 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
494 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
495 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
496 {
497 x = copy_rtx (x);
498 XEXP (x, 0) = new0;
499 XEXP (x, 1) = new1;
500 }
bbf6f052
RK
501 }
502 return x;
503 }
504 /* If the increment has not happened, use the variable itself. */
505 if (QUEUED_INSN (x) == 0)
506 return QUEUED_VAR (x);
507 /* If the increment has happened and a pre-increment copy exists,
508 use that copy. */
509 if (QUEUED_COPY (x) != 0)
510 return QUEUED_COPY (x);
511 /* The increment has happened but we haven't set up a pre-increment copy.
512 Set one up now, and use it. */
513 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
514 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
515 QUEUED_INSN (x));
516 return QUEUED_COPY (x);
517}
518
519/* Return nonzero if X contains a QUEUED expression:
520 if it contains anything that will be altered by a queued increment.
521 We handle only combinations of MEM, PLUS, MINUS and MULT operators
522 since memory addresses generally contain only those. */
523
524static int
525queued_subexp_p (x)
526 rtx x;
527{
528 register enum rtx_code code = GET_CODE (x);
529 switch (code)
530 {
531 case QUEUED:
532 return 1;
533 case MEM:
534 return queued_subexp_p (XEXP (x, 0));
535 case MULT:
536 case PLUS:
537 case MINUS:
538 return queued_subexp_p (XEXP (x, 0))
539 || queued_subexp_p (XEXP (x, 1));
540 }
541 return 0;
542}
543
544/* Perform all the pending incrementations. */
545
546void
547emit_queue ()
548{
549 register rtx p;
550 while (p = pending_chain)
551 {
552 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
553 pending_chain = QUEUED_NEXT (p);
554 }
555}
556
557static void
558init_queue ()
559{
560 if (pending_chain)
561 abort ();
562}
563\f
564/* Copy data from FROM to TO, where the machine modes are not the same.
565 Both modes may be integer, or both may be floating.
566 UNSIGNEDP should be nonzero if FROM is an unsigned type.
567 This causes zero-extension instead of sign-extension. */
568
569void
570convert_move (to, from, unsignedp)
571 register rtx to, from;
572 int unsignedp;
573{
574 enum machine_mode to_mode = GET_MODE (to);
575 enum machine_mode from_mode = GET_MODE (from);
576 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
577 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
578 enum insn_code code;
579 rtx libcall;
580
581 /* rtx code for making an equivalent value. */
582 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
583
584 to = protect_from_queue (to, 1);
585 from = protect_from_queue (from, 0);
586
587 if (to_real != from_real)
588 abort ();
589
1499e0a8
RK
590 /* If FROM is a SUBREG that indicates that we have already done at least
591 the required extension, strip it. We don't handle such SUBREGs as
592 TO here. */
593
594 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
595 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
596 >= GET_MODE_SIZE (to_mode))
597 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
598 from = gen_lowpart (to_mode, from), from_mode = to_mode;
599
600 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
601 abort ();
602
bbf6f052
RK
603 if (to_mode == from_mode
604 || (from_mode == VOIDmode && CONSTANT_P (from)))
605 {
606 emit_move_insn (to, from);
607 return;
608 }
609
610 if (to_real)
611 {
81d79e2c
RS
612 rtx value;
613
b424402e 614#ifdef HAVE_extendqfhf2
8ab0613c 615 if (HAVE_extendqfhf2 && from_mode == QFmode && to_mode == HFmode)
b424402e 616 {
8ab0613c 617 emit_unop_insn (CODE_FOR_extendqfhf2, to, from, UNKNOWN);
b424402e
RS
618 return;
619 }
620#endif
621#ifdef HAVE_extendqfsf2
622 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == SFmode)
623 {
624 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
625 return;
626 }
627#endif
628#ifdef HAVE_extendqfdf2
629 if (HAVE_extendqfdf2 && from_mode == QFmode && to_mode == DFmode)
630 {
631 emit_unop_insn (CODE_FOR_extendqfdf2, to, from, UNKNOWN);
632 return;
633 }
634#endif
635#ifdef HAVE_extendqfxf2
636 if (HAVE_extendqfxf2 && from_mode == QFmode && to_mode == XFmode)
637 {
638 emit_unop_insn (CODE_FOR_extendqfxf2, to, from, UNKNOWN);
639 return;
640 }
641#endif
642#ifdef HAVE_extendqftf2
643 if (HAVE_extendqftf2 && from_mode == QFmode && to_mode == TFmode)
644 {
645 emit_unop_insn (CODE_FOR_extendqftf2, to, from, UNKNOWN);
646 return;
647 }
648#endif
649
03747aa3
RK
650#ifdef HAVE_extendhftqf2
651 if (HAVE_extendhftqf2 && from_mode == HFmode && to_mode == TQFmode)
652 {
653 emit_unop_insn (CODE_FOR_extendhftqf2, to, from, UNKNOWN);
654 return;
655 }
656#endif
657
b424402e
RS
658#ifdef HAVE_extendhfsf2
659 if (HAVE_extendhfsf2 && from_mode == HFmode && to_mode == SFmode)
660 {
661 emit_unop_insn (CODE_FOR_extendhfsf2, to, from, UNKNOWN);
662 return;
663 }
664#endif
665#ifdef HAVE_extendhfdf2
666 if (HAVE_extendhfdf2 && from_mode == HFmode && to_mode == DFmode)
667 {
668 emit_unop_insn (CODE_FOR_extendhfdf2, to, from, UNKNOWN);
669 return;
670 }
671#endif
672#ifdef HAVE_extendhfxf2
673 if (HAVE_extendhfxf2 && from_mode == HFmode && to_mode == XFmode)
674 {
675 emit_unop_insn (CODE_FOR_extendhfxf2, to, from, UNKNOWN);
676 return;
677 }
678#endif
679#ifdef HAVE_extendhftf2
680 if (HAVE_extendhftf2 && from_mode == HFmode && to_mode == TFmode)
681 {
682 emit_unop_insn (CODE_FOR_extendhftf2, to, from, UNKNOWN);
683 return;
684 }
685#endif
686
bbf6f052
RK
687#ifdef HAVE_extendsfdf2
688 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
689 {
690 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
691 return;
692 }
693#endif
b092b471
JW
694#ifdef HAVE_extendsfxf2
695 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
696 {
697 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
698 return;
699 }
700#endif
bbf6f052
RK
701#ifdef HAVE_extendsftf2
702 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
703 {
704 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
705 return;
706 }
707#endif
b092b471
JW
708#ifdef HAVE_extenddfxf2
709 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
710 {
711 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
712 return;
713 }
714#endif
bbf6f052
RK
715#ifdef HAVE_extenddftf2
716 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
717 {
718 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
719 return;
720 }
721#endif
b424402e
RS
722
723#ifdef HAVE_trunchfqf2
724 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
725 {
726 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
727 return;
728 }
729#endif
730#ifdef HAVE_truncsfqf2
731 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
732 {
733 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
734 return;
735 }
736#endif
737#ifdef HAVE_truncdfqf2
738 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
739 {
740 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
741 return;
742 }
743#endif
744#ifdef HAVE_truncxfqf2
745 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
746 {
747 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
748 return;
749 }
750#endif
751#ifdef HAVE_trunctfqf2
752 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
753 {
754 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
755 return;
756 }
757#endif
03747aa3
RK
758
759#ifdef HAVE_trunctqfhf2
760 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
761 {
762 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
763 return;
764 }
765#endif
b424402e
RS
766#ifdef HAVE_truncsfhf2
767 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
768 {
769 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
770 return;
771 }
772#endif
773#ifdef HAVE_truncdfhf2
774 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
775 {
776 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
777 return;
778 }
779#endif
780#ifdef HAVE_truncxfhf2
781 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
782 {
783 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
784 return;
785 }
786#endif
787#ifdef HAVE_trunctfhf2
788 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
789 {
790 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
791 return;
792 }
793#endif
bbf6f052
RK
794#ifdef HAVE_truncdfsf2
795 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
796 {
797 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
798 return;
799 }
800#endif
b092b471
JW
801#ifdef HAVE_truncxfsf2
802 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
803 {
804 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
805 return;
806 }
807#endif
bbf6f052
RK
808#ifdef HAVE_trunctfsf2
809 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
810 {
811 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
812 return;
813 }
814#endif
b092b471
JW
815#ifdef HAVE_truncxfdf2
816 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
817 {
818 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
819 return;
820 }
821#endif
bbf6f052
RK
822#ifdef HAVE_trunctfdf2
823 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
824 {
825 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
826 return;
827 }
828#endif
829
b092b471
JW
830 libcall = (rtx) 0;
831 switch (from_mode)
832 {
833 case SFmode:
834 switch (to_mode)
835 {
836 case DFmode:
837 libcall = extendsfdf2_libfunc;
838 break;
839
840 case XFmode:
841 libcall = extendsfxf2_libfunc;
842 break;
843
844 case TFmode:
845 libcall = extendsftf2_libfunc;
846 break;
847 }
848 break;
849
850 case DFmode:
851 switch (to_mode)
852 {
853 case SFmode:
854 libcall = truncdfsf2_libfunc;
855 break;
856
857 case XFmode:
858 libcall = extenddfxf2_libfunc;
859 break;
860
861 case TFmode:
862 libcall = extenddftf2_libfunc;
863 break;
864 }
865 break;
866
867 case XFmode:
868 switch (to_mode)
869 {
870 case SFmode:
871 libcall = truncxfsf2_libfunc;
872 break;
873
874 case DFmode:
875 libcall = truncxfdf2_libfunc;
876 break;
877 }
878 break;
879
880 case TFmode:
881 switch (to_mode)
882 {
883 case SFmode:
884 libcall = trunctfsf2_libfunc;
885 break;
886
887 case DFmode:
888 libcall = trunctfdf2_libfunc;
889 break;
890 }
891 break;
892 }
893
894 if (libcall == (rtx) 0)
895 /* This conversion is not implemented yet. */
bbf6f052
RK
896 abort ();
897
81d79e2c
RS
898 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
899 1, from, from_mode);
900 emit_move_insn (to, value);
bbf6f052
RK
901 return;
902 }
903
904 /* Now both modes are integers. */
905
906 /* Handle expanding beyond a word. */
907 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
908 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
909 {
910 rtx insns;
911 rtx lowpart;
912 rtx fill_value;
913 rtx lowfrom;
914 int i;
915 enum machine_mode lowpart_mode;
916 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
917
918 /* Try converting directly if the insn is supported. */
919 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
920 != CODE_FOR_nothing)
921 {
cd1b4b44
RK
922 /* If FROM is a SUBREG, put it into a register. Do this
923 so that we always generate the same set of insns for
924 better cse'ing; if an intermediate assignment occurred,
925 we won't be doing the operation directly on the SUBREG. */
926 if (optimize > 0 && GET_CODE (from) == SUBREG)
927 from = force_reg (from_mode, from);
bbf6f052
RK
928 emit_unop_insn (code, to, from, equiv_code);
929 return;
930 }
931 /* Next, try converting via full word. */
932 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
933 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
934 != CODE_FOR_nothing))
935 {
a81fee56
RS
936 if (GET_CODE (to) == REG)
937 emit_insn (gen_rtx (CLOBBER, VOIDmode, to));
bbf6f052
RK
938 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
939 emit_unop_insn (code, to,
940 gen_lowpart (word_mode, to), equiv_code);
941 return;
942 }
943
944 /* No special multiword conversion insn; do it by hand. */
945 start_sequence ();
946
5c5033c3
RK
947 /* Since we will turn this into a no conflict block, we must ensure
948 that the source does not overlap the target. */
949
950 if (reg_overlap_mentioned_p (to, from))
951 from = force_reg (from_mode, from);
952
bbf6f052
RK
953 /* Get a copy of FROM widened to a word, if necessary. */
954 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
955 lowpart_mode = word_mode;
956 else
957 lowpart_mode = from_mode;
958
959 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
960
961 lowpart = gen_lowpart (lowpart_mode, to);
962 emit_move_insn (lowpart, lowfrom);
963
964 /* Compute the value to put in each remaining word. */
965 if (unsignedp)
966 fill_value = const0_rtx;
967 else
968 {
969#ifdef HAVE_slt
970 if (HAVE_slt
971 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
972 && STORE_FLAG_VALUE == -1)
973 {
906c4e36
RK
974 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
975 lowpart_mode, 0, 0);
bbf6f052
RK
976 fill_value = gen_reg_rtx (word_mode);
977 emit_insn (gen_slt (fill_value));
978 }
979 else
980#endif
981 {
982 fill_value
983 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
984 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
906c4e36 985 NULL_RTX, 0);
bbf6f052
RK
986 fill_value = convert_to_mode (word_mode, fill_value, 1);
987 }
988 }
989
990 /* Fill the remaining words. */
991 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
992 {
993 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
994 rtx subword = operand_subword (to, index, 1, to_mode);
995
996 if (subword == 0)
997 abort ();
998
999 if (fill_value != subword)
1000 emit_move_insn (subword, fill_value);
1001 }
1002
1003 insns = get_insns ();
1004 end_sequence ();
1005
906c4e36 1006 emit_no_conflict_block (insns, to, from, NULL_RTX,
2abec1b7 1007 gen_rtx (equiv_code, to_mode, copy_rtx (from)));
bbf6f052
RK
1008 return;
1009 }
1010
d3c64ee3
RS
1011 /* Truncating multi-word to a word or less. */
1012 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
1013 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
bbf6f052 1014 {
431a6eca
JW
1015 if (!((GET_CODE (from) == MEM
1016 && ! MEM_VOLATILE_P (from)
1017 && direct_load[(int) to_mode]
1018 && ! mode_dependent_address_p (XEXP (from, 0)))
1019 || GET_CODE (from) == REG
1020 || GET_CODE (from) == SUBREG))
1021 from = force_reg (from_mode, from);
bbf6f052
RK
1022 convert_move (to, gen_lowpart (word_mode, from), 0);
1023 return;
1024 }
1025
1026 /* Handle pointer conversion */ /* SPEE 900220 */
1027 if (to_mode == PSImode)
1028 {
1029 if (from_mode != SImode)
1030 from = convert_to_mode (SImode, from, unsignedp);
1031
1f584163
DE
1032#ifdef HAVE_truncsipsi2
1033 if (HAVE_truncsipsi2)
bbf6f052 1034 {
1f584163 1035 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
bbf6f052
RK
1036 return;
1037 }
1f584163 1038#endif /* HAVE_truncsipsi2 */
bbf6f052
RK
1039 abort ();
1040 }
1041
1042 if (from_mode == PSImode)
1043 {
1044 if (to_mode != SImode)
1045 {
1046 from = convert_to_mode (SImode, from, unsignedp);
1047 from_mode = SImode;
1048 }
1049 else
1050 {
1f584163
DE
1051#ifdef HAVE_extendpsisi2
1052 if (HAVE_extendpsisi2)
bbf6f052 1053 {
1f584163 1054 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
bbf6f052
RK
1055 return;
1056 }
1f584163 1057#endif /* HAVE_extendpsisi2 */
bbf6f052
RK
1058 abort ();
1059 }
1060 }
1061
0407367d
RK
1062 if (to_mode == PDImode)
1063 {
1064 if (from_mode != DImode)
1065 from = convert_to_mode (DImode, from, unsignedp);
1066
1067#ifdef HAVE_truncdipdi2
1068 if (HAVE_truncdipdi2)
1069 {
1070 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1071 return;
1072 }
1073#endif /* HAVE_truncdipdi2 */
1074 abort ();
1075 }
1076
1077 if (from_mode == PDImode)
1078 {
1079 if (to_mode != DImode)
1080 {
1081 from = convert_to_mode (DImode, from, unsignedp);
1082 from_mode = DImode;
1083 }
1084 else
1085 {
1086#ifdef HAVE_extendpdidi2
1087 if (HAVE_extendpdidi2)
1088 {
1089 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1090 return;
1091 }
1092#endif /* HAVE_extendpdidi2 */
1093 abort ();
1094 }
1095 }
1096
bbf6f052
RK
1097 /* Now follow all the conversions between integers
1098 no more than a word long. */
1099
1100 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1101 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1102 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
d3c64ee3 1103 GET_MODE_BITSIZE (from_mode)))
bbf6f052 1104 {
d3c64ee3
RS
1105 if (!((GET_CODE (from) == MEM
1106 && ! MEM_VOLATILE_P (from)
1107 && direct_load[(int) to_mode]
1108 && ! mode_dependent_address_p (XEXP (from, 0)))
1109 || GET_CODE (from) == REG
1110 || GET_CODE (from) == SUBREG))
1111 from = force_reg (from_mode, from);
34aa3599
RK
1112 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1113 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1114 from = copy_to_reg (from);
bbf6f052
RK
1115 emit_move_insn (to, gen_lowpart (to_mode, from));
1116 return;
1117 }
1118
d3c64ee3 1119 /* Handle extension. */
bbf6f052
RK
1120 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1121 {
1122 /* Convert directly if that works. */
1123 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1124 != CODE_FOR_nothing)
1125 {
1126 emit_unop_insn (code, to, from, equiv_code);
1127 return;
1128 }
1129 else
1130 {
1131 enum machine_mode intermediate;
1132
1133 /* Search for a mode to convert via. */
1134 for (intermediate = from_mode; intermediate != VOIDmode;
1135 intermediate = GET_MODE_WIDER_MODE (intermediate))
930b4e39
RK
1136 if (((can_extend_p (to_mode, intermediate, unsignedp)
1137 != CODE_FOR_nothing)
1138 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1139 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
bbf6f052
RK
1140 && (can_extend_p (intermediate, from_mode, unsignedp)
1141 != CODE_FOR_nothing))
1142 {
1143 convert_move (to, convert_to_mode (intermediate, from,
1144 unsignedp), unsignedp);
1145 return;
1146 }
1147
1148 /* No suitable intermediate mode. */
1149 abort ();
1150 }
1151 }
1152
1153 /* Support special truncate insns for certain modes. */
1154
1155 if (from_mode == DImode && to_mode == SImode)
1156 {
1157#ifdef HAVE_truncdisi2
1158 if (HAVE_truncdisi2)
1159 {
1160 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1161 return;
1162 }
1163#endif
1164 convert_move (to, force_reg (from_mode, from), unsignedp);
1165 return;
1166 }
1167
1168 if (from_mode == DImode && to_mode == HImode)
1169 {
1170#ifdef HAVE_truncdihi2
1171 if (HAVE_truncdihi2)
1172 {
1173 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1174 return;
1175 }
1176#endif
1177 convert_move (to, force_reg (from_mode, from), unsignedp);
1178 return;
1179 }
1180
1181 if (from_mode == DImode && to_mode == QImode)
1182 {
1183#ifdef HAVE_truncdiqi2
1184 if (HAVE_truncdiqi2)
1185 {
1186 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1187 return;
1188 }
1189#endif
1190 convert_move (to, force_reg (from_mode, from), unsignedp);
1191 return;
1192 }
1193
1194 if (from_mode == SImode && to_mode == HImode)
1195 {
1196#ifdef HAVE_truncsihi2
1197 if (HAVE_truncsihi2)
1198 {
1199 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1200 return;
1201 }
1202#endif
1203 convert_move (to, force_reg (from_mode, from), unsignedp);
1204 return;
1205 }
1206
1207 if (from_mode == SImode && to_mode == QImode)
1208 {
1209#ifdef HAVE_truncsiqi2
1210 if (HAVE_truncsiqi2)
1211 {
1212 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1213 return;
1214 }
1215#endif
1216 convert_move (to, force_reg (from_mode, from), unsignedp);
1217 return;
1218 }
1219
1220 if (from_mode == HImode && to_mode == QImode)
1221 {
1222#ifdef HAVE_trunchiqi2
1223 if (HAVE_trunchiqi2)
1224 {
1225 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1226 return;
1227 }
1228#endif
1229 convert_move (to, force_reg (from_mode, from), unsignedp);
1230 return;
1231 }
1232
b9bcad65
RK
1233 if (from_mode == TImode && to_mode == DImode)
1234 {
1235#ifdef HAVE_trunctidi2
1236 if (HAVE_trunctidi2)
1237 {
1238 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1239 return;
1240 }
1241#endif
1242 convert_move (to, force_reg (from_mode, from), unsignedp);
1243 return;
1244 }
1245
1246 if (from_mode == TImode && to_mode == SImode)
1247 {
1248#ifdef HAVE_trunctisi2
1249 if (HAVE_trunctisi2)
1250 {
1251 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1252 return;
1253 }
1254#endif
1255 convert_move (to, force_reg (from_mode, from), unsignedp);
1256 return;
1257 }
1258
1259 if (from_mode == TImode && to_mode == HImode)
1260 {
1261#ifdef HAVE_trunctihi2
1262 if (HAVE_trunctihi2)
1263 {
1264 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1265 return;
1266 }
1267#endif
1268 convert_move (to, force_reg (from_mode, from), unsignedp);
1269 return;
1270 }
1271
1272 if (from_mode == TImode && to_mode == QImode)
1273 {
1274#ifdef HAVE_trunctiqi2
1275 if (HAVE_trunctiqi2)
1276 {
1277 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1278 return;
1279 }
1280#endif
1281 convert_move (to, force_reg (from_mode, from), unsignedp);
1282 return;
1283 }
1284
bbf6f052
RK
1285 /* Handle truncation of volatile memrefs, and so on;
1286 the things that couldn't be truncated directly,
1287 and for which there was no special instruction. */
1288 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1289 {
1290 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1291 emit_move_insn (to, temp);
1292 return;
1293 }
1294
1295 /* Mode combination is not recognized. */
1296 abort ();
1297}
1298
1299/* Return an rtx for a value that would result
1300 from converting X to mode MODE.
1301 Both X and MODE may be floating, or both integer.
1302 UNSIGNEDP is nonzero if X is an unsigned value.
1303 This can be done by referring to a part of X in place
5d901c31
RS
1304 or by copying to a new temporary with conversion.
1305
1306 This function *must not* call protect_from_queue
1307 except when putting X into an insn (in which case convert_move does it). */
bbf6f052
RK
1308
1309rtx
1310convert_to_mode (mode, x, unsignedp)
1311 enum machine_mode mode;
1312 rtx x;
1313 int unsignedp;
5ffe63ed
RS
1314{
1315 return convert_modes (mode, VOIDmode, x, unsignedp);
1316}
1317
1318/* Return an rtx for a value that would result
1319 from converting X from mode OLDMODE to mode MODE.
1320 Both modes may be floating, or both integer.
1321 UNSIGNEDP is nonzero if X is an unsigned value.
1322
1323 This can be done by referring to a part of X in place
1324 or by copying to a new temporary with conversion.
1325
1326 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1327
1328 This function *must not* call protect_from_queue
1329 except when putting X into an insn (in which case convert_move does it). */
1330
1331rtx
1332convert_modes (mode, oldmode, x, unsignedp)
1333 enum machine_mode mode, oldmode;
1334 rtx x;
1335 int unsignedp;
bbf6f052
RK
1336{
1337 register rtx temp;
5ffe63ed 1338
1499e0a8
RK
1339 /* If FROM is a SUBREG that indicates that we have already done at least
1340 the required extension, strip it. */
1341
1342 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1343 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1344 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1345 x = gen_lowpart (mode, x);
bbf6f052 1346
64791b18
RK
1347 if (GET_MODE (x) != VOIDmode)
1348 oldmode = GET_MODE (x);
1349
5ffe63ed 1350 if (mode == oldmode)
bbf6f052
RK
1351 return x;
1352
1353 /* There is one case that we must handle specially: If we are converting
906c4e36 1354 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
bbf6f052
RK
1355 we are to interpret the constant as unsigned, gen_lowpart will do
1356 the wrong if the constant appears negative. What we want to do is
1357 make the high-order word of the constant zero, not all ones. */
1358
1359 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
906c4e36 1360 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
bbf6f052 1361 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
906c4e36 1362 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
bbf6f052
RK
1363
1364 /* We can do this with a gen_lowpart if both desired and current modes
1365 are integer, and this is either a constant integer, a register, or a
ba2e110c
RK
1366 non-volatile MEM. Except for the constant case where MODE is no
1367 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
bbf6f052 1368
ba2e110c
RK
1369 if ((GET_CODE (x) == CONST_INT
1370 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 1371 || (GET_MODE_CLASS (mode) == MODE_INT
5ffe63ed 1372 && GET_MODE_CLASS (oldmode) == MODE_INT
bbf6f052 1373 && (GET_CODE (x) == CONST_DOUBLE
5ffe63ed 1374 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
d57c66da
JW
1375 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1376 && direct_load[(int) mode])
2bf29316
JW
1377 || (GET_CODE (x) == REG
1378 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1379 GET_MODE_BITSIZE (GET_MODE (x)))))))))
ba2e110c
RK
1380 {
1381 /* ?? If we don't know OLDMODE, we have to assume here that
1382 X does not need sign- or zero-extension. This may not be
1383 the case, but it's the best we can do. */
1384 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1385 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1386 {
1387 HOST_WIDE_INT val = INTVAL (x);
1388 int width = GET_MODE_BITSIZE (oldmode);
1389
1390 /* We must sign or zero-extend in this case. Start by
1391 zero-extending, then sign extend if we need to. */
1392 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1393 if (! unsignedp
1394 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1395 val |= (HOST_WIDE_INT) (-1) << width;
1396
1397 return GEN_INT (val);
1398 }
1399
1400 return gen_lowpart (mode, x);
1401 }
bbf6f052
RK
1402
1403 temp = gen_reg_rtx (mode);
1404 convert_move (temp, x, unsignedp);
1405 return temp;
1406}
1407\f
1408/* Generate several move instructions to copy LEN bytes
1409 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1410 The caller must pass FROM and TO
1411 through protect_from_queue before calling.
1412 ALIGN (in bytes) is maximum alignment we can assume. */
1413
bbf6f052
RK
1414static void
1415move_by_pieces (to, from, len, align)
1416 rtx to, from;
1417 int len, align;
1418{
1419 struct move_by_pieces data;
1420 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
e87b4f3f 1421 int max_size = MOVE_MAX + 1;
bbf6f052
RK
1422
1423 data.offset = 0;
1424 data.to_addr = to_addr;
1425 data.from_addr = from_addr;
1426 data.to = to;
1427 data.from = from;
1428 data.autinc_to
1429 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1430 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1431 data.autinc_from
1432 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1433 || GET_CODE (from_addr) == POST_INC
1434 || GET_CODE (from_addr) == POST_DEC);
1435
1436 data.explicit_inc_from = 0;
1437 data.explicit_inc_to = 0;
1438 data.reverse
1439 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1440 if (data.reverse) data.offset = len;
1441 data.len = len;
1442
e9cf6a97
JW
1443 data.to_struct = MEM_IN_STRUCT_P (to);
1444 data.from_struct = MEM_IN_STRUCT_P (from);
1445
bbf6f052
RK
1446 /* If copying requires more than two move insns,
1447 copy addresses to registers (to make displacements shorter)
1448 and use post-increment if available. */
1449 if (!(data.autinc_from && data.autinc_to)
1450 && move_by_pieces_ninsns (len, align) > 2)
1451 {
1452#ifdef HAVE_PRE_DECREMENT
1453 if (data.reverse && ! data.autinc_from)
1454 {
1455 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1456 data.autinc_from = 1;
1457 data.explicit_inc_from = -1;
1458 }
1459#endif
1460#ifdef HAVE_POST_INCREMENT
1461 if (! data.autinc_from)
1462 {
1463 data.from_addr = copy_addr_to_reg (from_addr);
1464 data.autinc_from = 1;
1465 data.explicit_inc_from = 1;
1466 }
1467#endif
1468 if (!data.autinc_from && CONSTANT_P (from_addr))
1469 data.from_addr = copy_addr_to_reg (from_addr);
1470#ifdef HAVE_PRE_DECREMENT
1471 if (data.reverse && ! data.autinc_to)
1472 {
1473 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1474 data.autinc_to = 1;
1475 data.explicit_inc_to = -1;
1476 }
1477#endif
1478#ifdef HAVE_POST_INCREMENT
1479 if (! data.reverse && ! data.autinc_to)
1480 {
1481 data.to_addr = copy_addr_to_reg (to_addr);
1482 data.autinc_to = 1;
1483 data.explicit_inc_to = 1;
1484 }
1485#endif
1486 if (!data.autinc_to && CONSTANT_P (to_addr))
1487 data.to_addr = copy_addr_to_reg (to_addr);
1488 }
1489
c7a7ac46 1490 if (! SLOW_UNALIGNED_ACCESS
e87b4f3f 1491 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1492 align = MOVE_MAX;
bbf6f052
RK
1493
1494 /* First move what we can in the largest integer mode, then go to
1495 successively smaller modes. */
1496
1497 while (max_size > 1)
1498 {
1499 enum machine_mode mode = VOIDmode, tmode;
1500 enum insn_code icode;
1501
e7c33f54
RK
1502 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1503 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1504 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1505 mode = tmode;
1506
1507 if (mode == VOIDmode)
1508 break;
1509
1510 icode = mov_optab->handlers[(int) mode].insn_code;
1511 if (icode != CODE_FOR_nothing
1512 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1513 GET_MODE_SIZE (mode)))
1514 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1515
1516 max_size = GET_MODE_SIZE (mode);
1517 }
1518
1519 /* The code above should have handled everything. */
1520 if (data.len != 0)
1521 abort ();
1522}
1523
1524/* Return number of insns required to move L bytes by pieces.
1525 ALIGN (in bytes) is maximum alignment we can assume. */
1526
1527static int
1528move_by_pieces_ninsns (l, align)
1529 unsigned int l;
1530 int align;
1531{
1532 register int n_insns = 0;
e87b4f3f 1533 int max_size = MOVE_MAX + 1;
bbf6f052 1534
c7a7ac46 1535 if (! SLOW_UNALIGNED_ACCESS
e87b4f3f 1536 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1537 align = MOVE_MAX;
bbf6f052
RK
1538
1539 while (max_size > 1)
1540 {
1541 enum machine_mode mode = VOIDmode, tmode;
1542 enum insn_code icode;
1543
e7c33f54
RK
1544 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1545 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1546 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1547 mode = tmode;
1548
1549 if (mode == VOIDmode)
1550 break;
1551
1552 icode = mov_optab->handlers[(int) mode].insn_code;
1553 if (icode != CODE_FOR_nothing
1554 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1555 GET_MODE_SIZE (mode)))
1556 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1557
1558 max_size = GET_MODE_SIZE (mode);
1559 }
1560
1561 return n_insns;
1562}
1563
1564/* Subroutine of move_by_pieces. Move as many bytes as appropriate
1565 with move instructions for mode MODE. GENFUN is the gen_... function
1566 to make a move insn for that mode. DATA has all the other info. */
1567
1568static void
1569move_by_pieces_1 (genfun, mode, data)
1570 rtx (*genfun) ();
1571 enum machine_mode mode;
1572 struct move_by_pieces *data;
1573{
1574 register int size = GET_MODE_SIZE (mode);
1575 register rtx to1, from1;
1576
1577 while (data->len >= size)
1578 {
1579 if (data->reverse) data->offset -= size;
1580
1581 to1 = (data->autinc_to
1582 ? gen_rtx (MEM, mode, data->to_addr)
1583 : change_address (data->to, mode,
1584 plus_constant (data->to_addr, data->offset)));
e9cf6a97 1585 MEM_IN_STRUCT_P (to1) = data->to_struct;
bbf6f052
RK
1586 from1 =
1587 (data->autinc_from
1588 ? gen_rtx (MEM, mode, data->from_addr)
1589 : change_address (data->from, mode,
1590 plus_constant (data->from_addr, data->offset)));
e9cf6a97 1591 MEM_IN_STRUCT_P (from1) = data->from_struct;
bbf6f052
RK
1592
1593#ifdef HAVE_PRE_DECREMENT
1594 if (data->explicit_inc_to < 0)
906c4e36 1595 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
bbf6f052 1596 if (data->explicit_inc_from < 0)
906c4e36 1597 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
bbf6f052
RK
1598#endif
1599
1600 emit_insn ((*genfun) (to1, from1));
1601#ifdef HAVE_POST_INCREMENT
1602 if (data->explicit_inc_to > 0)
906c4e36 1603 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
bbf6f052 1604 if (data->explicit_inc_from > 0)
906c4e36 1605 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
bbf6f052
RK
1606#endif
1607
1608 if (! data->reverse) data->offset += size;
1609
1610 data->len -= size;
1611 }
1612}
1613\f
1614/* Emit code to move a block Y to a block X.
1615 This may be done with string-move instructions,
1616 with multiple scalar move instructions, or with a library call.
1617
1618 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1619 with mode BLKmode.
1620 SIZE is an rtx that says how long they are.
1621 ALIGN is the maximum alignment we can assume they have,
1622 measured in bytes. */
1623
1624void
1625emit_block_move (x, y, size, align)
1626 rtx x, y;
1627 rtx size;
1628 int align;
1629{
1630 if (GET_MODE (x) != BLKmode)
1631 abort ();
1632
1633 if (GET_MODE (y) != BLKmode)
1634 abort ();
1635
1636 x = protect_from_queue (x, 1);
1637 y = protect_from_queue (y, 0);
5d901c31 1638 size = protect_from_queue (size, 0);
bbf6f052
RK
1639
1640 if (GET_CODE (x) != MEM)
1641 abort ();
1642 if (GET_CODE (y) != MEM)
1643 abort ();
1644 if (size == 0)
1645 abort ();
1646
1647 if (GET_CODE (size) == CONST_INT
906c4e36 1648 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
bbf6f052
RK
1649 move_by_pieces (x, y, INTVAL (size), align);
1650 else
1651 {
1652 /* Try the most limited insn first, because there's no point
1653 including more than one in the machine description unless
1654 the more limited one has some advantage. */
266007a7 1655
0bba3f6f 1656 rtx opalign = GEN_INT (align);
266007a7
RK
1657 enum machine_mode mode;
1658
1659 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1660 mode = GET_MODE_WIDER_MODE (mode))
bbf6f052 1661 {
266007a7 1662 enum insn_code code = movstr_optab[(int) mode];
266007a7
RK
1663
1664 if (code != CODE_FOR_nothing
803090c4
RK
1665 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1666 here because if SIZE is less than the mode mask, as it is
8008b228 1667 returned by the macro, it will definitely be less than the
803090c4 1668 actual mode mask. */
8ca00751
RK
1669 && ((GET_CODE (size) == CONST_INT
1670 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1671 <= GET_MODE_MASK (mode)))
1672 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
0bba3f6f
RK
1673 && (insn_operand_predicate[(int) code][0] == 0
1674 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1675 && (insn_operand_predicate[(int) code][1] == 0
1676 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1677 && (insn_operand_predicate[(int) code][3] == 0
1678 || (*insn_operand_predicate[(int) code][3]) (opalign,
1679 VOIDmode)))
bbf6f052 1680 {
1ba1e2a8 1681 rtx op2;
266007a7
RK
1682 rtx last = get_last_insn ();
1683 rtx pat;
1684
1ba1e2a8 1685 op2 = convert_to_mode (mode, size, 1);
0bba3f6f
RK
1686 if (insn_operand_predicate[(int) code][2] != 0
1687 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
266007a7
RK
1688 op2 = copy_to_mode_reg (mode, op2);
1689
1690 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1691 if (pat)
1692 {
1693 emit_insn (pat);
1694 return;
1695 }
1696 else
1697 delete_insns_since (last);
bbf6f052
RK
1698 }
1699 }
bbf6f052
RK
1700
1701#ifdef TARGET_MEM_FUNCTIONS
d562e42e 1702 emit_library_call (memcpy_libfunc, 0,
bbf6f052
RK
1703 VOIDmode, 3, XEXP (x, 0), Pmode,
1704 XEXP (y, 0), Pmode,
0fa83258
RK
1705 convert_to_mode (TYPE_MODE (sizetype), size,
1706 TREE_UNSIGNED (sizetype)),
1707 TYPE_MODE (sizetype));
bbf6f052 1708#else
d562e42e 1709 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
1710 VOIDmode, 3, XEXP (y, 0), Pmode,
1711 XEXP (x, 0), Pmode,
3b6f75e2
JW
1712 convert_to_mode (TYPE_MODE (integer_type_node), size,
1713 TREE_UNSIGNED (integer_type_node)),
1714 TYPE_MODE (integer_type_node));
bbf6f052
RK
1715#endif
1716 }
1717}
1718\f
1719/* Copy all or part of a value X into registers starting at REGNO.
1720 The number of registers to be filled is NREGS. */
1721
1722void
1723move_block_to_reg (regno, x, nregs, mode)
1724 int regno;
1725 rtx x;
1726 int nregs;
1727 enum machine_mode mode;
1728{
1729 int i;
1730 rtx pat, last;
1731
72bb9717
RK
1732 if (nregs == 0)
1733 return;
1734
bbf6f052
RK
1735 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1736 x = validize_mem (force_const_mem (mode, x));
1737
1738 /* See if the machine can do this with a load multiple insn. */
1739#ifdef HAVE_load_multiple
c3a02afe 1740 if (HAVE_load_multiple)
bbf6f052 1741 {
c3a02afe
RK
1742 last = get_last_insn ();
1743 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1744 GEN_INT (nregs));
1745 if (pat)
1746 {
1747 emit_insn (pat);
1748 return;
1749 }
1750 else
1751 delete_insns_since (last);
bbf6f052 1752 }
bbf6f052
RK
1753#endif
1754
1755 for (i = 0; i < nregs; i++)
1756 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1757 operand_subword_force (x, i, mode));
1758}
1759
1760/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
0040593d
JW
1761 The number of registers to be filled is NREGS. SIZE indicates the number
1762 of bytes in the object X. */
1763
bbf6f052
RK
1764
1765void
0040593d 1766move_block_from_reg (regno, x, nregs, size)
bbf6f052
RK
1767 int regno;
1768 rtx x;
1769 int nregs;
0040593d 1770 int size;
bbf6f052
RK
1771{
1772 int i;
1773 rtx pat, last;
1774
0040593d
JW
1775 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1776 to the left before storing to memory. */
1777 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1778 {
1779 rtx tem = operand_subword (x, 0, 1, BLKmode);
1780 rtx shift;
1781
1782 if (tem == 0)
1783 abort ();
1784
1785 shift = expand_shift (LSHIFT_EXPR, word_mode,
1786 gen_rtx (REG, word_mode, regno),
1787 build_int_2 ((UNITS_PER_WORD - size)
1788 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1789 emit_move_insn (tem, shift);
1790 return;
1791 }
1792
bbf6f052
RK
1793 /* See if the machine can do this with a store multiple insn. */
1794#ifdef HAVE_store_multiple
c3a02afe 1795 if (HAVE_store_multiple)
bbf6f052 1796 {
c3a02afe
RK
1797 last = get_last_insn ();
1798 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1799 GEN_INT (nregs));
1800 if (pat)
1801 {
1802 emit_insn (pat);
1803 return;
1804 }
1805 else
1806 delete_insns_since (last);
bbf6f052 1807 }
bbf6f052
RK
1808#endif
1809
1810 for (i = 0; i < nregs; i++)
1811 {
1812 rtx tem = operand_subword (x, i, 1, BLKmode);
1813
1814 if (tem == 0)
1815 abort ();
1816
1817 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1818 }
1819}
1820
fffa9c1d
JW
1821/* Emit code to move a block Y to a block X, where X is non-consecutive
1822 registers represented by a PARALLEL. */
1823
1824void
1825emit_group_load (x, y)
1826 rtx x, y;
1827{
1828 rtx target_reg, source;
1829 int i;
1830
1831 if (GET_CODE (x) != PARALLEL)
1832 abort ();
1833
1834 /* Check for a NULL entry, used to indicate that the parameter goes
1835 both on the stack and in registers. */
1836 if (XEXP (XVECEXP (x, 0, 0), 0))
1837 i = 0;
1838 else
1839 i = 1;
1840
1841 for (; i < XVECLEN (x, 0); i++)
1842 {
1843 rtx element = XVECEXP (x, 0, i);
1844
1845 target_reg = XEXP (element, 0);
1846
1847 if (GET_CODE (y) == MEM)
1848 source = change_address (y, GET_MODE (target_reg),
1849 plus_constant (XEXP (y, 0),
1850 INTVAL (XEXP (element, 1))));
1851 else if (XEXP (element, 1) == const0_rtx)
1852 {
1853 if (GET_MODE (target_reg) == GET_MODE (y))
1854 source = y;
1855 else if (GET_MODE_SIZE (GET_MODE (target_reg))
1856 == GET_MODE_SIZE (GET_MODE (y)))
1857 source = gen_rtx (SUBREG, GET_MODE (target_reg), y, 0);
1858 else
1859 abort ();
1860 }
1861 else
1862 abort ();
1863
1864 emit_move_insn (target_reg, source);
1865 }
1866}
1867
1868/* Emit code to move a block Y to a block X, where Y is non-consecutive
1869 registers represented by a PARALLEL. */
1870
1871void
1872emit_group_store (x, y)
1873 rtx x, y;
1874{
1875 rtx source_reg, target;
1876 int i;
1877
1878 if (GET_CODE (y) != PARALLEL)
1879 abort ();
1880
1881 /* Check for a NULL entry, used to indicate that the parameter goes
1882 both on the stack and in registers. */
1883 if (XEXP (XVECEXP (y, 0, 0), 0))
1884 i = 0;
1885 else
1886 i = 1;
1887
1888 for (; i < XVECLEN (y, 0); i++)
1889 {
1890 rtx element = XVECEXP (y, 0, i);
1891
1892 source_reg = XEXP (element, 0);
1893
1894 if (GET_CODE (x) == MEM)
1895 target = change_address (x, GET_MODE (source_reg),
1896 plus_constant (XEXP (x, 0),
1897 INTVAL (XEXP (element, 1))));
1898 else if (XEXP (element, 1) == const0_rtx)
1899 target = x;
1900 else
1901 abort ();
1902
1903 emit_move_insn (target, source_reg);
1904 }
1905}
1906
94b25f81
RK
1907/* Add a USE expression for REG to the (possibly empty) list pointed
1908 to by CALL_FUSAGE. REG must denote a hard register. */
bbf6f052
RK
1909
1910void
b3f8cf4a
RK
1911use_reg (call_fusage, reg)
1912 rtx *call_fusage, reg;
1913{
0304dfbb
DE
1914 if (GET_CODE (reg) != REG
1915 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
b3f8cf4a
RK
1916 abort();
1917
1918 *call_fusage
1919 = gen_rtx (EXPR_LIST, VOIDmode,
0304dfbb 1920 gen_rtx (USE, VOIDmode, reg), *call_fusage);
b3f8cf4a
RK
1921}
1922
94b25f81
RK
1923/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1924 starting at REGNO. All of these registers must be hard registers. */
b3f8cf4a
RK
1925
1926void
0304dfbb
DE
1927use_regs (call_fusage, regno, nregs)
1928 rtx *call_fusage;
bbf6f052
RK
1929 int regno;
1930 int nregs;
1931{
0304dfbb 1932 int i;
bbf6f052 1933
0304dfbb
DE
1934 if (regno + nregs > FIRST_PSEUDO_REGISTER)
1935 abort ();
1936
1937 for (i = 0; i < nregs; i++)
1938 use_reg (call_fusage, gen_rtx (REG, reg_raw_mode[regno + i], regno + i));
bbf6f052 1939}
fffa9c1d
JW
1940
1941/* Add USE expressions to *CALL_FUSAGE for each REG contained in the
1942 PARALLEL REGS. This is for calls that pass values in multiple
1943 non-contiguous locations. The Irix 6 ABI has examples of this. */
1944
1945void
1946use_group_regs (call_fusage, regs)
1947 rtx *call_fusage;
1948 rtx regs;
1949{
1950 int i;
1951
1952 /* Check for a NULL entry, used to indicate that the parameter goes
1953 both on the stack and in registers. */
1954 if (XEXP (XVECEXP (regs, 0, 0), 0))
1955 i = 0;
1956 else
1957 i = 1;
1958
1959 for (; i < XVECLEN (regs, 0); i++)
1960 use_reg (call_fusage, XEXP (XVECEXP (regs, 0, i), 0));
1961}
bbf6f052 1962\f
9de08200
RK
1963/* Generate several move instructions to clear LEN bytes of block TO.
1964 (A MEM rtx with BLKmode). The caller must pass TO through
1965 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
1966 we can assume. */
1967
1968static void
1969clear_by_pieces (to, len, align)
1970 rtx to;
1971 int len, align;
1972{
1973 struct clear_by_pieces data;
1974 rtx to_addr = XEXP (to, 0);
1975 int max_size = MOVE_MAX + 1;
1976
1977 data.offset = 0;
1978 data.to_addr = to_addr;
1979 data.to = to;
1980 data.autinc_to
1981 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1982 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1983
1984 data.explicit_inc_to = 0;
1985 data.reverse
1986 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1987 if (data.reverse) data.offset = len;
1988 data.len = len;
1989
1990 data.to_struct = MEM_IN_STRUCT_P (to);
1991
1992 /* If copying requires more than two move insns,
1993 copy addresses to registers (to make displacements shorter)
1994 and use post-increment if available. */
1995 if (!data.autinc_to
1996 && move_by_pieces_ninsns (len, align) > 2)
1997 {
1998#ifdef HAVE_PRE_DECREMENT
1999 if (data.reverse && ! data.autinc_to)
2000 {
2001 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2002 data.autinc_to = 1;
2003 data.explicit_inc_to = -1;
2004 }
2005#endif
2006#ifdef HAVE_POST_INCREMENT
2007 if (! data.reverse && ! data.autinc_to)
2008 {
2009 data.to_addr = copy_addr_to_reg (to_addr);
2010 data.autinc_to = 1;
2011 data.explicit_inc_to = 1;
2012 }
2013#endif
2014 if (!data.autinc_to && CONSTANT_P (to_addr))
2015 data.to_addr = copy_addr_to_reg (to_addr);
2016 }
2017
2018 if (! SLOW_UNALIGNED_ACCESS
2019 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2020 align = MOVE_MAX;
2021
2022 /* First move what we can in the largest integer mode, then go to
2023 successively smaller modes. */
2024
2025 while (max_size > 1)
2026 {
2027 enum machine_mode mode = VOIDmode, tmode;
2028 enum insn_code icode;
2029
2030 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2031 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2032 if (GET_MODE_SIZE (tmode) < max_size)
2033 mode = tmode;
2034
2035 if (mode == VOIDmode)
2036 break;
2037
2038 icode = mov_optab->handlers[(int) mode].insn_code;
2039 if (icode != CODE_FOR_nothing
2040 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2041 GET_MODE_SIZE (mode)))
2042 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2043
2044 max_size = GET_MODE_SIZE (mode);
2045 }
2046
2047 /* The code above should have handled everything. */
2048 if (data.len != 0)
2049 abort ();
2050}
2051
2052/* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2053 with move instructions for mode MODE. GENFUN is the gen_... function
2054 to make a move insn for that mode. DATA has all the other info. */
2055
2056static void
2057clear_by_pieces_1 (genfun, mode, data)
2058 rtx (*genfun) ();
2059 enum machine_mode mode;
2060 struct clear_by_pieces *data;
2061{
2062 register int size = GET_MODE_SIZE (mode);
2063 register rtx to1;
2064
2065 while (data->len >= size)
2066 {
2067 if (data->reverse) data->offset -= size;
2068
2069 to1 = (data->autinc_to
2070 ? gen_rtx (MEM, mode, data->to_addr)
2071 : change_address (data->to, mode,
2072 plus_constant (data->to_addr, data->offset)));
2073 MEM_IN_STRUCT_P (to1) = data->to_struct;
2074
2075#ifdef HAVE_PRE_DECREMENT
2076 if (data->explicit_inc_to < 0)
2077 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2078#endif
2079
2080 emit_insn ((*genfun) (to1, const0_rtx));
2081#ifdef HAVE_POST_INCREMENT
2082 if (data->explicit_inc_to > 0)
2083 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2084#endif
2085
2086 if (! data->reverse) data->offset += size;
2087
2088 data->len -= size;
2089 }
2090}
2091\f
bbf6f052 2092/* Write zeros through the storage of OBJECT.
9de08200
RK
2093 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2094 the maximum alignment we can is has, measured in bytes. */
bbf6f052
RK
2095
2096void
9de08200 2097clear_storage (object, size, align)
bbf6f052 2098 rtx object;
4c08eef0 2099 rtx size;
9de08200 2100 int align;
bbf6f052
RK
2101{
2102 if (GET_MODE (object) == BLKmode)
2103 {
9de08200
RK
2104 object = protect_from_queue (object, 1);
2105 size = protect_from_queue (size, 0);
2106
2107 if (GET_CODE (size) == CONST_INT
2108 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
2109 clear_by_pieces (object, INTVAL (size), align);
2110
2111 else
2112 {
2113 /* Try the most limited insn first, because there's no point
2114 including more than one in the machine description unless
2115 the more limited one has some advantage. */
2116
2117 rtx opalign = GEN_INT (align);
2118 enum machine_mode mode;
2119
2120 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2121 mode = GET_MODE_WIDER_MODE (mode))
2122 {
2123 enum insn_code code = clrstr_optab[(int) mode];
2124
2125 if (code != CODE_FOR_nothing
2126 /* We don't need MODE to be narrower than
2127 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2128 the mode mask, as it is returned by the macro, it will
2129 definitely be less than the actual mode mask. */
2130 && ((GET_CODE (size) == CONST_INT
2131 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2132 <= GET_MODE_MASK (mode)))
2133 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2134 && (insn_operand_predicate[(int) code][0] == 0
2135 || (*insn_operand_predicate[(int) code][0]) (object,
2136 BLKmode))
2137 && (insn_operand_predicate[(int) code][2] == 0
2138 || (*insn_operand_predicate[(int) code][2]) (opalign,
2139 VOIDmode)))
2140 {
2141 rtx op1;
2142 rtx last = get_last_insn ();
2143 rtx pat;
2144
2145 op1 = convert_to_mode (mode, size, 1);
2146 if (insn_operand_predicate[(int) code][1] != 0
2147 && ! (*insn_operand_predicate[(int) code][1]) (op1,
2148 mode))
2149 op1 = copy_to_mode_reg (mode, op1);
2150
2151 pat = GEN_FCN ((int) code) (object, op1, opalign);
2152 if (pat)
2153 {
2154 emit_insn (pat);
2155 return;
2156 }
2157 else
2158 delete_insns_since (last);
2159 }
2160 }
2161
2162
bbf6f052 2163#ifdef TARGET_MEM_FUNCTIONS
9de08200
RK
2164 emit_library_call (memset_libfunc, 0,
2165 VOIDmode, 3,
2166 XEXP (object, 0), Pmode,
2167 const0_rtx, TYPE_MODE (integer_type_node),
2168 convert_to_mode (TYPE_MODE (sizetype),
2169 size, TREE_UNSIGNED (sizetype)),
2170 TYPE_MODE (sizetype));
bbf6f052 2171#else
9de08200
RK
2172 emit_library_call (bzero_libfunc, 0,
2173 VOIDmode, 2,
2174 XEXP (object, 0), Pmode,
2175 convert_to_mode (TYPE_MODE (integer_type_node),
2176 size,
2177 TREE_UNSIGNED (integer_type_node)),
2178 TYPE_MODE (integer_type_node));
bbf6f052 2179#endif
9de08200 2180 }
bbf6f052
RK
2181 }
2182 else
2183 emit_move_insn (object, const0_rtx);
2184}
2185
2186/* Generate code to copy Y into X.
2187 Both Y and X must have the same mode, except that
2188 Y can be a constant with VOIDmode.
2189 This mode cannot be BLKmode; use emit_block_move for that.
2190
2191 Return the last instruction emitted. */
2192
2193rtx
2194emit_move_insn (x, y)
2195 rtx x, y;
2196{
2197 enum machine_mode mode = GET_MODE (x);
bbf6f052
RK
2198
2199 x = protect_from_queue (x, 1);
2200 y = protect_from_queue (y, 0);
2201
2202 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2203 abort ();
2204
2205 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2206 y = force_const_mem (mode, y);
2207
2208 /* If X or Y are memory references, verify that their addresses are valid
2209 for the machine. */
2210 if (GET_CODE (x) == MEM
2211 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2212 && ! push_operand (x, GET_MODE (x)))
2213 || (flag_force_addr
2214 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2215 x = change_address (x, VOIDmode, XEXP (x, 0));
2216
2217 if (GET_CODE (y) == MEM
2218 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2219 || (flag_force_addr
2220 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2221 y = change_address (y, VOIDmode, XEXP (y, 0));
2222
2223 if (mode == BLKmode)
2224 abort ();
2225
261c4230
RS
2226 return emit_move_insn_1 (x, y);
2227}
2228
2229/* Low level part of emit_move_insn.
2230 Called just like emit_move_insn, but assumes X and Y
2231 are basically valid. */
2232
2233rtx
2234emit_move_insn_1 (x, y)
2235 rtx x, y;
2236{
2237 enum machine_mode mode = GET_MODE (x);
2238 enum machine_mode submode;
2239 enum mode_class class = GET_MODE_CLASS (mode);
2240 int i;
2241
bbf6f052
RK
2242 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2243 return
2244 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2245
89742723 2246 /* Expand complex moves by moving real part and imag part, if possible. */
7308a047 2247 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
d0c76654
RK
2248 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2249 * BITS_PER_UNIT),
2250 (class == MODE_COMPLEX_INT
2251 ? MODE_INT : MODE_FLOAT),
2252 0))
7308a047
RS
2253 && (mov_optab->handlers[(int) submode].insn_code
2254 != CODE_FOR_nothing))
2255 {
2256 /* Don't split destination if it is a stack push. */
2257 int stack = push_operand (x, GET_MODE (x));
6551fa4d 2258 rtx insns;
7308a047 2259
7308a047
RS
2260 /* If this is a stack, push the highpart first, so it
2261 will be in the argument order.
2262
2263 In that case, change_address is used only to convert
2264 the mode, not to change the address. */
c937357e
RS
2265 if (stack)
2266 {
e33c0d66
RS
2267 /* Note that the real part always precedes the imag part in memory
2268 regardless of machine's endianness. */
c937357e
RS
2269#ifdef STACK_GROWS_DOWNWARD
2270 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2271 (gen_rtx (MEM, submode, (XEXP (x, 0))),
e33c0d66 2272 gen_imagpart (submode, y)));
c937357e
RS
2273 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2274 (gen_rtx (MEM, submode, (XEXP (x, 0))),
e33c0d66 2275 gen_realpart (submode, y)));
c937357e
RS
2276#else
2277 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2278 (gen_rtx (MEM, submode, (XEXP (x, 0))),
e33c0d66 2279 gen_realpart (submode, y)));
c937357e
RS
2280 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2281 (gen_rtx (MEM, submode, (XEXP (x, 0))),
e33c0d66 2282 gen_imagpart (submode, y)));
c937357e
RS
2283#endif
2284 }
2285 else
2286 {
2287 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
976ff203 2288 (gen_realpart (submode, x), gen_realpart (submode, y)));
c937357e 2289 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
976ff203 2290 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
c937357e 2291 }
7308a047 2292
7a1ab50a 2293 return get_last_insn ();
7308a047
RS
2294 }
2295
bbf6f052
RK
2296 /* This will handle any multi-word mode that lacks a move_insn pattern.
2297 However, you will get better code if you define such patterns,
2298 even if they must turn into multiple assembler instructions. */
a4320483 2299 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
bbf6f052
RK
2300 {
2301 rtx last_insn = 0;
6551fa4d
JW
2302 rtx insns;
2303
a98c9f1a
RK
2304#ifdef PUSH_ROUNDING
2305
2306 /* If X is a push on the stack, do the push now and replace
2307 X with a reference to the stack pointer. */
2308 if (push_operand (x, GET_MODE (x)))
2309 {
2310 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2311 x = change_address (x, VOIDmode, stack_pointer_rtx);
2312 }
2313#endif
2314
15a7a8ec 2315 /* Show the output dies here. */
43e046cb
RK
2316 if (x != y)
2317 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
15a7a8ec 2318
bbf6f052
RK
2319 for (i = 0;
2320 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2321 i++)
2322 {
2323 rtx xpart = operand_subword (x, i, 1, mode);
2324 rtx ypart = operand_subword (y, i, 1, mode);
2325
2326 /* If we can't get a part of Y, put Y into memory if it is a
2327 constant. Otherwise, force it into a register. If we still
2328 can't get a part of Y, abort. */
2329 if (ypart == 0 && CONSTANT_P (y))
2330 {
2331 y = force_const_mem (mode, y);
2332 ypart = operand_subword (y, i, 1, mode);
2333 }
2334 else if (ypart == 0)
2335 ypart = operand_subword_force (y, i, mode);
2336
2337 if (xpart == 0 || ypart == 0)
2338 abort ();
2339
2340 last_insn = emit_move_insn (xpart, ypart);
2341 }
6551fa4d 2342
bbf6f052
RK
2343 return last_insn;
2344 }
2345 else
2346 abort ();
2347}
2348\f
2349/* Pushing data onto the stack. */
2350
2351/* Push a block of length SIZE (perhaps variable)
2352 and return an rtx to address the beginning of the block.
2353 Note that it is not possible for the value returned to be a QUEUED.
2354 The value may be virtual_outgoing_args_rtx.
2355
2356 EXTRA is the number of bytes of padding to push in addition to SIZE.
2357 BELOW nonzero means this padding comes at low addresses;
2358 otherwise, the padding comes at high addresses. */
2359
2360rtx
2361push_block (size, extra, below)
2362 rtx size;
2363 int extra, below;
2364{
2365 register rtx temp;
88f63c77
RK
2366
2367 size = convert_modes (Pmode, ptr_mode, size, 1);
bbf6f052
RK
2368 if (CONSTANT_P (size))
2369 anti_adjust_stack (plus_constant (size, extra));
2370 else if (GET_CODE (size) == REG && extra == 0)
2371 anti_adjust_stack (size);
2372 else
2373 {
2374 rtx temp = copy_to_mode_reg (Pmode, size);
2375 if (extra != 0)
906c4e36 2376 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
bbf6f052
RK
2377 temp, 0, OPTAB_LIB_WIDEN);
2378 anti_adjust_stack (temp);
2379 }
2380
2381#ifdef STACK_GROWS_DOWNWARD
2382 temp = virtual_outgoing_args_rtx;
2383 if (extra != 0 && below)
2384 temp = plus_constant (temp, extra);
2385#else
2386 if (GET_CODE (size) == CONST_INT)
2387 temp = plus_constant (virtual_outgoing_args_rtx,
2388 - INTVAL (size) - (below ? 0 : extra));
2389 else if (extra != 0 && !below)
2390 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2391 negate_rtx (Pmode, plus_constant (size, extra)));
2392 else
2393 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2394 negate_rtx (Pmode, size));
2395#endif
2396
2397 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2398}
2399
87e38d84 2400rtx
bbf6f052
RK
2401gen_push_operand ()
2402{
2403 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2404}
2405
2406/* Generate code to push X onto the stack, assuming it has mode MODE and
2407 type TYPE.
2408 MODE is redundant except when X is a CONST_INT (since they don't
2409 carry mode info).
2410 SIZE is an rtx for the size of data to be copied (in bytes),
2411 needed only if X is BLKmode.
2412
2413 ALIGN (in bytes) is maximum alignment we can assume.
2414
cd048831
RK
2415 If PARTIAL and REG are both nonzero, then copy that many of the first
2416 words of X into registers starting with REG, and push the rest of X.
bbf6f052
RK
2417 The amount of space pushed is decreased by PARTIAL words,
2418 rounded *down* to a multiple of PARM_BOUNDARY.
2419 REG must be a hard register in this case.
cd048831
RK
2420 If REG is zero but PARTIAL is not, take any all others actions for an
2421 argument partially in registers, but do not actually load any
2422 registers.
bbf6f052
RK
2423
2424 EXTRA is the amount in bytes of extra space to leave next to this arg.
6dc42e49 2425 This is ignored if an argument block has already been allocated.
bbf6f052
RK
2426
2427 On a machine that lacks real push insns, ARGS_ADDR is the address of
2428 the bottom of the argument block for this call. We use indexing off there
2429 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2430 argument block has not been preallocated.
2431
2432 ARGS_SO_FAR is the size of args previously pushed for this call. */
2433
2434void
2435emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2436 args_addr, args_so_far)
2437 register rtx x;
2438 enum machine_mode mode;
2439 tree type;
2440 rtx size;
2441 int align;
2442 int partial;
2443 rtx reg;
2444 int extra;
2445 rtx args_addr;
2446 rtx args_so_far;
2447{
2448 rtx xinner;
2449 enum direction stack_direction
2450#ifdef STACK_GROWS_DOWNWARD
2451 = downward;
2452#else
2453 = upward;
2454#endif
2455
2456 /* Decide where to pad the argument: `downward' for below,
2457 `upward' for above, or `none' for don't pad it.
2458 Default is below for small data on big-endian machines; else above. */
2459 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2460
9c7be814
JL
2461 /* If we're placing part of X into a register and part of X onto
2462 the stack, indicate that the entire register is clobbered to
2463 keep flow from thinking the unused part of the register is live. */
22745c7e 2464 if (partial > 0 && reg != 0)
9c7be814
JL
2465 emit_insn (gen_rtx (CLOBBER, VOIDmode, reg));
2466
bbf6f052
RK
2467 /* Invert direction if stack is post-update. */
2468 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2469 if (where_pad != none)
2470 where_pad = (where_pad == downward ? upward : downward);
2471
2472 xinner = x = protect_from_queue (x, 0);
2473
2474 if (mode == BLKmode)
2475 {
2476 /* Copy a block into the stack, entirely or partially. */
2477
2478 register rtx temp;
2479 int used = partial * UNITS_PER_WORD;
2480 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2481 int skip;
2482
2483 if (size == 0)
2484 abort ();
2485
2486 used -= offset;
2487
2488 /* USED is now the # of bytes we need not copy to the stack
2489 because registers will take care of them. */
2490
2491 if (partial != 0)
2492 xinner = change_address (xinner, BLKmode,
2493 plus_constant (XEXP (xinner, 0), used));
2494
2495 /* If the partial register-part of the arg counts in its stack size,
2496 skip the part of stack space corresponding to the registers.
2497 Otherwise, start copying to the beginning of the stack space,
2498 by setting SKIP to 0. */
2499#ifndef REG_PARM_STACK_SPACE
2500 skip = 0;
2501#else
2502 skip = used;
2503#endif
2504
2505#ifdef PUSH_ROUNDING
2506 /* Do it with several push insns if that doesn't take lots of insns
2507 and if there is no difficulty with push insns that skip bytes
2508 on the stack for alignment purposes. */
2509 if (args_addr == 0
2510 && GET_CODE (size) == CONST_INT
2511 && skip == 0
2512 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2513 < MOVE_RATIO)
bbf6f052
RK
2514 /* Here we avoid the case of a structure whose weak alignment
2515 forces many pushes of a small amount of data,
2516 and such small pushes do rounding that causes trouble. */
c7a7ac46 2517 && ((! SLOW_UNALIGNED_ACCESS)
e87b4f3f 2518 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
bbf6f052 2519 || PUSH_ROUNDING (align) == align)
bbf6f052
RK
2520 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2521 {
2522 /* Push padding now if padding above and stack grows down,
2523 or if padding below and stack grows up.
2524 But if space already allocated, this has already been done. */
2525 if (extra && args_addr == 0
2526 && where_pad != none && where_pad != stack_direction)
906c4e36 2527 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2528
2529 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
2530 INTVAL (size) - used, align);
2531 }
2532 else
2533#endif /* PUSH_ROUNDING */
2534 {
2535 /* Otherwise make space on the stack and copy the data
2536 to the address of that space. */
2537
2538 /* Deduct words put into registers from the size we must copy. */
2539 if (partial != 0)
2540 {
2541 if (GET_CODE (size) == CONST_INT)
906c4e36 2542 size = GEN_INT (INTVAL (size) - used);
bbf6f052
RK
2543 else
2544 size = expand_binop (GET_MODE (size), sub_optab, size,
906c4e36
RK
2545 GEN_INT (used), NULL_RTX, 0,
2546 OPTAB_LIB_WIDEN);
bbf6f052
RK
2547 }
2548
2549 /* Get the address of the stack space.
2550 In this case, we do not deal with EXTRA separately.
2551 A single stack adjust will do. */
2552 if (! args_addr)
2553 {
2554 temp = push_block (size, extra, where_pad == downward);
2555 extra = 0;
2556 }
2557 else if (GET_CODE (args_so_far) == CONST_INT)
2558 temp = memory_address (BLKmode,
2559 plus_constant (args_addr,
2560 skip + INTVAL (args_so_far)));
2561 else
2562 temp = memory_address (BLKmode,
2563 plus_constant (gen_rtx (PLUS, Pmode,
2564 args_addr, args_so_far),
2565 skip));
2566
2567 /* TEMP is the address of the block. Copy the data there. */
2568 if (GET_CODE (size) == CONST_INT
2569 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2570 < MOVE_RATIO))
2571 {
2572 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
2573 INTVAL (size), align);
2574 goto ret;
2575 }
2576 /* Try the most limited insn first, because there's no point
2577 including more than one in the machine description unless
2578 the more limited one has some advantage. */
2579#ifdef HAVE_movstrqi
2580 if (HAVE_movstrqi
2581 && GET_CODE (size) == CONST_INT
2582 && ((unsigned) INTVAL (size)
2583 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
2584 {
c841050e
RS
2585 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
2586 xinner, size, GEN_INT (align));
2587 if (pat != 0)
2588 {
2589 emit_insn (pat);
2590 goto ret;
2591 }
bbf6f052
RK
2592 }
2593#endif
2594#ifdef HAVE_movstrhi
2595 if (HAVE_movstrhi
2596 && GET_CODE (size) == CONST_INT
2597 && ((unsigned) INTVAL (size)
2598 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
2599 {
c841050e
RS
2600 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
2601 xinner, size, GEN_INT (align));
2602 if (pat != 0)
2603 {
2604 emit_insn (pat);
2605 goto ret;
2606 }
bbf6f052
RK
2607 }
2608#endif
2609#ifdef HAVE_movstrsi
2610 if (HAVE_movstrsi)
2611 {
c841050e
RS
2612 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
2613 xinner, size, GEN_INT (align));
2614 if (pat != 0)
2615 {
2616 emit_insn (pat);
2617 goto ret;
2618 }
bbf6f052
RK
2619 }
2620#endif
2621#ifdef HAVE_movstrdi
2622 if (HAVE_movstrdi)
2623 {
c841050e
RS
2624 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
2625 xinner, size, GEN_INT (align));
2626 if (pat != 0)
2627 {
2628 emit_insn (pat);
2629 goto ret;
2630 }
bbf6f052
RK
2631 }
2632#endif
2633
2634#ifndef ACCUMULATE_OUTGOING_ARGS
2635 /* If the source is referenced relative to the stack pointer,
2636 copy it to another register to stabilize it. We do not need
2637 to do this if we know that we won't be changing sp. */
2638
2639 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2640 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2641 temp = copy_to_reg (temp);
2642#endif
2643
2644 /* Make inhibit_defer_pop nonzero around the library call
2645 to force it to pop the bcopy-arguments right away. */
2646 NO_DEFER_POP;
2647#ifdef TARGET_MEM_FUNCTIONS
d562e42e 2648 emit_library_call (memcpy_libfunc, 0,
bbf6f052 2649 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
0fa83258
RK
2650 convert_to_mode (TYPE_MODE (sizetype),
2651 size, TREE_UNSIGNED (sizetype)),
26ba80fc 2652 TYPE_MODE (sizetype));
bbf6f052 2653#else
d562e42e 2654 emit_library_call (bcopy_libfunc, 0,
bbf6f052 2655 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3b6f75e2
JW
2656 convert_to_mode (TYPE_MODE (integer_type_node),
2657 size,
2658 TREE_UNSIGNED (integer_type_node)),
2659 TYPE_MODE (integer_type_node));
bbf6f052
RK
2660#endif
2661 OK_DEFER_POP;
2662 }
2663 }
2664 else if (partial > 0)
2665 {
2666 /* Scalar partly in registers. */
2667
2668 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2669 int i;
2670 int not_stack;
2671 /* # words of start of argument
2672 that we must make space for but need not store. */
2673 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2674 int args_offset = INTVAL (args_so_far);
2675 int skip;
2676
2677 /* Push padding now if padding above and stack grows down,
2678 or if padding below and stack grows up.
2679 But if space already allocated, this has already been done. */
2680 if (extra && args_addr == 0
2681 && where_pad != none && where_pad != stack_direction)
906c4e36 2682 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2683
2684 /* If we make space by pushing it, we might as well push
2685 the real data. Otherwise, we can leave OFFSET nonzero
2686 and leave the space uninitialized. */
2687 if (args_addr == 0)
2688 offset = 0;
2689
2690 /* Now NOT_STACK gets the number of words that we don't need to
2691 allocate on the stack. */
2692 not_stack = partial - offset;
2693
2694 /* If the partial register-part of the arg counts in its stack size,
2695 skip the part of stack space corresponding to the registers.
2696 Otherwise, start copying to the beginning of the stack space,
2697 by setting SKIP to 0. */
2698#ifndef REG_PARM_STACK_SPACE
2699 skip = 0;
2700#else
2701 skip = not_stack;
2702#endif
2703
2704 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2705 x = validize_mem (force_const_mem (mode, x));
2706
2707 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2708 SUBREGs of such registers are not allowed. */
2709 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2710 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2711 x = copy_to_reg (x);
2712
2713 /* Loop over all the words allocated on the stack for this arg. */
2714 /* We can do it by words, because any scalar bigger than a word
2715 has a size a multiple of a word. */
2716#ifndef PUSH_ARGS_REVERSED
2717 for (i = not_stack; i < size; i++)
2718#else
2719 for (i = size - 1; i >= not_stack; i--)
2720#endif
2721 if (i >= not_stack + offset)
2722 emit_push_insn (operand_subword_force (x, i, mode),
906c4e36
RK
2723 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2724 0, args_addr,
2725 GEN_INT (args_offset + ((i - not_stack + skip)
bbf6f052
RK
2726 * UNITS_PER_WORD)));
2727 }
2728 else
2729 {
2730 rtx addr;
2731
2732 /* Push padding now if padding above and stack grows down,
2733 or if padding below and stack grows up.
2734 But if space already allocated, this has already been done. */
2735 if (extra && args_addr == 0
2736 && where_pad != none && where_pad != stack_direction)
906c4e36 2737 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2738
2739#ifdef PUSH_ROUNDING
2740 if (args_addr == 0)
2741 addr = gen_push_operand ();
2742 else
2743#endif
2744 if (GET_CODE (args_so_far) == CONST_INT)
2745 addr
2746 = memory_address (mode,
2747 plus_constant (args_addr, INTVAL (args_so_far)));
2748 else
2749 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2750 args_so_far));
2751
2752 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2753 }
2754
2755 ret:
2756 /* If part should go in registers, copy that part
2757 into the appropriate registers. Do this now, at the end,
2758 since mem-to-mem copies above may do function calls. */
cd048831 2759 if (partial > 0 && reg != 0)
fffa9c1d
JW
2760 {
2761 /* Handle calls that pass values in multiple non-contiguous locations.
2762 The Irix 6 ABI has examples of this. */
2763 if (GET_CODE (reg) == PARALLEL)
2764 emit_group_load (reg, x);
2765 else
2766 move_block_to_reg (REGNO (reg), x, partial, mode);
2767 }
bbf6f052
RK
2768
2769 if (extra && args_addr == 0 && where_pad == stack_direction)
906c4e36 2770 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2771}
2772\f
bbf6f052
RK
2773/* Expand an assignment that stores the value of FROM into TO.
2774 If WANT_VALUE is nonzero, return an rtx for the value of TO.
709f5be1
RS
2775 (This may contain a QUEUED rtx;
2776 if the value is constant, this rtx is a constant.)
2777 Otherwise, the returned value is NULL_RTX.
bbf6f052
RK
2778
2779 SUGGEST_REG is no longer actually used.
2780 It used to mean, copy the value through a register
2781 and return that register, if that is possible.
709f5be1 2782 We now use WANT_VALUE to decide whether to do this. */
bbf6f052
RK
2783
2784rtx
2785expand_assignment (to, from, want_value, suggest_reg)
2786 tree to, from;
2787 int want_value;
2788 int suggest_reg;
2789{
2790 register rtx to_rtx = 0;
2791 rtx result;
2792
2793 /* Don't crash if the lhs of the assignment was erroneous. */
2794
2795 if (TREE_CODE (to) == ERROR_MARK)
709f5be1
RS
2796 {
2797 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
2798 return want_value ? result : NULL_RTX;
2799 }
bbf6f052 2800
ca695ac9
JB
2801 if (output_bytecode)
2802 {
2803 tree dest_innermost;
2804
2805 bc_expand_expr (from);
6d6e61ce 2806 bc_emit_instruction (duplicate);
ca695ac9
JB
2807
2808 dest_innermost = bc_expand_address (to);
2809
2810 /* Can't deduce from TYPE that we're dealing with a bitfield, so
0f41302f 2811 take care of it here. */
ca695ac9
JB
2812
2813 bc_store_memory (TREE_TYPE (to), dest_innermost);
2814 return NULL;
2815 }
2816
bbf6f052
RK
2817 /* Assignment of a structure component needs special treatment
2818 if the structure component's rtx is not simply a MEM.
6be58303
JW
2819 Assignment of an array element at a constant index, and assignment of
2820 an array element in an unaligned packed structure field, has the same
2821 problem. */
bbf6f052
RK
2822
2823 if (TREE_CODE (to) == COMPONENT_REF
2824 || TREE_CODE (to) == BIT_FIELD_REF
2825 || (TREE_CODE (to) == ARRAY_REF
6be58303
JW
2826 && ((TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2827 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST)
c7a7ac46 2828 || (SLOW_UNALIGNED_ACCESS && get_inner_unaligned_p (to)))))
bbf6f052
RK
2829 {
2830 enum machine_mode mode1;
2831 int bitsize;
2832 int bitpos;
7bb0943f 2833 tree offset;
bbf6f052
RK
2834 int unsignedp;
2835 int volatilep = 0;
0088fcb1 2836 tree tem;
d78d243c 2837 int alignment;
0088fcb1
RK
2838
2839 push_temp_slots ();
2840 tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
bbf6f052
RK
2841 &mode1, &unsignedp, &volatilep);
2842
2843 /* If we are going to use store_bit_field and extract_bit_field,
2844 make sure to_rtx will be safe for multiple use. */
2845
2846 if (mode1 == VOIDmode && want_value)
2847 tem = stabilize_reference (tem);
2848
d78d243c 2849 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
906c4e36 2850 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
2851 if (offset != 0)
2852 {
906c4e36 2853 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
2854
2855 if (GET_CODE (to_rtx) != MEM)
2856 abort ();
2857 to_rtx = change_address (to_rtx, VOIDmode,
88f63c77
RK
2858 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
2859 force_reg (ptr_mode, offset_rtx)));
d78d243c
RS
2860 /* If we have a variable offset, the known alignment
2861 is only that of the innermost structure containing the field.
2862 (Actually, we could sometimes do better by using the
2863 align of an element of the innermost array, but no need.) */
2864 if (TREE_CODE (to) == COMPONENT_REF
2865 || TREE_CODE (to) == BIT_FIELD_REF)
2866 alignment
2867 = TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (to, 0))) / BITS_PER_UNIT;
7bb0943f 2868 }
bbf6f052
RK
2869 if (volatilep)
2870 {
2871 if (GET_CODE (to_rtx) == MEM)
01188446
JW
2872 {
2873 /* When the offset is zero, to_rtx is the address of the
2874 structure we are storing into, and hence may be shared.
2875 We must make a new MEM before setting the volatile bit. */
2876 if (offset == 0)
2877 to_rtx = change_address (to_rtx, VOIDmode, XEXP (to_rtx, 0));
2878 MEM_VOLATILE_P (to_rtx) = 1;
2879 }
bbf6f052
RK
2880#if 0 /* This was turned off because, when a field is volatile
2881 in an object which is not volatile, the object may be in a register,
2882 and then we would abort over here. */
2883 else
2884 abort ();
2885#endif
2886 }
2887
2888 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2889 (want_value
2890 /* Spurious cast makes HPUX compiler happy. */
2891 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2892 : VOIDmode),
2893 unsignedp,
2894 /* Required alignment of containing datum. */
d78d243c 2895 alignment,
bbf6f052
RK
2896 int_size_in_bytes (TREE_TYPE (tem)));
2897 preserve_temp_slots (result);
2898 free_temp_slots ();
0088fcb1 2899 pop_temp_slots ();
bbf6f052 2900
709f5be1
RS
2901 /* If the value is meaningful, convert RESULT to the proper mode.
2902 Otherwise, return nothing. */
5ffe63ed
RS
2903 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
2904 TYPE_MODE (TREE_TYPE (from)),
2905 result,
2906 TREE_UNSIGNED (TREE_TYPE (to)))
709f5be1 2907 : NULL_RTX);
bbf6f052
RK
2908 }
2909
cd1db108
RS
2910 /* If the rhs is a function call and its value is not an aggregate,
2911 call the function before we start to compute the lhs.
2912 This is needed for correct code for cases such as
2913 val = setjmp (buf) on machines where reference to val
1ad87b63
RK
2914 requires loading up part of an address in a separate insn.
2915
2916 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
2917 a promoted variable where the zero- or sign- extension needs to be done.
2918 Handling this in the normal way is safe because no computation is done
2919 before the call. */
2920 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
b35cd3c1 2921 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
1ad87b63 2922 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
cd1db108 2923 {
0088fcb1
RK
2924 rtx value;
2925
2926 push_temp_slots ();
2927 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
cd1db108
RS
2928 if (to_rtx == 0)
2929 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
aaf87c45 2930
fffa9c1d
JW
2931 /* Handle calls that return values in multiple non-contiguous locations.
2932 The Irix 6 ABI has examples of this. */
2933 if (GET_CODE (to_rtx) == PARALLEL)
2934 emit_group_load (to_rtx, value);
2935 else if (GET_MODE (to_rtx) == BLKmode)
db3ec607 2936 emit_block_move (to_rtx, value, expr_size (from),
ff9b5bd8 2937 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
aaf87c45
JL
2938 else
2939 emit_move_insn (to_rtx, value);
cd1db108
RS
2940 preserve_temp_slots (to_rtx);
2941 free_temp_slots ();
0088fcb1 2942 pop_temp_slots ();
709f5be1 2943 return want_value ? to_rtx : NULL_RTX;
cd1db108
RS
2944 }
2945
bbf6f052
RK
2946 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2947 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2948
2949 if (to_rtx == 0)
906c4e36 2950 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
bbf6f052 2951
86d38d25
RS
2952 /* Don't move directly into a return register. */
2953 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
2954 {
0088fcb1
RK
2955 rtx temp;
2956
2957 push_temp_slots ();
2958 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
86d38d25
RS
2959 emit_move_insn (to_rtx, temp);
2960 preserve_temp_slots (to_rtx);
2961 free_temp_slots ();
0088fcb1 2962 pop_temp_slots ();
709f5be1 2963 return want_value ? to_rtx : NULL_RTX;
86d38d25
RS
2964 }
2965
bbf6f052
RK
2966 /* In case we are returning the contents of an object which overlaps
2967 the place the value is being stored, use a safe function when copying
2968 a value through a pointer into a structure value return block. */
2969 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2970 && current_function_returns_struct
2971 && !current_function_returns_pcc_struct)
2972 {
0088fcb1
RK
2973 rtx from_rtx, size;
2974
2975 push_temp_slots ();
33a20d10
RK
2976 size = expr_size (from);
2977 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
2978
2979#ifdef TARGET_MEM_FUNCTIONS
d562e42e 2980 emit_library_call (memcpy_libfunc, 0,
bbf6f052
RK
2981 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2982 XEXP (from_rtx, 0), Pmode,
0fa83258
RK
2983 convert_to_mode (TYPE_MODE (sizetype),
2984 size, TREE_UNSIGNED (sizetype)),
26ba80fc 2985 TYPE_MODE (sizetype));
bbf6f052 2986#else
d562e42e 2987 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
2988 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2989 XEXP (to_rtx, 0), Pmode,
3b6f75e2
JW
2990 convert_to_mode (TYPE_MODE (integer_type_node),
2991 size, TREE_UNSIGNED (integer_type_node)),
2992 TYPE_MODE (integer_type_node));
bbf6f052
RK
2993#endif
2994
2995 preserve_temp_slots (to_rtx);
2996 free_temp_slots ();
0088fcb1 2997 pop_temp_slots ();
709f5be1 2998 return want_value ? to_rtx : NULL_RTX;
bbf6f052
RK
2999 }
3000
3001 /* Compute FROM and store the value in the rtx we got. */
3002
0088fcb1 3003 push_temp_slots ();
bbf6f052
RK
3004 result = store_expr (from, to_rtx, want_value);
3005 preserve_temp_slots (result);
3006 free_temp_slots ();
0088fcb1 3007 pop_temp_slots ();
709f5be1 3008 return want_value ? result : NULL_RTX;
bbf6f052
RK
3009}
3010
3011/* Generate code for computing expression EXP,
3012 and storing the value into TARGET.
bbf6f052
RK
3013 TARGET may contain a QUEUED rtx.
3014
709f5be1
RS
3015 If WANT_VALUE is nonzero, return a copy of the value
3016 not in TARGET, so that we can be sure to use the proper
3017 value in a containing expression even if TARGET has something
3018 else stored in it. If possible, we copy the value through a pseudo
3019 and return that pseudo. Or, if the value is constant, we try to
3020 return the constant. In some cases, we return a pseudo
3021 copied *from* TARGET.
3022
3023 If the mode is BLKmode then we may return TARGET itself.
3024 It turns out that in BLKmode it doesn't cause a problem.
3025 because C has no operators that could combine two different
3026 assignments into the same BLKmode object with different values
3027 with no sequence point. Will other languages need this to
3028 be more thorough?
3029
3030 If WANT_VALUE is 0, we return NULL, to make sure
3031 to catch quickly any cases where the caller uses the value
3032 and fails to set WANT_VALUE. */
bbf6f052
RK
3033
3034rtx
709f5be1 3035store_expr (exp, target, want_value)
bbf6f052
RK
3036 register tree exp;
3037 register rtx target;
709f5be1 3038 int want_value;
bbf6f052
RK
3039{
3040 register rtx temp;
3041 int dont_return_target = 0;
3042
3043 if (TREE_CODE (exp) == COMPOUND_EXPR)
3044 {
3045 /* Perform first part of compound expression, then assign from second
3046 part. */
3047 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3048 emit_queue ();
709f5be1 3049 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
bbf6f052
RK
3050 }
3051 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3052 {
3053 /* For conditional expression, get safe form of the target. Then
3054 test the condition, doing the appropriate assignment on either
3055 side. This avoids the creation of unnecessary temporaries.
3056 For non-BLKmode, it is more efficient not to do this. */
3057
3058 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3059
3060 emit_queue ();
3061 target = protect_from_queue (target, 1);
3062
dabf8373 3063 do_pending_stack_adjust ();
bbf6f052
RK
3064 NO_DEFER_POP;
3065 jumpifnot (TREE_OPERAND (exp, 0), lab1);
709f5be1 3066 store_expr (TREE_OPERAND (exp, 1), target, 0);
bbf6f052
RK
3067 emit_queue ();
3068 emit_jump_insn (gen_jump (lab2));
3069 emit_barrier ();
3070 emit_label (lab1);
709f5be1 3071 store_expr (TREE_OPERAND (exp, 2), target, 0);
bbf6f052
RK
3072 emit_queue ();
3073 emit_label (lab2);
3074 OK_DEFER_POP;
709f5be1 3075 return want_value ? target : NULL_RTX;
bbf6f052 3076 }
709f5be1 3077 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
bbf6f052
RK
3078 && GET_MODE (target) != BLKmode)
3079 /* If target is in memory and caller wants value in a register instead,
3080 arrange that. Pass TARGET as target for expand_expr so that,
709f5be1 3081 if EXP is another assignment, WANT_VALUE will be nonzero for it.
c2e6aff6
RS
3082 We know expand_expr will not use the target in that case.
3083 Don't do this if TARGET is volatile because we are supposed
3084 to write it and then read it. */
bbf6f052 3085 {
906c4e36 3086 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
bbf6f052
RK
3087 GET_MODE (target), 0);
3088 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3089 temp = copy_to_reg (temp);
3090 dont_return_target = 1;
3091 }
3092 else if (queued_subexp_p (target))
709f5be1
RS
3093 /* If target contains a postincrement, let's not risk
3094 using it as the place to generate the rhs. */
bbf6f052
RK
3095 {
3096 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3097 {
3098 /* Expand EXP into a new pseudo. */
3099 temp = gen_reg_rtx (GET_MODE (target));
3100 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3101 }
3102 else
906c4e36 3103 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
709f5be1
RS
3104
3105 /* If target is volatile, ANSI requires accessing the value
3106 *from* the target, if it is accessed. So make that happen.
3107 In no case return the target itself. */
3108 if (! MEM_VOLATILE_P (target) && want_value)
3109 dont_return_target = 1;
bbf6f052 3110 }
1499e0a8
RK
3111 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3112 /* If this is an scalar in a register that is stored in a wider mode
3113 than the declared mode, compute the result into its declared mode
3114 and then convert to the wider mode. Our value is the computed
3115 expression. */
3116 {
5a32d038 3117 /* If we don't want a value, we can do the conversion inside EXP,
f635a84d
RK
3118 which will often result in some optimizations. Do the conversion
3119 in two steps: first change the signedness, if needed, then
3120 the extend. */
5a32d038 3121 if (! want_value)
f635a84d
RK
3122 {
3123 if (TREE_UNSIGNED (TREE_TYPE (exp))
3124 != SUBREG_PROMOTED_UNSIGNED_P (target))
3125 exp
3126 = convert
3127 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3128 TREE_TYPE (exp)),
3129 exp);
3130
3131 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3132 SUBREG_PROMOTED_UNSIGNED_P (target)),
3133 exp);
3134 }
5a32d038 3135
1499e0a8 3136 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
b258707c 3137
766f36c7 3138 /* If TEMP is a volatile MEM and we want a result value, make
f29369b9
RK
3139 the access now so it gets done only once. Likewise if
3140 it contains TARGET. */
3141 if (GET_CODE (temp) == MEM && want_value
3142 && (MEM_VOLATILE_P (temp)
3143 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
766f36c7
RK
3144 temp = copy_to_reg (temp);
3145
b258707c
RS
3146 /* If TEMP is a VOIDmode constant, use convert_modes to make
3147 sure that we properly convert it. */
3148 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3149 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3150 TYPE_MODE (TREE_TYPE (exp)), temp,
3151 SUBREG_PROMOTED_UNSIGNED_P (target));
3152
1499e0a8
RK
3153 convert_move (SUBREG_REG (target), temp,
3154 SUBREG_PROMOTED_UNSIGNED_P (target));
709f5be1 3155 return want_value ? temp : NULL_RTX;
1499e0a8 3156 }
bbf6f052
RK
3157 else
3158 {
3159 temp = expand_expr (exp, target, GET_MODE (target), 0);
766f36c7 3160 /* Return TARGET if it's a specified hardware register.
709f5be1
RS
3161 If TARGET is a volatile mem ref, either return TARGET
3162 or return a reg copied *from* TARGET; ANSI requires this.
3163
3164 Otherwise, if TEMP is not TARGET, return TEMP
3165 if it is constant (for efficiency),
3166 or if we really want the correct value. */
bbf6f052
RK
3167 if (!(target && GET_CODE (target) == REG
3168 && REGNO (target) < FIRST_PSEUDO_REGISTER)
709f5be1
RS
3169 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3170 && temp != target
3171 && (CONSTANT_P (temp) || want_value))
bbf6f052
RK
3172 dont_return_target = 1;
3173 }
3174
b258707c
RS
3175 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3176 the same as that of TARGET, adjust the constant. This is needed, for
3177 example, in case it is a CONST_DOUBLE and we want only a word-sized
3178 value. */
3179 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
c1da1f33 3180 && TREE_CODE (exp) != ERROR_MARK
b258707c
RS
3181 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3182 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3183 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3184
bbf6f052
RK
3185 /* If value was not generated in the target, store it there.
3186 Convert the value to TARGET's type first if nec. */
3187
3188 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
3189 {
3190 target = protect_from_queue (target, 1);
3191 if (GET_MODE (temp) != GET_MODE (target)
3192 && GET_MODE (temp) != VOIDmode)
3193 {
3194 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3195 if (dont_return_target)
3196 {
3197 /* In this case, we will return TEMP,
3198 so make sure it has the proper mode.
3199 But don't forget to store the value into TARGET. */
3200 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3201 emit_move_insn (target, temp);
3202 }
3203 else
3204 convert_move (target, temp, unsignedp);
3205 }
3206
3207 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3208 {
3209 /* Handle copying a string constant into an array.
3210 The string constant may be shorter than the array.
3211 So copy just the string's actual length, and clear the rest. */
3212 rtx size;
22619c3f 3213 rtx addr;
bbf6f052 3214
e87b4f3f
RS
3215 /* Get the size of the data type of the string,
3216 which is actually the size of the target. */
3217 size = expr_size (exp);
3218 if (GET_CODE (size) == CONST_INT
3219 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3220 emit_block_move (target, temp, size,
3221 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3222 else
bbf6f052 3223 {
e87b4f3f
RS
3224 /* Compute the size of the data to copy from the string. */
3225 tree copy_size
c03b7665 3226 = size_binop (MIN_EXPR,
b50d17a1 3227 make_tree (sizetype, size),
c03b7665
RK
3228 convert (sizetype,
3229 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
906c4e36
RK
3230 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3231 VOIDmode, 0);
e87b4f3f
RS
3232 rtx label = 0;
3233
3234 /* Copy that much. */
3235 emit_block_move (target, temp, copy_size_rtx,
3236 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3237
88f63c77
RK
3238 /* Figure out how much is left in TARGET that we have to clear.
3239 Do all calculations in ptr_mode. */
3240
3241 addr = XEXP (target, 0);
3242 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3243
e87b4f3f
RS
3244 if (GET_CODE (copy_size_rtx) == CONST_INT)
3245 {
88f63c77 3246 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
22619c3f 3247 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
e87b4f3f
RS
3248 }
3249 else
3250 {
88f63c77
RK
3251 addr = force_reg (ptr_mode, addr);
3252 addr = expand_binop (ptr_mode, add_optab, addr,
906c4e36
RK
3253 copy_size_rtx, NULL_RTX, 0,
3254 OPTAB_LIB_WIDEN);
e87b4f3f 3255
88f63c77 3256 size = expand_binop (ptr_mode, sub_optab, size,
906c4e36
RK
3257 copy_size_rtx, NULL_RTX, 0,
3258 OPTAB_LIB_WIDEN);
e87b4f3f 3259
906c4e36 3260 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
e87b4f3f
RS
3261 GET_MODE (size), 0, 0);
3262 label = gen_label_rtx ();
3263 emit_jump_insn (gen_blt (label));
3264 }
3265
3266 if (size != const0_rtx)
3267 {
bbf6f052 3268#ifdef TARGET_MEM_FUNCTIONS
3b6f75e2
JW
3269 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3270 addr, Pmode,
3271 const0_rtx, TYPE_MODE (integer_type_node),
3272 convert_to_mode (TYPE_MODE (sizetype),
3273 size,
3274 TREE_UNSIGNED (sizetype)),
3275 TYPE_MODE (sizetype));
bbf6f052 3276#else
d562e42e 3277 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3b6f75e2
JW
3278 addr, Pmode,
3279 convert_to_mode (TYPE_MODE (integer_type_node),
3280 size,
3281 TREE_UNSIGNED (integer_type_node)),
3282 TYPE_MODE (integer_type_node));
bbf6f052 3283#endif
e87b4f3f 3284 }
22619c3f 3285
e87b4f3f
RS
3286 if (label)
3287 emit_label (label);
bbf6f052
RK
3288 }
3289 }
fffa9c1d
JW
3290 /* Handle calls that return values in multiple non-contiguous locations.
3291 The Irix 6 ABI has examples of this. */
3292 else if (GET_CODE (target) == PARALLEL)
3293 emit_group_load (target, temp);
bbf6f052
RK
3294 else if (GET_MODE (temp) == BLKmode)
3295 emit_block_move (target, temp, expr_size (exp),
3296 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3297 else
3298 emit_move_insn (target, temp);
3299 }
709f5be1 3300
766f36c7
RK
3301 /* If we don't want a value, return NULL_RTX. */
3302 if (! want_value)
3303 return NULL_RTX;
3304
3305 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3306 ??? The latter test doesn't seem to make sense. */
3307 else if (dont_return_target && GET_CODE (temp) != MEM)
bbf6f052 3308 return temp;
766f36c7
RK
3309
3310 /* Return TARGET itself if it is a hard register. */
3311 else if (want_value && GET_MODE (target) != BLKmode
3312 && ! (GET_CODE (target) == REG
3313 && REGNO (target) < FIRST_PSEUDO_REGISTER))
709f5be1 3314 return copy_to_reg (target);
766f36c7
RK
3315
3316 else
709f5be1 3317 return target;
bbf6f052
RK
3318}
3319\f
9de08200
RK
3320/* Return 1 if EXP just contains zeros. */
3321
3322static int
3323is_zeros_p (exp)
3324 tree exp;
3325{
3326 tree elt;
3327
3328 switch (TREE_CODE (exp))
3329 {
3330 case CONVERT_EXPR:
3331 case NOP_EXPR:
3332 case NON_LVALUE_EXPR:
3333 return is_zeros_p (TREE_OPERAND (exp, 0));
3334
3335 case INTEGER_CST:
3336 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3337
3338 case COMPLEX_CST:
3339 return
3340 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3341
3342 case REAL_CST:
3343 return REAL_VALUES_EQUAL (TREE_REAL_CST (exp), dconst0);
3344
3345 case CONSTRUCTOR:
e1a43f73
PB
3346 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3347 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
9de08200
RK
3348 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3349 if (! is_zeros_p (TREE_VALUE (elt)))
3350 return 0;
3351
3352 return 1;
3353 }
3354
3355 return 0;
3356}
3357
3358/* Return 1 if EXP contains mostly (3/4) zeros. */
3359
3360static int
3361mostly_zeros_p (exp)
3362 tree exp;
3363{
9de08200
RK
3364 if (TREE_CODE (exp) == CONSTRUCTOR)
3365 {
e1a43f73
PB
3366 int elts = 0, zeros = 0;
3367 tree elt = CONSTRUCTOR_ELTS (exp);
3368 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3369 {
3370 /* If there are no ranges of true bits, it is all zero. */
3371 return elt == NULL_TREE;
3372 }
3373 for (; elt; elt = TREE_CHAIN (elt))
3374 {
3375 /* We do not handle the case where the index is a RANGE_EXPR,
3376 so the statistic will be somewhat inaccurate.
3377 We do make a more accurate count in store_constructor itself,
3378 so since this function is only used for nested array elements,
0f41302f 3379 this should be close enough. */
e1a43f73
PB
3380 if (mostly_zeros_p (TREE_VALUE (elt)))
3381 zeros++;
3382 elts++;
3383 }
9de08200
RK
3384
3385 return 4 * zeros >= 3 * elts;
3386 }
3387
3388 return is_zeros_p (exp);
3389}
3390\f
e1a43f73
PB
3391/* Helper function for store_constructor.
3392 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3393 TYPE is the type of the CONSTRUCTOR, not the element type.
23ccec44
JW
3394 CLEARED is as for store_constructor.
3395
3396 This provides a recursive shortcut back to store_constructor when it isn't
3397 necessary to go through store_field. This is so that we can pass through
3398 the cleared field to let store_constructor know that we may not have to
3399 clear a substructure if the outer structure has already been cleared. */
e1a43f73
PB
3400
3401static void
3402store_constructor_field (target, bitsize, bitpos,
3403 mode, exp, type, cleared)
3404 rtx target;
3405 int bitsize, bitpos;
3406 enum machine_mode mode;
3407 tree exp, type;
3408 int cleared;
3409{
3410 if (TREE_CODE (exp) == CONSTRUCTOR
23ccec44
JW
3411 && bitpos % BITS_PER_UNIT == 0
3412 /* If we have a non-zero bitpos for a register target, then we just
3413 let store_field do the bitfield handling. This is unlikely to
3414 generate unnecessary clear instructions anyways. */
3415 && (bitpos == 0 || GET_CODE (target) == MEM))
e1a43f73 3416 {
126e5b0d
JW
3417 if (bitpos != 0)
3418 target = change_address (target, VOIDmode,
3419 plus_constant (XEXP (target, 0),
3420 bitpos / BITS_PER_UNIT));
3421 store_constructor (exp, target, cleared);
e1a43f73
PB
3422 }
3423 else
3424 store_field (target, bitsize, bitpos, mode, exp,
3425 VOIDmode, 0, TYPE_ALIGN (type) / BITS_PER_UNIT,
3426 int_size_in_bytes (type));
3427}
3428
bbf6f052 3429/* Store the value of constructor EXP into the rtx TARGET.
e1a43f73 3430 TARGET is either a REG or a MEM.
0f41302f 3431 CLEARED is true if TARGET is known to have been zero'd. */
bbf6f052
RK
3432
3433static void
e1a43f73 3434store_constructor (exp, target, cleared)
bbf6f052
RK
3435 tree exp;
3436 rtx target;
e1a43f73 3437 int cleared;
bbf6f052 3438{
4af3895e
JVA
3439 tree type = TREE_TYPE (exp);
3440
bbf6f052
RK
3441 /* We know our target cannot conflict, since safe_from_p has been called. */
3442#if 0
3443 /* Don't try copying piece by piece into a hard register
3444 since that is vulnerable to being clobbered by EXP.
3445 Instead, construct in a pseudo register and then copy it all. */
3446 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
3447 {
3448 rtx temp = gen_reg_rtx (GET_MODE (target));
e1a43f73 3449 store_constructor (exp, temp, 0);
bbf6f052
RK
3450 emit_move_insn (target, temp);
3451 return;
3452 }
3453#endif
3454
e44842fe
RK
3455 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
3456 || TREE_CODE (type) == QUAL_UNION_TYPE)
bbf6f052
RK
3457 {
3458 register tree elt;
3459
4af3895e 3460 /* Inform later passes that the whole union value is dead. */
e44842fe
RK
3461 if (TREE_CODE (type) == UNION_TYPE
3462 || TREE_CODE (type) == QUAL_UNION_TYPE)
bbf6f052 3463 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
4af3895e
JVA
3464
3465 /* If we are building a static constructor into a register,
3466 set the initial value as zero so we can fold the value into
67225c15
RK
3467 a constant. But if more than one register is involved,
3468 this probably loses. */
3469 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
3470 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
9de08200
RK
3471 {
3472 if (! cleared)
3473 emit_move_insn (target, const0_rtx);
4af3895e 3474
9de08200
RK
3475 cleared = 1;
3476 }
3477
3478 /* If the constructor has fewer fields than the structure
3479 or if we are initializing the structure to mostly zeros,
bbf6f052 3480 clear the whole structure first. */
9de08200
RK
3481 else if ((list_length (CONSTRUCTOR_ELTS (exp))
3482 != list_length (TYPE_FIELDS (type)))
3483 || mostly_zeros_p (exp))
3484 {
3485 if (! cleared)
3486 clear_storage (target, expr_size (exp),
3487 TYPE_ALIGN (type) / BITS_PER_UNIT);
3488
3489 cleared = 1;
3490 }
bbf6f052
RK
3491 else
3492 /* Inform later passes that the old value is dead. */
3493 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3494
3495 /* Store each element of the constructor into
3496 the corresponding field of TARGET. */
3497
3498 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3499 {
3500 register tree field = TREE_PURPOSE (elt);
3501 register enum machine_mode mode;
3502 int bitsize;
b50d17a1 3503 int bitpos = 0;
bbf6f052 3504 int unsignedp;
b50d17a1
RK
3505 tree pos, constant = 0, offset = 0;
3506 rtx to_rtx = target;
bbf6f052 3507
f32fd778
RS
3508 /* Just ignore missing fields.
3509 We cleared the whole structure, above,
3510 if any fields are missing. */
3511 if (field == 0)
3512 continue;
3513
e1a43f73
PB
3514 if (cleared && is_zeros_p (TREE_VALUE (elt)))
3515 continue;
9de08200 3516
bbf6f052
RK
3517 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
3518 unsignedp = TREE_UNSIGNED (field);
3519 mode = DECL_MODE (field);
3520 if (DECL_BIT_FIELD (field))
3521 mode = VOIDmode;
3522
b50d17a1
RK
3523 pos = DECL_FIELD_BITPOS (field);
3524 if (TREE_CODE (pos) == INTEGER_CST)
3525 constant = pos;
3526 else if (TREE_CODE (pos) == PLUS_EXPR
3527 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3528 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
3529 else
3530 offset = pos;
3531
3532 if (constant)
cd11b87e 3533 bitpos = TREE_INT_CST_LOW (constant);
b50d17a1
RK
3534
3535 if (offset)
3536 {
3537 rtx offset_rtx;
3538
3539 if (contains_placeholder_p (offset))
3540 offset = build (WITH_RECORD_EXPR, sizetype,
3541 offset, exp);
bbf6f052 3542
b50d17a1
RK
3543 offset = size_binop (FLOOR_DIV_EXPR, offset,
3544 size_int (BITS_PER_UNIT));
bbf6f052 3545
b50d17a1
RK
3546 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3547 if (GET_CODE (to_rtx) != MEM)
3548 abort ();
3549
3550 to_rtx
3551 = change_address (to_rtx, VOIDmode,
88f63c77
RK
3552 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
3553 force_reg (ptr_mode, offset_rtx)));
b50d17a1 3554 }
cf04eb80
RK
3555 if (TREE_READONLY (field))
3556 {
9151b3bf
RK
3557 if (GET_CODE (to_rtx) == MEM)
3558 to_rtx = change_address (to_rtx, GET_MODE (to_rtx),
3559 XEXP (to_rtx, 0));
cf04eb80
RK
3560 RTX_UNCHANGING_P (to_rtx) = 1;
3561 }
3562
e1a43f73
PB
3563 store_constructor_field (to_rtx, bitsize, bitpos,
3564 mode, TREE_VALUE (elt), type, cleared);
bbf6f052
RK
3565 }
3566 }
4af3895e 3567 else if (TREE_CODE (type) == ARRAY_TYPE)
bbf6f052
RK
3568 {
3569 register tree elt;
3570 register int i;
e1a43f73 3571 int need_to_clear;
4af3895e 3572 tree domain = TYPE_DOMAIN (type);
906c4e36
RK
3573 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
3574 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4af3895e 3575 tree elttype = TREE_TYPE (type);
bbf6f052 3576
e1a43f73
PB
3577 /* If the constructor has fewer elements than the array,
3578 clear the whole array first. Similarly if this this is
3579 static constructor of a non-BLKmode object. */
3580 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
3581 need_to_clear = 1;
3582 else
3583 {
3584 HOST_WIDE_INT count = 0, zero_count = 0;
3585 need_to_clear = 0;
3586 /* This loop is a more accurate version of the loop in
3587 mostly_zeros_p (it handles RANGE_EXPR in an index).
3588 It is also needed to check for missing elements. */
3589 for (elt = CONSTRUCTOR_ELTS (exp);
3590 elt != NULL_TREE;
3591 elt = TREE_CHAIN (elt), i++)
3592 {
3593 tree index = TREE_PURPOSE (elt);
3594 HOST_WIDE_INT this_node_count;
3595 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3596 {
3597 tree lo_index = TREE_OPERAND (index, 0);
3598 tree hi_index = TREE_OPERAND (index, 1);
3599 if (TREE_CODE (lo_index) != INTEGER_CST
3600 || TREE_CODE (hi_index) != INTEGER_CST)
3601 {
3602 need_to_clear = 1;
3603 break;
3604 }
3605 this_node_count = TREE_INT_CST_LOW (hi_index)
3606 - TREE_INT_CST_LOW (lo_index) + 1;
3607 }
3608 else
3609 this_node_count = 1;
3610 count += this_node_count;
3611 if (mostly_zeros_p (TREE_VALUE (elt)))
3612 zero_count += this_node_count;
3613 }
8e958f70 3614 /* Clear the entire array first if there are any missing elements,
0f41302f 3615 or if the incidence of zero elements is >= 75%. */
8e958f70
PB
3616 if (count < maxelt - minelt + 1
3617 || 4 * zero_count >= 3 * count)
e1a43f73
PB
3618 need_to_clear = 1;
3619 }
3620 if (need_to_clear)
9de08200
RK
3621 {
3622 if (! cleared)
3623 clear_storage (target, expr_size (exp),
3624 TYPE_ALIGN (type) / BITS_PER_UNIT);
9de08200
RK
3625 cleared = 1;
3626 }
bbf6f052
RK
3627 else
3628 /* Inform later passes that the old value is dead. */
3629 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3630
3631 /* Store each element of the constructor into
3632 the corresponding element of TARGET, determined
3633 by counting the elements. */
3634 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
3635 elt;
3636 elt = TREE_CHAIN (elt), i++)
3637 {
3638 register enum machine_mode mode;
3639 int bitsize;
3640 int bitpos;
3641 int unsignedp;
e1a43f73 3642 tree value = TREE_VALUE (elt);
03dc44a6
RS
3643 tree index = TREE_PURPOSE (elt);
3644 rtx xtarget = target;
bbf6f052 3645
e1a43f73
PB
3646 if (cleared && is_zeros_p (value))
3647 continue;
9de08200 3648
bbf6f052
RK
3649 mode = TYPE_MODE (elttype);
3650 bitsize = GET_MODE_BITSIZE (mode);
3651 unsignedp = TREE_UNSIGNED (elttype);
3652
e1a43f73
PB
3653 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3654 {
3655 tree lo_index = TREE_OPERAND (index, 0);
3656 tree hi_index = TREE_OPERAND (index, 1);
3657 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
3658 struct nesting *loop;
05c0b405
PB
3659 HOST_WIDE_INT lo, hi, count;
3660 tree position;
e1a43f73 3661
0f41302f 3662 /* If the range is constant and "small", unroll the loop. */
e1a43f73 3663 if (TREE_CODE (lo_index) == INTEGER_CST
05c0b405
PB
3664 && TREE_CODE (hi_index) == INTEGER_CST
3665 && (lo = TREE_INT_CST_LOW (lo_index),
3666 hi = TREE_INT_CST_LOW (hi_index),
3667 count = hi - lo + 1,
3668 (GET_CODE (target) != MEM
3669 || count <= 2
3670 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
3671 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
3672 <= 40 * 8))))
e1a43f73 3673 {
05c0b405
PB
3674 lo -= minelt; hi -= minelt;
3675 for (; lo <= hi; lo++)
e1a43f73 3676 {
05c0b405
PB
3677 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
3678 store_constructor_field (target, bitsize, bitpos,
3679 mode, value, type, cleared);
e1a43f73
PB
3680 }
3681 }
3682 else
3683 {
3684 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
3685 loop_top = gen_label_rtx ();
3686 loop_end = gen_label_rtx ();
3687
3688 unsignedp = TREE_UNSIGNED (domain);
3689
3690 index = build_decl (VAR_DECL, NULL_TREE, domain);
3691
3692 DECL_RTL (index) = index_r
3693 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
3694 &unsignedp, 0));
3695
3696 if (TREE_CODE (value) == SAVE_EXPR
3697 && SAVE_EXPR_RTL (value) == 0)
3698 {
0f41302f
MS
3699 /* Make sure value gets expanded once before the
3700 loop. */
e1a43f73
PB
3701 expand_expr (value, const0_rtx, VOIDmode, 0);
3702 emit_queue ();
3703 }
3704 store_expr (lo_index, index_r, 0);
3705 loop = expand_start_loop (0);
3706
0f41302f 3707 /* Assign value to element index. */
e1a43f73
PB
3708 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3709 size_int (BITS_PER_UNIT));
3710 position = size_binop (MULT_EXPR,
3711 size_binop (MINUS_EXPR, index,
3712 TYPE_MIN_VALUE (domain)),
3713 position);
3714 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3715 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
3716 xtarget = change_address (target, mode, addr);
3717 if (TREE_CODE (value) == CONSTRUCTOR)
05c0b405 3718 store_constructor (value, xtarget, cleared);
e1a43f73
PB
3719 else
3720 store_expr (value, xtarget, 0);
3721
3722 expand_exit_loop_if_false (loop,
3723 build (LT_EXPR, integer_type_node,
3724 index, hi_index));
3725
3726 expand_increment (build (PREINCREMENT_EXPR,
3727 TREE_TYPE (index),
7b8b9722 3728 index, integer_one_node), 0, 0);
e1a43f73
PB
3729 expand_end_loop ();
3730 emit_label (loop_end);
3731
3732 /* Needed by stupid register allocation. to extend the
3733 lifetime of pseudo-regs used by target past the end
3734 of the loop. */
3735 emit_insn (gen_rtx (USE, GET_MODE (target), target));
3736 }
3737 }
3738 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
5b6c44ff 3739 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
03dc44a6 3740 {
e1a43f73 3741 rtx pos_rtx, addr;
03dc44a6
RS
3742 tree position;
3743
5b6c44ff
RK
3744 if (index == 0)
3745 index = size_int (i);
3746
e1a43f73
PB
3747 if (minelt)
3748 index = size_binop (MINUS_EXPR, index,
3749 TYPE_MIN_VALUE (domain));
5b6c44ff
RK
3750 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3751 size_int (BITS_PER_UNIT));
3752 position = size_binop (MULT_EXPR, index, position);
03dc44a6
RS
3753 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3754 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
3755 xtarget = change_address (target, mode, addr);
e1a43f73 3756 store_expr (value, xtarget, 0);
03dc44a6
RS
3757 }
3758 else
3759 {
3760 if (index != 0)
7c314719 3761 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
03dc44a6
RS
3762 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3763 else
3764 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
e1a43f73
PB
3765 store_constructor_field (target, bitsize, bitpos,
3766 mode, value, type, cleared);
03dc44a6 3767 }
bbf6f052
RK
3768 }
3769 }
071a6595
PB
3770 /* set constructor assignments */
3771 else if (TREE_CODE (type) == SET_TYPE)
3772 {
e1a43f73 3773 tree elt = CONSTRUCTOR_ELTS (exp);
071a6595
PB
3774 rtx xtarget = XEXP (target, 0);
3775 int set_word_size = TYPE_ALIGN (type);
e1a43f73 3776 int nbytes = int_size_in_bytes (type), nbits;
071a6595
PB
3777 tree domain = TYPE_DOMAIN (type);
3778 tree domain_min, domain_max, bitlength;
3779
9faa82d8 3780 /* The default implementation strategy is to extract the constant
071a6595
PB
3781 parts of the constructor, use that to initialize the target,
3782 and then "or" in whatever non-constant ranges we need in addition.
3783
3784 If a large set is all zero or all ones, it is
3785 probably better to set it using memset (if available) or bzero.
3786 Also, if a large set has just a single range, it may also be
3787 better to first clear all the first clear the set (using
0f41302f 3788 bzero/memset), and set the bits we want. */
071a6595 3789
0f41302f 3790 /* Check for all zeros. */
e1a43f73 3791 if (elt == NULL_TREE)
071a6595 3792 {
e1a43f73
PB
3793 if (!cleared)
3794 clear_storage (target, expr_size (exp),
3795 TYPE_ALIGN (type) / BITS_PER_UNIT);
071a6595
PB
3796 return;
3797 }
3798
071a6595
PB
3799 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
3800 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
3801 bitlength = size_binop (PLUS_EXPR,
3802 size_binop (MINUS_EXPR, domain_max, domain_min),
3803 size_one_node);
3804
e1a43f73
PB
3805 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
3806 abort ();
3807 nbits = TREE_INT_CST_LOW (bitlength);
3808
3809 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
3810 are "complicated" (more than one range), initialize (the
3811 constant parts) by copying from a constant. */
3812 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
3813 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
071a6595 3814 {
b4ee5a72
PB
3815 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
3816 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
0f41302f 3817 char *bit_buffer = (char *) alloca (nbits);
b4ee5a72
PB
3818 HOST_WIDE_INT word = 0;
3819 int bit_pos = 0;
3820 int ibit = 0;
0f41302f 3821 int offset = 0; /* In bytes from beginning of set. */
e1a43f73 3822 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
b4ee5a72 3823 for (;;)
071a6595 3824 {
b4ee5a72
PB
3825 if (bit_buffer[ibit])
3826 {
b09f3348 3827 if (BYTES_BIG_ENDIAN)
b4ee5a72
PB
3828 word |= (1 << (set_word_size - 1 - bit_pos));
3829 else
3830 word |= 1 << bit_pos;
3831 }
3832 bit_pos++; ibit++;
3833 if (bit_pos >= set_word_size || ibit == nbits)
071a6595 3834 {
e1a43f73
PB
3835 if (word != 0 || ! cleared)
3836 {
3837 rtx datum = GEN_INT (word);
3838 rtx to_rtx;
0f41302f
MS
3839 /* The assumption here is that it is safe to use
3840 XEXP if the set is multi-word, but not if
3841 it's single-word. */
e1a43f73
PB
3842 if (GET_CODE (target) == MEM)
3843 {
3844 to_rtx = plus_constant (XEXP (target, 0), offset);
3845 to_rtx = change_address (target, mode, to_rtx);
3846 }
3847 else if (offset == 0)
3848 to_rtx = target;
3849 else
3850 abort ();
3851 emit_move_insn (to_rtx, datum);
3852 }
b4ee5a72
PB
3853 if (ibit == nbits)
3854 break;
3855 word = 0;
3856 bit_pos = 0;
3857 offset += set_word_size / BITS_PER_UNIT;
071a6595
PB
3858 }
3859 }
071a6595 3860 }
e1a43f73
PB
3861 else if (!cleared)
3862 {
0f41302f 3863 /* Don't bother clearing storage if the set is all ones. */
e1a43f73
PB
3864 if (TREE_CHAIN (elt) != NULL_TREE
3865 || (TREE_PURPOSE (elt) == NULL_TREE
3866 ? nbits != 1
3867 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
3868 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
3869 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
3870 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
3871 != nbits))))
3872 clear_storage (target, expr_size (exp),
3873 TYPE_ALIGN (type) / BITS_PER_UNIT);
3874 }
3875
3876 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
071a6595
PB
3877 {
3878 /* start of range of element or NULL */
3879 tree startbit = TREE_PURPOSE (elt);
3880 /* end of range of element, or element value */
3881 tree endbit = TREE_VALUE (elt);
3882 HOST_WIDE_INT startb, endb;
3883 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
3884
3885 bitlength_rtx = expand_expr (bitlength,
3886 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
3887
3888 /* handle non-range tuple element like [ expr ] */
3889 if (startbit == NULL_TREE)
3890 {
3891 startbit = save_expr (endbit);
3892 endbit = startbit;
3893 }
3894 startbit = convert (sizetype, startbit);
3895 endbit = convert (sizetype, endbit);
3896 if (! integer_zerop (domain_min))
3897 {
3898 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
3899 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
3900 }
3901 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
3902 EXPAND_CONST_ADDRESS);
3903 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
3904 EXPAND_CONST_ADDRESS);
3905
3906 if (REG_P (target))
3907 {
3908 targetx = assign_stack_temp (GET_MODE (target),
3909 GET_MODE_SIZE (GET_MODE (target)),
3910 0);
3911 emit_move_insn (targetx, target);
3912 }
3913 else if (GET_CODE (target) == MEM)
3914 targetx = target;
3915 else
3916 abort ();
3917
3918#ifdef TARGET_MEM_FUNCTIONS
3919 /* Optimization: If startbit and endbit are
9faa82d8 3920 constants divisible by BITS_PER_UNIT,
0f41302f 3921 call memset instead. */
071a6595
PB
3922 if (TREE_CODE (startbit) == INTEGER_CST
3923 && TREE_CODE (endbit) == INTEGER_CST
3924 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
e1a43f73 3925 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
071a6595 3926 {
071a6595
PB
3927 emit_library_call (memset_libfunc, 0,
3928 VOIDmode, 3,
e1a43f73
PB
3929 plus_constant (XEXP (targetx, 0),
3930 startb / BITS_PER_UNIT),
071a6595 3931 Pmode,
3b6f75e2 3932 constm1_rtx, TYPE_MODE (integer_type_node),
071a6595 3933 GEN_INT ((endb - startb) / BITS_PER_UNIT),
3b6f75e2 3934 TYPE_MODE (sizetype));
071a6595
PB
3935 }
3936 else
3937#endif
3938 {
071a6595
PB
3939 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__setbits"),
3940 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
3941 bitlength_rtx, TYPE_MODE (sizetype),
3942 startbit_rtx, TYPE_MODE (sizetype),
3943 endbit_rtx, TYPE_MODE (sizetype));
3944 }
3945 if (REG_P (target))
3946 emit_move_insn (target, targetx);
3947 }
3948 }
bbf6f052
RK
3949
3950 else
3951 abort ();
3952}
3953
3954/* Store the value of EXP (an expression tree)
3955 into a subfield of TARGET which has mode MODE and occupies
3956 BITSIZE bits, starting BITPOS bits from the start of TARGET.
3957 If MODE is VOIDmode, it means that we are storing into a bit-field.
3958
3959 If VALUE_MODE is VOIDmode, return nothing in particular.
3960 UNSIGNEDP is not used in this case.
3961
3962 Otherwise, return an rtx for the value stored. This rtx
3963 has mode VALUE_MODE if that is convenient to do.
3964 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
3965
3966 ALIGN is the alignment that TARGET is known to have, measured in bytes.
3967 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
3968
3969static rtx
3970store_field (target, bitsize, bitpos, mode, exp, value_mode,
3971 unsignedp, align, total_size)
3972 rtx target;
3973 int bitsize, bitpos;
3974 enum machine_mode mode;
3975 tree exp;
3976 enum machine_mode value_mode;
3977 int unsignedp;
3978 int align;
3979 int total_size;
3980{
906c4e36 3981 HOST_WIDE_INT width_mask = 0;
bbf6f052 3982
906c4e36
RK
3983 if (bitsize < HOST_BITS_PER_WIDE_INT)
3984 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
bbf6f052
RK
3985
3986 /* If we are storing into an unaligned field of an aligned union that is
3987 in a register, we may have the mode of TARGET being an integer mode but
3988 MODE == BLKmode. In that case, get an aligned object whose size and
3989 alignment are the same as TARGET and store TARGET into it (we can avoid
3990 the store if the field being stored is the entire width of TARGET). Then
3991 call ourselves recursively to store the field into a BLKmode version of
3992 that object. Finally, load from the object into TARGET. This is not
3993 very efficient in general, but should only be slightly more expensive
3994 than the otherwise-required unaligned accesses. Perhaps this can be
3995 cleaned up later. */
3996
3997 if (mode == BLKmode
3998 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
3999 {
4000 rtx object = assign_stack_temp (GET_MODE (target),
4001 GET_MODE_SIZE (GET_MODE (target)), 0);
4002 rtx blk_object = copy_rtx (object);
4003
24a13950
JW
4004 MEM_IN_STRUCT_P (object) = 1;
4005 MEM_IN_STRUCT_P (blk_object) = 1;
bbf6f052
RK
4006 PUT_MODE (blk_object, BLKmode);
4007
4008 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4009 emit_move_insn (object, target);
4010
4011 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4012 align, total_size);
4013
46093b97
RS
4014 /* Even though we aren't returning target, we need to
4015 give it the updated value. */
bbf6f052
RK
4016 emit_move_insn (target, object);
4017
46093b97 4018 return blk_object;
bbf6f052
RK
4019 }
4020
4021 /* If the structure is in a register or if the component
4022 is a bit field, we cannot use addressing to access it.
4023 Use bit-field techniques or SUBREG to store in it. */
4024
4fa52007
RK
4025 if (mode == VOIDmode
4026 || (mode != BLKmode && ! direct_store[(int) mode])
4027 || GET_CODE (target) == REG
c980ac49 4028 || GET_CODE (target) == SUBREG
ccc98036
RS
4029 /* If the field isn't aligned enough to store as an ordinary memref,
4030 store it as a bit field. */
c7a7ac46 4031 || (SLOW_UNALIGNED_ACCESS
ccc98036 4032 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
c7a7ac46 4033 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
bbf6f052 4034 {
906c4e36 4035 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbd6cf73
RK
4036
4037 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4038 MODE. */
4039 if (mode != VOIDmode && mode != BLKmode
4040 && mode != TYPE_MODE (TREE_TYPE (exp)))
4041 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4042
a281e72d
RK
4043 /* If the modes of TARGET and TEMP are both BLKmode, both
4044 must be in memory and BITPOS must be aligned on a byte
4045 boundary. If so, we simply do a block copy. */
4046 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4047 {
4048 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4049 || bitpos % BITS_PER_UNIT != 0)
4050 abort ();
4051
0086427c
RK
4052 target = change_address (target, VOIDmode,
4053 plus_constant (XEXP (target, 0),
a281e72d
RK
4054 bitpos / BITS_PER_UNIT));
4055
4056 emit_block_move (target, temp,
4057 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4058 / BITS_PER_UNIT),
4059 1);
4060
4061 return value_mode == VOIDmode ? const0_rtx : target;
4062 }
4063
bbf6f052
RK
4064 /* Store the value in the bitfield. */
4065 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4066 if (value_mode != VOIDmode)
4067 {
4068 /* The caller wants an rtx for the value. */
4069 /* If possible, avoid refetching from the bitfield itself. */
4070 if (width_mask != 0
4071 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5c4d7cfb 4072 {
9074de27 4073 tree count;
5c4d7cfb 4074 enum machine_mode tmode;
86a2c12a 4075
5c4d7cfb
RS
4076 if (unsignedp)
4077 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4078 tmode = GET_MODE (temp);
86a2c12a
RS
4079 if (tmode == VOIDmode)
4080 tmode = value_mode;
5c4d7cfb
RS
4081 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4082 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4083 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4084 }
bbf6f052 4085 return extract_bit_field (target, bitsize, bitpos, unsignedp,
906c4e36
RK
4086 NULL_RTX, value_mode, 0, align,
4087 total_size);
bbf6f052
RK
4088 }
4089 return const0_rtx;
4090 }
4091 else
4092 {
4093 rtx addr = XEXP (target, 0);
4094 rtx to_rtx;
4095
4096 /* If a value is wanted, it must be the lhs;
4097 so make the address stable for multiple use. */
4098
4099 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4100 && ! CONSTANT_ADDRESS_P (addr)
4101 /* A frame-pointer reference is already stable. */
4102 && ! (GET_CODE (addr) == PLUS
4103 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4104 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4105 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4106 addr = copy_to_reg (addr);
4107
4108 /* Now build a reference to just the desired component. */
4109
4110 to_rtx = change_address (target, mode,
4111 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
4112 MEM_IN_STRUCT_P (to_rtx) = 1;
4113
4114 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4115 }
4116}
4117\f
6be58303
JW
4118/* Return true if any object containing the innermost array is an unaligned
4119 packed structure field. */
4120
4121static int
4122get_inner_unaligned_p (exp)
4123 tree exp;
4124{
4125 int needed_alignment = TYPE_ALIGN (TREE_TYPE (exp));
4126
4127 while (1)
4128 {
4129 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4130 {
4131 if (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
4132 < needed_alignment)
4133 return 1;
4134 }
4135 else if (TREE_CODE (exp) != ARRAY_REF
4136 && TREE_CODE (exp) != NON_LVALUE_EXPR
4137 && ! ((TREE_CODE (exp) == NOP_EXPR
4138 || TREE_CODE (exp) == CONVERT_EXPR)
4139 && (TYPE_MODE (TREE_TYPE (exp))
4140 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4141 break;
4142
4143 exp = TREE_OPERAND (exp, 0);
4144 }
4145
4146 return 0;
4147}
4148
bbf6f052
RK
4149/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4150 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
742920c7 4151 ARRAY_REFs and find the ultimate containing object, which we return.
bbf6f052
RK
4152
4153 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4154 bit position, and *PUNSIGNEDP to the signedness of the field.
7bb0943f
RS
4155 If the position of the field is variable, we store a tree
4156 giving the variable offset (in units) in *POFFSET.
4157 This offset is in addition to the bit position.
4158 If the position is not variable, we store 0 in *POFFSET.
bbf6f052
RK
4159
4160 If any of the extraction expressions is volatile,
4161 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4162
4163 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4164 is a mode that can be used to access the field. In that case, *PBITSIZE
e7c33f54
RK
4165 is redundant.
4166
4167 If the field describes a variable-sized object, *PMODE is set to
4168 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4169 this case, but the address of the object can be found. */
bbf6f052
RK
4170
4171tree
4969d05d
RK
4172get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4173 punsignedp, pvolatilep)
bbf6f052
RK
4174 tree exp;
4175 int *pbitsize;
4176 int *pbitpos;
7bb0943f 4177 tree *poffset;
bbf6f052
RK
4178 enum machine_mode *pmode;
4179 int *punsignedp;
4180 int *pvolatilep;
4181{
b50d17a1 4182 tree orig_exp = exp;
bbf6f052
RK
4183 tree size_tree = 0;
4184 enum machine_mode mode = VOIDmode;
742920c7 4185 tree offset = integer_zero_node;
bbf6f052
RK
4186
4187 if (TREE_CODE (exp) == COMPONENT_REF)
4188 {
4189 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4190 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4191 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4192 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4193 }
4194 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4195 {
4196 size_tree = TREE_OPERAND (exp, 1);
4197 *punsignedp = TREE_UNSIGNED (exp);
4198 }
4199 else
4200 {
4201 mode = TYPE_MODE (TREE_TYPE (exp));
4202 *pbitsize = GET_MODE_BITSIZE (mode);
4203 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4204 }
4205
4206 if (size_tree)
4207 {
4208 if (TREE_CODE (size_tree) != INTEGER_CST)
e7c33f54
RK
4209 mode = BLKmode, *pbitsize = -1;
4210 else
4211 *pbitsize = TREE_INT_CST_LOW (size_tree);
bbf6f052
RK
4212 }
4213
4214 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4215 and find the ultimate containing object. */
4216
4217 *pbitpos = 0;
4218
4219 while (1)
4220 {
7bb0943f 4221 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
bbf6f052 4222 {
7bb0943f
RS
4223 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4224 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4225 : TREE_OPERAND (exp, 2));
e6d8c385 4226 tree constant = integer_zero_node, var = pos;
bbf6f052 4227
e7f3c83f
RK
4228 /* If this field hasn't been filled in yet, don't go
4229 past it. This should only happen when folding expressions
4230 made during type construction. */
4231 if (pos == 0)
4232 break;
4233
e6d8c385
RK
4234 /* Assume here that the offset is a multiple of a unit.
4235 If not, there should be an explicitly added constant. */
4236 if (TREE_CODE (pos) == PLUS_EXPR
4237 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4238 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
7bb0943f 4239 else if (TREE_CODE (pos) == INTEGER_CST)
e6d8c385
RK
4240 constant = pos, var = integer_zero_node;
4241
4242 *pbitpos += TREE_INT_CST_LOW (constant);
4243
4244 if (var)
4245 offset = size_binop (PLUS_EXPR, offset,
4246 size_binop (EXACT_DIV_EXPR, var,
4247 size_int (BITS_PER_UNIT)));
bbf6f052 4248 }
bbf6f052 4249
742920c7 4250 else if (TREE_CODE (exp) == ARRAY_REF)
bbf6f052 4251 {
742920c7
RK
4252 /* This code is based on the code in case ARRAY_REF in expand_expr
4253 below. We assume here that the size of an array element is
4254 always an integral multiple of BITS_PER_UNIT. */
4255
4256 tree index = TREE_OPERAND (exp, 1);
4257 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4258 tree low_bound
4259 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4260 tree index_type = TREE_TYPE (index);
4261
4262 if (! integer_zerop (low_bound))
4263 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
4264
4c08eef0 4265 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
742920c7 4266 {
4c08eef0
RK
4267 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4268 index);
742920c7
RK
4269 index_type = TREE_TYPE (index);
4270 }
4271
4272 index = fold (build (MULT_EXPR, index_type, index,
4273 TYPE_SIZE (TREE_TYPE (exp))));
4274
4275 if (TREE_CODE (index) == INTEGER_CST
4276 && TREE_INT_CST_HIGH (index) == 0)
4277 *pbitpos += TREE_INT_CST_LOW (index);
4278 else
4279 offset = size_binop (PLUS_EXPR, offset,
4280 size_binop (FLOOR_DIV_EXPR, index,
4281 size_int (BITS_PER_UNIT)));
bbf6f052
RK
4282 }
4283 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
4284 && ! ((TREE_CODE (exp) == NOP_EXPR
4285 || TREE_CODE (exp) == CONVERT_EXPR)
7f62854a
RK
4286 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4287 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
4288 != UNION_TYPE))
bbf6f052
RK
4289 && (TYPE_MODE (TREE_TYPE (exp))
4290 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4291 break;
7bb0943f
RS
4292
4293 /* If any reference in the chain is volatile, the effect is volatile. */
4294 if (TREE_THIS_VOLATILE (exp))
4295 *pvolatilep = 1;
bbf6f052
RK
4296 exp = TREE_OPERAND (exp, 0);
4297 }
4298
4299 /* If this was a bit-field, see if there is a mode that allows direct
4300 access in case EXP is in memory. */
e7f3c83f 4301 if (mode == VOIDmode && *pbitsize != 0 && *pbitpos % *pbitsize == 0)
bbf6f052 4302 {
e8621b3c
RK
4303 mode = mode_for_size (*pbitsize,
4304 (TYPE_MODE (TREE_TYPE (orig_exp)) == BLKmode
4305 ? MODE_INT
4306 : GET_MODE_CLASS (TYPE_MODE
4307 (TREE_TYPE (orig_exp)))),
4308 0);
bbf6f052
RK
4309 if (mode == BLKmode)
4310 mode = VOIDmode;
4311 }
4312
742920c7
RK
4313 if (integer_zerop (offset))
4314 offset = 0;
4315
b50d17a1
RK
4316 if (offset != 0 && contains_placeholder_p (offset))
4317 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
4318
bbf6f052 4319 *pmode = mode;
7bb0943f 4320 *poffset = offset;
bbf6f052
RK
4321 return exp;
4322}
4323\f
4324/* Given an rtx VALUE that may contain additions and multiplications,
4325 return an equivalent value that just refers to a register or memory.
4326 This is done by generating instructions to perform the arithmetic
c45a13a6
RK
4327 and returning a pseudo-register containing the value.
4328
4329 The returned value may be a REG, SUBREG, MEM or constant. */
bbf6f052
RK
4330
4331rtx
4332force_operand (value, target)
4333 rtx value, target;
4334{
4335 register optab binoptab = 0;
4336 /* Use a temporary to force order of execution of calls to
4337 `force_operand'. */
4338 rtx tmp;
4339 register rtx op2;
4340 /* Use subtarget as the target for operand 0 of a binary operation. */
4341 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4342
4343 if (GET_CODE (value) == PLUS)
4344 binoptab = add_optab;
4345 else if (GET_CODE (value) == MINUS)
4346 binoptab = sub_optab;
4347 else if (GET_CODE (value) == MULT)
4348 {
4349 op2 = XEXP (value, 1);
4350 if (!CONSTANT_P (op2)
4351 && !(GET_CODE (op2) == REG && op2 != subtarget))
4352 subtarget = 0;
4353 tmp = force_operand (XEXP (value, 0), subtarget);
4354 return expand_mult (GET_MODE (value), tmp,
906c4e36 4355 force_operand (op2, NULL_RTX),
bbf6f052
RK
4356 target, 0);
4357 }
4358
4359 if (binoptab)
4360 {
4361 op2 = XEXP (value, 1);
4362 if (!CONSTANT_P (op2)
4363 && !(GET_CODE (op2) == REG && op2 != subtarget))
4364 subtarget = 0;
4365 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
4366 {
4367 binoptab = add_optab;
4368 op2 = negate_rtx (GET_MODE (value), op2);
4369 }
4370
4371 /* Check for an addition with OP2 a constant integer and our first
4372 operand a PLUS of a virtual register and something else. In that
4373 case, we want to emit the sum of the virtual register and the
4374 constant first and then add the other value. This allows virtual
4375 register instantiation to simply modify the constant rather than
4376 creating another one around this addition. */
4377 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
4378 && GET_CODE (XEXP (value, 0)) == PLUS
4379 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
4380 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
4381 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
4382 {
4383 rtx temp = expand_binop (GET_MODE (value), binoptab,
4384 XEXP (XEXP (value, 0), 0), op2,
4385 subtarget, 0, OPTAB_LIB_WIDEN);
4386 return expand_binop (GET_MODE (value), binoptab, temp,
4387 force_operand (XEXP (XEXP (value, 0), 1), 0),
4388 target, 0, OPTAB_LIB_WIDEN);
4389 }
4390
4391 tmp = force_operand (XEXP (value, 0), subtarget);
4392 return expand_binop (GET_MODE (value), binoptab, tmp,
906c4e36 4393 force_operand (op2, NULL_RTX),
bbf6f052 4394 target, 0, OPTAB_LIB_WIDEN);
8008b228 4395 /* We give UNSIGNEDP = 0 to expand_binop
bbf6f052
RK
4396 because the only operations we are expanding here are signed ones. */
4397 }
4398 return value;
4399}
4400\f
4401/* Subroutine of expand_expr:
4402 save the non-copied parts (LIST) of an expr (LHS), and return a list
4403 which can restore these values to their previous values,
4404 should something modify their storage. */
4405
4406static tree
4407save_noncopied_parts (lhs, list)
4408 tree lhs;
4409 tree list;
4410{
4411 tree tail;
4412 tree parts = 0;
4413
4414 for (tail = list; tail; tail = TREE_CHAIN (tail))
4415 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4416 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
4417 else
4418 {
4419 tree part = TREE_VALUE (tail);
4420 tree part_type = TREE_TYPE (part);
906c4e36 4421 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
06089a8b 4422 rtx target = assign_temp (part_type, 0, 1, 1);
bbf6f052 4423 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
906c4e36 4424 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
bbf6f052 4425 parts = tree_cons (to_be_saved,
906c4e36
RK
4426 build (RTL_EXPR, part_type, NULL_TREE,
4427 (tree) target),
bbf6f052
RK
4428 parts);
4429 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
4430 }
4431 return parts;
4432}
4433
4434/* Subroutine of expand_expr:
4435 record the non-copied parts (LIST) of an expr (LHS), and return a list
4436 which specifies the initial values of these parts. */
4437
4438static tree
4439init_noncopied_parts (lhs, list)
4440 tree lhs;
4441 tree list;
4442{
4443 tree tail;
4444 tree parts = 0;
4445
4446 for (tail = list; tail; tail = TREE_CHAIN (tail))
4447 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4448 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
4449 else
4450 {
4451 tree part = TREE_VALUE (tail);
4452 tree part_type = TREE_TYPE (part);
906c4e36 4453 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
bbf6f052
RK
4454 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
4455 }
4456 return parts;
4457}
4458
4459/* Subroutine of expand_expr: return nonzero iff there is no way that
4460 EXP can reference X, which is being modified. */
4461
4462static int
4463safe_from_p (x, exp)
4464 rtx x;
4465 tree exp;
4466{
4467 rtx exp_rtl = 0;
4468 int i, nops;
4469
6676e72f
RK
4470 if (x == 0
4471 /* If EXP has varying size, we MUST use a target since we currently
8f6562d0
PB
4472 have no way of allocating temporaries of variable size
4473 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
4474 So we assume here that something at a higher level has prevented a
f4510f37
RK
4475 clash. This is somewhat bogus, but the best we can do. Only
4476 do this when X is BLKmode. */
45524ce9 4477 || (TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
f4510f37 4478 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
8f6562d0
PB
4479 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
4480 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
4481 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
4482 != INTEGER_CST)
f4510f37 4483 && GET_MODE (x) == BLKmode))
bbf6f052
RK
4484 return 1;
4485
4486 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
4487 find the underlying pseudo. */
4488 if (GET_CODE (x) == SUBREG)
4489 {
4490 x = SUBREG_REG (x);
4491 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4492 return 0;
4493 }
4494
4495 /* If X is a location in the outgoing argument area, it is always safe. */
4496 if (GET_CODE (x) == MEM
4497 && (XEXP (x, 0) == virtual_outgoing_args_rtx
4498 || (GET_CODE (XEXP (x, 0)) == PLUS
4499 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
4500 return 1;
4501
4502 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4503 {
4504 case 'd':
4505 exp_rtl = DECL_RTL (exp);
4506 break;
4507
4508 case 'c':
4509 return 1;
4510
4511 case 'x':
4512 if (TREE_CODE (exp) == TREE_LIST)
f32fd778
RS
4513 return ((TREE_VALUE (exp) == 0
4514 || safe_from_p (x, TREE_VALUE (exp)))
bbf6f052
RK
4515 && (TREE_CHAIN (exp) == 0
4516 || safe_from_p (x, TREE_CHAIN (exp))));
4517 else
4518 return 0;
4519
4520 case '1':
4521 return safe_from_p (x, TREE_OPERAND (exp, 0));
4522
4523 case '2':
4524 case '<':
4525 return (safe_from_p (x, TREE_OPERAND (exp, 0))
4526 && safe_from_p (x, TREE_OPERAND (exp, 1)));
4527
4528 case 'e':
4529 case 'r':
4530 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
4531 the expression. If it is set, we conflict iff we are that rtx or
4532 both are in memory. Otherwise, we check all operands of the
4533 expression recursively. */
4534
4535 switch (TREE_CODE (exp))
4536 {
4537 case ADDR_EXPR:
e44842fe
RK
4538 return (staticp (TREE_OPERAND (exp, 0))
4539 || safe_from_p (x, TREE_OPERAND (exp, 0)));
bbf6f052
RK
4540
4541 case INDIRECT_REF:
4542 if (GET_CODE (x) == MEM)
4543 return 0;
4544 break;
4545
4546 case CALL_EXPR:
4547 exp_rtl = CALL_EXPR_RTL (exp);
4548 if (exp_rtl == 0)
4549 {
4550 /* Assume that the call will clobber all hard registers and
4551 all of memory. */
4552 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4553 || GET_CODE (x) == MEM)
4554 return 0;
4555 }
4556
4557 break;
4558
4559 case RTL_EXPR:
3bb5826a
RK
4560 /* If a sequence exists, we would have to scan every instruction
4561 in the sequence to see if it was safe. This is probably not
4562 worthwhile. */
4563 if (RTL_EXPR_SEQUENCE (exp))
bbf6f052
RK
4564 return 0;
4565
3bb5826a 4566 exp_rtl = RTL_EXPR_RTL (exp);
bbf6f052
RK
4567 break;
4568
4569 case WITH_CLEANUP_EXPR:
4570 exp_rtl = RTL_EXPR_RTL (exp);
4571 break;
4572
5dab5552
MS
4573 case CLEANUP_POINT_EXPR:
4574 return safe_from_p (x, TREE_OPERAND (exp, 0));
4575
bbf6f052
RK
4576 case SAVE_EXPR:
4577 exp_rtl = SAVE_EXPR_RTL (exp);
4578 break;
4579
8129842c
RS
4580 case BIND_EXPR:
4581 /* The only operand we look at is operand 1. The rest aren't
4582 part of the expression. */
4583 return safe_from_p (x, TREE_OPERAND (exp, 1));
4584
bbf6f052 4585 case METHOD_CALL_EXPR:
0f41302f 4586 /* This takes a rtx argument, but shouldn't appear here. */
bbf6f052
RK
4587 abort ();
4588 }
4589
4590 /* If we have an rtx, we do not need to scan our operands. */
4591 if (exp_rtl)
4592 break;
4593
4594 nops = tree_code_length[(int) TREE_CODE (exp)];
4595 for (i = 0; i < nops; i++)
4596 if (TREE_OPERAND (exp, i) != 0
4597 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
4598 return 0;
4599 }
4600
4601 /* If we have an rtl, find any enclosed object. Then see if we conflict
4602 with it. */
4603 if (exp_rtl)
4604 {
4605 if (GET_CODE (exp_rtl) == SUBREG)
4606 {
4607 exp_rtl = SUBREG_REG (exp_rtl);
4608 if (GET_CODE (exp_rtl) == REG
4609 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
4610 return 0;
4611 }
4612
4613 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
4614 are memory and EXP is not readonly. */
4615 return ! (rtx_equal_p (x, exp_rtl)
4616 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
4617 && ! TREE_READONLY (exp)));
4618 }
4619
4620 /* If we reach here, it is safe. */
4621 return 1;
4622}
4623
4624/* Subroutine of expand_expr: return nonzero iff EXP is an
4625 expression whose type is statically determinable. */
4626
4627static int
4628fixed_type_p (exp)
4629 tree exp;
4630{
4631 if (TREE_CODE (exp) == PARM_DECL
4632 || TREE_CODE (exp) == VAR_DECL
4633 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
4634 || TREE_CODE (exp) == COMPONENT_REF
4635 || TREE_CODE (exp) == ARRAY_REF)
4636 return 1;
4637 return 0;
4638}
4639\f
4640/* expand_expr: generate code for computing expression EXP.
4641 An rtx for the computed value is returned. The value is never null.
4642 In the case of a void EXP, const0_rtx is returned.
4643
4644 The value may be stored in TARGET if TARGET is nonzero.
4645 TARGET is just a suggestion; callers must assume that
4646 the rtx returned may not be the same as TARGET.
4647
4648 If TARGET is CONST0_RTX, it means that the value will be ignored.
4649
4650 If TMODE is not VOIDmode, it suggests generating the
4651 result in mode TMODE. But this is done only when convenient.
4652 Otherwise, TMODE is ignored and the value generated in its natural mode.
4653 TMODE is just a suggestion; callers must assume that
4654 the rtx returned may not have mode TMODE.
4655
d6a5ac33
RK
4656 Note that TARGET may have neither TMODE nor MODE. In that case, it
4657 probably will not be used.
bbf6f052
RK
4658
4659 If MODIFIER is EXPAND_SUM then when EXP is an addition
4660 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
4661 or a nest of (PLUS ...) and (MINUS ...) where the terms are
4662 products as above, or REG or MEM, or constant.
4663 Ordinarily in such cases we would output mul or add instructions
4664 and then return a pseudo reg containing the sum.
4665
4666 EXPAND_INITIALIZER is much like EXPAND_SUM except that
4667 it also marks a label as absolutely required (it can't be dead).
26fcb35a 4668 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
d6a5ac33
RK
4669 This is used for outputting expressions used in initializers.
4670
4671 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
4672 with a constant address even if that address is not normally legitimate.
4673 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
bbf6f052
RK
4674
4675rtx
4676expand_expr (exp, target, tmode, modifier)
4677 register tree exp;
4678 rtx target;
4679 enum machine_mode tmode;
4680 enum expand_modifier modifier;
4681{
b50d17a1
RK
4682 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
4683 This is static so it will be accessible to our recursive callees. */
4684 static tree placeholder_list = 0;
bbf6f052
RK
4685 register rtx op0, op1, temp;
4686 tree type = TREE_TYPE (exp);
4687 int unsignedp = TREE_UNSIGNED (type);
4688 register enum machine_mode mode = TYPE_MODE (type);
4689 register enum tree_code code = TREE_CODE (exp);
4690 optab this_optab;
4691 /* Use subtarget as the target for operand 0 of a binary operation. */
4692 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4693 rtx original_target = target;
ca695ac9 4694 /* Maybe defer this until sure not doing bytecode? */
dd27116b
RK
4695 int ignore = (target == const0_rtx
4696 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
4d87de75
RS
4697 || code == CONVERT_EXPR || code == REFERENCE_EXPR
4698 || code == COND_EXPR)
dd27116b 4699 && TREE_CODE (type) == VOID_TYPE));
bbf6f052
RK
4700 tree context;
4701
ca695ac9 4702
1d556704 4703 if (output_bytecode && modifier != EXPAND_INITIALIZER)
ca695ac9
JB
4704 {
4705 bc_expand_expr (exp);
4706 return NULL;
4707 }
4708
bbf6f052
RK
4709 /* Don't use hard regs as subtargets, because the combiner
4710 can only handle pseudo regs. */
4711 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
4712 subtarget = 0;
4713 /* Avoid subtargets inside loops,
4714 since they hide some invariant expressions. */
4715 if (preserve_subexpressions_p ())
4716 subtarget = 0;
4717
dd27116b
RK
4718 /* If we are going to ignore this result, we need only do something
4719 if there is a side-effect somewhere in the expression. If there
b50d17a1
RK
4720 is, short-circuit the most common cases here. Note that we must
4721 not call expand_expr with anything but const0_rtx in case this
4722 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
bbf6f052 4723
dd27116b
RK
4724 if (ignore)
4725 {
4726 if (! TREE_SIDE_EFFECTS (exp))
4727 return const0_rtx;
4728
4729 /* Ensure we reference a volatile object even if value is ignored. */
4730 if (TREE_THIS_VOLATILE (exp)
4731 && TREE_CODE (exp) != FUNCTION_DECL
4732 && mode != VOIDmode && mode != BLKmode)
4733 {
4734 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
4735 if (GET_CODE (temp) == MEM)
4736 temp = copy_to_reg (temp);
4737 return const0_rtx;
4738 }
4739
4740 if (TREE_CODE_CLASS (code) == '1')
4741 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4742 VOIDmode, modifier);
4743 else if (TREE_CODE_CLASS (code) == '2'
4744 || TREE_CODE_CLASS (code) == '<')
4745 {
4746 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
4747 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
4748 return const0_rtx;
4749 }
4750 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
4751 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
4752 /* If the second operand has no side effects, just evaluate
0f41302f 4753 the first. */
dd27116b
RK
4754 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4755 VOIDmode, modifier);
dd27116b 4756
90764a87 4757 target = 0;
dd27116b 4758 }
bbf6f052 4759
e44842fe
RK
4760 /* If will do cse, generate all results into pseudo registers
4761 since 1) that allows cse to find more things
4762 and 2) otherwise cse could produce an insn the machine
4763 cannot support. */
4764
bbf6f052
RK
4765 if (! cse_not_expected && mode != BLKmode && target
4766 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
4767 target = subtarget;
4768
bbf6f052
RK
4769 switch (code)
4770 {
4771 case LABEL_DECL:
b552441b
RS
4772 {
4773 tree function = decl_function_context (exp);
4774 /* Handle using a label in a containing function. */
4775 if (function != current_function_decl && function != 0)
4776 {
4777 struct function *p = find_function_data (function);
4778 /* Allocate in the memory associated with the function
4779 that the label is in. */
4780 push_obstacks (p->function_obstack,
4781 p->function_maybepermanent_obstack);
4782
4783 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4784 label_rtx (exp), p->forced_labels);
4785 pop_obstacks ();
4786 }
4787 else if (modifier == EXPAND_INITIALIZER)
4788 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4789 label_rtx (exp), forced_labels);
26fcb35a 4790 temp = gen_rtx (MEM, FUNCTION_MODE,
b552441b 4791 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
26fcb35a
RS
4792 if (function != current_function_decl && function != 0)
4793 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
4794 return temp;
b552441b 4795 }
bbf6f052
RK
4796
4797 case PARM_DECL:
4798 if (DECL_RTL (exp) == 0)
4799 {
4800 error_with_decl (exp, "prior parameter's size depends on `%s'");
4af3895e 4801 return CONST0_RTX (mode);
bbf6f052
RK
4802 }
4803
0f41302f 4804 /* ... fall through ... */
d6a5ac33 4805
bbf6f052 4806 case VAR_DECL:
2dca20cd
RS
4807 /* If a static var's type was incomplete when the decl was written,
4808 but the type is complete now, lay out the decl now. */
4809 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4810 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
4811 {
4812 push_obstacks_nochange ();
4813 end_temporary_allocation ();
4814 layout_decl (exp, 0);
4815 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
4816 pop_obstacks ();
4817 }
d6a5ac33 4818
0f41302f 4819 /* ... fall through ... */
d6a5ac33 4820
2dca20cd 4821 case FUNCTION_DECL:
bbf6f052
RK
4822 case RESULT_DECL:
4823 if (DECL_RTL (exp) == 0)
4824 abort ();
d6a5ac33 4825
e44842fe
RK
4826 /* Ensure variable marked as used even if it doesn't go through
4827 a parser. If it hasn't be used yet, write out an external
4828 definition. */
4829 if (! TREE_USED (exp))
4830 {
4831 assemble_external (exp);
4832 TREE_USED (exp) = 1;
4833 }
4834
dc6d66b3
RK
4835 /* Show we haven't gotten RTL for this yet. */
4836 temp = 0;
4837
bbf6f052
RK
4838 /* Handle variables inherited from containing functions. */
4839 context = decl_function_context (exp);
4840
4841 /* We treat inline_function_decl as an alias for the current function
4842 because that is the inline function whose vars, types, etc.
4843 are being merged into the current function.
4844 See expand_inline_function. */
d6a5ac33 4845
bbf6f052
RK
4846 if (context != 0 && context != current_function_decl
4847 && context != inline_function_decl
4848 /* If var is static, we don't need a static chain to access it. */
4849 && ! (GET_CODE (DECL_RTL (exp)) == MEM
4850 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
4851 {
4852 rtx addr;
4853
4854 /* Mark as non-local and addressable. */
81feeecb 4855 DECL_NONLOCAL (exp) = 1;
38ee6ed9
JM
4856 if (DECL_NO_STATIC_CHAIN (current_function_decl))
4857 abort ();
bbf6f052
RK
4858 mark_addressable (exp);
4859 if (GET_CODE (DECL_RTL (exp)) != MEM)
4860 abort ();
4861 addr = XEXP (DECL_RTL (exp), 0);
4862 if (GET_CODE (addr) == MEM)
d6a5ac33
RK
4863 addr = gen_rtx (MEM, Pmode,
4864 fix_lexical_addr (XEXP (addr, 0), exp));
bbf6f052
RK
4865 else
4866 addr = fix_lexical_addr (addr, exp);
dc6d66b3 4867 temp = change_address (DECL_RTL (exp), mode, addr);
bbf6f052 4868 }
4af3895e 4869
bbf6f052
RK
4870 /* This is the case of an array whose size is to be determined
4871 from its initializer, while the initializer is still being parsed.
4872 See expand_decl. */
d6a5ac33 4873
dc6d66b3
RK
4874 else if (GET_CODE (DECL_RTL (exp)) == MEM
4875 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
4876 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
bbf6f052 4877 XEXP (DECL_RTL (exp), 0));
d6a5ac33
RK
4878
4879 /* If DECL_RTL is memory, we are in the normal case and either
4880 the address is not valid or it is not a register and -fforce-addr
4881 is specified, get the address into a register. */
4882
dc6d66b3
RK
4883 else if (GET_CODE (DECL_RTL (exp)) == MEM
4884 && modifier != EXPAND_CONST_ADDRESS
4885 && modifier != EXPAND_SUM
4886 && modifier != EXPAND_INITIALIZER
4887 && (! memory_address_p (DECL_MODE (exp),
4888 XEXP (DECL_RTL (exp), 0))
4889 || (flag_force_addr
4890 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
4891 temp = change_address (DECL_RTL (exp), VOIDmode,
d6a5ac33 4892 copy_rtx (XEXP (DECL_RTL (exp), 0)));
1499e0a8 4893
dc6d66b3
RK
4894 /* If we got something, return it. But first, set the alignment
4895 the address is a register. */
4896 if (temp != 0)
4897 {
4898 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
4899 mark_reg_pointer (XEXP (temp, 0),
4900 DECL_ALIGN (exp) / BITS_PER_UNIT);
4901
4902 return temp;
4903 }
4904
1499e0a8
RK
4905 /* If the mode of DECL_RTL does not match that of the decl, it
4906 must be a promoted value. We return a SUBREG of the wanted mode,
4907 but mark it so that we know that it was already extended. */
4908
4909 if (GET_CODE (DECL_RTL (exp)) == REG
4910 && GET_MODE (DECL_RTL (exp)) != mode)
4911 {
1499e0a8
RK
4912 /* Get the signedness used for this variable. Ensure we get the
4913 same mode we got when the variable was declared. */
78911e8b
RK
4914 if (GET_MODE (DECL_RTL (exp))
4915 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
1499e0a8
RK
4916 abort ();
4917
4918 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
4919 SUBREG_PROMOTED_VAR_P (temp) = 1;
4920 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4921 return temp;
4922 }
4923
bbf6f052
RK
4924 return DECL_RTL (exp);
4925
4926 case INTEGER_CST:
4927 return immed_double_const (TREE_INT_CST_LOW (exp),
4928 TREE_INT_CST_HIGH (exp),
4929 mode);
4930
4931 case CONST_DECL:
4932 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
4933
4934 case REAL_CST:
4935 /* If optimized, generate immediate CONST_DOUBLE
4936 which will be turned into memory by reload if necessary.
4937
4938 We used to force a register so that loop.c could see it. But
4939 this does not allow gen_* patterns to perform optimizations with
4940 the constants. It also produces two insns in cases like "x = 1.0;".
4941 On most machines, floating-point constants are not permitted in
4942 many insns, so we'd end up copying it to a register in any case.
4943
4944 Now, we do the copying in expand_binop, if appropriate. */
4945 return immed_real_const (exp);
4946
4947 case COMPLEX_CST:
4948 case STRING_CST:
4949 if (! TREE_CST_RTL (exp))
4950 output_constant_def (exp);
4951
4952 /* TREE_CST_RTL probably contains a constant address.
4953 On RISC machines where a constant address isn't valid,
4954 make some insns to get that address into a register. */
4955 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
4956 && modifier != EXPAND_CONST_ADDRESS
4957 && modifier != EXPAND_INITIALIZER
4958 && modifier != EXPAND_SUM
d6a5ac33
RK
4959 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
4960 || (flag_force_addr
4961 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
bbf6f052
RK
4962 return change_address (TREE_CST_RTL (exp), VOIDmode,
4963 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
4964 return TREE_CST_RTL (exp);
4965
4966 case SAVE_EXPR:
4967 context = decl_function_context (exp);
d6a5ac33 4968
bbf6f052
RK
4969 /* We treat inline_function_decl as an alias for the current function
4970 because that is the inline function whose vars, types, etc.
4971 are being merged into the current function.
4972 See expand_inline_function. */
4973 if (context == current_function_decl || context == inline_function_decl)
4974 context = 0;
4975
4976 /* If this is non-local, handle it. */
4977 if (context)
4978 {
4979 temp = SAVE_EXPR_RTL (exp);
4980 if (temp && GET_CODE (temp) == REG)
4981 {
4982 put_var_into_stack (exp);
4983 temp = SAVE_EXPR_RTL (exp);
4984 }
4985 if (temp == 0 || GET_CODE (temp) != MEM)
4986 abort ();
4987 return change_address (temp, mode,
4988 fix_lexical_addr (XEXP (temp, 0), exp));
4989 }
4990 if (SAVE_EXPR_RTL (exp) == 0)
4991 {
06089a8b
RK
4992 if (mode == VOIDmode)
4993 temp = const0_rtx;
4994 else
4995 temp = assign_temp (type, 0, 0, 0);
1499e0a8 4996
bbf6f052 4997 SAVE_EXPR_RTL (exp) = temp;
bbf6f052
RK
4998 if (!optimize && GET_CODE (temp) == REG)
4999 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
5000 save_expr_regs);
ff78f773
RK
5001
5002 /* If the mode of TEMP does not match that of the expression, it
5003 must be a promoted value. We pass store_expr a SUBREG of the
5004 wanted mode but mark it so that we know that it was already
5005 extended. Note that `unsignedp' was modified above in
5006 this case. */
5007
5008 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
5009 {
5010 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
5011 SUBREG_PROMOTED_VAR_P (temp) = 1;
5012 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5013 }
5014
4c7a0be9
JW
5015 if (temp == const0_rtx)
5016 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
5017 else
5018 store_expr (TREE_OPERAND (exp, 0), temp, 0);
bbf6f052 5019 }
1499e0a8
RK
5020
5021 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
5022 must be a promoted value. We return a SUBREG of the wanted mode,
0f41302f 5023 but mark it so that we know that it was already extended. */
1499e0a8
RK
5024
5025 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
5026 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
5027 {
e70d22c8
RK
5028 /* Compute the signedness and make the proper SUBREG. */
5029 promote_mode (type, mode, &unsignedp, 0);
5030 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
1499e0a8
RK
5031 SUBREG_PROMOTED_VAR_P (temp) = 1;
5032 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5033 return temp;
5034 }
5035
bbf6f052
RK
5036 return SAVE_EXPR_RTL (exp);
5037
679163cf
MS
5038 case UNSAVE_EXPR:
5039 {
5040 rtx temp;
5041 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5042 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
5043 return temp;
5044 }
5045
b50d17a1
RK
5046 case PLACEHOLDER_EXPR:
5047 /* If there is an object on the head of the placeholder list,
5048 see if some object in it's references is of type TYPE. For
5049 further information, see tree.def. */
5050 if (placeholder_list)
5051 {
5052 tree object;
f59d43a9 5053 tree old_list = placeholder_list;
b50d17a1
RK
5054
5055 for (object = TREE_PURPOSE (placeholder_list);
330446eb
RK
5056 (TYPE_MAIN_VARIANT (TREE_TYPE (object))
5057 != TYPE_MAIN_VARIANT (type))
b50d17a1 5058 && (TREE_CODE_CLASS (TREE_CODE (object)) == 'r'
4805bfa0
RK
5059 || TREE_CODE_CLASS (TREE_CODE (object)) == '1'
5060 || TREE_CODE_CLASS (TREE_CODE (object)) == '2'
5061 || TREE_CODE_CLASS (TREE_CODE (object)) == 'e');
b50d17a1
RK
5062 object = TREE_OPERAND (object, 0))
5063 ;
5064
330446eb
RK
5065 if (object != 0
5066 && (TYPE_MAIN_VARIANT (TREE_TYPE (object))
5067 == TYPE_MAIN_VARIANT (type)))
f59d43a9
RK
5068 {
5069 /* Expand this object skipping the list entries before
5070 it was found in case it is also a PLACEHOLDER_EXPR.
5071 In that case, we want to translate it using subsequent
5072 entries. */
5073 placeholder_list = TREE_CHAIN (placeholder_list);
5074 temp = expand_expr (object, original_target, tmode, modifier);
5075 placeholder_list = old_list;
5076 return temp;
5077 }
b50d17a1
RK
5078 }
5079
5080 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
5081 abort ();
5082
5083 case WITH_RECORD_EXPR:
5084 /* Put the object on the placeholder list, expand our first operand,
5085 and pop the list. */
5086 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
5087 placeholder_list);
5088 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
5089 tmode, modifier);
5090 placeholder_list = TREE_CHAIN (placeholder_list);
5091 return target;
5092
bbf6f052 5093 case EXIT_EXPR:
e44842fe
RK
5094 expand_exit_loop_if_false (NULL_PTR,
5095 invert_truthvalue (TREE_OPERAND (exp, 0)));
bbf6f052
RK
5096 return const0_rtx;
5097
5098 case LOOP_EXPR:
0088fcb1 5099 push_temp_slots ();
bbf6f052
RK
5100 expand_start_loop (1);
5101 expand_expr_stmt (TREE_OPERAND (exp, 0));
5102 expand_end_loop ();
0088fcb1 5103 pop_temp_slots ();
bbf6f052
RK
5104
5105 return const0_rtx;
5106
5107 case BIND_EXPR:
5108 {
5109 tree vars = TREE_OPERAND (exp, 0);
5110 int vars_need_expansion = 0;
5111
5112 /* Need to open a binding contour here because
5113 if there are any cleanups they most be contained here. */
5114 expand_start_bindings (0);
5115
2df53c0b
RS
5116 /* Mark the corresponding BLOCK for output in its proper place. */
5117 if (TREE_OPERAND (exp, 2) != 0
5118 && ! TREE_USED (TREE_OPERAND (exp, 2)))
5119 insert_block (TREE_OPERAND (exp, 2));
bbf6f052
RK
5120
5121 /* If VARS have not yet been expanded, expand them now. */
5122 while (vars)
5123 {
5124 if (DECL_RTL (vars) == 0)
5125 {
5126 vars_need_expansion = 1;
5127 expand_decl (vars);
5128 }
5129 expand_decl_init (vars);
5130 vars = TREE_CHAIN (vars);
5131 }
5132
5133 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
5134
5135 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
5136
5137 return temp;
5138 }
5139
5140 case RTL_EXPR:
5141 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
5142 abort ();
5143 emit_insns (RTL_EXPR_SEQUENCE (exp));
5144 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
99310285 5145 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
ca814259 5146 free_temps_for_rtl_expr (exp);
bbf6f052
RK
5147 return RTL_EXPR_RTL (exp);
5148
5149 case CONSTRUCTOR:
dd27116b
RK
5150 /* If we don't need the result, just ensure we evaluate any
5151 subexpressions. */
5152 if (ignore)
5153 {
5154 tree elt;
5155 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
5156 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
5157 return const0_rtx;
5158 }
3207b172 5159
4af3895e
JVA
5160 /* All elts simple constants => refer to a constant in memory. But
5161 if this is a non-BLKmode mode, let it store a field at a time
5162 since that should make a CONST_INT or CONST_DOUBLE when we
3207b172 5163 fold. Likewise, if we have a target we can use, it is best to
d720b9d1
RK
5164 store directly into the target unless the type is large enough
5165 that memcpy will be used. If we are making an initializer and
3207b172 5166 all operands are constant, put it in memory as well. */
dd27116b 5167 else if ((TREE_STATIC (exp)
3207b172
RK
5168 && ((mode == BLKmode
5169 && ! (target != 0 && safe_from_p (target, exp)))
d720b9d1
RK
5170 || TREE_ADDRESSABLE (exp)
5171 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5172 && (move_by_pieces_ninsns
67225c15
RK
5173 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
5174 TYPE_ALIGN (type) / BITS_PER_UNIT)
9de08200
RK
5175 > MOVE_RATIO)
5176 && ! mostly_zeros_p (exp))))
dd27116b 5177 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
bbf6f052
RK
5178 {
5179 rtx constructor = output_constant_def (exp);
b552441b
RS
5180 if (modifier != EXPAND_CONST_ADDRESS
5181 && modifier != EXPAND_INITIALIZER
5182 && modifier != EXPAND_SUM
d6a5ac33
RK
5183 && (! memory_address_p (GET_MODE (constructor),
5184 XEXP (constructor, 0))
5185 || (flag_force_addr
5186 && GET_CODE (XEXP (constructor, 0)) != REG)))
bbf6f052
RK
5187 constructor = change_address (constructor, VOIDmode,
5188 XEXP (constructor, 0));
5189 return constructor;
5190 }
5191
bbf6f052
RK
5192 else
5193 {
5194 if (target == 0 || ! safe_from_p (target, exp))
06089a8b
RK
5195 {
5196 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
5197 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5198 else
5199 target = assign_temp (type, 0, 1, 1);
5200 }
07604beb
RK
5201
5202 if (TREE_READONLY (exp))
5203 {
9151b3bf
RK
5204 if (GET_CODE (target) == MEM)
5205 target = change_address (target, GET_MODE (target),
5206 XEXP (target, 0));
07604beb
RK
5207 RTX_UNCHANGING_P (target) = 1;
5208 }
5209
e1a43f73 5210 store_constructor (exp, target, 0);
bbf6f052
RK
5211 return target;
5212 }
5213
5214 case INDIRECT_REF:
5215 {
5216 tree exp1 = TREE_OPERAND (exp, 0);
5217 tree exp2;
5218
405f0da6
JW
5219 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
5220 op0 = memory_address (mode, op0);
8c8a8e34
JW
5221
5222 temp = gen_rtx (MEM, mode, op0);
5223 /* If address was computed by addition,
5224 mark this as an element of an aggregate. */
5225 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5226 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
5227 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
05e3bdb9 5228 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
8c8a8e34
JW
5229 || (TREE_CODE (exp1) == ADDR_EXPR
5230 && (exp2 = TREE_OPERAND (exp1, 0))
05e3bdb9 5231 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
8c8a8e34 5232 MEM_IN_STRUCT_P (temp) = 1;
2c4c436a 5233 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
1125706f
RK
5234
5235 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
5236 here, because, in C and C++, the fact that a location is accessed
5237 through a pointer to const does not mean that the value there can
5238 never change. Languages where it can never change should
5239 also set TREE_STATIC. */
5cb7a25a 5240 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
8c8a8e34
JW
5241 return temp;
5242 }
bbf6f052
RK
5243
5244 case ARRAY_REF:
742920c7
RK
5245 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
5246 abort ();
bbf6f052 5247
bbf6f052 5248 {
742920c7
RK
5249 tree array = TREE_OPERAND (exp, 0);
5250 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5251 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
5252 tree index = TREE_OPERAND (exp, 1);
5253 tree index_type = TREE_TYPE (index);
bbf6f052 5254 int i;
bbf6f052 5255
b50d17a1
RK
5256 if (TREE_CODE (low_bound) != INTEGER_CST
5257 && contains_placeholder_p (low_bound))
5258 low_bound = build (WITH_RECORD_EXPR, sizetype, low_bound, exp);
5259
d4c89139
PB
5260 /* Optimize the special-case of a zero lower bound.
5261
5262 We convert the low_bound to sizetype to avoid some problems
5263 with constant folding. (E.g. suppose the lower bound is 1,
5264 and its mode is QI. Without the conversion, (ARRAY
5265 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
5266 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
5267
5268 But sizetype isn't quite right either (especially if
5269 the lowbound is negative). FIXME */
5270
742920c7 5271 if (! integer_zerop (low_bound))
d4c89139
PB
5272 index = fold (build (MINUS_EXPR, index_type, index,
5273 convert (sizetype, low_bound)));
742920c7 5274
6be58303
JW
5275 if ((TREE_CODE (index) != INTEGER_CST
5276 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
c7a7ac46 5277 && (! SLOW_UNALIGNED_ACCESS || ! get_inner_unaligned_p (exp)))
742920c7 5278 {
6be58303
JW
5279 /* Nonconstant array index or nonconstant element size, and
5280 not an array in an unaligned (packed) structure field.
742920c7
RK
5281 Generate the tree for *(&array+index) and expand that,
5282 except do it in a language-independent way
5283 and don't complain about non-lvalue arrays.
5284 `mark_addressable' should already have been called
5285 for any array for which this case will be reached. */
5286
5287 /* Don't forget the const or volatile flag from the array
0f41302f 5288 element. */
742920c7
RK
5289 tree variant_type = build_type_variant (type,
5290 TREE_READONLY (exp),
5291 TREE_THIS_VOLATILE (exp));
5292 tree array_adr = build1 (ADDR_EXPR,
5293 build_pointer_type (variant_type), array);
5294 tree elt;
b50d17a1 5295 tree size = size_in_bytes (type);
742920c7 5296
4c08eef0
RK
5297 /* Convert the integer argument to a type the same size as sizetype
5298 so the multiply won't overflow spuriously. */
5299 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
5300 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
5301 index);
742920c7 5302
b50d17a1
RK
5303 if (TREE_CODE (size) != INTEGER_CST
5304 && contains_placeholder_p (size))
5305 size = build (WITH_RECORD_EXPR, sizetype, size, exp);
5306
742920c7
RK
5307 /* Don't think the address has side effects
5308 just because the array does.
5309 (In some cases the address might have side effects,
5310 and we fail to record that fact here. However, it should not
5311 matter, since expand_expr should not care.) */
5312 TREE_SIDE_EFFECTS (array_adr) = 0;
5313
2ae342f7
RK
5314 elt
5315 = build1
5316 (INDIRECT_REF, type,
5317 fold (build (PLUS_EXPR,
5318 TYPE_POINTER_TO (variant_type),
5319 array_adr,
5320 fold
5321 (build1
5322 (NOP_EXPR,
5323 TYPE_POINTER_TO (variant_type),
5324 fold (build (MULT_EXPR, TREE_TYPE (index),
5325 index,
5326 convert (TREE_TYPE (index),
5327 size))))))));;
742920c7
RK
5328
5329 /* Volatility, etc., of new expression is same as old
5330 expression. */
5331 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
5332 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
5333 TREE_READONLY (elt) = TREE_READONLY (exp);
5334
5335 return expand_expr (elt, target, tmode, modifier);
5336 }
5337
5338 /* Fold an expression like: "foo"[2].
ad2e7dd0
RK
5339 This is not done in fold so it won't happen inside &.
5340 Don't fold if this is for wide characters since it's too
5341 difficult to do correctly and this is a very rare case. */
742920c7
RK
5342
5343 if (TREE_CODE (array) == STRING_CST
5344 && TREE_CODE (index) == INTEGER_CST
5345 && !TREE_INT_CST_HIGH (index)
307b821c 5346 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
ad2e7dd0
RK
5347 && GET_MODE_CLASS (mode) == MODE_INT
5348 && GET_MODE_SIZE (mode) == 1)
307b821c 5349 return GEN_INT (TREE_STRING_POINTER (array)[i]);
bbf6f052 5350
742920c7
RK
5351 /* If this is a constant index into a constant array,
5352 just get the value from the array. Handle both the cases when
5353 we have an explicit constructor and when our operand is a variable
5354 that was declared const. */
4af3895e 5355
742920c7
RK
5356 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
5357 {
5358 if (TREE_CODE (index) == INTEGER_CST
5359 && TREE_INT_CST_HIGH (index) == 0)
5360 {
5361 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
5362
5363 i = TREE_INT_CST_LOW (index);
5364 while (elem && i--)
5365 elem = TREE_CHAIN (elem);
5366 if (elem)
5367 return expand_expr (fold (TREE_VALUE (elem)), target,
5368 tmode, modifier);
5369 }
5370 }
4af3895e 5371
742920c7
RK
5372 else if (optimize >= 1
5373 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
5374 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
5375 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
5376 {
5377 if (TREE_CODE (index) == INTEGER_CST
5378 && TREE_INT_CST_HIGH (index) == 0)
5379 {
5380 tree init = DECL_INITIAL (array);
5381
5382 i = TREE_INT_CST_LOW (index);
5383 if (TREE_CODE (init) == CONSTRUCTOR)
5384 {
5385 tree elem = CONSTRUCTOR_ELTS (init);
5386
03dc44a6
RS
5387 while (elem
5388 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
742920c7
RK
5389 elem = TREE_CHAIN (elem);
5390 if (elem)
5391 return expand_expr (fold (TREE_VALUE (elem)), target,
5392 tmode, modifier);
5393 }
5394 else if (TREE_CODE (init) == STRING_CST
5395 && i < TREE_STRING_LENGTH (init))
307b821c 5396 return GEN_INT (TREE_STRING_POINTER (init)[i]);
742920c7
RK
5397 }
5398 }
5399 }
8c8a8e34 5400
bbf6f052
RK
5401 /* Treat array-ref with constant index as a component-ref. */
5402
5403 case COMPONENT_REF:
5404 case BIT_FIELD_REF:
4af3895e 5405 /* If the operand is a CONSTRUCTOR, we can just extract the
7a0b7b9a
RK
5406 appropriate field if it is present. Don't do this if we have
5407 already written the data since we want to refer to that copy
5408 and varasm.c assumes that's what we'll do. */
4af3895e 5409 if (code != ARRAY_REF
7a0b7b9a
RK
5410 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
5411 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
4af3895e
JVA
5412 {
5413 tree elt;
5414
5415 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
5416 elt = TREE_CHAIN (elt))
5417 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
5418 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
5419 }
5420
bbf6f052
RK
5421 {
5422 enum machine_mode mode1;
5423 int bitsize;
5424 int bitpos;
7bb0943f 5425 tree offset;
bbf6f052 5426 int volatilep = 0;
7bb0943f 5427 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
bbf6f052 5428 &mode1, &unsignedp, &volatilep);
034f9101 5429 int alignment;
bbf6f052 5430
e7f3c83f
RK
5431 /* If we got back the original object, something is wrong. Perhaps
5432 we are evaluating an expression too early. In any event, don't
5433 infinitely recurse. */
5434 if (tem == exp)
5435 abort ();
5436
3d27140a 5437 /* If TEM's type is a union of variable size, pass TARGET to the inner
b74f5ff2
RK
5438 computation, since it will need a temporary and TARGET is known
5439 to have to do. This occurs in unchecked conversion in Ada. */
5440
5441 op0 = expand_expr (tem,
5442 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
5443 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
5444 != INTEGER_CST)
5445 ? target : NULL_RTX),
4ed67205
RK
5446 VOIDmode,
5447 modifier == EXPAND_INITIALIZER ? modifier : 0);
bbf6f052 5448
8c8a8e34 5449 /* If this is a constant, put it into a register if it is a
8008b228 5450 legitimate constant and memory if it isn't. */
8c8a8e34
JW
5451 if (CONSTANT_P (op0))
5452 {
5453 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
f2878c6b 5454 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
8c8a8e34
JW
5455 op0 = force_reg (mode, op0);
5456 else
5457 op0 = validize_mem (force_const_mem (mode, op0));
5458 }
5459
034f9101 5460 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
7bb0943f
RS
5461 if (offset != 0)
5462 {
906c4e36 5463 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
5464
5465 if (GET_CODE (op0) != MEM)
5466 abort ();
5467 op0 = change_address (op0, VOIDmode,
88f63c77
RK
5468 gen_rtx (PLUS, ptr_mode, XEXP (op0, 0),
5469 force_reg (ptr_mode, offset_rtx)));
034f9101
RS
5470 /* If we have a variable offset, the known alignment
5471 is only that of the innermost structure containing the field.
5472 (Actually, we could sometimes do better by using the
5473 size of an element of the innermost array, but no need.) */
5474 if (TREE_CODE (exp) == COMPONENT_REF
5475 || TREE_CODE (exp) == BIT_FIELD_REF)
5476 alignment = (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5477 / BITS_PER_UNIT);
7bb0943f
RS
5478 }
5479
bbf6f052
RK
5480 /* Don't forget about volatility even if this is a bitfield. */
5481 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
5482 {
5483 op0 = copy_rtx (op0);
5484 MEM_VOLATILE_P (op0) = 1;
5485 }
5486
ccc98036
RS
5487 /* In cases where an aligned union has an unaligned object
5488 as a field, we might be extracting a BLKmode value from
5489 an integer-mode (e.g., SImode) object. Handle this case
5490 by doing the extract into an object as wide as the field
5491 (which we know to be the width of a basic mode), then
f2420d0b
JW
5492 storing into memory, and changing the mode to BLKmode.
5493 If we ultimately want the address (EXPAND_CONST_ADDRESS or
5494 EXPAND_INITIALIZER), then we must not copy to a temporary. */
bbf6f052 5495 if (mode1 == VOIDmode
ccc98036 5496 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
f9409c3a 5497 || (modifier != EXPAND_CONST_ADDRESS
f9409c3a
JW
5498 && modifier != EXPAND_INITIALIZER
5499 && ((mode1 != BLKmode && ! direct_load[(int) mode1])
5500 /* If the field isn't aligned enough to fetch as a memref,
5501 fetch it as a bit field. */
5502 || (SLOW_UNALIGNED_ACCESS
5503 && ((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
5504 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
bbf6f052 5505 {
bbf6f052
RK
5506 enum machine_mode ext_mode = mode;
5507
5508 if (ext_mode == BLKmode)
5509 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
5510
5511 if (ext_mode == BLKmode)
a281e72d
RK
5512 {
5513 /* In this case, BITPOS must start at a byte boundary and
5514 TARGET, if specified, must be a MEM. */
5515 if (GET_CODE (op0) != MEM
5516 || (target != 0 && GET_CODE (target) != MEM)
5517 || bitpos % BITS_PER_UNIT != 0)
5518 abort ();
5519
5520 op0 = change_address (op0, VOIDmode,
5521 plus_constant (XEXP (op0, 0),
5522 bitpos / BITS_PER_UNIT));
5523 if (target == 0)
5524 target = assign_temp (type, 0, 1, 1);
5525
5526 emit_block_move (target, op0,
5527 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5528 / BITS_PER_UNIT),
5529 1);
5530
5531 return target;
5532 }
bbf6f052 5533
dc6d66b3
RK
5534 op0 = validize_mem (op0);
5535
5536 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
5537 mark_reg_pointer (XEXP (op0, 0), alignment);
5538
5539 op0 = extract_bit_field (op0, bitsize, bitpos,
bbf6f052 5540 unsignedp, target, ext_mode, ext_mode,
034f9101 5541 alignment,
bbf6f052
RK
5542 int_size_in_bytes (TREE_TYPE (tem)));
5543 if (mode == BLKmode)
5544 {
5545 rtx new = assign_stack_temp (ext_mode,
5546 bitsize / BITS_PER_UNIT, 0);
5547
5548 emit_move_insn (new, op0);
5549 op0 = copy_rtx (new);
5550 PUT_MODE (op0, BLKmode);
092dded9 5551 MEM_IN_STRUCT_P (op0) = 1;
bbf6f052
RK
5552 }
5553
5554 return op0;
5555 }
5556
05019f83
RK
5557 /* If the result is BLKmode, use that to access the object
5558 now as well. */
5559 if (mode == BLKmode)
5560 mode1 = BLKmode;
5561
bbf6f052
RK
5562 /* Get a reference to just this component. */
5563 if (modifier == EXPAND_CONST_ADDRESS
5564 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5565 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
5566 (bitpos / BITS_PER_UNIT)));
5567 else
5568 op0 = change_address (op0, mode1,
5569 plus_constant (XEXP (op0, 0),
5570 (bitpos / BITS_PER_UNIT)));
dc6d66b3
RK
5571 if (GET_CODE (XEXP (op0, 0)) == REG)
5572 mark_reg_pointer (XEXP (op0, 0), alignment);
5573
bbf6f052
RK
5574 MEM_IN_STRUCT_P (op0) = 1;
5575 MEM_VOLATILE_P (op0) |= volatilep;
5576 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
5577 return op0;
5578 if (target == 0)
5579 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5580 convert_move (target, op0, unsignedp);
5581 return target;
5582 }
5583
bbf6f052
RK
5584 /* Intended for a reference to a buffer of a file-object in Pascal.
5585 But it's not certain that a special tree code will really be
5586 necessary for these. INDIRECT_REF might work for them. */
5587 case BUFFER_REF:
5588 abort ();
5589
7308a047 5590 case IN_EXPR:
7308a047 5591 {
d6a5ac33
RK
5592 /* Pascal set IN expression.
5593
5594 Algorithm:
5595 rlo = set_low - (set_low%bits_per_word);
5596 the_word = set [ (index - rlo)/bits_per_word ];
5597 bit_index = index % bits_per_word;
5598 bitmask = 1 << bit_index;
5599 return !!(the_word & bitmask); */
5600
7308a047
RS
5601 tree set = TREE_OPERAND (exp, 0);
5602 tree index = TREE_OPERAND (exp, 1);
d6a5ac33 5603 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7308a047 5604 tree set_type = TREE_TYPE (set);
7308a047
RS
5605 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
5606 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
d6a5ac33
RK
5607 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
5608 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
5609 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
5610 rtx setval = expand_expr (set, 0, VOIDmode, 0);
5611 rtx setaddr = XEXP (setval, 0);
5612 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7308a047
RS
5613 rtx rlow;
5614 rtx diff, quo, rem, addr, bit, result;
7308a047 5615
d6a5ac33
RK
5616 preexpand_calls (exp);
5617
5618 /* If domain is empty, answer is no. Likewise if index is constant
5619 and out of bounds. */
5620 if ((TREE_CODE (set_high_bound) == INTEGER_CST
5621 && TREE_CODE (set_low_bound) == INTEGER_CST
5622 && tree_int_cst_lt (set_high_bound, set_low_bound)
5623 || (TREE_CODE (index) == INTEGER_CST
5624 && TREE_CODE (set_low_bound) == INTEGER_CST
5625 && tree_int_cst_lt (index, set_low_bound))
5626 || (TREE_CODE (set_high_bound) == INTEGER_CST
5627 && TREE_CODE (index) == INTEGER_CST
5628 && tree_int_cst_lt (set_high_bound, index))))
7308a047
RS
5629 return const0_rtx;
5630
d6a5ac33
RK
5631 if (target == 0)
5632 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7308a047
RS
5633
5634 /* If we get here, we have to generate the code for both cases
5635 (in range and out of range). */
5636
5637 op0 = gen_label_rtx ();
5638 op1 = gen_label_rtx ();
5639
5640 if (! (GET_CODE (index_val) == CONST_INT
5641 && GET_CODE (lo_r) == CONST_INT))
5642 {
17938e57 5643 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
d6a5ac33 5644 GET_MODE (index_val), iunsignedp, 0);
7308a047
RS
5645 emit_jump_insn (gen_blt (op1));
5646 }
5647
5648 if (! (GET_CODE (index_val) == CONST_INT
5649 && GET_CODE (hi_r) == CONST_INT))
5650 {
17938e57 5651 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
d6a5ac33 5652 GET_MODE (index_val), iunsignedp, 0);
7308a047
RS
5653 emit_jump_insn (gen_bgt (op1));
5654 }
5655
5656 /* Calculate the element number of bit zero in the first word
5657 of the set. */
5658 if (GET_CODE (lo_r) == CONST_INT)
17938e57
RK
5659 rlow = GEN_INT (INTVAL (lo_r)
5660 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7308a047 5661 else
17938e57
RK
5662 rlow = expand_binop (index_mode, and_optab, lo_r,
5663 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
d6a5ac33 5664 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047 5665
d6a5ac33
RK
5666 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
5667 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047
RS
5668
5669 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
d6a5ac33 5670 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7308a047 5671 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
d6a5ac33
RK
5672 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
5673
7308a047 5674 addr = memory_address (byte_mode,
d6a5ac33
RK
5675 expand_binop (index_mode, add_optab, diff,
5676 setaddr, NULL_RTX, iunsignedp,
17938e57 5677 OPTAB_LIB_WIDEN));
d6a5ac33 5678
7308a047
RS
5679 /* Extract the bit we want to examine */
5680 bit = expand_shift (RSHIFT_EXPR, byte_mode,
17938e57
RK
5681 gen_rtx (MEM, byte_mode, addr),
5682 make_tree (TREE_TYPE (index), rem),
5683 NULL_RTX, 1);
5684 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
5685 GET_MODE (target) == byte_mode ? target : 0,
7308a047 5686 1, OPTAB_LIB_WIDEN);
17938e57
RK
5687
5688 if (result != target)
5689 convert_move (target, result, 1);
7308a047
RS
5690
5691 /* Output the code to handle the out-of-range case. */
5692 emit_jump (op0);
5693 emit_label (op1);
5694 emit_move_insn (target, const0_rtx);
5695 emit_label (op0);
5696 return target;
5697 }
5698
bbf6f052
RK
5699 case WITH_CLEANUP_EXPR:
5700 if (RTL_EXPR_RTL (exp) == 0)
5701 {
5702 RTL_EXPR_RTL (exp)
6fcc9690 5703 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
906c4e36
RK
5704 cleanups_this_call
5705 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
bbf6f052
RK
5706 /* That's it for this cleanup. */
5707 TREE_OPERAND (exp, 2) = 0;
61d6b1cc 5708 (*interim_eh_hook) (NULL_TREE);
bbf6f052
RK
5709 }
5710 return RTL_EXPR_RTL (exp);
5711
5dab5552
MS
5712 case CLEANUP_POINT_EXPR:
5713 {
d93d4205 5714 extern int temp_slot_level;
5dab5552 5715 tree old_cleanups = cleanups_this_call;
d93d4205
MS
5716 int old_temp_level = target_temp_slot_level;
5717 push_temp_slots ();
5718 target_temp_slot_level = temp_slot_level;
f283f66b
JM
5719 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5720 /* If we're going to use this value, load it up now. */
5721 if (! ignore)
5722 op0 = force_not_mem (op0);
5dab5552 5723 expand_cleanups_to (old_cleanups);
d93d4205
MS
5724 preserve_temp_slots (op0);
5725 free_temp_slots ();
5726 pop_temp_slots ();
5727 target_temp_slot_level = old_temp_level;
5dab5552
MS
5728 }
5729 return op0;
5730
bbf6f052
RK
5731 case CALL_EXPR:
5732 /* Check for a built-in function. */
5733 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
d6a5ac33
RK
5734 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5735 == FUNCTION_DECL)
bbf6f052
RK
5736 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5737 return expand_builtin (exp, target, subtarget, tmode, ignore);
d6a5ac33 5738
bbf6f052
RK
5739 /* If this call was expanded already by preexpand_calls,
5740 just return the result we got. */
5741 if (CALL_EXPR_RTL (exp) != 0)
5742 return CALL_EXPR_RTL (exp);
d6a5ac33 5743
8129842c 5744 return expand_call (exp, target, ignore);
bbf6f052
RK
5745
5746 case NON_LVALUE_EXPR:
5747 case NOP_EXPR:
5748 case CONVERT_EXPR:
5749 case REFERENCE_EXPR:
bbf6f052
RK
5750 if (TREE_CODE (type) == UNION_TYPE)
5751 {
5752 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
5753 if (target == 0)
06089a8b
RK
5754 {
5755 if (mode != BLKmode)
5756 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5757 else
5758 target = assign_temp (type, 0, 1, 1);
5759 }
d6a5ac33 5760
bbf6f052
RK
5761 if (GET_CODE (target) == MEM)
5762 /* Store data into beginning of memory target. */
5763 store_expr (TREE_OPERAND (exp, 0),
1499e0a8
RK
5764 change_address (target, TYPE_MODE (valtype), 0), 0);
5765
bbf6f052
RK
5766 else if (GET_CODE (target) == REG)
5767 /* Store this field into a union of the proper type. */
5768 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
5769 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
5770 VOIDmode, 0, 1,
5771 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
5772 else
5773 abort ();
5774
5775 /* Return the entire union. */
5776 return target;
5777 }
d6a5ac33 5778
7f62854a
RK
5779 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5780 {
5781 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
5782 modifier);
5783
5784 /* If the signedness of the conversion differs and OP0 is
5785 a promoted SUBREG, clear that indication since we now
5786 have to do the proper extension. */
5787 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
5788 && GET_CODE (op0) == SUBREG)
5789 SUBREG_PROMOTED_VAR_P (op0) = 0;
5790
5791 return op0;
5792 }
5793
1499e0a8 5794 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
12342f90
RS
5795 if (GET_MODE (op0) == mode)
5796 return op0;
12342f90 5797
d6a5ac33
RK
5798 /* If OP0 is a constant, just convert it into the proper mode. */
5799 if (CONSTANT_P (op0))
5800 return
5801 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5802 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
12342f90 5803
26fcb35a
RS
5804 if (modifier == EXPAND_INITIALIZER)
5805 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
d6a5ac33 5806
bbf6f052 5807 if (target == 0)
d6a5ac33
RK
5808 return
5809 convert_to_mode (mode, op0,
5810 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052 5811 else
d6a5ac33
RK
5812 convert_move (target, op0,
5813 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052
RK
5814 return target;
5815
5816 case PLUS_EXPR:
0f41302f
MS
5817 /* We come here from MINUS_EXPR when the second operand is a
5818 constant. */
bbf6f052
RK
5819 plus_expr:
5820 this_optab = add_optab;
5821
5822 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
5823 something else, make sure we add the register to the constant and
5824 then to the other thing. This case can occur during strength
5825 reduction and doing it this way will produce better code if the
5826 frame pointer or argument pointer is eliminated.
5827
5828 fold-const.c will ensure that the constant is always in the inner
5829 PLUS_EXPR, so the only case we need to do anything about is if
5830 sp, ap, or fp is our second argument, in which case we must swap
5831 the innermost first argument and our second argument. */
5832
5833 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5834 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
5835 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
5836 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
5837 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
5838 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
5839 {
5840 tree t = TREE_OPERAND (exp, 1);
5841
5842 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5843 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
5844 }
5845
88f63c77 5846 /* If the result is to be ptr_mode and we are adding an integer to
bbf6f052
RK
5847 something, we might be forming a constant. So try to use
5848 plus_constant. If it produces a sum and we can't accept it,
5849 use force_operand. This allows P = &ARR[const] to generate
5850 efficient code on machines where a SYMBOL_REF is not a valid
5851 address.
5852
5853 If this is an EXPAND_SUM call, always return the sum. */
c980ac49 5854 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
88f63c77 5855 || mode == ptr_mode)
bbf6f052 5856 {
c980ac49
RS
5857 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
5858 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5859 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
5860 {
5861 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
5862 EXPAND_SUM);
5863 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
5864 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5865 op1 = force_operand (op1, target);
5866 return op1;
5867 }
bbf6f052 5868
c980ac49
RS
5869 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
5870 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
5871 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
5872 {
5873 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
5874 EXPAND_SUM);
5875 if (! CONSTANT_P (op0))
5876 {
5877 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5878 VOIDmode, modifier);
709f5be1
RS
5879 /* Don't go to both_summands if modifier
5880 says it's not right to return a PLUS. */
5881 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5882 goto binop2;
c980ac49
RS
5883 goto both_summands;
5884 }
5885 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
5886 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5887 op0 = force_operand (op0, target);
5888 return op0;
5889 }
bbf6f052
RK
5890 }
5891
5892 /* No sense saving up arithmetic to be done
5893 if it's all in the wrong mode to form part of an address.
5894 And force_operand won't know whether to sign-extend or
5895 zero-extend. */
5896 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
88f63c77 5897 || mode != ptr_mode)
c980ac49 5898 goto binop;
bbf6f052
RK
5899
5900 preexpand_calls (exp);
5901 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5902 subtarget = 0;
5903
5904 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
906c4e36 5905 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
bbf6f052 5906
c980ac49 5907 both_summands:
bbf6f052
RK
5908 /* Make sure any term that's a sum with a constant comes last. */
5909 if (GET_CODE (op0) == PLUS
5910 && CONSTANT_P (XEXP (op0, 1)))
5911 {
5912 temp = op0;
5913 op0 = op1;
5914 op1 = temp;
5915 }
5916 /* If adding to a sum including a constant,
5917 associate it to put the constant outside. */
5918 if (GET_CODE (op1) == PLUS
5919 && CONSTANT_P (XEXP (op1, 1)))
5920 {
5921 rtx constant_term = const0_rtx;
5922
5923 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
5924 if (temp != 0)
5925 op0 = temp;
6f90e075
JW
5926 /* Ensure that MULT comes first if there is one. */
5927 else if (GET_CODE (op0) == MULT)
5928 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
bbf6f052
RK
5929 else
5930 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
5931
5932 /* Let's also eliminate constants from op0 if possible. */
5933 op0 = eliminate_constant_term (op0, &constant_term);
5934
5935 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
5936 their sum should be a constant. Form it into OP1, since the
5937 result we want will then be OP0 + OP1. */
5938
5939 temp = simplify_binary_operation (PLUS, mode, constant_term,
5940 XEXP (op1, 1));
5941 if (temp != 0)
5942 op1 = temp;
5943 else
5944 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
5945 }
5946
5947 /* Put a constant term last and put a multiplication first. */
5948 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
5949 temp = op1, op1 = op0, op0 = temp;
5950
5951 temp = simplify_binary_operation (PLUS, mode, op0, op1);
5952 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
5953
5954 case MINUS_EXPR:
ea87523e
RK
5955 /* For initializers, we are allowed to return a MINUS of two
5956 symbolic constants. Here we handle all cases when both operands
5957 are constant. */
bbf6f052
RK
5958 /* Handle difference of two symbolic constants,
5959 for the sake of an initializer. */
5960 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5961 && really_constant_p (TREE_OPERAND (exp, 0))
5962 && really_constant_p (TREE_OPERAND (exp, 1)))
5963 {
906c4e36
RK
5964 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
5965 VOIDmode, modifier);
5966 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5967 VOIDmode, modifier);
ea87523e 5968
ea87523e
RK
5969 /* If the last operand is a CONST_INT, use plus_constant of
5970 the negated constant. Else make the MINUS. */
5971 if (GET_CODE (op1) == CONST_INT)
5972 return plus_constant (op0, - INTVAL (op1));
5973 else
5974 return gen_rtx (MINUS, mode, op0, op1);
bbf6f052
RK
5975 }
5976 /* Convert A - const to A + (-const). */
5977 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
5978 {
ae431183
RK
5979 tree negated = fold (build1 (NEGATE_EXPR, type,
5980 TREE_OPERAND (exp, 1)));
5981
5982 /* Deal with the case where we can't negate the constant
5983 in TYPE. */
5984 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
5985 {
5986 tree newtype = signed_type (type);
5987 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
5988 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
5989 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
5990
5991 if (! TREE_OVERFLOW (newneg))
5992 return expand_expr (convert (type,
5993 build (PLUS_EXPR, newtype,
5994 newop0, newneg)),
5995 target, tmode, modifier);
5996 }
5997 else
5998 {
5999 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
6000 goto plus_expr;
6001 }
bbf6f052
RK
6002 }
6003 this_optab = sub_optab;
6004 goto binop;
6005
6006 case MULT_EXPR:
6007 preexpand_calls (exp);
6008 /* If first operand is constant, swap them.
6009 Thus the following special case checks need only
6010 check the second operand. */
6011 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6012 {
6013 register tree t1 = TREE_OPERAND (exp, 0);
6014 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
6015 TREE_OPERAND (exp, 1) = t1;
6016 }
6017
6018 /* Attempt to return something suitable for generating an
6019 indexed address, for machines that support that. */
6020
88f63c77 6021 if (modifier == EXPAND_SUM && mode == ptr_mode
bbf6f052 6022 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
906c4e36 6023 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
6024 {
6025 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
6026
6027 /* Apply distributive law if OP0 is x+c. */
6028 if (GET_CODE (op0) == PLUS
6029 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
6030 return gen_rtx (PLUS, mode,
6031 gen_rtx (MULT, mode, XEXP (op0, 0),
906c4e36
RK
6032 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
6033 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
6034 * INTVAL (XEXP (op0, 1))));
bbf6f052
RK
6035
6036 if (GET_CODE (op0) != REG)
906c4e36 6037 op0 = force_operand (op0, NULL_RTX);
bbf6f052
RK
6038 if (GET_CODE (op0) != REG)
6039 op0 = copy_to_mode_reg (mode, op0);
6040
6041 return gen_rtx (MULT, mode, op0,
906c4e36 6042 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
bbf6f052
RK
6043 }
6044
6045 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6046 subtarget = 0;
6047
6048 /* Check for multiplying things that have been extended
6049 from a narrower type. If this machine supports multiplying
6050 in that narrower type with a result in the desired type,
6051 do it that way, and avoid the explicit type-conversion. */
6052 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
6053 && TREE_CODE (type) == INTEGER_TYPE
6054 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6055 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
6056 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6057 && int_fits_type_p (TREE_OPERAND (exp, 1),
6058 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6059 /* Don't use a widening multiply if a shift will do. */
6060 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
906c4e36 6061 > HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
6062 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
6063 ||
6064 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
6065 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6066 ==
6067 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
6068 /* If both operands are extended, they must either both
6069 be zero-extended or both be sign-extended. */
6070 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6071 ==
6072 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
6073 {
6074 enum machine_mode innermode
6075 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
b10af0c8
TG
6076 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6077 ? smul_widen_optab : umul_widen_optab);
bbf6f052
RK
6078 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6079 ? umul_widen_optab : smul_widen_optab);
b10af0c8 6080 if (mode == GET_MODE_WIDER_MODE (innermode))
bbf6f052 6081 {
b10af0c8
TG
6082 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
6083 {
6084 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6085 NULL_RTX, VOIDmode, 0);
6086 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6087 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6088 VOIDmode, 0);
6089 else
6090 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6091 NULL_RTX, VOIDmode, 0);
6092 goto binop2;
6093 }
6094 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
6095 && innermode == word_mode)
6096 {
6097 rtx htem;
6098 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6099 NULL_RTX, VOIDmode, 0);
6100 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6101 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6102 VOIDmode, 0);
6103 else
6104 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6105 NULL_RTX, VOIDmode, 0);
6106 temp = expand_binop (mode, other_optab, op0, op1, target,
6107 unsignedp, OPTAB_LIB_WIDEN);
6108 htem = expand_mult_highpart_adjust (innermode,
6109 gen_highpart (innermode, temp),
6110 op0, op1,
6111 gen_highpart (innermode, temp),
6112 unsignedp);
6113 emit_move_insn (gen_highpart (innermode, temp), htem);
6114 return temp;
6115 }
bbf6f052
RK
6116 }
6117 }
6118 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 6119 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6120 return expand_mult (mode, op0, op1, target, unsignedp);
6121
6122 case TRUNC_DIV_EXPR:
6123 case FLOOR_DIV_EXPR:
6124 case CEIL_DIV_EXPR:
6125 case ROUND_DIV_EXPR:
6126 case EXACT_DIV_EXPR:
6127 preexpand_calls (exp);
6128 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6129 subtarget = 0;
6130 /* Possible optimization: compute the dividend with EXPAND_SUM
6131 then if the divisor is constant can optimize the case
6132 where some terms of the dividend have coeffs divisible by it. */
6133 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 6134 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6135 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
6136
6137 case RDIV_EXPR:
6138 this_optab = flodiv_optab;
6139 goto binop;
6140
6141 case TRUNC_MOD_EXPR:
6142 case FLOOR_MOD_EXPR:
6143 case CEIL_MOD_EXPR:
6144 case ROUND_MOD_EXPR:
6145 preexpand_calls (exp);
6146 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6147 subtarget = 0;
6148 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 6149 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6150 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
6151
6152 case FIX_ROUND_EXPR:
6153 case FIX_FLOOR_EXPR:
6154 case FIX_CEIL_EXPR:
6155 abort (); /* Not used for C. */
6156
6157 case FIX_TRUNC_EXPR:
906c4e36 6158 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6159 if (target == 0)
6160 target = gen_reg_rtx (mode);
6161 expand_fix (target, op0, unsignedp);
6162 return target;
6163
6164 case FLOAT_EXPR:
906c4e36 6165 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6166 if (target == 0)
6167 target = gen_reg_rtx (mode);
6168 /* expand_float can't figure out what to do if FROM has VOIDmode.
6169 So give it the correct mode. With -O, cse will optimize this. */
6170 if (GET_MODE (op0) == VOIDmode)
6171 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6172 op0);
6173 expand_float (target, op0,
6174 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6175 return target;
6176
6177 case NEGATE_EXPR:
5b22bee8 6178 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
bbf6f052
RK
6179 temp = expand_unop (mode, neg_optab, op0, target, 0);
6180 if (temp == 0)
6181 abort ();
6182 return temp;
6183
6184 case ABS_EXPR:
6185 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6186
2d7050fd 6187 /* Handle complex values specially. */
d6a5ac33
RK
6188 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
6189 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
6190 return expand_complex_abs (mode, op0, target, unsignedp);
2d7050fd 6191
bbf6f052
RK
6192 /* Unsigned abs is simply the operand. Testing here means we don't
6193 risk generating incorrect code below. */
6194 if (TREE_UNSIGNED (type))
6195 return op0;
6196
2e5ec6cf
RK
6197 return expand_abs (mode, op0, target, unsignedp,
6198 safe_from_p (target, TREE_OPERAND (exp, 0)));
bbf6f052
RK
6199
6200 case MAX_EXPR:
6201 case MIN_EXPR:
6202 target = original_target;
6203 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
fc155707 6204 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
d6a5ac33 6205 || GET_MODE (target) != mode
bbf6f052
RK
6206 || (GET_CODE (target) == REG
6207 && REGNO (target) < FIRST_PSEUDO_REGISTER))
6208 target = gen_reg_rtx (mode);
906c4e36 6209 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6210 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6211
6212 /* First try to do it with a special MIN or MAX instruction.
6213 If that does not win, use a conditional jump to select the proper
6214 value. */
6215 this_optab = (TREE_UNSIGNED (type)
6216 ? (code == MIN_EXPR ? umin_optab : umax_optab)
6217 : (code == MIN_EXPR ? smin_optab : smax_optab));
6218
6219 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
6220 OPTAB_WIDEN);
6221 if (temp != 0)
6222 return temp;
6223
fa2981d8
JW
6224 /* At this point, a MEM target is no longer useful; we will get better
6225 code without it. */
6226
6227 if (GET_CODE (target) == MEM)
6228 target = gen_reg_rtx (mode);
6229
ee456b1c
RK
6230 if (target != op0)
6231 emit_move_insn (target, op0);
d6a5ac33 6232
bbf6f052 6233 op0 = gen_label_rtx ();
d6a5ac33 6234
f81497d9
RS
6235 /* If this mode is an integer too wide to compare properly,
6236 compare word by word. Rely on cse to optimize constant cases. */
d6a5ac33 6237 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
bbf6f052 6238 {
f81497d9 6239 if (code == MAX_EXPR)
d6a5ac33
RK
6240 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6241 target, op1, NULL_RTX, op0);
bbf6f052 6242 else
d6a5ac33
RK
6243 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6244 op1, target, NULL_RTX, op0);
ee456b1c 6245 emit_move_insn (target, op1);
bbf6f052 6246 }
f81497d9
RS
6247 else
6248 {
6249 if (code == MAX_EXPR)
6250 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
ee456b1c
RK
6251 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
6252 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
f81497d9
RS
6253 else
6254 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
ee456b1c
RK
6255 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
6256 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
f81497d9 6257 if (temp == const0_rtx)
ee456b1c 6258 emit_move_insn (target, op1);
f81497d9
RS
6259 else if (temp != const_true_rtx)
6260 {
6261 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
6262 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
6263 else
6264 abort ();
ee456b1c 6265 emit_move_insn (target, op1);
f81497d9
RS
6266 }
6267 }
bbf6f052
RK
6268 emit_label (op0);
6269 return target;
6270
bbf6f052
RK
6271 case BIT_NOT_EXPR:
6272 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6273 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
6274 if (temp == 0)
6275 abort ();
6276 return temp;
6277
6278 case FFS_EXPR:
6279 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6280 temp = expand_unop (mode, ffs_optab, op0, target, 1);
6281 if (temp == 0)
6282 abort ();
6283 return temp;
6284
d6a5ac33
RK
6285 /* ??? Can optimize bitwise operations with one arg constant.
6286 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
6287 and (a bitwise1 b) bitwise2 b (etc)
6288 but that is probably not worth while. */
6289
6290 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
6291 boolean values when we want in all cases to compute both of them. In
6292 general it is fastest to do TRUTH_AND_EXPR by computing both operands
6293 as actual zero-or-1 values and then bitwise anding. In cases where
6294 there cannot be any side effects, better code would be made by
6295 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
6296 how to recognize those cases. */
6297
bbf6f052
RK
6298 case TRUTH_AND_EXPR:
6299 case BIT_AND_EXPR:
6300 this_optab = and_optab;
6301 goto binop;
6302
bbf6f052
RK
6303 case TRUTH_OR_EXPR:
6304 case BIT_IOR_EXPR:
6305 this_optab = ior_optab;
6306 goto binop;
6307
874726a8 6308 case TRUTH_XOR_EXPR:
bbf6f052
RK
6309 case BIT_XOR_EXPR:
6310 this_optab = xor_optab;
6311 goto binop;
6312
6313 case LSHIFT_EXPR:
6314 case RSHIFT_EXPR:
6315 case LROTATE_EXPR:
6316 case RROTATE_EXPR:
6317 preexpand_calls (exp);
6318 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6319 subtarget = 0;
6320 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6321 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
6322 unsignedp);
6323
d6a5ac33
RK
6324 /* Could determine the answer when only additive constants differ. Also,
6325 the addition of one can be handled by changing the condition. */
bbf6f052
RK
6326 case LT_EXPR:
6327 case LE_EXPR:
6328 case GT_EXPR:
6329 case GE_EXPR:
6330 case EQ_EXPR:
6331 case NE_EXPR:
6332 preexpand_calls (exp);
6333 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
6334 if (temp != 0)
6335 return temp;
d6a5ac33 6336
0f41302f 6337 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
bbf6f052
RK
6338 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
6339 && original_target
6340 && GET_CODE (original_target) == REG
6341 && (GET_MODE (original_target)
6342 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6343 {
d6a5ac33
RK
6344 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
6345 VOIDmode, 0);
6346
bbf6f052
RK
6347 if (temp != original_target)
6348 temp = copy_to_reg (temp);
d6a5ac33 6349
bbf6f052 6350 op1 = gen_label_rtx ();
906c4e36 6351 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
bbf6f052
RK
6352 GET_MODE (temp), unsignedp, 0);
6353 emit_jump_insn (gen_beq (op1));
6354 emit_move_insn (temp, const1_rtx);
6355 emit_label (op1);
6356 return temp;
6357 }
d6a5ac33 6358
bbf6f052
RK
6359 /* If no set-flag instruction, must generate a conditional
6360 store into a temporary variable. Drop through
6361 and handle this like && and ||. */
6362
6363 case TRUTH_ANDIF_EXPR:
6364 case TRUTH_ORIF_EXPR:
e44842fe
RK
6365 if (! ignore
6366 && (target == 0 || ! safe_from_p (target, exp)
6367 /* Make sure we don't have a hard reg (such as function's return
6368 value) live across basic blocks, if not optimizing. */
6369 || (!optimize && GET_CODE (target) == REG
6370 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
bbf6f052 6371 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
e44842fe
RK
6372
6373 if (target)
6374 emit_clr_insn (target);
6375
bbf6f052
RK
6376 op1 = gen_label_rtx ();
6377 jumpifnot (exp, op1);
e44842fe
RK
6378
6379 if (target)
6380 emit_0_to_1_insn (target);
6381
bbf6f052 6382 emit_label (op1);
e44842fe 6383 return ignore ? const0_rtx : target;
bbf6f052
RK
6384
6385 case TRUTH_NOT_EXPR:
6386 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6387 /* The parser is careful to generate TRUTH_NOT_EXPR
6388 only with operands that are always zero or one. */
906c4e36 6389 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
bbf6f052
RK
6390 target, 1, OPTAB_LIB_WIDEN);
6391 if (temp == 0)
6392 abort ();
6393 return temp;
6394
6395 case COMPOUND_EXPR:
6396 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6397 emit_queue ();
6398 return expand_expr (TREE_OPERAND (exp, 1),
6399 (ignore ? const0_rtx : target),
6400 VOIDmode, 0);
6401
6402 case COND_EXPR:
6403 {
5dab5552
MS
6404 rtx flag = NULL_RTX;
6405 tree left_cleanups = NULL_TREE;
6406 tree right_cleanups = NULL_TREE;
6407
6408 /* Used to save a pointer to the place to put the setting of
6409 the flag that indicates if this side of the conditional was
6410 taken. We backpatch the code, if we find out later that we
0f41302f 6411 have any conditional cleanups that need to be performed. */
5dab5552
MS
6412 rtx dest_right_flag = NULL_RTX;
6413 rtx dest_left_flag = NULL_RTX;
6414
bbf6f052
RK
6415 /* Note that COND_EXPRs whose type is a structure or union
6416 are required to be constructed to contain assignments of
6417 a temporary variable, so that we can evaluate them here
6418 for side effect only. If type is void, we must do likewise. */
6419
6420 /* If an arm of the branch requires a cleanup,
6421 only that cleanup is performed. */
6422
6423 tree singleton = 0;
6424 tree binary_op = 0, unary_op = 0;
6425 tree old_cleanups = cleanups_this_call;
bbf6f052
RK
6426
6427 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
6428 convert it to our mode, if necessary. */
6429 if (integer_onep (TREE_OPERAND (exp, 1))
6430 && integer_zerop (TREE_OPERAND (exp, 2))
6431 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6432 {
dd27116b
RK
6433 if (ignore)
6434 {
6435 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6436 modifier);
6437 return const0_rtx;
6438 }
6439
bbf6f052
RK
6440 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
6441 if (GET_MODE (op0) == mode)
6442 return op0;
d6a5ac33 6443
bbf6f052
RK
6444 if (target == 0)
6445 target = gen_reg_rtx (mode);
6446 convert_move (target, op0, unsignedp);
6447 return target;
6448 }
6449
6450 /* If we are not to produce a result, we have no target. Otherwise,
6451 if a target was specified use it; it will not be used as an
6452 intermediate target unless it is safe. If no target, use a
6453 temporary. */
6454
dd27116b 6455 if (ignore)
bbf6f052
RK
6456 temp = 0;
6457 else if (original_target
d6a5ac33 6458 && safe_from_p (original_target, TREE_OPERAND (exp, 0))
2d444001
RK
6459 && GET_MODE (original_target) == mode
6460 && ! (GET_CODE (original_target) == MEM
6461 && MEM_VOLATILE_P (original_target)))
bbf6f052 6462 temp = original_target;
bbf6f052 6463 else
06089a8b 6464 temp = assign_temp (type, 0, 0, 1);
bbf6f052
RK
6465
6466 /* Check for X ? A + B : A. If we have this, we can copy
6467 A to the output and conditionally add B. Similarly for unary
6468 operations. Don't do this if X has side-effects because
6469 those side effects might affect A or B and the "?" operation is
6470 a sequence point in ANSI. (We test for side effects later.) */
6471
6472 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
6473 && operand_equal_p (TREE_OPERAND (exp, 2),
6474 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6475 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
6476 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
6477 && operand_equal_p (TREE_OPERAND (exp, 1),
6478 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6479 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
6480 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
6481 && operand_equal_p (TREE_OPERAND (exp, 2),
6482 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6483 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
6484 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
6485 && operand_equal_p (TREE_OPERAND (exp, 1),
6486 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6487 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
6488
6489 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
6490 operation, do this as A + (X != 0). Similarly for other simple
6491 binary operators. */
dd27116b 6492 if (temp && singleton && binary_op
bbf6f052
RK
6493 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6494 && (TREE_CODE (binary_op) == PLUS_EXPR
6495 || TREE_CODE (binary_op) == MINUS_EXPR
6496 || TREE_CODE (binary_op) == BIT_IOR_EXPR
9fbd9f58 6497 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
bbf6f052
RK
6498 && integer_onep (TREE_OPERAND (binary_op, 1))
6499 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6500 {
6501 rtx result;
6502 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
6503 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
6504 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
2d444001 6505 : xor_optab);
bbf6f052
RK
6506
6507 /* If we had X ? A : A + 1, do this as A + (X == 0).
6508
6509 We have to invert the truth value here and then put it
6510 back later if do_store_flag fails. We cannot simply copy
6511 TREE_OPERAND (exp, 0) to another variable and modify that
6512 because invert_truthvalue can modify the tree pointed to
6513 by its argument. */
6514 if (singleton == TREE_OPERAND (exp, 1))
6515 TREE_OPERAND (exp, 0)
6516 = invert_truthvalue (TREE_OPERAND (exp, 0));
6517
6518 result = do_store_flag (TREE_OPERAND (exp, 0),
906c4e36
RK
6519 (safe_from_p (temp, singleton)
6520 ? temp : NULL_RTX),
bbf6f052
RK
6521 mode, BRANCH_COST <= 1);
6522
6523 if (result)
6524 {
906c4e36 6525 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6526 return expand_binop (mode, boptab, op1, result, temp,
6527 unsignedp, OPTAB_LIB_WIDEN);
6528 }
6529 else if (singleton == TREE_OPERAND (exp, 1))
6530 TREE_OPERAND (exp, 0)
6531 = invert_truthvalue (TREE_OPERAND (exp, 0));
6532 }
6533
dabf8373 6534 do_pending_stack_adjust ();
bbf6f052
RK
6535 NO_DEFER_POP;
6536 op0 = gen_label_rtx ();
6537
5dab5552 6538 flag = gen_reg_rtx (word_mode);
bbf6f052
RK
6539 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
6540 {
6541 if (temp != 0)
6542 {
6543 /* If the target conflicts with the other operand of the
6544 binary op, we can't use it. Also, we can't use the target
6545 if it is a hard register, because evaluating the condition
6546 might clobber it. */
6547 if ((binary_op
6548 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
6549 || (GET_CODE (temp) == REG
6550 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
6551 temp = gen_reg_rtx (mode);
6552 store_expr (singleton, temp, 0);
6553 }
6554 else
906c4e36 6555 expand_expr (singleton,
2937cf87 6556 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5dab5552 6557 dest_left_flag = get_last_insn ();
bbf6f052
RK
6558 if (singleton == TREE_OPERAND (exp, 1))
6559 jumpif (TREE_OPERAND (exp, 0), op0);
6560 else
6561 jumpifnot (TREE_OPERAND (exp, 0), op0);
6562
0f41302f 6563 /* Allows cleanups up to here. */
5dab5552 6564 old_cleanups = cleanups_this_call;
bbf6f052
RK
6565 if (binary_op && temp == 0)
6566 /* Just touch the other operand. */
6567 expand_expr (TREE_OPERAND (binary_op, 1),
906c4e36 6568 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6569 else if (binary_op)
6570 store_expr (build (TREE_CODE (binary_op), type,
6571 make_tree (type, temp),
6572 TREE_OPERAND (binary_op, 1)),
6573 temp, 0);
6574 else
6575 store_expr (build1 (TREE_CODE (unary_op), type,
6576 make_tree (type, temp)),
6577 temp, 0);
6578 op1 = op0;
5dab5552 6579 dest_right_flag = get_last_insn ();
bbf6f052
RK
6580 }
6581#if 0
6582 /* This is now done in jump.c and is better done there because it
6583 produces shorter register lifetimes. */
6584
6585 /* Check for both possibilities either constants or variables
6586 in registers (but not the same as the target!). If so, can
6587 save branches by assigning one, branching, and assigning the
6588 other. */
6589 else if (temp && GET_MODE (temp) != BLKmode
6590 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
6591 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
6592 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
6593 && DECL_RTL (TREE_OPERAND (exp, 1))
6594 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
6595 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
6596 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
6597 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
6598 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
6599 && DECL_RTL (TREE_OPERAND (exp, 2))
6600 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
6601 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
6602 {
6603 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6604 temp = gen_reg_rtx (mode);
6605 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5dab5552 6606 dest_left_flag = get_last_insn ();
bbf6f052 6607 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 6608
0f41302f 6609 /* Allows cleanups up to here. */
5dab5552 6610 old_cleanups = cleanups_this_call;
bbf6f052
RK
6611 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6612 op1 = op0;
5dab5552 6613 dest_right_flag = get_last_insn ();
bbf6f052
RK
6614 }
6615#endif
6616 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
6617 comparison operator. If we have one of these cases, set the
6618 output to A, branch on A (cse will merge these two references),
6619 then set the output to FOO. */
6620 else if (temp
6621 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6622 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6623 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6624 TREE_OPERAND (exp, 1), 0)
6625 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6626 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
6627 {
6628 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6629 temp = gen_reg_rtx (mode);
6630 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5dab5552 6631 dest_left_flag = get_last_insn ();
bbf6f052 6632 jumpif (TREE_OPERAND (exp, 0), op0);
5dab5552 6633
0f41302f 6634 /* Allows cleanups up to here. */
5dab5552 6635 old_cleanups = cleanups_this_call;
bbf6f052
RK
6636 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6637 op1 = op0;
5dab5552 6638 dest_right_flag = get_last_insn ();
bbf6f052
RK
6639 }
6640 else if (temp
6641 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6642 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6643 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6644 TREE_OPERAND (exp, 2), 0)
6645 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6646 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
6647 {
6648 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6649 temp = gen_reg_rtx (mode);
6650 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5dab5552 6651 dest_left_flag = get_last_insn ();
bbf6f052 6652 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 6653
0f41302f 6654 /* Allows cleanups up to here. */
5dab5552 6655 old_cleanups = cleanups_this_call;
bbf6f052
RK
6656 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6657 op1 = op0;
5dab5552 6658 dest_right_flag = get_last_insn ();
bbf6f052
RK
6659 }
6660 else
6661 {
6662 op1 = gen_label_rtx ();
6663 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 6664
0f41302f 6665 /* Allows cleanups up to here. */
5dab5552 6666 old_cleanups = cleanups_this_call;
bbf6f052
RK
6667 if (temp != 0)
6668 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6669 else
906c4e36
RK
6670 expand_expr (TREE_OPERAND (exp, 1),
6671 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5dab5552
MS
6672 dest_left_flag = get_last_insn ();
6673
0f41302f 6674 /* Handle conditional cleanups, if any. */
5dab5552 6675 left_cleanups = defer_cleanups_to (old_cleanups);
bbf6f052
RK
6676
6677 emit_queue ();
6678 emit_jump_insn (gen_jump (op1));
6679 emit_barrier ();
6680 emit_label (op0);
6681 if (temp != 0)
6682 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6683 else
906c4e36
RK
6684 expand_expr (TREE_OPERAND (exp, 2),
6685 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5dab5552 6686 dest_right_flag = get_last_insn ();
bbf6f052
RK
6687 }
6688
0f41302f 6689 /* Handle conditional cleanups, if any. */
5dab5552 6690 right_cleanups = defer_cleanups_to (old_cleanups);
bbf6f052
RK
6691
6692 emit_queue ();
6693 emit_label (op1);
6694 OK_DEFER_POP;
5dab5552 6695
0f41302f 6696 /* Add back in, any conditional cleanups. */
5dab5552
MS
6697 if (left_cleanups || right_cleanups)
6698 {
6699 tree new_cleanups;
6700 tree cond;
6701 rtx last;
6702
6703 /* Now that we know that a flag is needed, go back and add in the
0f41302f 6704 setting of the flag. */
5dab5552 6705
0f41302f 6706 /* Do the left side flag. */
5dab5552 6707 last = get_last_insn ();
0f41302f 6708 /* Flag left cleanups as needed. */
5dab5552
MS
6709 emit_move_insn (flag, const1_rtx);
6710 /* ??? deprecated, use sequences instead. */
6711 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_left_flag);
6712
0f41302f 6713 /* Do the right side flag. */
5dab5552 6714 last = get_last_insn ();
0f41302f 6715 /* Flag left cleanups as needed. */
5dab5552
MS
6716 emit_move_insn (flag, const0_rtx);
6717 /* ??? deprecated, use sequences instead. */
6718 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_right_flag);
6719
9ba73d38
MS
6720 /* All cleanups must be on the function_obstack. */
6721 push_obstacks_nochange ();
6722 resume_temporary_allocation ();
6723
0f41302f 6724 /* convert flag, which is an rtx, into a tree. */
5dab5552
MS
6725 cond = make_node (RTL_EXPR);
6726 TREE_TYPE (cond) = integer_type_node;
6727 RTL_EXPR_RTL (cond) = flag;
6728 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
01842234 6729 cond = save_expr (cond);
5dab5552
MS
6730
6731 if (! left_cleanups)
6732 left_cleanups = integer_zero_node;
6733 if (! right_cleanups)
6734 right_cleanups = integer_zero_node;
fd67d2b6
JM
6735 new_cleanups = build (COND_EXPR, void_type_node,
6736 truthvalue_conversion (cond),
5dab5552
MS
6737 left_cleanups, right_cleanups);
6738 new_cleanups = fold (new_cleanups);
6739
9ba73d38
MS
6740 pop_obstacks ();
6741
5dab5552
MS
6742 /* Now add in the conditionalized cleanups. */
6743 cleanups_this_call
6744 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
61d6b1cc 6745 (*interim_eh_hook) (NULL_TREE);
5dab5552 6746 }
bbf6f052
RK
6747 return temp;
6748 }
6749
6750 case TARGET_EXPR:
6751 {
6752 /* Something needs to be initialized, but we didn't know
6753 where that thing was when building the tree. For example,
6754 it could be the return value of a function, or a parameter
6755 to a function which lays down in the stack, or a temporary
6756 variable which must be passed by reference.
6757
6758 We guarantee that the expression will either be constructed
6759 or copied into our original target. */
6760
6761 tree slot = TREE_OPERAND (exp, 0);
2a888d4c 6762 tree cleanups = NULL_TREE;
5c062816 6763 tree exp1;
61d6b1cc 6764 rtx temp;
bbf6f052
RK
6765
6766 if (TREE_CODE (slot) != VAR_DECL)
6767 abort ();
6768
9c51f375
RK
6769 if (! ignore)
6770 target = original_target;
6771
bbf6f052
RK
6772 if (target == 0)
6773 {
6774 if (DECL_RTL (slot) != 0)
ac993f4f
MS
6775 {
6776 target = DECL_RTL (slot);
5c062816 6777 /* If we have already expanded the slot, so don't do
ac993f4f 6778 it again. (mrs) */
5c062816
MS
6779 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6780 return target;
ac993f4f 6781 }
bbf6f052
RK
6782 else
6783 {
06089a8b 6784 target = assign_temp (type, 2, 1, 1);
bbf6f052
RK
6785 /* All temp slots at this level must not conflict. */
6786 preserve_temp_slots (target);
6787 DECL_RTL (slot) = target;
bbf6f052 6788
e287fd6e
RK
6789 /* Since SLOT is not known to the called function
6790 to belong to its stack frame, we must build an explicit
6791 cleanup. This case occurs when we must build up a reference
6792 to pass the reference as an argument. In this case,
6793 it is very likely that such a reference need not be
6794 built here. */
6795
6796 if (TREE_OPERAND (exp, 2) == 0)
6797 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
2a888d4c 6798 cleanups = TREE_OPERAND (exp, 2);
e287fd6e 6799 }
bbf6f052
RK
6800 }
6801 else
6802 {
6803 /* This case does occur, when expanding a parameter which
6804 needs to be constructed on the stack. The target
6805 is the actual stack address that we want to initialize.
6806 The function we call will perform the cleanup in this case. */
6807
8c042b47
RS
6808 /* If we have already assigned it space, use that space,
6809 not target that we were passed in, as our target
6810 parameter is only a hint. */
6811 if (DECL_RTL (slot) != 0)
6812 {
6813 target = DECL_RTL (slot);
6814 /* If we have already expanded the slot, so don't do
6815 it again. (mrs) */
6816 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6817 return target;
6818 }
6819
bbf6f052
RK
6820 DECL_RTL (slot) = target;
6821 }
6822
4847c938 6823 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
5c062816
MS
6824 /* Mark it as expanded. */
6825 TREE_OPERAND (exp, 1) = NULL_TREE;
6826
41531e5b 6827 store_expr (exp1, target, 0);
61d6b1cc 6828
2a888d4c
MS
6829 if (cleanups)
6830 {
6831 cleanups_this_call = tree_cons (NULL_TREE,
6832 cleanups,
6833 cleanups_this_call);
6834 (*interim_eh_hook) (NULL_TREE);
6835 }
61d6b1cc 6836
41531e5b 6837 return target;
bbf6f052
RK
6838 }
6839
6840 case INIT_EXPR:
6841 {
6842 tree lhs = TREE_OPERAND (exp, 0);
6843 tree rhs = TREE_OPERAND (exp, 1);
6844 tree noncopied_parts = 0;
6845 tree lhs_type = TREE_TYPE (lhs);
6846
6847 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
6848 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
6849 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
6850 TYPE_NONCOPIED_PARTS (lhs_type));
6851 while (noncopied_parts != 0)
6852 {
6853 expand_assignment (TREE_VALUE (noncopied_parts),
6854 TREE_PURPOSE (noncopied_parts), 0, 0);
6855 noncopied_parts = TREE_CHAIN (noncopied_parts);
6856 }
6857 return temp;
6858 }
6859
6860 case MODIFY_EXPR:
6861 {
6862 /* If lhs is complex, expand calls in rhs before computing it.
6863 That's so we don't compute a pointer and save it over a call.
6864 If lhs is simple, compute it first so we can give it as a
6865 target if the rhs is just a call. This avoids an extra temp and copy
6866 and that prevents a partial-subsumption which makes bad code.
6867 Actually we could treat component_ref's of vars like vars. */
6868
6869 tree lhs = TREE_OPERAND (exp, 0);
6870 tree rhs = TREE_OPERAND (exp, 1);
6871 tree noncopied_parts = 0;
6872 tree lhs_type = TREE_TYPE (lhs);
6873
6874 temp = 0;
6875
6876 if (TREE_CODE (lhs) != VAR_DECL
6877 && TREE_CODE (lhs) != RESULT_DECL
6878 && TREE_CODE (lhs) != PARM_DECL)
6879 preexpand_calls (exp);
6880
6881 /* Check for |= or &= of a bitfield of size one into another bitfield
6882 of size 1. In this case, (unless we need the result of the
6883 assignment) we can do this more efficiently with a
6884 test followed by an assignment, if necessary.
6885
6886 ??? At this point, we can't get a BIT_FIELD_REF here. But if
6887 things change so we do, this code should be enhanced to
6888 support it. */
6889 if (ignore
6890 && TREE_CODE (lhs) == COMPONENT_REF
6891 && (TREE_CODE (rhs) == BIT_IOR_EXPR
6892 || TREE_CODE (rhs) == BIT_AND_EXPR)
6893 && TREE_OPERAND (rhs, 0) == lhs
6894 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
6895 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
6896 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
6897 {
6898 rtx label = gen_label_rtx ();
6899
6900 do_jump (TREE_OPERAND (rhs, 1),
6901 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
6902 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
6903 expand_assignment (lhs, convert (TREE_TYPE (rhs),
6904 (TREE_CODE (rhs) == BIT_IOR_EXPR
6905 ? integer_one_node
6906 : integer_zero_node)),
6907 0, 0);
e7c33f54 6908 do_pending_stack_adjust ();
bbf6f052
RK
6909 emit_label (label);
6910 return const0_rtx;
6911 }
6912
6913 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
6914 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
6915 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
6916 TYPE_NONCOPIED_PARTS (lhs_type));
6917
6918 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
6919 while (noncopied_parts != 0)
6920 {
6921 expand_assignment (TREE_PURPOSE (noncopied_parts),
6922 TREE_VALUE (noncopied_parts), 0, 0);
6923 noncopied_parts = TREE_CHAIN (noncopied_parts);
6924 }
6925 return temp;
6926 }
6927
6928 case PREINCREMENT_EXPR:
6929 case PREDECREMENT_EXPR:
7b8b9722 6930 return expand_increment (exp, 0, ignore);
bbf6f052
RK
6931
6932 case POSTINCREMENT_EXPR:
6933 case POSTDECREMENT_EXPR:
6934 /* Faster to treat as pre-increment if result is not used. */
7b8b9722 6935 return expand_increment (exp, ! ignore, ignore);
bbf6f052
RK
6936
6937 case ADDR_EXPR:
987c71d9 6938 /* If nonzero, TEMP will be set to the address of something that might
0f41302f 6939 be a MEM corresponding to a stack slot. */
987c71d9
RK
6940 temp = 0;
6941
bbf6f052
RK
6942 /* Are we taking the address of a nested function? */
6943 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
38ee6ed9
JM
6944 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
6945 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0)))
bbf6f052
RK
6946 {
6947 op0 = trampoline_address (TREE_OPERAND (exp, 0));
6948 op0 = force_operand (op0, target);
6949 }
682ba3a6
RK
6950 /* If we are taking the address of something erroneous, just
6951 return a zero. */
6952 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
6953 return const0_rtx;
bbf6f052
RK
6954 else
6955 {
e287fd6e
RK
6956 /* We make sure to pass const0_rtx down if we came in with
6957 ignore set, to avoid doing the cleanups twice for something. */
6958 op0 = expand_expr (TREE_OPERAND (exp, 0),
6959 ignore ? const0_rtx : NULL_RTX, VOIDmode,
bbf6f052
RK
6960 (modifier == EXPAND_INITIALIZER
6961 ? modifier : EXPAND_CONST_ADDRESS));
896102d0 6962
119af78a
RK
6963 /* If we are going to ignore the result, OP0 will have been set
6964 to const0_rtx, so just return it. Don't get confused and
6965 think we are taking the address of the constant. */
6966 if (ignore)
6967 return op0;
6968
3539e816
MS
6969 op0 = protect_from_queue (op0, 0);
6970
896102d0
RK
6971 /* We would like the object in memory. If it is a constant,
6972 we can have it be statically allocated into memory. For
682ba3a6 6973 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
896102d0
RK
6974 memory and store the value into it. */
6975
6976 if (CONSTANT_P (op0))
6977 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6978 op0);
987c71d9 6979 else if (GET_CODE (op0) == MEM)
af5b53ed
RK
6980 {
6981 mark_temp_addr_taken (op0);
6982 temp = XEXP (op0, 0);
6983 }
896102d0 6984
682ba3a6
RK
6985 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6986 || GET_CODE (op0) == CONCAT)
896102d0
RK
6987 {
6988 /* If this object is in a register, it must be not
0f41302f 6989 be BLKmode. */
896102d0 6990 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
06089a8b 6991 rtx memloc = assign_temp (inner_type, 1, 1, 1);
896102d0 6992
7a0b7b9a 6993 mark_temp_addr_taken (memloc);
896102d0
RK
6994 emit_move_insn (memloc, op0);
6995 op0 = memloc;
6996 }
6997
bbf6f052
RK
6998 if (GET_CODE (op0) != MEM)
6999 abort ();
7000
7001 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
88f63c77
RK
7002 {
7003 temp = XEXP (op0, 0);
7004#ifdef POINTERS_EXTEND_UNSIGNED
7005 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
7006 && mode == ptr_mode)
9fcfcce7 7007 temp = convert_memory_address (ptr_mode, temp);
88f63c77
RK
7008#endif
7009 return temp;
7010 }
987c71d9 7011
bbf6f052
RK
7012 op0 = force_operand (XEXP (op0, 0), target);
7013 }
987c71d9 7014
bbf6f052 7015 if (flag_force_addr && GET_CODE (op0) != REG)
987c71d9
RK
7016 op0 = force_reg (Pmode, op0);
7017
dc6d66b3
RK
7018 if (GET_CODE (op0) == REG
7019 && ! REG_USERVAR_P (op0))
7020 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
987c71d9
RK
7021
7022 /* If we might have had a temp slot, add an equivalent address
7023 for it. */
7024 if (temp != 0)
7025 update_temp_slot_address (temp, op0);
7026
88f63c77
RK
7027#ifdef POINTERS_EXTEND_UNSIGNED
7028 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
7029 && mode == ptr_mode)
9fcfcce7 7030 op0 = convert_memory_address (ptr_mode, op0);
88f63c77
RK
7031#endif
7032
bbf6f052
RK
7033 return op0;
7034
7035 case ENTRY_VALUE_EXPR:
7036 abort ();
7037
7308a047
RS
7038 /* COMPLEX type for Extended Pascal & Fortran */
7039 case COMPLEX_EXPR:
7040 {
7041 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6551fa4d 7042 rtx insns;
7308a047
RS
7043
7044 /* Get the rtx code of the operands. */
7045 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7046 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
7047
7048 if (! target)
7049 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7050
6551fa4d 7051 start_sequence ();
7308a047
RS
7052
7053 /* Move the real (op0) and imaginary (op1) parts to their location. */
2d7050fd
RS
7054 emit_move_insn (gen_realpart (mode, target), op0);
7055 emit_move_insn (gen_imagpart (mode, target), op1);
7308a047 7056
6551fa4d
JW
7057 insns = get_insns ();
7058 end_sequence ();
7059
7308a047 7060 /* Complex construction should appear as a single unit. */
6551fa4d
JW
7061 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
7062 each with a separate pseudo as destination.
7063 It's not correct for flow to treat them as a unit. */
6d6e61ce 7064 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
7065 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
7066 else
7067 emit_insns (insns);
7308a047
RS
7068
7069 return target;
7070 }
7071
7072 case REALPART_EXPR:
2d7050fd
RS
7073 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7074 return gen_realpart (mode, op0);
7308a047
RS
7075
7076 case IMAGPART_EXPR:
2d7050fd
RS
7077 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7078 return gen_imagpart (mode, op0);
7308a047
RS
7079
7080 case CONJ_EXPR:
7081 {
62acb978 7082 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7308a047 7083 rtx imag_t;
6551fa4d 7084 rtx insns;
7308a047
RS
7085
7086 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7087
7088 if (! target)
d6a5ac33 7089 target = gen_reg_rtx (mode);
7308a047 7090
6551fa4d 7091 start_sequence ();
7308a047
RS
7092
7093 /* Store the realpart and the negated imagpart to target. */
62acb978
RK
7094 emit_move_insn (gen_realpart (partmode, target),
7095 gen_realpart (partmode, op0));
7308a047 7096
62acb978
RK
7097 imag_t = gen_imagpart (partmode, target);
7098 temp = expand_unop (partmode, neg_optab,
7099 gen_imagpart (partmode, op0), imag_t, 0);
7308a047
RS
7100 if (temp != imag_t)
7101 emit_move_insn (imag_t, temp);
7102
6551fa4d
JW
7103 insns = get_insns ();
7104 end_sequence ();
7105
d6a5ac33
RK
7106 /* Conjugate should appear as a single unit
7107 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
6551fa4d
JW
7108 each with a separate pseudo as destination.
7109 It's not correct for flow to treat them as a unit. */
6d6e61ce 7110 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
7111 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
7112 else
7113 emit_insns (insns);
7308a047
RS
7114
7115 return target;
7116 }
7117
bbf6f052 7118 case ERROR_MARK:
66538193
RS
7119 op0 = CONST0_RTX (tmode);
7120 if (op0 != 0)
7121 return op0;
bbf6f052
RK
7122 return const0_rtx;
7123
7124 default:
90764a87 7125 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
bbf6f052
RK
7126 }
7127
7128 /* Here to do an ordinary binary operator, generating an instruction
7129 from the optab already placed in `this_optab'. */
7130 binop:
7131 preexpand_calls (exp);
7132 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
7133 subtarget = 0;
7134 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 7135 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7136 binop2:
7137 temp = expand_binop (mode, this_optab, op0, op1, target,
7138 unsignedp, OPTAB_LIB_WIDEN);
7139 if (temp == 0)
7140 abort ();
7141 return temp;
7142}
bbf6f052 7143
bbf6f052 7144
0f41302f
MS
7145/* Emit bytecode to evaluate the given expression EXP to the stack. */
7146
ca695ac9
JB
7147void
7148bc_expand_expr (exp)
7149 tree exp;
bbf6f052 7150{
ca695ac9
JB
7151 enum tree_code code;
7152 tree type, arg0;
7153 rtx r;
7154 struct binary_operator *binoptab;
7155 struct unary_operator *unoptab;
7156 struct increment_operator *incroptab;
7157 struct bc_label *lab, *lab1;
7158 enum bytecode_opcode opcode;
7159
7160
7161 code = TREE_CODE (exp);
7162
7163 switch (code)
bbf6f052 7164 {
ca695ac9
JB
7165 case PARM_DECL:
7166
7167 if (DECL_RTL (exp) == 0)
bbf6f052 7168 {
ca695ac9
JB
7169 error_with_decl (exp, "prior parameter's size depends on `%s'");
7170 return;
bbf6f052 7171 }
ca695ac9
JB
7172
7173 bc_load_parmaddr (DECL_RTL (exp));
7174 bc_load_memory (TREE_TYPE (exp), exp);
7175
7176 return;
7177
7178 case VAR_DECL:
7179
7180 if (DECL_RTL (exp) == 0)
7181 abort ();
7182
7183#if 0
e7a42772 7184 if (BYTECODE_LABEL (DECL_RTL (exp)))
ca695ac9
JB
7185 bc_load_externaddr (DECL_RTL (exp));
7186 else
7187 bc_load_localaddr (DECL_RTL (exp));
7188#endif
7189 if (TREE_PUBLIC (exp))
e7a42772
JB
7190 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
7191 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
ca695ac9
JB
7192 else
7193 bc_load_localaddr (DECL_RTL (exp));
7194
7195 bc_load_memory (TREE_TYPE (exp), exp);
7196 return;
7197
7198 case INTEGER_CST:
7199
7200#ifdef DEBUG_PRINT_CODE
7201 fprintf (stderr, " [%x]\n", TREE_INT_CST_LOW (exp));
7202#endif
6bd6178d 7203 bc_emit_instruction (mode_to_const_map[(int) (DECL_BIT_FIELD (exp)
ca695ac9 7204 ? SImode
6bd6178d 7205 : TYPE_MODE (TREE_TYPE (exp)))],
ca695ac9
JB
7206 (HOST_WIDE_INT) TREE_INT_CST_LOW (exp));
7207 return;
7208
7209 case REAL_CST:
7210
c02bd5d9 7211#if 0
ca695ac9
JB
7212#ifdef DEBUG_PRINT_CODE
7213 fprintf (stderr, " [%g]\n", (double) TREE_INT_CST_LOW (exp));
7214#endif
c02bd5d9 7215 /* FIX THIS: find a better way to pass real_cst's. -bson */
ca695ac9
JB
7216 bc_emit_instruction (mode_to_const_map[TYPE_MODE (TREE_TYPE (exp))],
7217 (double) TREE_REAL_CST (exp));
c02bd5d9
JB
7218#else
7219 abort ();
7220#endif
7221
ca695ac9
JB
7222 return;
7223
7224 case CALL_EXPR:
7225
7226 /* We build a call description vector describing the type of
7227 the return value and of the arguments; this call vector,
7228 together with a pointer to a location for the return value
7229 and the base of the argument list, is passed to the low
7230 level machine dependent call subroutine, which is responsible
7231 for putting the arguments wherever real functions expect
7232 them, as well as getting the return value back. */
7233 {
7234 tree calldesc = 0, arg;
7235 int nargs = 0, i;
7236 rtx retval;
7237
7238 /* Push the evaluated args on the evaluation stack in reverse
7239 order. Also make an entry for each arg in the calldesc
7240 vector while we're at it. */
7241
7242 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
7243
7244 for (arg = TREE_OPERAND (exp, 1); arg; arg = TREE_CHAIN (arg))
7245 {
7246 ++nargs;
7247 bc_expand_expr (TREE_VALUE (arg));
7248
7249 calldesc = tree_cons ((tree) 0,
7250 size_in_bytes (TREE_TYPE (TREE_VALUE (arg))),
7251 calldesc);
7252 calldesc = tree_cons ((tree) 0,
7253 bc_runtime_type_code (TREE_TYPE (TREE_VALUE (arg))),
7254 calldesc);
7255 }
7256
7257 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
7258
7259 /* Allocate a location for the return value and push its
7260 address on the evaluation stack. Also make an entry
0f41302f 7261 at the front of the calldesc for the return value type. */
ca695ac9
JB
7262
7263 type = TREE_TYPE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7264 retval = bc_allocate_local (int_size_in_bytes (type), TYPE_ALIGN (type));
7265 bc_load_localaddr (retval);
7266
7267 calldesc = tree_cons ((tree) 0, size_in_bytes (type), calldesc);
7268 calldesc = tree_cons ((tree) 0, bc_runtime_type_code (type), calldesc);
7269
7270 /* Prepend the argument count. */
7271 calldesc = tree_cons ((tree) 0,
7272 build_int_2 (nargs, 0),
7273 calldesc);
7274
7275 /* Push the address of the call description vector on the stack. */
7276 calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
7277 TREE_TYPE (calldesc) = build_array_type (integer_type_node,
7278 build_index_type (build_int_2 (nargs * 2, 0)));
7279 r = output_constant_def (calldesc);
7280 bc_load_externaddr (r);
7281
0f41302f 7282 /* Push the address of the function to be called. */
ca695ac9
JB
7283 bc_expand_expr (TREE_OPERAND (exp, 0));
7284
7285 /* Call the function, popping its address and the calldesc vector
7286 address off the evaluation stack in the process. */
7287 bc_emit_instruction (call);
7288
7289 /* Pop the arguments off the stack. */
7290 bc_adjust_stack (nargs);
7291
7292 /* Load the return value onto the stack. */
7293 bc_load_localaddr (retval);
7294 bc_load_memory (type, TREE_OPERAND (exp, 0));
7295 }
7296 return;
7297
7298 case SAVE_EXPR:
7299
7300 if (!SAVE_EXPR_RTL (exp))
bbf6f052 7301 {
ca695ac9
JB
7302 /* First time around: copy to local variable */
7303 SAVE_EXPR_RTL (exp) = bc_allocate_local (int_size_in_bytes (TREE_TYPE (exp)),
7304 TYPE_ALIGN (TREE_TYPE(exp)));
7305 bc_expand_expr (TREE_OPERAND (exp, 0));
6d6e61ce 7306 bc_emit_instruction (duplicate);
ca695ac9
JB
7307
7308 bc_load_localaddr (SAVE_EXPR_RTL (exp));
7309 bc_store_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
bbf6f052 7310 }
ca695ac9 7311 else
bbf6f052 7312 {
ca695ac9
JB
7313 /* Consecutive reference: use saved copy */
7314 bc_load_localaddr (SAVE_EXPR_RTL (exp));
7315 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
bbf6f052 7316 }
ca695ac9
JB
7317 return;
7318
7319#if 0
7320 /* FIXME: the XXXX_STMT codes have been removed in GCC2, but
7321 how are they handled instead? */
7322 case LET_STMT:
7323
7324 TREE_USED (exp) = 1;
7325 bc_expand_expr (STMT_BODY (exp));
7326 return;
7327#endif
7328
7329 case NOP_EXPR:
7330 case CONVERT_EXPR:
7331
7332 bc_expand_expr (TREE_OPERAND (exp, 0));
7333 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)), TREE_TYPE (exp));
7334 return;
7335
7336 case MODIFY_EXPR:
7337
c02bd5d9 7338 expand_assignment (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 0, 0);
ca695ac9
JB
7339 return;
7340
7341 case ADDR_EXPR:
7342
7343 bc_expand_address (TREE_OPERAND (exp, 0));
7344 return;
7345
7346 case INDIRECT_REF:
7347
7348 bc_expand_expr (TREE_OPERAND (exp, 0));
7349 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7350 return;
7351
7352 case ARRAY_REF:
7353
7354 bc_expand_expr (bc_canonicalize_array_ref (exp));
7355 return;
7356
7357 case COMPONENT_REF:
7358
7359 bc_expand_component_address (exp);
7360
7361 /* If we have a bitfield, generate a proper load */
7362 bc_load_memory (TREE_TYPE (TREE_OPERAND (exp, 1)), TREE_OPERAND (exp, 1));
7363 return;
7364
7365 case COMPOUND_EXPR:
7366
7367 bc_expand_expr (TREE_OPERAND (exp, 0));
7368 bc_emit_instruction (drop);
7369 bc_expand_expr (TREE_OPERAND (exp, 1));
7370 return;
7371
7372 case COND_EXPR:
7373
7374 bc_expand_expr (TREE_OPERAND (exp, 0));
7375 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
7376 lab = bc_get_bytecode_label ();
c02bd5d9 7377 bc_emit_bytecode (xjumpifnot);
ca695ac9
JB
7378 bc_emit_bytecode_labelref (lab);
7379
7380#ifdef DEBUG_PRINT_CODE
7381 fputc ('\n', stderr);
7382#endif
7383 bc_expand_expr (TREE_OPERAND (exp, 1));
7384 lab1 = bc_get_bytecode_label ();
7385 bc_emit_bytecode (jump);
7386 bc_emit_bytecode_labelref (lab1);
7387
7388#ifdef DEBUG_PRINT_CODE
7389 fputc ('\n', stderr);
7390#endif
7391
7392 bc_emit_bytecode_labeldef (lab);
7393 bc_expand_expr (TREE_OPERAND (exp, 2));
7394 bc_emit_bytecode_labeldef (lab1);
7395 return;
7396
7397 case TRUTH_ANDIF_EXPR:
7398
c02bd5d9 7399 opcode = xjumpifnot;
ca695ac9
JB
7400 goto andorif;
7401
7402 case TRUTH_ORIF_EXPR:
7403
c02bd5d9 7404 opcode = xjumpif;
ca695ac9
JB
7405 goto andorif;
7406
7407 case PLUS_EXPR:
7408
7409 binoptab = optab_plus_expr;
7410 goto binop;
7411
7412 case MINUS_EXPR:
7413
7414 binoptab = optab_minus_expr;
7415 goto binop;
7416
7417 case MULT_EXPR:
7418
7419 binoptab = optab_mult_expr;
7420 goto binop;
7421
7422 case TRUNC_DIV_EXPR:
7423 case FLOOR_DIV_EXPR:
7424 case CEIL_DIV_EXPR:
7425 case ROUND_DIV_EXPR:
7426 case EXACT_DIV_EXPR:
7427
7428 binoptab = optab_trunc_div_expr;
7429 goto binop;
7430
7431 case TRUNC_MOD_EXPR:
7432 case FLOOR_MOD_EXPR:
7433 case CEIL_MOD_EXPR:
7434 case ROUND_MOD_EXPR:
7435
7436 binoptab = optab_trunc_mod_expr;
7437 goto binop;
7438
7439 case FIX_ROUND_EXPR:
7440 case FIX_FLOOR_EXPR:
7441 case FIX_CEIL_EXPR:
7442 abort (); /* Not used for C. */
7443
7444 case FIX_TRUNC_EXPR:
7445 case FLOAT_EXPR:
7446 case MAX_EXPR:
7447 case MIN_EXPR:
7448 case FFS_EXPR:
7449 case LROTATE_EXPR:
7450 case RROTATE_EXPR:
7451 abort (); /* FIXME */
7452
7453 case RDIV_EXPR:
7454
7455 binoptab = optab_rdiv_expr;
7456 goto binop;
7457
7458 case BIT_AND_EXPR:
7459
7460 binoptab = optab_bit_and_expr;
7461 goto binop;
7462
7463 case BIT_IOR_EXPR:
7464
7465 binoptab = optab_bit_ior_expr;
7466 goto binop;
7467
7468 case BIT_XOR_EXPR:
7469
7470 binoptab = optab_bit_xor_expr;
7471 goto binop;
7472
7473 case LSHIFT_EXPR:
7474
7475 binoptab = optab_lshift_expr;
7476 goto binop;
7477
7478 case RSHIFT_EXPR:
7479
7480 binoptab = optab_rshift_expr;
7481 goto binop;
7482
7483 case TRUTH_AND_EXPR:
7484
7485 binoptab = optab_truth_and_expr;
7486 goto binop;
7487
7488 case TRUTH_OR_EXPR:
7489
7490 binoptab = optab_truth_or_expr;
7491 goto binop;
7492
7493 case LT_EXPR:
7494
7495 binoptab = optab_lt_expr;
7496 goto binop;
7497
7498 case LE_EXPR:
7499
7500 binoptab = optab_le_expr;
7501 goto binop;
7502
7503 case GE_EXPR:
7504
7505 binoptab = optab_ge_expr;
7506 goto binop;
7507
7508 case GT_EXPR:
7509
7510 binoptab = optab_gt_expr;
7511 goto binop;
7512
7513 case EQ_EXPR:
7514
7515 binoptab = optab_eq_expr;
7516 goto binop;
7517
7518 case NE_EXPR:
7519
7520 binoptab = optab_ne_expr;
7521 goto binop;
7522
7523 case NEGATE_EXPR:
7524
7525 unoptab = optab_negate_expr;
7526 goto unop;
7527
7528 case BIT_NOT_EXPR:
7529
7530 unoptab = optab_bit_not_expr;
7531 goto unop;
7532
7533 case TRUTH_NOT_EXPR:
7534
7535 unoptab = optab_truth_not_expr;
7536 goto unop;
7537
7538 case PREDECREMENT_EXPR:
7539
7540 incroptab = optab_predecrement_expr;
7541 goto increment;
7542
7543 case PREINCREMENT_EXPR:
7544
7545 incroptab = optab_preincrement_expr;
7546 goto increment;
7547
7548 case POSTDECREMENT_EXPR:
7549
7550 incroptab = optab_postdecrement_expr;
7551 goto increment;
7552
7553 case POSTINCREMENT_EXPR:
7554
7555 incroptab = optab_postincrement_expr;
7556 goto increment;
7557
7558 case CONSTRUCTOR:
7559
7560 bc_expand_constructor (exp);
7561 return;
7562
7563 case ERROR_MARK:
7564 case RTL_EXPR:
7565
7566 return;
7567
7568 case BIND_EXPR:
7569 {
7570 tree vars = TREE_OPERAND (exp, 0);
7571 int vars_need_expansion = 0;
7572
7573 /* Need to open a binding contour here because
7574 if there are any cleanups they most be contained here. */
7575 expand_start_bindings (0);
7576
7577 /* Mark the corresponding BLOCK for output. */
7578 if (TREE_OPERAND (exp, 2) != 0)
7579 TREE_USED (TREE_OPERAND (exp, 2)) = 1;
7580
7581 /* If VARS have not yet been expanded, expand them now. */
7582 while (vars)
7583 {
7584 if (DECL_RTL (vars) == 0)
7585 {
7586 vars_need_expansion = 1;
9bac07c3 7587 expand_decl (vars);
ca695ac9 7588 }
9bac07c3 7589 expand_decl_init (vars);
ca695ac9
JB
7590 vars = TREE_CHAIN (vars);
7591 }
7592
7593 bc_expand_expr (TREE_OPERAND (exp, 1));
7594
7595 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
7596
7597 return;
7598 }
7599 }
7600
7601 abort ();
7602
7603 binop:
7604
7605 bc_expand_binary_operation (binoptab, TREE_TYPE (exp),
7606 TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1));
7607 return;
7608
7609
7610 unop:
7611
7612 bc_expand_unary_operation (unoptab, TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7613 return;
7614
7615
7616 andorif:
7617
7618 bc_expand_expr (TREE_OPERAND (exp, 0));
7619 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
7620 lab = bc_get_bytecode_label ();
7621
6d6e61ce 7622 bc_emit_instruction (duplicate);
ca695ac9
JB
7623 bc_emit_bytecode (opcode);
7624 bc_emit_bytecode_labelref (lab);
7625
7626#ifdef DEBUG_PRINT_CODE
7627 fputc ('\n', stderr);
7628#endif
7629
7630 bc_emit_instruction (drop);
7631
7632 bc_expand_expr (TREE_OPERAND (exp, 1));
7633 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)));
7634 bc_emit_bytecode_labeldef (lab);
7635 return;
7636
7637
7638 increment:
7639
7640 type = TREE_TYPE (TREE_OPERAND (exp, 0));
7641
7642 /* Push the quantum. */
7643 bc_expand_expr (TREE_OPERAND (exp, 1));
7644
7645 /* Convert it to the lvalue's type. */
7646 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)), type);
7647
7648 /* Push the address of the lvalue */
c02bd5d9 7649 bc_expand_expr (build1 (ADDR_EXPR, TYPE_POINTER_TO (type), TREE_OPERAND (exp, 0)));
ca695ac9
JB
7650
7651 /* Perform actual increment */
c02bd5d9 7652 bc_expand_increment (incroptab, type);
ca695ac9
JB
7653 return;
7654}
7655\f
7656/* Return the alignment in bits of EXP, a pointer valued expression.
7657 But don't return more than MAX_ALIGN no matter what.
7658 The alignment returned is, by default, the alignment of the thing that
7659 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
7660
7661 Otherwise, look at the expression to see if we can do better, i.e., if the
7662 expression is actually pointing at an object whose alignment is tighter. */
7663
7664static int
7665get_pointer_alignment (exp, max_align)
7666 tree exp;
7667 unsigned max_align;
7668{
7669 unsigned align, inner;
7670
7671 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7672 return 0;
7673
7674 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7675 align = MIN (align, max_align);
7676
7677 while (1)
7678 {
7679 switch (TREE_CODE (exp))
7680 {
7681 case NOP_EXPR:
7682 case CONVERT_EXPR:
7683 case NON_LVALUE_EXPR:
7684 exp = TREE_OPERAND (exp, 0);
7685 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7686 return align;
7687 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
8dc2fbcf 7688 align = MIN (inner, max_align);
ca695ac9
JB
7689 break;
7690
7691 case PLUS_EXPR:
7692 /* If sum of pointer + int, restrict our maximum alignment to that
7693 imposed by the integer. If not, we can't do any better than
7694 ALIGN. */
7695 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
7696 return align;
7697
7698 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
7699 & (max_align - 1))
7700 != 0)
7701 max_align >>= 1;
7702
7703 exp = TREE_OPERAND (exp, 0);
7704 break;
7705
7706 case ADDR_EXPR:
7707 /* See what we are pointing at and look at its alignment. */
7708 exp = TREE_OPERAND (exp, 0);
7709 if (TREE_CODE (exp) == FUNCTION_DECL)
8dc2fbcf 7710 align = FUNCTION_BOUNDARY;
ca695ac9 7711 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
8dc2fbcf 7712 align = DECL_ALIGN (exp);
ca695ac9
JB
7713#ifdef CONSTANT_ALIGNMENT
7714 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
7715 align = CONSTANT_ALIGNMENT (exp, align);
7716#endif
7717 return MIN (align, max_align);
7718
7719 default:
7720 return align;
7721 }
7722 }
7723}
7724\f
7725/* Return the tree node and offset if a given argument corresponds to
7726 a string constant. */
7727
7728static tree
7729string_constant (arg, ptr_offset)
7730 tree arg;
7731 tree *ptr_offset;
7732{
7733 STRIP_NOPS (arg);
7734
7735 if (TREE_CODE (arg) == ADDR_EXPR
7736 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
7737 {
7738 *ptr_offset = integer_zero_node;
7739 return TREE_OPERAND (arg, 0);
7740 }
7741 else if (TREE_CODE (arg) == PLUS_EXPR)
7742 {
7743 tree arg0 = TREE_OPERAND (arg, 0);
7744 tree arg1 = TREE_OPERAND (arg, 1);
7745
7746 STRIP_NOPS (arg0);
7747 STRIP_NOPS (arg1);
7748
7749 if (TREE_CODE (arg0) == ADDR_EXPR
7750 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
7751 {
7752 *ptr_offset = arg1;
7753 return TREE_OPERAND (arg0, 0);
7754 }
7755 else if (TREE_CODE (arg1) == ADDR_EXPR
7756 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
7757 {
7758 *ptr_offset = arg0;
7759 return TREE_OPERAND (arg1, 0);
7760 }
7761 }
7762
7763 return 0;
7764}
7765
7766/* Compute the length of a C string. TREE_STRING_LENGTH is not the right
7767 way, because it could contain a zero byte in the middle.
7768 TREE_STRING_LENGTH is the size of the character array, not the string.
7769
7770 Unfortunately, string_constant can't access the values of const char
7771 arrays with initializers, so neither can we do so here. */
7772
7773static tree
7774c_strlen (src)
7775 tree src;
7776{
7777 tree offset_node;
7778 int offset, max;
7779 char *ptr;
7780
7781 src = string_constant (src, &offset_node);
7782 if (src == 0)
7783 return 0;
7784 max = TREE_STRING_LENGTH (src);
7785 ptr = TREE_STRING_POINTER (src);
7786 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
7787 {
7788 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
7789 compute the offset to the following null if we don't know where to
7790 start searching for it. */
7791 int i;
7792 for (i = 0; i < max; i++)
7793 if (ptr[i] == 0)
7794 return 0;
7795 /* We don't know the starting offset, but we do know that the string
7796 has no internal zero bytes. We can assume that the offset falls
7797 within the bounds of the string; otherwise, the programmer deserves
7798 what he gets. Subtract the offset from the length of the string,
7799 and return that. */
7800 /* This would perhaps not be valid if we were dealing with named
7801 arrays in addition to literal string constants. */
7802 return size_binop (MINUS_EXPR, size_int (max), offset_node);
7803 }
7804
7805 /* We have a known offset into the string. Start searching there for
7806 a null character. */
7807 if (offset_node == 0)
7808 offset = 0;
7809 else
7810 {
7811 /* Did we get a long long offset? If so, punt. */
7812 if (TREE_INT_CST_HIGH (offset_node) != 0)
7813 return 0;
7814 offset = TREE_INT_CST_LOW (offset_node);
7815 }
7816 /* If the offset is known to be out of bounds, warn, and call strlen at
7817 runtime. */
7818 if (offset < 0 || offset > max)
7819 {
7820 warning ("offset outside bounds of constant string");
7821 return 0;
7822 }
7823 /* Use strlen to search for the first zero byte. Since any strings
7824 constructed with build_string will have nulls appended, we win even
7825 if we get handed something like (char[4])"abcd".
7826
7827 Since OFFSET is our starting index into the string, no further
7828 calculation is needed. */
7829 return size_int (strlen (ptr + offset));
7830}
2bbf216f
RK
7831
7832rtx
7833expand_builtin_return_addr (fndecl_code, count, tem)
7834 enum built_in_function fndecl_code;
7835 rtx tem;
7836 int count;
7837{
7838 int i;
7839
7840 /* Some machines need special handling before we can access
7841 arbitrary frames. For example, on the sparc, we must first flush
7842 all register windows to the stack. */
7843#ifdef SETUP_FRAME_ADDRESSES
7844 SETUP_FRAME_ADDRESSES ();
7845#endif
7846
7847 /* On the sparc, the return address is not in the frame, it is in a
7848 register. There is no way to access it off of the current frame
7849 pointer, but it can be accessed off the previous frame pointer by
7850 reading the value from the register window save area. */
7851#ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
7852 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
7853 count--;
7854#endif
7855
7856 /* Scan back COUNT frames to the specified frame. */
7857 for (i = 0; i < count; i++)
7858 {
7859 /* Assume the dynamic chain pointer is in the word that the
7860 frame address points to, unless otherwise specified. */
7861#ifdef DYNAMIC_CHAIN_ADDRESS
7862 tem = DYNAMIC_CHAIN_ADDRESS (tem);
7863#endif
7864 tem = memory_address (Pmode, tem);
7865 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
7866 }
7867
7868 /* For __builtin_frame_address, return what we've got. */
7869 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
7870 return tem;
7871
7872 /* For __builtin_return_address, Get the return address from that
7873 frame. */
7874#ifdef RETURN_ADDR_RTX
7875 tem = RETURN_ADDR_RTX (count, tem);
7876#else
7877 tem = memory_address (Pmode,
7878 plus_constant (tem, GET_MODE_SIZE (Pmode)));
7879 tem = gen_rtx (MEM, Pmode, tem);
7880#endif
0ebba7fc 7881 return tem;
2bbf216f 7882}
ca695ac9
JB
7883\f
7884/* Expand an expression EXP that calls a built-in function,
7885 with result going to TARGET if that's convenient
7886 (and in mode MODE if that's convenient).
7887 SUBTARGET may be used as the target for computing one of EXP's operands.
7888 IGNORE is nonzero if the value is to be ignored. */
7889
98aad286
RK
7890#define CALLED_AS_BUILT_IN(NODE) \
7891 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
7892
ca695ac9
JB
7893static rtx
7894expand_builtin (exp, target, subtarget, mode, ignore)
7895 tree exp;
7896 rtx target;
7897 rtx subtarget;
7898 enum machine_mode mode;
7899 int ignore;
7900{
7901 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7902 tree arglist = TREE_OPERAND (exp, 1);
7903 rtx op0;
7904 rtx lab1, insns;
7905 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
7906 optab builtin_optab;
7907
7908 switch (DECL_FUNCTION_CODE (fndecl))
7909 {
7910 case BUILT_IN_ABS:
7911 case BUILT_IN_LABS:
7912 case BUILT_IN_FABS:
7913 /* build_function_call changes these into ABS_EXPR. */
7914 abort ();
7915
7916 case BUILT_IN_SIN:
7917 case BUILT_IN_COS:
0f41302f 7918 /* Treat these like sqrt, but only if the user asks for them. */
ba558a85
RK
7919 if (! flag_fast_math)
7920 break;
ca695ac9
JB
7921 case BUILT_IN_FSQRT:
7922 /* If not optimizing, call the library function. */
7923 if (! optimize)
7924 break;
7925
7926 if (arglist == 0
7927 /* Arg could be wrong type if user redeclared this fcn wrong. */
7928 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
7b073ca6 7929 break;
ca695ac9
JB
7930
7931 /* Stabilize and compute the argument. */
7932 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
7933 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
7934 {
7935 exp = copy_node (exp);
7936 arglist = copy_node (arglist);
7937 TREE_OPERAND (exp, 1) = arglist;
7938 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
7939 }
7940 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
7941
7942 /* Make a suitable register to place result in. */
7943 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7944
7945 emit_queue ();
7946 start_sequence ();
7947
7948 switch (DECL_FUNCTION_CODE (fndecl))
7949 {
7950 case BUILT_IN_SIN:
7951 builtin_optab = sin_optab; break;
7952 case BUILT_IN_COS:
7953 builtin_optab = cos_optab; break;
7954 case BUILT_IN_FSQRT:
7955 builtin_optab = sqrt_optab; break;
7956 default:
7957 abort ();
7958 }
7959
7960 /* Compute into TARGET.
7961 Set TARGET to wherever the result comes back. */
7962 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
7963 builtin_optab, op0, target, 0);
7964
7965 /* If we were unable to expand via the builtin, stop the
7966 sequence (without outputting the insns) and break, causing
7967 a call the the library function. */
7968 if (target == 0)
7969 {
7970 end_sequence ();
7971 break;
7972 }
7973
7974 /* Check the results by default. But if flag_fast_math is turned on,
7975 then assume sqrt will always be called with valid arguments. */
7976
7977 if (! flag_fast_math)
7978 {
7979 /* Don't define the builtin FP instructions
7980 if your machine is not IEEE. */
7981 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
7982 abort ();
7983
7984 lab1 = gen_label_rtx ();
7985
7986 /* Test the result; if it is NaN, set errno=EDOM because
7987 the argument was not in the domain. */
7988 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
7989 emit_jump_insn (gen_beq (lab1));
7990
4ac09687 7991#ifdef TARGET_EDOM
ca695ac9
JB
7992 {
7993#ifdef GEN_ERRNO_RTX
7994 rtx errno_rtx = GEN_ERRNO_RTX;
7995#else
7996 rtx errno_rtx
e74a2201 7997 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "errno"));
ca695ac9
JB
7998#endif
7999
8000 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
8001 }
8002#else
8003 /* We can't set errno=EDOM directly; let the library call do it.
0f41302f 8004 Pop the arguments right away in case the call gets deleted. */
ca695ac9
JB
8005 NO_DEFER_POP;
8006 expand_call (exp, target, 0);
8007 OK_DEFER_POP;
8008#endif
8009
8010 emit_label (lab1);
8011 }
8012
0f41302f 8013 /* Output the entire sequence. */
ca695ac9
JB
8014 insns = get_insns ();
8015 end_sequence ();
8016 emit_insns (insns);
8017
8018 return target;
8019
8020 /* __builtin_apply_args returns block of memory allocated on
8021 the stack into which is stored the arg pointer, structure
8022 value address, static chain, and all the registers that might
8023 possibly be used in performing a function call. The code is
8024 moved to the start of the function so the incoming values are
8025 saved. */
8026 case BUILT_IN_APPLY_ARGS:
8027 /* Don't do __builtin_apply_args more than once in a function.
8028 Save the result of the first call and reuse it. */
8029 if (apply_args_value != 0)
8030 return apply_args_value;
8031 {
8032 /* When this function is called, it means that registers must be
8033 saved on entry to this function. So we migrate the
8034 call to the first insn of this function. */
8035 rtx temp;
8036 rtx seq;
8037
8038 start_sequence ();
8039 temp = expand_builtin_apply_args ();
8040 seq = get_insns ();
8041 end_sequence ();
8042
8043 apply_args_value = temp;
8044
8045 /* Put the sequence after the NOTE that starts the function.
8046 If this is inside a SEQUENCE, make the outer-level insn
8047 chain current, so the code is placed at the start of the
8048 function. */
8049 push_topmost_sequence ();
8050 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8051 pop_topmost_sequence ();
8052 return temp;
8053 }
8054
8055 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8056 FUNCTION with a copy of the parameters described by
8057 ARGUMENTS, and ARGSIZE. It returns a block of memory
8058 allocated on the stack into which is stored all the registers
8059 that might possibly be used for returning the result of a
8060 function. ARGUMENTS is the value returned by
8061 __builtin_apply_args. ARGSIZE is the number of bytes of
8062 arguments that must be copied. ??? How should this value be
8063 computed? We'll also need a safe worst case value for varargs
8064 functions. */
8065 case BUILT_IN_APPLY:
8066 if (arglist == 0
8067 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8068 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8069 || TREE_CHAIN (arglist) == 0
8070 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8071 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8072 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8073 return const0_rtx;
8074 else
8075 {
8076 int i;
8077 tree t;
8078 rtx ops[3];
8079
8080 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
8081 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
8082
8083 return expand_builtin_apply (ops[0], ops[1], ops[2]);
8084 }
8085
8086 /* __builtin_return (RESULT) causes the function to return the
8087 value described by RESULT. RESULT is address of the block of
8088 memory returned by __builtin_apply. */
8089 case BUILT_IN_RETURN:
8090 if (arglist
8091 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8092 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
8093 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
8094 NULL_RTX, VOIDmode, 0));
8095 return const0_rtx;
8096
8097 case BUILT_IN_SAVEREGS:
8098 /* Don't do __builtin_saveregs more than once in a function.
8099 Save the result of the first call and reuse it. */
8100 if (saveregs_value != 0)
8101 return saveregs_value;
8102 {
8103 /* When this function is called, it means that registers must be
8104 saved on entry to this function. So we migrate the
8105 call to the first insn of this function. */
8106 rtx temp;
8107 rtx seq;
ca695ac9
JB
8108
8109 /* Now really call the function. `expand_call' does not call
8110 expand_builtin, so there is no danger of infinite recursion here. */
8111 start_sequence ();
8112
8113#ifdef EXPAND_BUILTIN_SAVEREGS
8114 /* Do whatever the machine needs done in this case. */
8115 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
8116#else
8117 /* The register where the function returns its value
8118 is likely to have something else in it, such as an argument.
8119 So preserve that register around the call. */
d0c76654 8120
ca695ac9
JB
8121 if (value_mode != VOIDmode)
8122 {
d0c76654
RK
8123 rtx valreg = hard_libcall_value (value_mode);
8124 rtx saved_valreg = gen_reg_rtx (value_mode);
8125
ca695ac9 8126 emit_move_insn (saved_valreg, valreg);
d0c76654
RK
8127 temp = expand_call (exp, target, ignore);
8128 emit_move_insn (valreg, saved_valreg);
ca695ac9 8129 }
d0c76654
RK
8130 else
8131 /* Generate the call, putting the value in a pseudo. */
8132 temp = expand_call (exp, target, ignore);
ca695ac9
JB
8133#endif
8134
8135 seq = get_insns ();
8136 end_sequence ();
8137
8138 saveregs_value = temp;
8139
8140 /* Put the sequence after the NOTE that starts the function.
8141 If this is inside a SEQUENCE, make the outer-level insn
8142 chain current, so the code is placed at the start of the
8143 function. */
8144 push_topmost_sequence ();
8145 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8146 pop_topmost_sequence ();
8147 return temp;
8148 }
8149
8150 /* __builtin_args_info (N) returns word N of the arg space info
8151 for the current function. The number and meanings of words
8152 is controlled by the definition of CUMULATIVE_ARGS. */
8153 case BUILT_IN_ARGS_INFO:
8154 {
8155 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
8156 int i;
8157 int *word_ptr = (int *) &current_function_args_info;
8158 tree type, elts, result;
8159
8160 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
8161 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
8162 __FILE__, __LINE__);
8163
8164 if (arglist != 0)
8165 {
8166 tree arg = TREE_VALUE (arglist);
8167 if (TREE_CODE (arg) != INTEGER_CST)
8168 error ("argument of `__builtin_args_info' must be constant");
8169 else
8170 {
8171 int wordnum = TREE_INT_CST_LOW (arg);
8172
8173 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
8174 error ("argument of `__builtin_args_info' out of range");
8175 else
8176 return GEN_INT (word_ptr[wordnum]);
8177 }
8178 }
8179 else
8180 error ("missing argument in `__builtin_args_info'");
8181
8182 return const0_rtx;
8183
8184#if 0
8185 for (i = 0; i < nwords; i++)
8186 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
8187
8188 type = build_array_type (integer_type_node,
8189 build_index_type (build_int_2 (nwords, 0)));
8190 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
8191 TREE_CONSTANT (result) = 1;
8192 TREE_STATIC (result) = 1;
8193 result = build (INDIRECT_REF, build_pointer_type (type), result);
8194 TREE_CONSTANT (result) = 1;
8195 return expand_expr (result, NULL_RTX, VOIDmode, 0);
8196#endif
8197 }
8198
17bbab26 8199 /* Return the address of the first anonymous stack arg. */
ca695ac9
JB
8200 case BUILT_IN_NEXT_ARG:
8201 {
8202 tree fntype = TREE_TYPE (current_function_decl);
c4dfe0fc 8203
33162beb
DE
8204 if ((TYPE_ARG_TYPES (fntype) == 0
8205 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
8206 == void_type_node))
8207 && ! current_function_varargs)
ca695ac9
JB
8208 {
8209 error ("`va_start' used in function with fixed args");
8210 return const0_rtx;
8211 }
c4dfe0fc 8212
e4493c04
RK
8213 if (arglist)
8214 {
8215 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
8216 tree arg = TREE_VALUE (arglist);
8217
8218 /* Strip off all nops for the sake of the comparison. This
6692a31f
RK
8219 is not quite the same as STRIP_NOPS. It does more.
8220 We must also strip off INDIRECT_EXPR for C++ reference
8221 parameters. */
e4493c04
RK
8222 while (TREE_CODE (arg) == NOP_EXPR
8223 || TREE_CODE (arg) == CONVERT_EXPR
6692a31f
RK
8224 || TREE_CODE (arg) == NON_LVALUE_EXPR
8225 || TREE_CODE (arg) == INDIRECT_REF)
e4493c04
RK
8226 arg = TREE_OPERAND (arg, 0);
8227 if (arg != last_parm)
8228 warning ("second parameter of `va_start' not last named argument");
8229 }
5b4ff0de 8230 else if (! current_function_varargs)
e4493c04
RK
8231 /* Evidently an out of date version of <stdarg.h>; can't validate
8232 va_start's second argument, but can still work as intended. */
8233 warning ("`__builtin_next_arg' called without an argument");
ca695ac9
JB
8234 }
8235
8236 return expand_binop (Pmode, add_optab,
8237 current_function_internal_arg_pointer,
8238 current_function_arg_offset_rtx,
8239 NULL_RTX, 0, OPTAB_LIB_WIDEN);
8240
8241 case BUILT_IN_CLASSIFY_TYPE:
8242 if (arglist != 0)
8243 {
8244 tree type = TREE_TYPE (TREE_VALUE (arglist));
8245 enum tree_code code = TREE_CODE (type);
8246 if (code == VOID_TYPE)
8247 return GEN_INT (void_type_class);
8248 if (code == INTEGER_TYPE)
8249 return GEN_INT (integer_type_class);
8250 if (code == CHAR_TYPE)
8251 return GEN_INT (char_type_class);
8252 if (code == ENUMERAL_TYPE)
8253 return GEN_INT (enumeral_type_class);
8254 if (code == BOOLEAN_TYPE)
8255 return GEN_INT (boolean_type_class);
8256 if (code == POINTER_TYPE)
8257 return GEN_INT (pointer_type_class);
8258 if (code == REFERENCE_TYPE)
8259 return GEN_INT (reference_type_class);
8260 if (code == OFFSET_TYPE)
8261 return GEN_INT (offset_type_class);
8262 if (code == REAL_TYPE)
8263 return GEN_INT (real_type_class);
8264 if (code == COMPLEX_TYPE)
8265 return GEN_INT (complex_type_class);
8266 if (code == FUNCTION_TYPE)
8267 return GEN_INT (function_type_class);
8268 if (code == METHOD_TYPE)
8269 return GEN_INT (method_type_class);
8270 if (code == RECORD_TYPE)
8271 return GEN_INT (record_type_class);
8272 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
8273 return GEN_INT (union_type_class);
8274 if (code == ARRAY_TYPE)
4042d440
PB
8275 {
8276 if (TYPE_STRING_FLAG (type))
8277 return GEN_INT (string_type_class);
8278 else
8279 return GEN_INT (array_type_class);
8280 }
ca695ac9
JB
8281 if (code == SET_TYPE)
8282 return GEN_INT (set_type_class);
8283 if (code == FILE_TYPE)
8284 return GEN_INT (file_type_class);
8285 if (code == LANG_TYPE)
8286 return GEN_INT (lang_type_class);
8287 }
8288 return GEN_INT (no_type_class);
8289
8290 case BUILT_IN_CONSTANT_P:
8291 if (arglist == 0)
8292 return const0_rtx;
8293 else
33cf5823
RK
8294 {
8295 tree arg = TREE_VALUE (arglist);
8296
8297 STRIP_NOPS (arg);
8298 return (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c'
8299 || (TREE_CODE (arg) == ADDR_EXPR
8300 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8301 ? const1_rtx : const0_rtx);
8302 }
ca695ac9
JB
8303
8304 case BUILT_IN_FRAME_ADDRESS:
8305 /* The argument must be a nonnegative integer constant.
8306 It counts the number of frames to scan up the stack.
8307 The value is the address of that frame. */
8308 case BUILT_IN_RETURN_ADDRESS:
8309 /* The argument must be a nonnegative integer constant.
8310 It counts the number of frames to scan up the stack.
8311 The value is the return address saved in that frame. */
8312 if (arglist == 0)
8313 /* Warning about missing arg was already issued. */
8314 return const0_rtx;
8315 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
8316 {
8317 error ("invalid arg to `__builtin_return_address'");
8318 return const0_rtx;
8319 }
153c149b 8320 else if (tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
ca695ac9
JB
8321 {
8322 error ("invalid arg to `__builtin_return_address'");
8323 return const0_rtx;
8324 }
8325 else
8326 {
2bbf216f
RK
8327 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
8328 TREE_INT_CST_LOW (TREE_VALUE (arglist)),
8329 hard_frame_pointer_rtx);
ca695ac9
JB
8330
8331 /* For __builtin_frame_address, return what we've got. */
8332 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8333 return tem;
8334
2bbf216f
RK
8335 if (GET_CODE (tem) != REG)
8336 tem = copy_to_reg (tem);
8337 return tem;
ca695ac9
JB
8338 }
8339
8340 case BUILT_IN_ALLOCA:
8341 if (arglist == 0
8342 /* Arg could be non-integer if user redeclared this fcn wrong. */
8343 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
7b073ca6 8344 break;
1ee86d15 8345
ca695ac9
JB
8346 /* Compute the argument. */
8347 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
8348
8349 /* Allocate the desired space. */
1ee86d15 8350 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
ca695ac9
JB
8351
8352 case BUILT_IN_FFS:
8353 /* If not optimizing, call the library function. */
98aad286 8354 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
ca695ac9
JB
8355 break;
8356
8357 if (arglist == 0
8358 /* Arg could be non-integer if user redeclared this fcn wrong. */
8359 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
7b073ca6 8360 break;
ca695ac9
JB
8361
8362 /* Compute the argument. */
8363 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8364 /* Compute ffs, into TARGET if possible.
8365 Set TARGET to wherever the result comes back. */
8366 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8367 ffs_optab, op0, target, 1);
8368 if (target == 0)
8369 abort ();
8370 return target;
8371
8372 case BUILT_IN_STRLEN:
8373 /* If not optimizing, call the library function. */
98aad286 8374 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
ca695ac9
JB
8375 break;
8376
8377 if (arglist == 0
8378 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8379 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
7b073ca6 8380 break;
ca695ac9
JB
8381 else
8382 {
8383 tree src = TREE_VALUE (arglist);
8384 tree len = c_strlen (src);
8385
8386 int align
8387 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8388
8389 rtx result, src_rtx, char_rtx;
8390 enum machine_mode insn_mode = value_mode, char_mode;
8391 enum insn_code icode;
8392
0f41302f 8393 /* If the length is known, just return it. */
ca695ac9
JB
8394 if (len != 0)
8395 return expand_expr (len, target, mode, 0);
8396
0f41302f 8397 /* If SRC is not a pointer type, don't do this operation inline. */
ca695ac9
JB
8398 if (align == 0)
8399 break;
8400
0f41302f 8401 /* Call a function if we can't compute strlen in the right mode. */
ca695ac9
JB
8402
8403 while (insn_mode != VOIDmode)
8404 {
8405 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
8406 if (icode != CODE_FOR_nothing)
8407 break;
bbf6f052 8408
ca695ac9
JB
8409 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
8410 }
8411 if (insn_mode == VOIDmode)
8412 break;
bbf6f052 8413
ca695ac9
JB
8414 /* Make a place to write the result of the instruction. */
8415 result = target;
8416 if (! (result != 0
8417 && GET_CODE (result) == REG
8418 && GET_MODE (result) == insn_mode
8419 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8420 result = gen_reg_rtx (insn_mode);
bbf6f052 8421
ca695ac9
JB
8422 /* Make sure the operands are acceptable to the predicates. */
8423
8424 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
8425 result = gen_reg_rtx (insn_mode);
8426
8427 src_rtx = memory_address (BLKmode,
88f63c77 8428 expand_expr (src, NULL_RTX, ptr_mode,
ca695ac9
JB
8429 EXPAND_NORMAL));
8430 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
8431 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
8432
8433 char_rtx = const0_rtx;
8434 char_mode = insn_operand_mode[(int)icode][2];
8435 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
8436 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
8437
8438 emit_insn (GEN_FCN (icode) (result,
8439 gen_rtx (MEM, BLKmode, src_rtx),
8440 char_rtx, GEN_INT (align)));
8441
8442 /* Return the value in the proper mode for this function. */
8443 if (GET_MODE (result) == value_mode)
8444 return result;
8445 else if (target != 0)
8446 {
8447 convert_move (target, result, 0);
8448 return target;
8449 }
8450 else
8451 return convert_to_mode (value_mode, result, 0);
8452 }
8453
8454 case BUILT_IN_STRCPY:
e87b4f3f 8455 /* If not optimizing, call the library function. */
98aad286 8456 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
e87b4f3f
RS
8457 break;
8458
8459 if (arglist == 0
ca695ac9
JB
8460 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8461 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8462 || TREE_CHAIN (arglist) == 0
8463 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
7b073ca6 8464 break;
ca695ac9 8465 else
db0e6d01 8466 {
ca695ac9 8467 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
e7c33f54 8468
ca695ac9
JB
8469 if (len == 0)
8470 break;
e7c33f54 8471
ca695ac9 8472 len = size_binop (PLUS_EXPR, len, integer_one_node);
e7c33f54 8473
ca695ac9 8474 chainon (arglist, build_tree_list (NULL_TREE, len));
1bbddf11
JVA
8475 }
8476
ca695ac9
JB
8477 /* Drops in. */
8478 case BUILT_IN_MEMCPY:
8479 /* If not optimizing, call the library function. */
98aad286 8480 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
ca695ac9 8481 break;
e7c33f54 8482
ca695ac9
JB
8483 if (arglist == 0
8484 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8485 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8486 || TREE_CHAIN (arglist) == 0
8487 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8488 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8489 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7b073ca6 8490 break;
ca695ac9 8491 else
e7c33f54 8492 {
ca695ac9
JB
8493 tree dest = TREE_VALUE (arglist);
8494 tree src = TREE_VALUE (TREE_CHAIN (arglist));
8495 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
e9cf6a97 8496 tree type;
e87b4f3f 8497
ca695ac9
JB
8498 int src_align
8499 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8500 int dest_align
8501 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8502 rtx dest_rtx, dest_mem, src_mem;
60bac6ea 8503
ca695ac9
JB
8504 /* If either SRC or DEST is not a pointer type, don't do
8505 this operation in-line. */
8506 if (src_align == 0 || dest_align == 0)
8507 {
8508 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
8509 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8510 break;
8511 }
8512
88f63c77 8513 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
ca695ac9
JB
8514 dest_mem = gen_rtx (MEM, BLKmode,
8515 memory_address (BLKmode, dest_rtx));
e9cf6a97 8516 /* There could be a void* cast on top of the object. */
5480a90c
RK
8517 while (TREE_CODE (dest) == NOP_EXPR)
8518 dest = TREE_OPERAND (dest, 0);
8519 type = TREE_TYPE (TREE_TYPE (dest));
e9cf6a97 8520 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
ca695ac9
JB
8521 src_mem = gen_rtx (MEM, BLKmode,
8522 memory_address (BLKmode,
8523 expand_expr (src, NULL_RTX,
88f63c77
RK
8524 ptr_mode,
8525 EXPAND_SUM)));
e9cf6a97 8526 /* There could be a void* cast on top of the object. */
5480a90c
RK
8527 while (TREE_CODE (src) == NOP_EXPR)
8528 src = TREE_OPERAND (src, 0);
8529 type = TREE_TYPE (TREE_TYPE (src));
e9cf6a97 8530 MEM_IN_STRUCT_P (src_mem) = AGGREGATE_TYPE_P (type);
ca695ac9
JB
8531
8532 /* Copy word part most expediently. */
8533 emit_block_move (dest_mem, src_mem,
8534 expand_expr (len, NULL_RTX, VOIDmode, 0),
8535 MIN (src_align, dest_align));
85c53d24 8536 return force_operand (dest_rtx, NULL_RTX);
ca695ac9
JB
8537 }
8538
d7f21d63
RK
8539 case BUILT_IN_MEMSET:
8540 /* If not optimizing, call the library function. */
8541 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8542 break;
8543
8544 if (arglist == 0
8545 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8546 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8547 || TREE_CHAIN (arglist) == 0
8548 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
8549 != INTEGER_TYPE)
8550 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8551 || (INTEGER_CST
8552 != (TREE_CODE (TREE_TYPE
8553 (TREE_VALUE
8554 (TREE_CHAIN (TREE_CHAIN (arglist))))))))
8555 break;
8556 else
8557 {
8558 tree dest = TREE_VALUE (arglist);
8559 tree val = TREE_VALUE (TREE_CHAIN (arglist));
8560 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8561 tree type;
8562
8563 int dest_align
8564 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8565 rtx dest_rtx, dest_mem;
8566
8567 /* If DEST is not a pointer type, don't do this
8568 operation in-line. */
8569 if (dest_align == 0)
8570 break;
8571
8572 /* If VAL is not 0, don't do this operation in-line. */
8573 if (expand_expr (val, NULL_RTX, VOIDmode, 0) != const0_rtx)
8574 break;
8575
8576 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
8577 dest_mem = gen_rtx (MEM, BLKmode,
8578 memory_address (BLKmode, dest_rtx));
8579 /* There could be a void* cast on top of the object. */
8580 while (TREE_CODE (dest) == NOP_EXPR)
8581 dest = TREE_OPERAND (dest, 0);
8582 type = TREE_TYPE (TREE_TYPE (dest));
8583 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
8584
8585 clear_storage (dest_mem, expand_expr (len, NULL_RTX, VOIDmode, 0),
8586 dest_align);
8587
8588 return force_operand (dest_rtx, NULL_RTX);
8589 }
8590
ca695ac9
JB
8591/* These comparison functions need an instruction that returns an actual
8592 index. An ordinary compare that just sets the condition codes
8593 is not enough. */
8594#ifdef HAVE_cmpstrsi
8595 case BUILT_IN_STRCMP:
8596 /* If not optimizing, call the library function. */
98aad286 8597 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
ca695ac9
JB
8598 break;
8599
8600 if (arglist == 0
8601 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8602 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8603 || TREE_CHAIN (arglist) == 0
8604 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
7b073ca6 8605 break;
ca695ac9
JB
8606 else if (!HAVE_cmpstrsi)
8607 break;
8608 {
8609 tree arg1 = TREE_VALUE (arglist);
8610 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
8611 tree offset;
8612 tree len, len2;
8613
8614 len = c_strlen (arg1);
8615 if (len)
8616 len = size_binop (PLUS_EXPR, integer_one_node, len);
8617 len2 = c_strlen (arg2);
8618 if (len2)
8619 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
8620
8621 /* If we don't have a constant length for the first, use the length
8622 of the second, if we know it. We don't require a constant for
8623 this case; some cost analysis could be done if both are available
8624 but neither is constant. For now, assume they're equally cheap.
8625
8626 If both strings have constant lengths, use the smaller. This
8627 could arise if optimization results in strcpy being called with
8628 two fixed strings, or if the code was machine-generated. We should
8629 add some code to the `memcmp' handler below to deal with such
8630 situations, someday. */
8631 if (!len || TREE_CODE (len) != INTEGER_CST)
8632 {
8633 if (len2)
8634 len = len2;
8635 else if (len == 0)
8636 break;
8637 }
8638 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
8639 {
8640 if (tree_int_cst_lt (len2, len))
8641 len = len2;
8642 }
8643
8644 chainon (arglist, build_tree_list (NULL_TREE, len));
8645 }
8646
8647 /* Drops in. */
8648 case BUILT_IN_MEMCMP:
8649 /* If not optimizing, call the library function. */
98aad286 8650 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
ca695ac9
JB
8651 break;
8652
8653 if (arglist == 0
8654 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8655 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8656 || TREE_CHAIN (arglist) == 0
8657 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8658 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8659 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7b073ca6 8660 break;
ca695ac9
JB
8661 else if (!HAVE_cmpstrsi)
8662 break;
8663 {
8664 tree arg1 = TREE_VALUE (arglist);
8665 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
8666 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8667 rtx result;
8668
8669 int arg1_align
8670 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8671 int arg2_align
8672 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8673 enum machine_mode insn_mode
8674 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
60bac6ea 8675
ca695ac9
JB
8676 /* If we don't have POINTER_TYPE, call the function. */
8677 if (arg1_align == 0 || arg2_align == 0)
8678 {
8679 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
8680 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8681 break;
8682 }
60bac6ea 8683
ca695ac9
JB
8684 /* Make a place to write the result of the instruction. */
8685 result = target;
8686 if (! (result != 0
8687 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
8688 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8689 result = gen_reg_rtx (insn_mode);
60bac6ea 8690
ca695ac9
JB
8691 emit_insn (gen_cmpstrsi (result,
8692 gen_rtx (MEM, BLKmode,
88f63c77
RK
8693 expand_expr (arg1, NULL_RTX,
8694 ptr_mode,
ca695ac9
JB
8695 EXPAND_NORMAL)),
8696 gen_rtx (MEM, BLKmode,
88f63c77
RK
8697 expand_expr (arg2, NULL_RTX,
8698 ptr_mode,
ca695ac9
JB
8699 EXPAND_NORMAL)),
8700 expand_expr (len, NULL_RTX, VOIDmode, 0),
8701 GEN_INT (MIN (arg1_align, arg2_align))));
60bac6ea 8702
ca695ac9
JB
8703 /* Return the value in the proper mode for this function. */
8704 mode = TYPE_MODE (TREE_TYPE (exp));
8705 if (GET_MODE (result) == mode)
8706 return result;
8707 else if (target != 0)
8708 {
8709 convert_move (target, result, 0);
8710 return target;
60bac6ea 8711 }
ca695ac9
JB
8712 else
8713 return convert_to_mode (mode, result, 0);
8714 }
60bac6ea 8715#else
ca695ac9
JB
8716 case BUILT_IN_STRCMP:
8717 case BUILT_IN_MEMCMP:
8718 break;
60bac6ea
RS
8719#endif
8720
4ed67205
RK
8721 /* __builtin_setjmp is passed a pointer to an array of five words
8722 (not all will be used on all machines). It operates similarly to
8723 the C library function of the same name, but is more efficient.
8724 Much of the code below (and for longjmp) is copied from the handling
8725 of non-local gotos.
8726
8727 NOTE: This is intended for use by GNAT and will only work in
8728 the method used by it. This code will likely NOT survive to
8729 the GCC 2.8.0 release. */
8730 case BUILT_IN_SETJMP:
8731 if (arglist == 0
8732 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8733 break;
8734
8735 {
85ab4aaa
RK
8736 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
8737 VOIDmode, 0);
4ed67205
RK
8738 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
8739 enum machine_mode sa_mode = Pmode;
8740 rtx stack_save;
7565a035
RK
8741 int old_inhibit_defer_pop = inhibit_defer_pop;
8742 int return_pops = RETURN_POPS_ARGS (get_identifier ("__dummy"),
8743 get_identifier ("__dummy"), 0);
8744 rtx next_arg_reg;
8745 CUMULATIVE_ARGS args_so_far;
a8a8cbb7 8746 int i;
4ed67205 8747
85ab4aaa
RK
8748#ifdef POINTERS_EXTEND_UNSIGNED
8749 buf_addr = convert_memory_address (Pmode, buf_addr);
8750#endif
8751
8752 buf_addr = force_reg (Pmode, buf_addr);
8753
4ed67205
RK
8754 if (target == 0 || GET_CODE (target) != REG
8755 || REGNO (target) < FIRST_PSEUDO_REGISTER)
8756 target = gen_reg_rtx (value_mode);
8757
8758 emit_queue ();
8759
0dddb42d 8760 CONST_CALL_P (emit_note (NULL_PTR, NOTE_INSN_SETJMP)) = 1;
4ed67205
RK
8761 current_function_calls_setjmp = 1;
8762
8763 /* We store the frame pointer and the address of lab1 in the buffer
8764 and use the rest of it for the stack save area, which is
8765 machine-dependent. */
8766 emit_move_insn (gen_rtx (MEM, Pmode, buf_addr),
8767 virtual_stack_vars_rtx);
8768 emit_move_insn
8769 (validize_mem (gen_rtx (MEM, Pmode,
8770 plus_constant (buf_addr,
8771 GET_MODE_SIZE (Pmode)))),
8772 gen_rtx (LABEL_REF, Pmode, lab1));
8773
8774#ifdef HAVE_save_stack_nonlocal
8775 if (HAVE_save_stack_nonlocal)
8776 sa_mode = insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0];
8777#endif
8778
8779 stack_save = gen_rtx (MEM, sa_mode,
8780 plus_constant (buf_addr,
8781 2 * GET_MODE_SIZE (Pmode)));
8782 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
8783
7565a035
RK
8784#ifdef HAVE_setjmp
8785 if (HAVE_setjmp)
8786 emit_insn (gen_setjmp ());
8787#endif
8788
4ed67205
RK
8789 /* Set TARGET to zero and branch around the other case. */
8790 emit_move_insn (target, const0_rtx);
8791 emit_jump_insn (gen_jump (lab2));
8792 emit_barrier ();
8793 emit_label (lab1);
8794
a8a8cbb7 8795 /* Note that setjmp clobbers FP when we get here, so we have to
0dddb42d 8796 make sure it's marked as used by this function. */
a8a8cbb7
RK
8797 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
8798
477efd50
RK
8799 /* Mark the static chain as clobbered here so life information
8800 doesn't get messed up for it. */
8801 emit_insn (gen_rtx (CLOBBER, VOIDmode, static_chain_rtx));
8802
4ed67205
RK
8803 /* Now put in the code to restore the frame pointer, and argument
8804 pointer, if needed. The code below is from expand_end_bindings
8805 in stmt.c; see detailed documentation there. */
8806#ifdef HAVE_nonlocal_goto
8807 if (! HAVE_nonlocal_goto)
8808#endif
8809 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
8810
a8a8cbb7
RK
8811 current_function_has_nonlocal_goto = 1;
8812
4ed67205
RK
8813#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
8814 if (fixed_regs[ARG_POINTER_REGNUM])
8815 {
8816#ifdef ELIMINABLE_REGS
8817 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
4ed67205
RK
8818
8819 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
8820 if (elim_regs[i].from == ARG_POINTER_REGNUM
8821 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
8822 break;
8823
8824 if (i == sizeof elim_regs / sizeof elim_regs [0])
8825#endif
8826 {
8827 /* Now restore our arg pointer from the address at which it
8828 was saved in our stack frame.
8829 If there hasn't be space allocated for it yet, make
8830 some now. */
8831 if (arg_pointer_save_area == 0)
8832 arg_pointer_save_area
8833 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
8834 emit_move_insn (virtual_incoming_args_rtx,
8835 copy_to_reg (arg_pointer_save_area));
8836 }
8837 }
8838#endif
8839
7565a035
RK
8840 /* The static chain pointer contains the address of dummy function.
8841 We need to call it here to handle some PIC cases of restoring
8842 a global pointer. Then return 1. */
8843 op0 = copy_to_mode_reg (Pmode, static_chain_rtx);
8844
8845 /* We can't actually call emit_library_call here, so do everything
8846 it does, which isn't much for a libfunc with no args. */
8847 op0 = memory_address (FUNCTION_MODE, op0);
8848
8849 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE,
2c7ee1a6 8850 gen_rtx (SYMBOL_REF, Pmode, "__dummy"), 1);
7565a035
RK
8851 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1);
8852
8853#ifndef ACCUMULATE_OUTGOING_ARGS
8854#ifdef HAVE_call_pop
8855 if (HAVE_call_pop)
8856 emit_call_insn (gen_call_pop (gen_rtx (MEM, FUNCTION_MODE, op0),
8857 const0_rtx, next_arg_reg,
8858 GEN_INT (return_pops)));
8859 else
8860#endif
8861#endif
8862
8863#ifdef HAVE_call
8864 if (HAVE_call)
8865 emit_call_insn (gen_call (gen_rtx (MEM, FUNCTION_MODE, op0),
8866 const0_rtx, next_arg_reg, const0_rtx));
4ed67205 8867 else
7565a035
RK
8868#endif
8869 abort ();
4ed67205 8870
7565a035 8871 emit_move_insn (target, const1_rtx);
4ed67205
RK
8872 emit_label (lab2);
8873 return target;
8874 }
8875
8876 /* __builtin_longjmp is passed a pointer to an array of five words
7565a035 8877 and a value, which is a dummy. It's similar to the C library longjmp
4ed67205
RK
8878 function but works with __builtin_setjmp above. */
8879 case BUILT_IN_LONGJMP:
8880 if (arglist == 0 || TREE_CHAIN (arglist) == 0
8881 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8882 break;
8883
8884 {
b089937a
RK
8885 tree dummy_id = get_identifier ("__dummy");
8886 tree dummy_type = build_function_type (void_type_node, NULL_TREE);
8887 tree dummy_decl = build_decl (FUNCTION_DECL, dummy_id, dummy_type);
85ab4aaa 8888#ifdef POINTERS_EXTEND_UNSIGNED
4ed67205 8889 rtx buf_addr
0fedef28 8890 = force_reg (Pmode,
85ab4aaa
RK
8891 convert_memory_address
8892 (Pmode,
8893 expand_expr (TREE_VALUE (arglist),
8894 NULL_RTX, VOIDmode, 0)));
8895#else
8896 rtx buf_addr
8897 = force_reg (Pmode, expand_expr (TREE_VALUE (arglist),
8898 NULL_RTX,
8899 VOIDmode, 0));
8900#endif
4ed67205
RK
8901 rtx fp = gen_rtx (MEM, Pmode, buf_addr);
8902 rtx lab = gen_rtx (MEM, Pmode,
8903 plus_constant (buf_addr, GET_MODE_SIZE (Pmode)));
8904 enum machine_mode sa_mode
8905#ifdef HAVE_save_stack_nonlocal
8906 = (HAVE_save_stack_nonlocal
8907 ? insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0]
8908 : Pmode);
8909#else
8910 = Pmode;
8911#endif
8912 rtx stack = gen_rtx (MEM, sa_mode,
8913 plus_constant (buf_addr,
8914 2 * GET_MODE_SIZE (Pmode)));
b089937a
RK
8915
8916 DECL_EXTERNAL (dummy_decl) = 1;
8917 TREE_PUBLIC (dummy_decl) = 1;
8918 make_decl_rtl (dummy_decl, NULL_PTR, 1);
7565a035
RK
8919
8920 /* Expand the second expression just for side-effects. */
8921 expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
8922 const0_rtx, VOIDmode, 0);
8923
b089937a 8924 assemble_external (dummy_decl);
4ed67205
RK
8925
8926 /* Pick up FP, label, and SP from the block and jump. This code is
8927 from expand_goto in stmt.c; see there for detailed comments. */
8928#if HAVE_nonlocal_goto
8929 if (HAVE_nonlocal_goto)
b089937a
RK
8930 emit_insn (gen_nonlocal_goto (fp, lab, stack,
8931 XEXP (DECL_RTL (dummy_decl), 0)));
4ed67205
RK
8932 else
8933#endif
8934 {
7565a035 8935 lab = copy_to_reg (lab);
4ed67205
RK
8936 emit_move_insn (hard_frame_pointer_rtx, fp);
8937 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
8938
7565a035
RK
8939 /* Put in the static chain register the address of the dummy
8940 function. */
b089937a 8941 emit_move_insn (static_chain_rtx, XEXP (DECL_RTL (dummy_decl), 0));
4ed67205
RK
8942 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
8943 emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
8944 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
7565a035 8945 emit_indirect_jump (lab);
4ed67205
RK
8946 }
8947
8948 return const0_rtx;
8949 }
8950
ca695ac9
JB
8951 default: /* just do library call, if unknown builtin */
8952 error ("built-in function `%s' not currently supported",
8953 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
8954 }
e87b4f3f 8955
ca695ac9
JB
8956 /* The switch statement above can drop through to cause the function
8957 to be called normally. */
e7c33f54 8958
ca695ac9
JB
8959 return expand_call (exp, target, ignore);
8960}
8961\f
8962/* Built-in functions to perform an untyped call and return. */
0006469d 8963
ca695ac9
JB
8964/* For each register that may be used for calling a function, this
8965 gives a mode used to copy the register's value. VOIDmode indicates
8966 the register is not used for calling a function. If the machine
8967 has register windows, this gives only the outbound registers.
8968 INCOMING_REGNO gives the corresponding inbound register. */
8969static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
0006469d 8970
ca695ac9
JB
8971/* For each register that may be used for returning values, this gives
8972 a mode used to copy the register's value. VOIDmode indicates the
8973 register is not used for returning values. If the machine has
8974 register windows, this gives only the outbound registers.
8975 INCOMING_REGNO gives the corresponding inbound register. */
8976static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
0006469d 8977
ca695ac9
JB
8978/* For each register that may be used for calling a function, this
8979 gives the offset of that register into the block returned by
9faa82d8 8980 __builtin_apply_args. 0 indicates that the register is not
0f41302f 8981 used for calling a function. */
ca695ac9 8982static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
0006469d 8983
ca695ac9
JB
8984/* Return the offset of register REGNO into the block returned by
8985 __builtin_apply_args. This is not declared static, since it is
0f41302f 8986 needed in objc-act.c. */
0006469d 8987
ca695ac9
JB
8988int
8989apply_args_register_offset (regno)
8990 int regno;
8991{
8992 apply_args_size ();
0006469d 8993
ca695ac9 8994 /* Arguments are always put in outgoing registers (in the argument
0f41302f 8995 block) if such make sense. */
ca695ac9
JB
8996#ifdef OUTGOING_REGNO
8997 regno = OUTGOING_REGNO(regno);
8998#endif
8999 return apply_args_reg_offset[regno];
9000}
0006469d 9001
ca695ac9
JB
9002/* Return the size required for the block returned by __builtin_apply_args,
9003 and initialize apply_args_mode. */
0006469d 9004
ca695ac9
JB
9005static int
9006apply_args_size ()
9007{
9008 static int size = -1;
9009 int align, regno;
9010 enum machine_mode mode;
bbf6f052 9011
ca695ac9
JB
9012 /* The values computed by this function never change. */
9013 if (size < 0)
9014 {
9015 /* The first value is the incoming arg-pointer. */
9016 size = GET_MODE_SIZE (Pmode);
bbf6f052 9017
ca695ac9
JB
9018 /* The second value is the structure value address unless this is
9019 passed as an "invisible" first argument. */
9020 if (struct_value_rtx)
9021 size += GET_MODE_SIZE (Pmode);
9022
9023 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9024 if (FUNCTION_ARG_REGNO_P (regno))
bbf6f052 9025 {
ca695ac9
JB
9026 /* Search for the proper mode for copying this register's
9027 value. I'm not sure this is right, but it works so far. */
9028 enum machine_mode best_mode = VOIDmode;
9029
9030 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9031 mode != VOIDmode;
9032 mode = GET_MODE_WIDER_MODE (mode))
9033 if (HARD_REGNO_MODE_OK (regno, mode)
9034 && HARD_REGNO_NREGS (regno, mode) == 1)
9035 best_mode = mode;
9036
9037 if (best_mode == VOIDmode)
9038 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9039 mode != VOIDmode;
9040 mode = GET_MODE_WIDER_MODE (mode))
9041 if (HARD_REGNO_MODE_OK (regno, mode)
9042 && (mov_optab->handlers[(int) mode].insn_code
9043 != CODE_FOR_nothing))
9044 best_mode = mode;
9045
9046 mode = best_mode;
9047 if (mode == VOIDmode)
9048 abort ();
9049
9050 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9051 if (size % align != 0)
9052 size = CEIL (size, align) * align;
9053 apply_args_reg_offset[regno] = size;
9054 size += GET_MODE_SIZE (mode);
9055 apply_args_mode[regno] = mode;
9056 }
9057 else
9058 {
9059 apply_args_mode[regno] = VOIDmode;
9060 apply_args_reg_offset[regno] = 0;
bbf6f052 9061 }
ca695ac9
JB
9062 }
9063 return size;
9064}
bbf6f052 9065
ca695ac9
JB
9066/* Return the size required for the block returned by __builtin_apply,
9067 and initialize apply_result_mode. */
bbf6f052 9068
ca695ac9
JB
9069static int
9070apply_result_size ()
9071{
9072 static int size = -1;
9073 int align, regno;
9074 enum machine_mode mode;
bbf6f052 9075
ca695ac9
JB
9076 /* The values computed by this function never change. */
9077 if (size < 0)
9078 {
9079 size = 0;
bbf6f052 9080
ca695ac9
JB
9081 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9082 if (FUNCTION_VALUE_REGNO_P (regno))
9083 {
9084 /* Search for the proper mode for copying this register's
9085 value. I'm not sure this is right, but it works so far. */
9086 enum machine_mode best_mode = VOIDmode;
bbf6f052 9087
ca695ac9
JB
9088 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9089 mode != TImode;
9090 mode = GET_MODE_WIDER_MODE (mode))
9091 if (HARD_REGNO_MODE_OK (regno, mode))
9092 best_mode = mode;
bbf6f052 9093
ca695ac9
JB
9094 if (best_mode == VOIDmode)
9095 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9096 mode != VOIDmode;
9097 mode = GET_MODE_WIDER_MODE (mode))
9098 if (HARD_REGNO_MODE_OK (regno, mode)
9099 && (mov_optab->handlers[(int) mode].insn_code
9100 != CODE_FOR_nothing))
9101 best_mode = mode;
bbf6f052 9102
ca695ac9
JB
9103 mode = best_mode;
9104 if (mode == VOIDmode)
9105 abort ();
bbf6f052 9106
ca695ac9
JB
9107 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9108 if (size % align != 0)
9109 size = CEIL (size, align) * align;
9110 size += GET_MODE_SIZE (mode);
9111 apply_result_mode[regno] = mode;
bbf6f052
RK
9112 }
9113 else
ca695ac9 9114 apply_result_mode[regno] = VOIDmode;
bbf6f052 9115
ca695ac9
JB
9116 /* Allow targets that use untyped_call and untyped_return to override
9117 the size so that machine-specific information can be stored here. */
9118#ifdef APPLY_RESULT_SIZE
9119 size = APPLY_RESULT_SIZE;
9120#endif
9121 }
9122 return size;
9123}
bbf6f052 9124
ca695ac9
JB
9125#if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
9126/* Create a vector describing the result block RESULT. If SAVEP is true,
9127 the result block is used to save the values; otherwise it is used to
9128 restore the values. */
bbf6f052 9129
ca695ac9
JB
9130static rtx
9131result_vector (savep, result)
9132 int savep;
9133 rtx result;
9134{
9135 int regno, size, align, nelts;
9136 enum machine_mode mode;
9137 rtx reg, mem;
9138 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
9139
9140 size = nelts = 0;
9141 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9142 if ((mode = apply_result_mode[regno]) != VOIDmode)
9143 {
9144 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9145 if (size % align != 0)
9146 size = CEIL (size, align) * align;
18992995 9147 reg = gen_rtx (REG, mode, savep ? regno : INCOMING_REGNO (regno));
ca695ac9
JB
9148 mem = change_address (result, mode,
9149 plus_constant (XEXP (result, 0), size));
9150 savevec[nelts++] = (savep
9151 ? gen_rtx (SET, VOIDmode, mem, reg)
9152 : gen_rtx (SET, VOIDmode, reg, mem));
9153 size += GET_MODE_SIZE (mode);
bbf6f052 9154 }
ca695ac9
JB
9155 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
9156}
9157#endif /* HAVE_untyped_call or HAVE_untyped_return */
bbf6f052 9158
ca695ac9
JB
9159/* Save the state required to perform an untyped call with the same
9160 arguments as were passed to the current function. */
9161
9162static rtx
9163expand_builtin_apply_args ()
9164{
9165 rtx registers;
9166 int size, align, regno;
9167 enum machine_mode mode;
9168
9169 /* Create a block where the arg-pointer, structure value address,
9170 and argument registers can be saved. */
9171 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
9172
9173 /* Walk past the arg-pointer and structure value address. */
9174 size = GET_MODE_SIZE (Pmode);
9175 if (struct_value_rtx)
9176 size += GET_MODE_SIZE (Pmode);
9177
c816db88
RK
9178 /* Save each register used in calling a function to the block. */
9179 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
ca695ac9 9180 if ((mode = apply_args_mode[regno]) != VOIDmode)
bbf6f052 9181 {
ee33823f
RK
9182 rtx tem;
9183
ca695ac9
JB
9184 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9185 if (size % align != 0)
9186 size = CEIL (size, align) * align;
ee33823f
RK
9187
9188 tem = gen_rtx (REG, mode, INCOMING_REGNO (regno));
9189
9190#ifdef STACK_REGS
9191 /* For reg-stack.c's stack register household.
9192 Compare with a similar piece of code in function.c. */
9193
9194 emit_insn (gen_rtx (USE, mode, tem));
9195#endif
9196
ca695ac9
JB
9197 emit_move_insn (change_address (registers, mode,
9198 plus_constant (XEXP (registers, 0),
9199 size)),
ee33823f 9200 tem);
ca695ac9 9201 size += GET_MODE_SIZE (mode);
bbf6f052
RK
9202 }
9203
ca695ac9
JB
9204 /* Save the arg pointer to the block. */
9205 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
9206 copy_to_reg (virtual_incoming_args_rtx));
9207 size = GET_MODE_SIZE (Pmode);
bbf6f052 9208
ca695ac9
JB
9209 /* Save the structure value address unless this is passed as an
9210 "invisible" first argument. */
9211 if (struct_value_incoming_rtx)
9212 {
9213 emit_move_insn (change_address (registers, Pmode,
9214 plus_constant (XEXP (registers, 0),
9215 size)),
9216 copy_to_reg (struct_value_incoming_rtx));
9217 size += GET_MODE_SIZE (Pmode);
9218 }
9219
9220 /* Return the address of the block. */
9221 return copy_addr_to_reg (XEXP (registers, 0));
9222}
9223
9224/* Perform an untyped call and save the state required to perform an
9225 untyped return of whatever value was returned by the given function. */
9226
9227static rtx
9228expand_builtin_apply (function, arguments, argsize)
9229 rtx function, arguments, argsize;
9230{
9231 int size, align, regno;
9232 enum machine_mode mode;
9233 rtx incoming_args, result, reg, dest, call_insn;
9234 rtx old_stack_level = 0;
b3f8cf4a 9235 rtx call_fusage = 0;
bbf6f052 9236
ca695ac9
JB
9237 /* Create a block where the return registers can be saved. */
9238 result = assign_stack_local (BLKmode, apply_result_size (), -1);
bbf6f052 9239
ca695ac9 9240 /* ??? The argsize value should be adjusted here. */
bbf6f052 9241
ca695ac9
JB
9242 /* Fetch the arg pointer from the ARGUMENTS block. */
9243 incoming_args = gen_reg_rtx (Pmode);
9244 emit_move_insn (incoming_args,
9245 gen_rtx (MEM, Pmode, arguments));
9246#ifndef STACK_GROWS_DOWNWARD
9247 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
9248 incoming_args, 0, OPTAB_LIB_WIDEN);
46b68a37
JW
9249#endif
9250
ca695ac9
JB
9251 /* Perform postincrements before actually calling the function. */
9252 emit_queue ();
46b68a37 9253
ca695ac9
JB
9254 /* Push a new argument block and copy the arguments. */
9255 do_pending_stack_adjust ();
9256 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
bbf6f052 9257
ca695ac9
JB
9258 /* Push a block of memory onto the stack to store the memory arguments.
9259 Save the address in a register, and copy the memory arguments. ??? I
9260 haven't figured out how the calling convention macros effect this,
9261 but it's likely that the source and/or destination addresses in
9262 the block copy will need updating in machine specific ways. */
9263 dest = copy_addr_to_reg (push_block (argsize, 0, 0));
9264 emit_block_move (gen_rtx (MEM, BLKmode, dest),
9265 gen_rtx (MEM, BLKmode, incoming_args),
9266 argsize,
9267 PARM_BOUNDARY / BITS_PER_UNIT);
bbf6f052 9268
ca695ac9
JB
9269 /* Refer to the argument block. */
9270 apply_args_size ();
9271 arguments = gen_rtx (MEM, BLKmode, arguments);
9272
9273 /* Walk past the arg-pointer and structure value address. */
9274 size = GET_MODE_SIZE (Pmode);
9275 if (struct_value_rtx)
9276 size += GET_MODE_SIZE (Pmode);
9277
9278 /* Restore each of the registers previously saved. Make USE insns
c816db88
RK
9279 for each of these registers for use in making the call. */
9280 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
ca695ac9
JB
9281 if ((mode = apply_args_mode[regno]) != VOIDmode)
9282 {
9283 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9284 if (size % align != 0)
9285 size = CEIL (size, align) * align;
9286 reg = gen_rtx (REG, mode, regno);
9287 emit_move_insn (reg,
9288 change_address (arguments, mode,
9289 plus_constant (XEXP (arguments, 0),
9290 size)));
9291
b3f8cf4a 9292 use_reg (&call_fusage, reg);
ca695ac9
JB
9293 size += GET_MODE_SIZE (mode);
9294 }
9295
9296 /* Restore the structure value address unless this is passed as an
9297 "invisible" first argument. */
9298 size = GET_MODE_SIZE (Pmode);
9299 if (struct_value_rtx)
9300 {
9301 rtx value = gen_reg_rtx (Pmode);
9302 emit_move_insn (value,
9303 change_address (arguments, Pmode,
9304 plus_constant (XEXP (arguments, 0),
9305 size)));
9306 emit_move_insn (struct_value_rtx, value);
9307 if (GET_CODE (struct_value_rtx) == REG)
b3f8cf4a 9308 use_reg (&call_fusage, struct_value_rtx);
ca695ac9
JB
9309 size += GET_MODE_SIZE (Pmode);
9310 }
bbf6f052 9311
ca695ac9 9312 /* All arguments and registers used for the call are set up by now! */
b3f8cf4a 9313 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
bbf6f052 9314
ca695ac9
JB
9315 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
9316 and we don't want to load it into a register as an optimization,
9317 because prepare_call_address already did it if it should be done. */
9318 if (GET_CODE (function) != SYMBOL_REF)
9319 function = memory_address (FUNCTION_MODE, function);
bbf6f052 9320
ca695ac9
JB
9321 /* Generate the actual call instruction and save the return value. */
9322#ifdef HAVE_untyped_call
9323 if (HAVE_untyped_call)
9324 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
9325 result, result_vector (1, result)));
9326 else
9327#endif
9328#ifdef HAVE_call_value
9329 if (HAVE_call_value)
9330 {
9331 rtx valreg = 0;
bbf6f052 9332
ca695ac9
JB
9333 /* Locate the unique return register. It is not possible to
9334 express a call that sets more than one return register using
9335 call_value; use untyped_call for that. In fact, untyped_call
9336 only needs to save the return registers in the given block. */
9337 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9338 if ((mode = apply_result_mode[regno]) != VOIDmode)
9339 {
9340 if (valreg)
9341 abort (); /* HAVE_untyped_call required. */
9342 valreg = gen_rtx (REG, mode, regno);
9343 }
bbf6f052 9344
ca695ac9
JB
9345 emit_call_insn (gen_call_value (valreg,
9346 gen_rtx (MEM, FUNCTION_MODE, function),
9347 const0_rtx, NULL_RTX, const0_rtx));
bbf6f052 9348
ca695ac9
JB
9349 emit_move_insn (change_address (result, GET_MODE (valreg),
9350 XEXP (result, 0)),
9351 valreg);
9352 }
9353 else
9354#endif
9355 abort ();
bbf6f052 9356
b3f8cf4a 9357 /* Find the CALL insn we just emitted. */
ca695ac9
JB
9358 for (call_insn = get_last_insn ();
9359 call_insn && GET_CODE (call_insn) != CALL_INSN;
9360 call_insn = PREV_INSN (call_insn))
9361 ;
bbf6f052 9362
ca695ac9
JB
9363 if (! call_insn)
9364 abort ();
bbf6f052 9365
6d100794
RK
9366 /* Put the register usage information on the CALL. If there is already
9367 some usage information, put ours at the end. */
9368 if (CALL_INSN_FUNCTION_USAGE (call_insn))
9369 {
9370 rtx link;
9371
9372 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
9373 link = XEXP (link, 1))
9374 ;
9375
9376 XEXP (link, 1) = call_fusage;
9377 }
9378 else
9379 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
e7c33f54 9380
ca695ac9
JB
9381 /* Restore the stack. */
9382 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
e7c33f54 9383
ca695ac9
JB
9384 /* Return the address of the result block. */
9385 return copy_addr_to_reg (XEXP (result, 0));
9386}
e7c33f54 9387
ca695ac9 9388/* Perform an untyped return. */
e7c33f54 9389
ca695ac9
JB
9390static void
9391expand_builtin_return (result)
9392 rtx result;
9393{
9394 int size, align, regno;
9395 enum machine_mode mode;
9396 rtx reg;
b3f8cf4a 9397 rtx call_fusage = 0;
e7c33f54 9398
ca695ac9
JB
9399 apply_result_size ();
9400 result = gen_rtx (MEM, BLKmode, result);
e7c33f54 9401
ca695ac9
JB
9402#ifdef HAVE_untyped_return
9403 if (HAVE_untyped_return)
9404 {
9405 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
9406 emit_barrier ();
9407 return;
9408 }
9409#endif
e7c33f54 9410
ca695ac9
JB
9411 /* Restore the return value and note that each value is used. */
9412 size = 0;
9413 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9414 if ((mode = apply_result_mode[regno]) != VOIDmode)
9415 {
9416 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9417 if (size % align != 0)
9418 size = CEIL (size, align) * align;
9419 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
9420 emit_move_insn (reg,
9421 change_address (result, mode,
9422 plus_constant (XEXP (result, 0),
9423 size)));
e7c33f54 9424
b3f8cf4a 9425 push_to_sequence (call_fusage);
ca695ac9 9426 emit_insn (gen_rtx (USE, VOIDmode, reg));
b3f8cf4a 9427 call_fusage = get_insns ();
ca695ac9
JB
9428 end_sequence ();
9429 size += GET_MODE_SIZE (mode);
9430 }
e7c33f54 9431
ca695ac9 9432 /* Put the USE insns before the return. */
b3f8cf4a 9433 emit_insns (call_fusage);
e7c33f54 9434
ca695ac9
JB
9435 /* Return whatever values was restored by jumping directly to the end
9436 of the function. */
9437 expand_null_return ();
9438}
9439\f
9440/* Expand code for a post- or pre- increment or decrement
9441 and return the RTX for the result.
9442 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
e7c33f54 9443
ca695ac9 9444static rtx
7b8b9722 9445expand_increment (exp, post, ignore)
ca695ac9 9446 register tree exp;
7b8b9722 9447 int post, ignore;
ca695ac9
JB
9448{
9449 register rtx op0, op1;
9450 register rtx temp, value;
9451 register tree incremented = TREE_OPERAND (exp, 0);
9452 optab this_optab = add_optab;
9453 int icode;
9454 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9455 int op0_is_copy = 0;
9456 int single_insn = 0;
a97f5a86
RS
9457 /* 1 means we can't store into OP0 directly,
9458 because it is a subreg narrower than a word,
9459 and we don't dare clobber the rest of the word. */
9460 int bad_subreg = 0;
e7c33f54 9461
ca695ac9 9462 if (output_bytecode)
c02bd5d9
JB
9463 {
9464 bc_expand_expr (exp);
9465 return NULL_RTX;
9466 }
e7c33f54 9467
ca695ac9
JB
9468 /* Stabilize any component ref that might need to be
9469 evaluated more than once below. */
9470 if (!post
9471 || TREE_CODE (incremented) == BIT_FIELD_REF
9472 || (TREE_CODE (incremented) == COMPONENT_REF
9473 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9474 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9475 incremented = stabilize_reference (incremented);
9476 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9477 ones into save exprs so that they don't accidentally get evaluated
9478 more than once by the code below. */
9479 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9480 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9481 incremented = save_expr (incremented);
bbf6f052 9482
ca695ac9
JB
9483 /* Compute the operands as RTX.
9484 Note whether OP0 is the actual lvalue or a copy of it:
9485 I believe it is a copy iff it is a register or subreg
9486 and insns were generated in computing it. */
bbf6f052 9487
ca695ac9
JB
9488 temp = get_last_insn ();
9489 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
bbf6f052 9490
ca695ac9 9491 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9faa82d8 9492 in place but instead must do sign- or zero-extension during assignment,
ca695ac9
JB
9493 so we copy it into a new register and let the code below use it as
9494 a copy.
bbf6f052 9495
ca695ac9
JB
9496 Note that we can safely modify this SUBREG since it is know not to be
9497 shared (it was made by the expand_expr call above). */
bbf6f052 9498
ca695ac9 9499 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
3e073e72
RK
9500 {
9501 if (post)
9502 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9503 else
9504 bad_subreg = 1;
9505 }
a97f5a86
RS
9506 else if (GET_CODE (op0) == SUBREG
9507 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
79777b79
RK
9508 {
9509 /* We cannot increment this SUBREG in place. If we are
9510 post-incrementing, get a copy of the old value. Otherwise,
9511 just mark that we cannot increment in place. */
9512 if (post)
9513 op0 = copy_to_reg (op0);
9514 else
9515 bad_subreg = 1;
9516 }
bbf6f052 9517
ca695ac9
JB
9518 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9519 && temp != get_last_insn ());
9520 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052 9521
ca695ac9
JB
9522 /* Decide whether incrementing or decrementing. */
9523 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9524 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9525 this_optab = sub_optab;
bbf6f052 9526
ca695ac9
JB
9527 /* Convert decrement by a constant into a negative increment. */
9528 if (this_optab == sub_optab
9529 && GET_CODE (op1) == CONST_INT)
9530 {
9531 op1 = GEN_INT (- INTVAL (op1));
9532 this_optab = add_optab;
9533 }
bbf6f052 9534
ca695ac9
JB
9535 /* For a preincrement, see if we can do this with a single instruction. */
9536 if (!post)
9537 {
9538 icode = (int) this_optab->handlers[(int) mode].insn_code;
9539 if (icode != (int) CODE_FOR_nothing
9540 /* Make sure that OP0 is valid for operands 0 and 1
9541 of the insn we want to queue. */
9542 && (*insn_operand_predicate[icode][0]) (op0, mode)
9543 && (*insn_operand_predicate[icode][1]) (op0, mode)
9544 && (*insn_operand_predicate[icode][2]) (op1, mode))
9545 single_insn = 1;
9546 }
bbf6f052 9547
ca695ac9
JB
9548 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9549 then we cannot just increment OP0. We must therefore contrive to
9550 increment the original value. Then, for postincrement, we can return
9551 OP0 since it is a copy of the old value. For preincrement, expand here
a97f5a86
RS
9552 unless we can do it with a single insn.
9553
9554 Likewise if storing directly into OP0 would clobber high bits
9555 we need to preserve (bad_subreg). */
9556 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
ca695ac9
JB
9557 {
9558 /* This is the easiest way to increment the value wherever it is.
9559 Problems with multiple evaluation of INCREMENTED are prevented
9560 because either (1) it is a component_ref or preincrement,
9561 in which case it was stabilized above, or (2) it is an array_ref
9562 with constant index in an array in a register, which is
9563 safe to reevaluate. */
9564 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9565 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9566 ? MINUS_EXPR : PLUS_EXPR),
9567 TREE_TYPE (exp),
9568 incremented,
9569 TREE_OPERAND (exp, 1));
e9cdf6e4
RK
9570
9571 while (TREE_CODE (incremented) == NOP_EXPR
9572 || TREE_CODE (incremented) == CONVERT_EXPR)
9573 {
9574 newexp = convert (TREE_TYPE (incremented), newexp);
9575 incremented = TREE_OPERAND (incremented, 0);
9576 }
9577
7b8b9722 9578 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
ca695ac9
JB
9579 return post ? op0 : temp;
9580 }
bbf6f052 9581
ca695ac9
JB
9582 if (post)
9583 {
9584 /* We have a true reference to the value in OP0.
9585 If there is an insn to add or subtract in this mode, queue it.
9586 Queueing the increment insn avoids the register shuffling
9587 that often results if we must increment now and first save
9588 the old value for subsequent use. */
bbf6f052 9589
ca695ac9
JB
9590#if 0 /* Turned off to avoid making extra insn for indexed memref. */
9591 op0 = stabilize (op0);
9592#endif
bbf6f052 9593
ca695ac9
JB
9594 icode = (int) this_optab->handlers[(int) mode].insn_code;
9595 if (icode != (int) CODE_FOR_nothing
9596 /* Make sure that OP0 is valid for operands 0 and 1
9597 of the insn we want to queue. */
9598 && (*insn_operand_predicate[icode][0]) (op0, mode)
9599 && (*insn_operand_predicate[icode][1]) (op0, mode))
9600 {
9601 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
9602 op1 = force_reg (mode, op1);
bbf6f052 9603
ca695ac9
JB
9604 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9605 }
9606 }
bbf6f052 9607
ca695ac9
JB
9608 /* Preincrement, or we can't increment with one simple insn. */
9609 if (post)
9610 /* Save a copy of the value before inc or dec, to return it later. */
9611 temp = value = copy_to_reg (op0);
9612 else
9613 /* Arrange to return the incremented value. */
9614 /* Copy the rtx because expand_binop will protect from the queue,
9615 and the results of that would be invalid for us to return
9616 if our caller does emit_queue before using our result. */
9617 temp = copy_rtx (value = op0);
bbf6f052 9618
ca695ac9
JB
9619 /* Increment however we can. */
9620 op1 = expand_binop (mode, this_optab, value, op1, op0,
9621 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9622 /* Make sure the value is stored into OP0. */
9623 if (op1 != op0)
9624 emit_move_insn (op0, op1);
bbf6f052 9625
ca695ac9
JB
9626 return temp;
9627}
9628\f
9629/* Expand all function calls contained within EXP, innermost ones first.
9630 But don't look within expressions that have sequence points.
9631 For each CALL_EXPR, record the rtx for its value
9632 in the CALL_EXPR_RTL field. */
bbf6f052 9633
ca695ac9
JB
9634static void
9635preexpand_calls (exp)
9636 tree exp;
9637{
9638 register int nops, i;
9639 int type = TREE_CODE_CLASS (TREE_CODE (exp));
bbf6f052 9640
ca695ac9
JB
9641 if (! do_preexpand_calls)
9642 return;
bbf6f052 9643
ca695ac9 9644 /* Only expressions and references can contain calls. */
bbf6f052 9645
ca695ac9
JB
9646 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
9647 return;
bbf6f052 9648
ca695ac9
JB
9649 switch (TREE_CODE (exp))
9650 {
9651 case CALL_EXPR:
9652 /* Do nothing if already expanded. */
43198be7
RK
9653 if (CALL_EXPR_RTL (exp) != 0
9654 /* Do nothing if the call returns a variable-sized object. */
9655 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
9656 /* Do nothing to built-in functions. */
9657 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
9658 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9659 == FUNCTION_DECL)
9660 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
ca695ac9 9661 return;
bbf6f052 9662
43198be7 9663 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
ca695ac9 9664 return;
bbf6f052 9665
ca695ac9
JB
9666 case COMPOUND_EXPR:
9667 case COND_EXPR:
9668 case TRUTH_ANDIF_EXPR:
9669 case TRUTH_ORIF_EXPR:
9670 /* If we find one of these, then we can be sure
9671 the adjust will be done for it (since it makes jumps).
9672 Do it now, so that if this is inside an argument
9673 of a function, we don't get the stack adjustment
9674 after some other args have already been pushed. */
9675 do_pending_stack_adjust ();
9676 return;
bbf6f052 9677
ca695ac9
JB
9678 case BLOCK:
9679 case RTL_EXPR:
9680 case WITH_CLEANUP_EXPR:
402c7311 9681 case CLEANUP_POINT_EXPR:
ca695ac9 9682 return;
bbf6f052 9683
ca695ac9
JB
9684 case SAVE_EXPR:
9685 if (SAVE_EXPR_RTL (exp) != 0)
9686 return;
9687 }
bbf6f052 9688
ca695ac9
JB
9689 nops = tree_code_length[(int) TREE_CODE (exp)];
9690 for (i = 0; i < nops; i++)
9691 if (TREE_OPERAND (exp, i) != 0)
9692 {
9693 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9694 if (type == 'e' || type == '<' || type == '1' || type == '2'
9695 || type == 'r')
9696 preexpand_calls (TREE_OPERAND (exp, i));
9697 }
bbf6f052
RK
9698}
9699\f
ca695ac9
JB
9700/* At the start of a function, record that we have no previously-pushed
9701 arguments waiting to be popped. */
0006469d 9702
ca695ac9
JB
9703void
9704init_pending_stack_adjust ()
9705{
9706 pending_stack_adjust = 0;
9707}
fb2ca25a 9708
ca695ac9
JB
9709/* When exiting from function, if safe, clear out any pending stack adjust
9710 so the adjustment won't get done. */
904762c8 9711
ca695ac9
JB
9712void
9713clear_pending_stack_adjust ()
fb2ca25a 9714{
ca695ac9 9715#ifdef EXIT_IGNORE_STACK
b7c2e1e2
RK
9716 if (optimize > 0
9717 && ! flag_omit_frame_pointer && EXIT_IGNORE_STACK
ca695ac9
JB
9718 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9719 && ! flag_inline_functions)
9720 pending_stack_adjust = 0;
fb2ca25a 9721#endif
fb2ca25a
KKT
9722}
9723
ca695ac9
JB
9724/* Pop any previously-pushed arguments that have not been popped yet. */
9725
9726void
9727do_pending_stack_adjust ()
9728{
9729 if (inhibit_defer_pop == 0)
9730 {
9731 if (pending_stack_adjust != 0)
9732 adjust_stack (GEN_INT (pending_stack_adjust));
9733 pending_stack_adjust = 0;
9734 }
9735}
9736
5dab5552
MS
9737/* Defer the expansion all cleanups up to OLD_CLEANUPS.
9738 Returns the cleanups to be performed. */
9739
9740static tree
9741defer_cleanups_to (old_cleanups)
9742 tree old_cleanups;
9743{
9744 tree new_cleanups = NULL_TREE;
9745 tree cleanups = cleanups_this_call;
9746 tree last = NULL_TREE;
9747
9748 while (cleanups_this_call != old_cleanups)
9749 {
61d6b1cc 9750 (*interim_eh_hook) (TREE_VALUE (cleanups_this_call));
4ea8537b 9751 last = cleanups_this_call;
5dab5552
MS
9752 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
9753 }
9754
9755 if (last)
9756 {
9757 /* Remove the list from the chain of cleanups. */
9758 TREE_CHAIN (last) = NULL_TREE;
9759
9760 /* reverse them so that we can build them in the right order. */
9761 cleanups = nreverse (cleanups);
9762
9ba73d38
MS
9763 /* All cleanups must be on the function_obstack. */
9764 push_obstacks_nochange ();
9765 resume_temporary_allocation ();
9766
5dab5552
MS
9767 while (cleanups)
9768 {
9769 if (new_cleanups)
9770 new_cleanups = build (COMPOUND_EXPR, TREE_TYPE (new_cleanups),
9771 TREE_VALUE (cleanups), new_cleanups);
9772 else
9773 new_cleanups = TREE_VALUE (cleanups);
9774
9775 cleanups = TREE_CHAIN (cleanups);
9776 }
9ba73d38
MS
9777
9778 pop_obstacks ();
5dab5552
MS
9779 }
9780
9781 return new_cleanups;
9782}
9783
ca695ac9
JB
9784/* Expand all cleanups up to OLD_CLEANUPS.
9785 Needed here, and also for language-dependent calls. */
904762c8 9786
ca695ac9
JB
9787void
9788expand_cleanups_to (old_cleanups)
9789 tree old_cleanups;
0006469d 9790{
ca695ac9 9791 while (cleanups_this_call != old_cleanups)
0006469d 9792 {
61d6b1cc 9793 (*interim_eh_hook) (TREE_VALUE (cleanups_this_call));
d3158f1a 9794 expand_expr (TREE_VALUE (cleanups_this_call), const0_rtx, VOIDmode, 0);
ca695ac9
JB
9795 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
9796 }
9797}
9798\f
9799/* Expand conditional expressions. */
0006469d 9800
ca695ac9
JB
9801/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9802 LABEL is an rtx of code CODE_LABEL, in this function and all the
9803 functions here. */
0006469d 9804
ca695ac9
JB
9805void
9806jumpifnot (exp, label)
9807 tree exp;
9808 rtx label;
9809{
9810 do_jump (exp, label, NULL_RTX);
9811}
0006469d 9812
ca695ac9 9813/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
0006469d 9814
ca695ac9
JB
9815void
9816jumpif (exp, label)
9817 tree exp;
9818 rtx label;
9819{
9820 do_jump (exp, NULL_RTX, label);
9821}
0006469d 9822
ca695ac9
JB
9823/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9824 the result is zero, or IF_TRUE_LABEL if the result is one.
9825 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9826 meaning fall through in that case.
0006469d 9827
ca695ac9
JB
9828 do_jump always does any pending stack adjust except when it does not
9829 actually perform a jump. An example where there is no jump
9830 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
0006469d 9831
ca695ac9
JB
9832 This function is responsible for optimizing cases such as
9833 &&, || and comparison operators in EXP. */
904762c8 9834
ca695ac9
JB
9835void
9836do_jump (exp, if_false_label, if_true_label)
9837 tree exp;
9838 rtx if_false_label, if_true_label;
0006469d 9839{
ca695ac9
JB
9840 register enum tree_code code = TREE_CODE (exp);
9841 /* Some cases need to create a label to jump to
9842 in order to properly fall through.
9843 These cases set DROP_THROUGH_LABEL nonzero. */
9844 rtx drop_through_label = 0;
9845 rtx temp;
9846 rtx comparison = 0;
9847 int i;
9848 tree type;
2f6e6d22 9849 enum machine_mode mode;
0006469d 9850
ca695ac9 9851 emit_queue ();
0006469d 9852
ca695ac9
JB
9853 switch (code)
9854 {
9855 case ERROR_MARK:
9856 break;
0006469d 9857
ca695ac9
JB
9858 case INTEGER_CST:
9859 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9860 if (temp)
9861 emit_jump (temp);
9862 break;
0006469d 9863
ca695ac9
JB
9864#if 0
9865 /* This is not true with #pragma weak */
9866 case ADDR_EXPR:
9867 /* The address of something can never be zero. */
9868 if (if_true_label)
9869 emit_jump (if_true_label);
9870 break;
9871#endif
0006469d 9872
ca695ac9
JB
9873 case NOP_EXPR:
9874 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9875 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9876 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9877 goto normal;
9878 case CONVERT_EXPR:
9879 /* If we are narrowing the operand, we have to do the compare in the
9880 narrower mode. */
9881 if ((TYPE_PRECISION (TREE_TYPE (exp))
9882 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9883 goto normal;
9884 case NON_LVALUE_EXPR:
9885 case REFERENCE_EXPR:
9886 case ABS_EXPR:
9887 case NEGATE_EXPR:
9888 case LROTATE_EXPR:
9889 case RROTATE_EXPR:
9890 /* These cannot change zero->non-zero or vice versa. */
9891 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9892 break;
0006469d 9893
ca695ac9
JB
9894#if 0
9895 /* This is never less insns than evaluating the PLUS_EXPR followed by
9896 a test and can be longer if the test is eliminated. */
9897 case PLUS_EXPR:
9898 /* Reduce to minus. */
9899 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9900 TREE_OPERAND (exp, 0),
9901 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9902 TREE_OPERAND (exp, 1))));
9903 /* Process as MINUS. */
0006469d 9904#endif
0006469d 9905
ca695ac9
JB
9906 case MINUS_EXPR:
9907 /* Non-zero iff operands of minus differ. */
9908 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
9909 TREE_OPERAND (exp, 0),
9910 TREE_OPERAND (exp, 1)),
9911 NE, NE);
9912 break;
904762c8 9913
ca695ac9
JB
9914 case BIT_AND_EXPR:
9915 /* If we are AND'ing with a small constant, do this comparison in the
9916 smallest type that fits. If the machine doesn't have comparisons
9917 that small, it will be converted back to the wider comparison.
9918 This helps if we are testing the sign bit of a narrower object.
9919 combine can't do this for us because it can't know whether a
9920 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
0006469d 9921
ca695ac9
JB
9922 if (! SLOW_BYTE_ACCESS
9923 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9924 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9925 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
2f6e6d22
RK
9926 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9927 && (type = type_for_mode (mode, 1)) != 0
ca695ac9
JB
9928 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9929 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9930 != CODE_FOR_nothing))
9931 {
9932 do_jump (convert (type, exp), if_false_label, if_true_label);
9933 break;
9934 }
9935 goto normal;
904762c8 9936
ca695ac9
JB
9937 case TRUTH_NOT_EXPR:
9938 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9939 break;
0006469d 9940
ca695ac9 9941 case TRUTH_ANDIF_EXPR:
7ee055f4
MS
9942 {
9943 rtx seq1, seq2;
9944 tree cleanups, old_cleanups;
9945
9946 if (if_false_label == 0)
9947 if_false_label = drop_through_label = gen_label_rtx ();
9948 start_sequence ();
9949 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9950 seq1 = get_insns ();
9951 end_sequence ();
9952
9953 old_cleanups = cleanups_this_call;
9954 start_sequence ();
9955 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9956 seq2 = get_insns ();
9957 end_sequence ();
9958
9959 cleanups = defer_cleanups_to (old_cleanups);
9960 if (cleanups)
9961 {
9962 rtx flag = gen_reg_rtx (word_mode);
9963 tree new_cleanups;
9964 tree cond;
9965
0f41302f 9966 /* Flag cleanups as not needed. */
7ee055f4
MS
9967 emit_move_insn (flag, const0_rtx);
9968 emit_insns (seq1);
9969
0f41302f 9970 /* Flag cleanups as needed. */
7ee055f4
MS
9971 emit_move_insn (flag, const1_rtx);
9972 emit_insns (seq2);
9973
9ba73d38
MS
9974 /* All cleanups must be on the function_obstack. */
9975 push_obstacks_nochange ();
9976 resume_temporary_allocation ();
9977
0f41302f 9978 /* convert flag, which is an rtx, into a tree. */
7ee055f4
MS
9979 cond = make_node (RTL_EXPR);
9980 TREE_TYPE (cond) = integer_type_node;
9981 RTL_EXPR_RTL (cond) = flag;
9982 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
01842234 9983 cond = save_expr (cond);
7ee055f4
MS
9984
9985 new_cleanups = build (COND_EXPR, void_type_node,
9986 truthvalue_conversion (cond),
9987 cleanups, integer_zero_node);
9988 new_cleanups = fold (new_cleanups);
9989
9ba73d38
MS
9990 pop_obstacks ();
9991
7ee055f4
MS
9992 /* Now add in the conditionalized cleanups. */
9993 cleanups_this_call
9994 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
9995 (*interim_eh_hook) (NULL_TREE);
9996 }
9997 else
9998 {
9999 emit_insns (seq1);
10000 emit_insns (seq2);
10001 }
10002 }
ca695ac9 10003 break;
0006469d 10004
ca695ac9 10005 case TRUTH_ORIF_EXPR:
7ee055f4
MS
10006 {
10007 rtx seq1, seq2;
10008 tree cleanups, old_cleanups;
10009
10010 if (if_true_label == 0)
10011 if_true_label = drop_through_label = gen_label_rtx ();
10012 start_sequence ();
10013 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
10014 seq1 = get_insns ();
10015 end_sequence ();
10016
10017 old_cleanups = cleanups_this_call;
10018 start_sequence ();
10019 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10020 seq2 = get_insns ();
10021 end_sequence ();
10022
10023 cleanups = defer_cleanups_to (old_cleanups);
10024 if (cleanups)
10025 {
10026 rtx flag = gen_reg_rtx (word_mode);
10027 tree new_cleanups;
10028 tree cond;
10029
0f41302f 10030 /* Flag cleanups as not needed. */
7ee055f4
MS
10031 emit_move_insn (flag, const0_rtx);
10032 emit_insns (seq1);
10033
0f41302f 10034 /* Flag cleanups as needed. */
7ee055f4
MS
10035 emit_move_insn (flag, const1_rtx);
10036 emit_insns (seq2);
10037
9ba73d38
MS
10038 /* All cleanups must be on the function_obstack. */
10039 push_obstacks_nochange ();
10040 resume_temporary_allocation ();
10041
0f41302f 10042 /* convert flag, which is an rtx, into a tree. */
7ee055f4
MS
10043 cond = make_node (RTL_EXPR);
10044 TREE_TYPE (cond) = integer_type_node;
10045 RTL_EXPR_RTL (cond) = flag;
10046 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
01842234 10047 cond = save_expr (cond);
7ee055f4
MS
10048
10049 new_cleanups = build (COND_EXPR, void_type_node,
10050 truthvalue_conversion (cond),
10051 cleanups, integer_zero_node);
10052 new_cleanups = fold (new_cleanups);
10053
9ba73d38
MS
10054 pop_obstacks ();
10055
7ee055f4
MS
10056 /* Now add in the conditionalized cleanups. */
10057 cleanups_this_call
10058 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
10059 (*interim_eh_hook) (NULL_TREE);
10060 }
10061 else
10062 {
10063 emit_insns (seq1);
10064 emit_insns (seq2);
10065 }
10066 }
ca695ac9 10067 break;
0006469d 10068
ca695ac9 10069 case COMPOUND_EXPR:
0088fcb1 10070 push_temp_slots ();
ca695ac9 10071 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
d80f96e9 10072 preserve_temp_slots (NULL_RTX);
ca695ac9 10073 free_temp_slots ();
0088fcb1 10074 pop_temp_slots ();
ca695ac9
JB
10075 emit_queue ();
10076 do_pending_stack_adjust ();
10077 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10078 break;
0006469d 10079
ca695ac9
JB
10080 case COMPONENT_REF:
10081 case BIT_FIELD_REF:
10082 case ARRAY_REF:
10083 {
10084 int bitsize, bitpos, unsignedp;
10085 enum machine_mode mode;
10086 tree type;
10087 tree offset;
10088 int volatilep = 0;
0006469d 10089
ca695ac9
JB
10090 /* Get description of this reference. We don't actually care
10091 about the underlying object here. */
10092 get_inner_reference (exp, &bitsize, &bitpos, &offset,
10093 &mode, &unsignedp, &volatilep);
0006469d 10094
ca695ac9
JB
10095 type = type_for_size (bitsize, unsignedp);
10096 if (! SLOW_BYTE_ACCESS
10097 && type != 0 && bitsize >= 0
10098 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10099 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10100 != CODE_FOR_nothing))
10101 {
10102 do_jump (convert (type, exp), if_false_label, if_true_label);
10103 break;
10104 }
10105 goto normal;
10106 }
0006469d 10107
ca695ac9
JB
10108 case COND_EXPR:
10109 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
10110 if (integer_onep (TREE_OPERAND (exp, 1))
10111 && integer_zerop (TREE_OPERAND (exp, 2)))
10112 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
904762c8 10113
ca695ac9
JB
10114 else if (integer_zerop (TREE_OPERAND (exp, 1))
10115 && integer_onep (TREE_OPERAND (exp, 2)))
10116 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
0006469d 10117
ca695ac9
JB
10118 else
10119 {
10120 register rtx label1 = gen_label_rtx ();
10121 drop_through_label = gen_label_rtx ();
10122 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
10123 /* Now the THEN-expression. */
10124 do_jump (TREE_OPERAND (exp, 1),
10125 if_false_label ? if_false_label : drop_through_label,
10126 if_true_label ? if_true_label : drop_through_label);
10127 /* In case the do_jump just above never jumps. */
10128 do_pending_stack_adjust ();
10129 emit_label (label1);
10130 /* Now the ELSE-expression. */
10131 do_jump (TREE_OPERAND (exp, 2),
10132 if_false_label ? if_false_label : drop_through_label,
10133 if_true_label ? if_true_label : drop_through_label);
10134 }
10135 break;
0006469d 10136
ca695ac9 10137 case EQ_EXPR:
0e8c9172
RK
10138 {
10139 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10140
10141 if (integer_zerop (TREE_OPERAND (exp, 1)))
10142 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10143 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
201012cb 10144 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
0e8c9172
RK
10145 do_jump
10146 (fold
10147 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
10148 fold (build (EQ_EXPR, TREE_TYPE (exp),
c8465d86
RK
10149 fold (build1 (REALPART_EXPR,
10150 TREE_TYPE (inner_type),
0e8c9172 10151 TREE_OPERAND (exp, 0))),
c8465d86
RK
10152 fold (build1 (REALPART_EXPR,
10153 TREE_TYPE (inner_type),
0e8c9172
RK
10154 TREE_OPERAND (exp, 1))))),
10155 fold (build (EQ_EXPR, TREE_TYPE (exp),
c8465d86
RK
10156 fold (build1 (IMAGPART_EXPR,
10157 TREE_TYPE (inner_type),
0e8c9172 10158 TREE_OPERAND (exp, 0))),
c8465d86
RK
10159 fold (build1 (IMAGPART_EXPR,
10160 TREE_TYPE (inner_type),
0e8c9172
RK
10161 TREE_OPERAND (exp, 1))))))),
10162 if_false_label, if_true_label);
10163 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10164 && !can_compare_p (TYPE_MODE (inner_type)))
10165 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
10166 else
10167 comparison = compare (exp, EQ, EQ);
10168 break;
10169 }
0006469d 10170
ca695ac9 10171 case NE_EXPR:
0e8c9172
RK
10172 {
10173 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10174
10175 if (integer_zerop (TREE_OPERAND (exp, 1)))
10176 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10177 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
201012cb 10178 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
0e8c9172
RK
10179 do_jump
10180 (fold
10181 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
10182 fold (build (NE_EXPR, TREE_TYPE (exp),
c8465d86
RK
10183 fold (build1 (REALPART_EXPR,
10184 TREE_TYPE (inner_type),
0e8c9172 10185 TREE_OPERAND (exp, 0))),
c8465d86
RK
10186 fold (build1 (REALPART_EXPR,
10187 TREE_TYPE (inner_type),
0e8c9172
RK
10188 TREE_OPERAND (exp, 1))))),
10189 fold (build (NE_EXPR, TREE_TYPE (exp),
c8465d86
RK
10190 fold (build1 (IMAGPART_EXPR,
10191 TREE_TYPE (inner_type),
0e8c9172 10192 TREE_OPERAND (exp, 0))),
c8465d86
RK
10193 fold (build1 (IMAGPART_EXPR,
10194 TREE_TYPE (inner_type),
0e8c9172
RK
10195 TREE_OPERAND (exp, 1))))))),
10196 if_false_label, if_true_label);
10197 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10198 && !can_compare_p (TYPE_MODE (inner_type)))
10199 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
10200 else
10201 comparison = compare (exp, NE, NE);
10202 break;
10203 }
0006469d 10204
ca695ac9
JB
10205 case LT_EXPR:
10206 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10207 == MODE_INT)
10208 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10209 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
10210 else
10211 comparison = compare (exp, LT, LTU);
10212 break;
0006469d 10213
ca695ac9
JB
10214 case LE_EXPR:
10215 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10216 == MODE_INT)
10217 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10218 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
10219 else
10220 comparison = compare (exp, LE, LEU);
10221 break;
0006469d 10222
ca695ac9
JB
10223 case GT_EXPR:
10224 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10225 == MODE_INT)
10226 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10227 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
10228 else
10229 comparison = compare (exp, GT, GTU);
10230 break;
0006469d 10231
ca695ac9
JB
10232 case GE_EXPR:
10233 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10234 == MODE_INT)
10235 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10236 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
10237 else
10238 comparison = compare (exp, GE, GEU);
10239 break;
0006469d 10240
ca695ac9
JB
10241 default:
10242 normal:
10243 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10244#if 0
10245 /* This is not needed any more and causes poor code since it causes
10246 comparisons and tests from non-SI objects to have different code
10247 sequences. */
10248 /* Copy to register to avoid generating bad insns by cse
10249 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10250 if (!cse_not_expected && GET_CODE (temp) == MEM)
10251 temp = copy_to_reg (temp);
10252#endif
10253 do_pending_stack_adjust ();
10254 if (GET_CODE (temp) == CONST_INT)
10255 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
10256 else if (GET_CODE (temp) == LABEL_REF)
10257 comparison = const_true_rtx;
10258 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10259 && !can_compare_p (GET_MODE (temp)))
10260 /* Note swapping the labels gives us not-equal. */
10261 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10262 else if (GET_MODE (temp) != VOIDmode)
10263 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
10264 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10265 GET_MODE (temp), NULL_RTX, 0);
10266 else
10267 abort ();
10268 }
0006469d 10269
ca695ac9
JB
10270 /* Do any postincrements in the expression that was tested. */
10271 emit_queue ();
0006469d 10272
ca695ac9
JB
10273 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
10274 straight into a conditional jump instruction as the jump condition.
10275 Otherwise, all the work has been done already. */
0006469d 10276
ca695ac9 10277 if (comparison == const_true_rtx)
0006469d 10278 {
ca695ac9
JB
10279 if (if_true_label)
10280 emit_jump (if_true_label);
0006469d 10281 }
ca695ac9
JB
10282 else if (comparison == const0_rtx)
10283 {
10284 if (if_false_label)
10285 emit_jump (if_false_label);
10286 }
10287 else if (comparison)
10288 do_jump_for_compare (comparison, if_false_label, if_true_label);
0006469d 10289
ca695ac9 10290 if (drop_through_label)
0006469d 10291 {
ca695ac9
JB
10292 /* If do_jump produces code that might be jumped around,
10293 do any stack adjusts from that code, before the place
10294 where control merges in. */
10295 do_pending_stack_adjust ();
10296 emit_label (drop_through_label);
10297 }
10298}
10299\f
10300/* Given a comparison expression EXP for values too wide to be compared
10301 with one insn, test the comparison and jump to the appropriate label.
10302 The code of EXP is ignored; we always test GT if SWAP is 0,
10303 and LT if SWAP is 1. */
0006469d 10304
ca695ac9
JB
10305static void
10306do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10307 tree exp;
10308 int swap;
10309 rtx if_false_label, if_true_label;
10310{
10311 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10312 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10313 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10314 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10315 rtx drop_through_label = 0;
10316 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10317 int i;
0006469d 10318
ca695ac9
JB
10319 if (! if_true_label || ! if_false_label)
10320 drop_through_label = gen_label_rtx ();
10321 if (! if_true_label)
10322 if_true_label = drop_through_label;
10323 if (! if_false_label)
10324 if_false_label = drop_through_label;
0006469d 10325
ca695ac9
JB
10326 /* Compare a word at a time, high order first. */
10327 for (i = 0; i < nwords; i++)
10328 {
10329 rtx comp;
10330 rtx op0_word, op1_word;
0006469d 10331
ca695ac9
JB
10332 if (WORDS_BIG_ENDIAN)
10333 {
10334 op0_word = operand_subword_force (op0, i, mode);
10335 op1_word = operand_subword_force (op1, i, mode);
10336 }
10337 else
10338 {
10339 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10340 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10341 }
0006469d 10342
ca695ac9
JB
10343 /* All but high-order word must be compared as unsigned. */
10344 comp = compare_from_rtx (op0_word, op1_word,
10345 (unsignedp || i > 0) ? GTU : GT,
10346 unsignedp, word_mode, NULL_RTX, 0);
10347 if (comp == const_true_rtx)
10348 emit_jump (if_true_label);
10349 else if (comp != const0_rtx)
10350 do_jump_for_compare (comp, NULL_RTX, if_true_label);
0006469d 10351
ca695ac9
JB
10352 /* Consider lower words only if these are equal. */
10353 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10354 NULL_RTX, 0);
10355 if (comp == const_true_rtx)
10356 emit_jump (if_false_label);
10357 else if (comp != const0_rtx)
10358 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10359 }
0006469d 10360
ca695ac9
JB
10361 if (if_false_label)
10362 emit_jump (if_false_label);
10363 if (drop_through_label)
10364 emit_label (drop_through_label);
0006469d
TW
10365}
10366
ca695ac9
JB
10367/* Compare OP0 with OP1, word at a time, in mode MODE.
10368 UNSIGNEDP says to do unsigned comparison.
10369 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
904762c8 10370
2e5ec6cf 10371void
ca695ac9
JB
10372do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10373 enum machine_mode mode;
10374 int unsignedp;
10375 rtx op0, op1;
10376 rtx if_false_label, if_true_label;
0006469d 10377{
ca695ac9
JB
10378 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10379 rtx drop_through_label = 0;
10380 int i;
0006469d 10381
ca695ac9
JB
10382 if (! if_true_label || ! if_false_label)
10383 drop_through_label = gen_label_rtx ();
10384 if (! if_true_label)
10385 if_true_label = drop_through_label;
10386 if (! if_false_label)
10387 if_false_label = drop_through_label;
0006469d 10388
ca695ac9
JB
10389 /* Compare a word at a time, high order first. */
10390 for (i = 0; i < nwords; i++)
0006469d 10391 {
ca695ac9
JB
10392 rtx comp;
10393 rtx op0_word, op1_word;
0006469d 10394
ca695ac9
JB
10395 if (WORDS_BIG_ENDIAN)
10396 {
10397 op0_word = operand_subword_force (op0, i, mode);
10398 op1_word = operand_subword_force (op1, i, mode);
10399 }
10400 else
10401 {
10402 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10403 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10404 }
0006469d 10405
ca695ac9
JB
10406 /* All but high-order word must be compared as unsigned. */
10407 comp = compare_from_rtx (op0_word, op1_word,
10408 (unsignedp || i > 0) ? GTU : GT,
10409 unsignedp, word_mode, NULL_RTX, 0);
10410 if (comp == const_true_rtx)
10411 emit_jump (if_true_label);
10412 else if (comp != const0_rtx)
10413 do_jump_for_compare (comp, NULL_RTX, if_true_label);
0006469d 10414
ca695ac9
JB
10415 /* Consider lower words only if these are equal. */
10416 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10417 NULL_RTX, 0);
10418 if (comp == const_true_rtx)
10419 emit_jump (if_false_label);
10420 else if (comp != const0_rtx)
10421 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10422 }
0006469d 10423
ca695ac9
JB
10424 if (if_false_label)
10425 emit_jump (if_false_label);
10426 if (drop_through_label)
10427 emit_label (drop_through_label);
0006469d 10428}
bbf6f052 10429
ca695ac9
JB
10430/* Given an EQ_EXPR expression EXP for values too wide to be compared
10431 with one insn, test the comparison and jump to the appropriate label. */
10432
10433static void
10434do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10435 tree exp;
10436 rtx if_false_label, if_true_label;
bbf6f052 10437{
ca695ac9
JB
10438 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10439 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10440 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10441 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10442 int i;
10443 rtx drop_through_label = 0;
bbf6f052 10444
ca695ac9
JB
10445 if (! if_false_label)
10446 drop_through_label = if_false_label = gen_label_rtx ();
bbf6f052 10447
ca695ac9
JB
10448 for (i = 0; i < nwords; i++)
10449 {
10450 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
10451 operand_subword_force (op1, i, mode),
10452 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10453 word_mode, NULL_RTX, 0);
10454 if (comp == const_true_rtx)
10455 emit_jump (if_false_label);
10456 else if (comp != const0_rtx)
10457 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10458 }
1499e0a8 10459
ca695ac9
JB
10460 if (if_true_label)
10461 emit_jump (if_true_label);
10462 if (drop_through_label)
10463 emit_label (drop_through_label);
10464}
10465\f
10466/* Jump according to whether OP0 is 0.
10467 We assume that OP0 has an integer mode that is too wide
10468 for the available compare insns. */
1499e0a8 10469
ca695ac9
JB
10470static void
10471do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10472 rtx op0;
10473 rtx if_false_label, if_true_label;
10474{
10475 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10476 int i;
10477 rtx drop_through_label = 0;
1499e0a8 10478
ca695ac9
JB
10479 if (! if_false_label)
10480 drop_through_label = if_false_label = gen_label_rtx ();
1499e0a8 10481
ca695ac9
JB
10482 for (i = 0; i < nwords; i++)
10483 {
10484 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
10485 GET_MODE (op0)),
10486 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
10487 if (comp == const_true_rtx)
10488 emit_jump (if_false_label);
10489 else if (comp != const0_rtx)
10490 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10491 }
1499e0a8 10492
ca695ac9
JB
10493 if (if_true_label)
10494 emit_jump (if_true_label);
10495 if (drop_through_label)
10496 emit_label (drop_through_label);
10497}
bbf6f052 10498
ca695ac9
JB
10499/* Given a comparison expression in rtl form, output conditional branches to
10500 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
bbf6f052 10501
ca695ac9
JB
10502static void
10503do_jump_for_compare (comparison, if_false_label, if_true_label)
10504 rtx comparison, if_false_label, if_true_label;
10505{
10506 if (if_true_label)
a358cee0 10507 {
ca695ac9
JB
10508 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10509 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
10510 else
10511 abort ();
a358cee0 10512
ca695ac9
JB
10513 if (if_false_label)
10514 emit_jump (if_false_label);
c980ac49 10515 }
ca695ac9 10516 else if (if_false_label)
bbf6f052 10517 {
ca695ac9 10518 rtx insn;
f12f485a 10519 rtx prev = get_last_insn ();
ca695ac9 10520 rtx branch = 0;
bbf6f052 10521
ca695ac9
JB
10522 /* Output the branch with the opposite condition. Then try to invert
10523 what is generated. If more than one insn is a branch, or if the
10524 branch is not the last insn written, abort. If we can't invert
10525 the branch, emit make a true label, redirect this jump to that,
10526 emit a jump to the false label and define the true label. */
bbf6f052 10527
ca695ac9 10528 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
34661f5c 10529 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])(if_false_label));
ca695ac9
JB
10530 else
10531 abort ();
bbf6f052 10532
41dfd40c
RK
10533 /* Here we get the first insn that was just emitted. It used to be the
10534 case that, on some machines, emitting the branch would discard
10535 the previous compare insn and emit a replacement. This isn't
10536 done anymore, but abort if we see that PREV is deleted. */
10537
ca695ac9 10538 if (prev == 0)
ca695ac9 10539 insn = get_insns ();
41dfd40c
RK
10540 else if (INSN_DELETED_P (prev))
10541 abort ();
ca695ac9 10542 else
41dfd40c 10543 insn = NEXT_INSN (prev);
bbf6f052 10544
34661f5c 10545 for (; insn; insn = NEXT_INSN (insn))
ca695ac9
JB
10546 if (GET_CODE (insn) == JUMP_INSN)
10547 {
10548 if (branch)
10549 abort ();
10550 branch = insn;
10551 }
10552
10553 if (branch != get_last_insn ())
10554 abort ();
10555
127e4d19 10556 JUMP_LABEL (branch) = if_false_label;
ca695ac9
JB
10557 if (! invert_jump (branch, if_false_label))
10558 {
10559 if_true_label = gen_label_rtx ();
10560 redirect_jump (branch, if_true_label);
10561 emit_jump (if_false_label);
10562 emit_label (if_true_label);
bbf6f052
RK
10563 }
10564 }
ca695ac9
JB
10565}
10566\f
10567/* Generate code for a comparison expression EXP
10568 (including code to compute the values to be compared)
10569 and set (CC0) according to the result.
10570 SIGNED_CODE should be the rtx operation for this comparison for
10571 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10572
10573 We force a stack adjustment unless there are currently
10574 things pushed on the stack that aren't yet used. */
10575
10576static rtx
10577compare (exp, signed_code, unsigned_code)
10578 register tree exp;
10579 enum rtx_code signed_code, unsigned_code;
10580{
10581 register rtx op0
10582 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10583 register rtx op1
10584 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10585 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
10586 register enum machine_mode mode = TYPE_MODE (type);
10587 int unsignedp = TREE_UNSIGNED (type);
10588 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
bbf6f052 10589
5718612f
JL
10590#ifdef HAVE_canonicalize_funcptr_for_compare
10591 /* If function pointers need to be "canonicalized" before they can
10592 be reliably compared, then canonicalize them. */
10593 if (HAVE_canonicalize_funcptr_for_compare
10594 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10595 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10596 == FUNCTION_TYPE))
10597 {
10598 rtx new_op0 = gen_reg_rtx (mode);
10599
10600 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10601 op0 = new_op0;
10602 }
10603
10604 if (HAVE_canonicalize_funcptr_for_compare
10605 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10606 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10607 == FUNCTION_TYPE))
10608 {
10609 rtx new_op1 = gen_reg_rtx (mode);
10610
10611 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10612 op1 = new_op1;
10613 }
10614#endif
10615
ca695ac9
JB
10616 return compare_from_rtx (op0, op1, code, unsignedp, mode,
10617 ((mode == BLKmode)
10618 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10619 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
10620}
bbf6f052 10621
ca695ac9
JB
10622/* Like compare but expects the values to compare as two rtx's.
10623 The decision as to signed or unsigned comparison must be made by the caller.
bbf6f052 10624
ca695ac9
JB
10625 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10626 compared.
bbf6f052 10627
ca695ac9
JB
10628 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10629 size of MODE should be used. */
bbf6f052 10630
ca695ac9
JB
10631rtx
10632compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10633 register rtx op0, op1;
10634 enum rtx_code code;
10635 int unsignedp;
10636 enum machine_mode mode;
10637 rtx size;
10638 int align;
10639{
10640 rtx tem;
bbf6f052 10641
ca695ac9
JB
10642 /* If one operand is constant, make it the second one. Only do this
10643 if the other operand is not constant as well. */
bbf6f052 10644
ca695ac9
JB
10645 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10646 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
10647 {
10648 tem = op0;
10649 op0 = op1;
10650 op1 = tem;
10651 code = swap_condition (code);
10652 }
bbf6f052 10653
ca695ac9 10654 if (flag_force_mem)
bbf6f052 10655 {
ca695ac9
JB
10656 op0 = force_not_mem (op0);
10657 op1 = force_not_mem (op1);
10658 }
bbf6f052 10659
ca695ac9 10660 do_pending_stack_adjust ();
bbf6f052 10661
ca695ac9
JB
10662 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10663 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10664 return tem;
bbf6f052 10665
ca695ac9
JB
10666#if 0
10667 /* There's no need to do this now that combine.c can eliminate lots of
10668 sign extensions. This can be less efficient in certain cases on other
0f41302f 10669 machines. */
bbf6f052 10670
ca695ac9
JB
10671 /* If this is a signed equality comparison, we can do it as an
10672 unsigned comparison since zero-extension is cheaper than sign
10673 extension and comparisons with zero are done as unsigned. This is
10674 the case even on machines that can do fast sign extension, since
10675 zero-extension is easier to combine with other operations than
10676 sign-extension is. If we are comparing against a constant, we must
10677 convert it to what it would look like unsigned. */
10678 if ((code == EQ || code == NE) && ! unsignedp
10679 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10680 {
10681 if (GET_CODE (op1) == CONST_INT
10682 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10683 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10684 unsignedp = 1;
bbf6f052 10685 }
ca695ac9
JB
10686#endif
10687
10688 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
bbf6f052 10689
ca695ac9 10690 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
bbf6f052
RK
10691}
10692\f
ca695ac9
JB
10693/* Generate code to calculate EXP using a store-flag instruction
10694 and return an rtx for the result. EXP is either a comparison
10695 or a TRUTH_NOT_EXPR whose operand is a comparison.
bbf6f052 10696
ca695ac9 10697 If TARGET is nonzero, store the result there if convenient.
bbf6f052 10698
ca695ac9
JB
10699 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10700 cheap.
bbf6f052 10701
ca695ac9
JB
10702 Return zero if there is no suitable set-flag instruction
10703 available on this machine.
bbf6f052 10704
ca695ac9
JB
10705 Once expand_expr has been called on the arguments of the comparison,
10706 we are committed to doing the store flag, since it is not safe to
10707 re-evaluate the expression. We emit the store-flag insn by calling
10708 emit_store_flag, but only expand the arguments if we have a reason
10709 to believe that emit_store_flag will be successful. If we think that
10710 it will, but it isn't, we have to simulate the store-flag with a
10711 set/jump/set sequence. */
bbf6f052 10712
ca695ac9
JB
10713static rtx
10714do_store_flag (exp, target, mode, only_cheap)
10715 tree exp;
10716 rtx target;
10717 enum machine_mode mode;
10718 int only_cheap;
bbf6f052 10719{
ca695ac9
JB
10720 enum rtx_code code;
10721 tree arg0, arg1, type;
10722 tree tem;
10723 enum machine_mode operand_mode;
10724 int invert = 0;
10725 int unsignedp;
10726 rtx op0, op1;
10727 enum insn_code icode;
10728 rtx subtarget = target;
10729 rtx result, label, pattern, jump_pat;
bbf6f052 10730
ca695ac9
JB
10731 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10732 result at the end. We can't simply invert the test since it would
10733 have already been inverted if it were valid. This case occurs for
10734 some floating-point comparisons. */
10735
10736 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10737 invert = 1, exp = TREE_OPERAND (exp, 0);
10738
10739 arg0 = TREE_OPERAND (exp, 0);
10740 arg1 = TREE_OPERAND (exp, 1);
10741 type = TREE_TYPE (arg0);
10742 operand_mode = TYPE_MODE (type);
10743 unsignedp = TREE_UNSIGNED (type);
10744
10745 /* We won't bother with BLKmode store-flag operations because it would mean
10746 passing a lot of information to emit_store_flag. */
10747 if (operand_mode == BLKmode)
10748 return 0;
10749
5718612f
JL
10750 /* We won't bother with store-flag operations involving function pointers
10751 when function pointers must be canonicalized before comparisons. */
10752#ifdef HAVE_canonicalize_funcptr_for_compare
10753 if (HAVE_canonicalize_funcptr_for_compare
10754 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10755 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10756 == FUNCTION_TYPE))
10757 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10758 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10759 == FUNCTION_TYPE))))
10760 return 0;
10761#endif
10762
ca695ac9
JB
10763 STRIP_NOPS (arg0);
10764 STRIP_NOPS (arg1);
10765
10766 /* Get the rtx comparison code to use. We know that EXP is a comparison
10767 operation of some type. Some comparisons against 1 and -1 can be
10768 converted to comparisons with zero. Do so here so that the tests
10769 below will be aware that we have a comparison with zero. These
10770 tests will not catch constants in the first operand, but constants
10771 are rarely passed as the first operand. */
10772
10773 switch (TREE_CODE (exp))
10774 {
10775 case EQ_EXPR:
10776 code = EQ;
10777 break;
10778 case NE_EXPR:
10779 code = NE;
10780 break;
10781 case LT_EXPR:
10782 if (integer_onep (arg1))
10783 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10784 else
10785 code = unsignedp ? LTU : LT;
10786 break;
10787 case LE_EXPR:
10788 if (! unsignedp && integer_all_onesp (arg1))
10789 arg1 = integer_zero_node, code = LT;
10790 else
10791 code = unsignedp ? LEU : LE;
10792 break;
10793 case GT_EXPR:
10794 if (! unsignedp && integer_all_onesp (arg1))
10795 arg1 = integer_zero_node, code = GE;
10796 else
10797 code = unsignedp ? GTU : GT;
10798 break;
10799 case GE_EXPR:
10800 if (integer_onep (arg1))
10801 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10802 else
10803 code = unsignedp ? GEU : GE;
10804 break;
10805 default:
10806 abort ();
10807 }
bbf6f052 10808
ca695ac9
JB
10809 /* Put a constant second. */
10810 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
bbf6f052 10811 {
ca695ac9
JB
10812 tem = arg0; arg0 = arg1; arg1 = tem;
10813 code = swap_condition (code);
bbf6f052 10814 }
bbf6f052 10815
ca695ac9
JB
10816 /* If this is an equality or inequality test of a single bit, we can
10817 do this by shifting the bit being tested to the low-order bit and
10818 masking the result with the constant 1. If the condition was EQ,
10819 we xor it with 1. This does not require an scc insn and is faster
10820 than an scc insn even if we have it. */
bbf6f052 10821
ca695ac9
JB
10822 if ((code == NE || code == EQ)
10823 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10824 && integer_pow2p (TREE_OPERAND (arg0, 1))
10825 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
10826 {
10827 tree inner = TREE_OPERAND (arg0, 0);
21b2a157
JW
10828 HOST_WIDE_INT tem;
10829 int bitnum;
ca695ac9 10830 int ops_unsignedp;
bbf6f052 10831
21b2a157
JW
10832 tem = INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
10833 NULL_RTX, VOIDmode, 0));
10834 /* In this case, immed_double_const will sign extend the value to make
10835 it look the same on the host and target. We must remove the
10836 sign-extension before calling exact_log2, since exact_log2 will
10837 fail for negative values. */
10838 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT
10839 && BITS_PER_WORD == GET_MODE_BITSIZE (TYPE_MODE (type)))
8b295000
JW
10840 /* We don't use the obvious constant shift to generate the mask,
10841 because that generates compiler warnings when BITS_PER_WORD is
10842 greater than or equal to HOST_BITS_PER_WIDE_INT, even though this
10843 code is unreachable in that case. */
10844 tem = tem & GET_MODE_MASK (word_mode);
21b2a157
JW
10845 bitnum = exact_log2 (tem);
10846
ca695ac9
JB
10847 /* If INNER is a right shift of a constant and it plus BITNUM does
10848 not overflow, adjust BITNUM and INNER. */
bbf6f052 10849
ca695ac9
JB
10850 if (TREE_CODE (inner) == RSHIFT_EXPR
10851 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10852 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10853 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
10854 < TYPE_PRECISION (type)))
10855 {
10856 bitnum +=TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10857 inner = TREE_OPERAND (inner, 0);
10858 }
bbf6f052 10859
ca695ac9
JB
10860 /* If we are going to be able to omit the AND below, we must do our
10861 operations as unsigned. If we must use the AND, we have a choice.
10862 Normally unsigned is faster, but for some machines signed is. */
10863 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
ad92c826
RK
10864#ifdef LOAD_EXTEND_OP
10865 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
ca695ac9
JB
10866#else
10867 : 1
10868#endif
10869 );
bbf6f052 10870
ca695ac9
JB
10871 if (subtarget == 0 || GET_CODE (subtarget) != REG
10872 || GET_MODE (subtarget) != operand_mode
10873 || ! safe_from_p (subtarget, inner))
10874 subtarget = 0;
e7c33f54 10875
ca695ac9 10876 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
bbf6f052 10877
ca695ac9
JB
10878 if (bitnum != 0)
10879 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
0c316b20 10880 size_int (bitnum), subtarget, ops_unsignedp);
bbf6f052 10881
ca695ac9
JB
10882 if (GET_MODE (op0) != mode)
10883 op0 = convert_to_mode (mode, op0, ops_unsignedp);
bbf6f052 10884
ca695ac9 10885 if ((code == EQ && ! invert) || (code == NE && invert))
0c316b20 10886 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
ca695ac9 10887 ops_unsignedp, OPTAB_LIB_WIDEN);
bbf6f052 10888
ca695ac9
JB
10889 /* Put the AND last so it can combine with more things. */
10890 if (bitnum != TYPE_PRECISION (type) - 1)
0c316b20 10891 op0 = expand_and (op0, const1_rtx, subtarget);
bbf6f052 10892
ca695ac9
JB
10893 return op0;
10894 }
bbf6f052 10895
ca695ac9
JB
10896 /* Now see if we are likely to be able to do this. Return if not. */
10897 if (! can_compare_p (operand_mode))
10898 return 0;
10899 icode = setcc_gen_code[(int) code];
10900 if (icode == CODE_FOR_nothing
10901 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
10902 {
10903 /* We can only do this if it is one of the special cases that
10904 can be handled without an scc insn. */
10905 if ((code == LT && integer_zerop (arg1))
10906 || (! only_cheap && code == GE && integer_zerop (arg1)))
10907 ;
10908 else if (BRANCH_COST >= 0
10909 && ! only_cheap && (code == NE || code == EQ)
10910 && TREE_CODE (type) != REAL_TYPE
10911 && ((abs_optab->handlers[(int) operand_mode].insn_code
10912 != CODE_FOR_nothing)
10913 || (ffs_optab->handlers[(int) operand_mode].insn_code
10914 != CODE_FOR_nothing)))
10915 ;
10916 else
10917 return 0;
10918 }
10919
10920 preexpand_calls (exp);
10921 if (subtarget == 0 || GET_CODE (subtarget) != REG
10922 || GET_MODE (subtarget) != operand_mode
10923 || ! safe_from_p (subtarget, arg1))
10924 subtarget = 0;
bbf6f052 10925
ca695ac9
JB
10926 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10927 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
bbf6f052 10928
ca695ac9
JB
10929 if (target == 0)
10930 target = gen_reg_rtx (mode);
bbf6f052 10931
ca695ac9
JB
10932 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10933 because, if the emit_store_flag does anything it will succeed and
10934 OP0 and OP1 will not be used subsequently. */
bbf6f052 10935
ca695ac9
JB
10936 result = emit_store_flag (target, code,
10937 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10938 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10939 operand_mode, unsignedp, 1);
bbf6f052 10940
ca695ac9
JB
10941 if (result)
10942 {
10943 if (invert)
10944 result = expand_binop (mode, xor_optab, result, const1_rtx,
10945 result, 0, OPTAB_LIB_WIDEN);
10946 return result;
10947 }
bbf6f052 10948
ca695ac9
JB
10949 /* If this failed, we have to do this with set/compare/jump/set code. */
10950 if (target == 0 || GET_CODE (target) != REG
10951 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10952 target = gen_reg_rtx (GET_MODE (target));
bbf6f052 10953
ca695ac9
JB
10954 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10955 result = compare_from_rtx (op0, op1, code, unsignedp,
10956 operand_mode, NULL_RTX, 0);
10957 if (GET_CODE (result) == CONST_INT)
10958 return (((result == const0_rtx && ! invert)
10959 || (result != const0_rtx && invert))
10960 ? const0_rtx : const1_rtx);
bbf6f052 10961
ca695ac9
JB
10962 label = gen_label_rtx ();
10963 if (bcc_gen_fctn[(int) code] == 0)
10964 abort ();
bbf6f052 10965
ca695ac9
JB
10966 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10967 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10968 emit_label (label);
bbf6f052 10969
ca695ac9
JB
10970 return target;
10971}
10972\f
10973/* Generate a tablejump instruction (used for switch statements). */
bbf6f052 10974
ca695ac9 10975#ifdef HAVE_tablejump
bbf6f052 10976
ca695ac9
JB
10977/* INDEX is the value being switched on, with the lowest value
10978 in the table already subtracted.
10979 MODE is its expected mode (needed if INDEX is constant).
10980 RANGE is the length of the jump table.
10981 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
bbf6f052 10982
ca695ac9
JB
10983 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10984 index value is out of range. */
bbf6f052 10985
ca695ac9
JB
10986void
10987do_tablejump (index, mode, range, table_label, default_label)
10988 rtx index, range, table_label, default_label;
10989 enum machine_mode mode;
10990{
10991 register rtx temp, vector;
bbf6f052 10992
ca695ac9
JB
10993 /* Do an unsigned comparison (in the proper mode) between the index
10994 expression and the value which represents the length of the range.
10995 Since we just finished subtracting the lower bound of the range
10996 from the index expression, this comparison allows us to simultaneously
10997 check that the original index expression value is both greater than
10998 or equal to the minimum value of the range and less than or equal to
10999 the maximum value of the range. */
bbf6f052 11000
bf500664
RK
11001 emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
11002 emit_jump_insn (gen_bgtu (default_label));
bbf6f052 11003
ca695ac9
JB
11004 /* If index is in range, it must fit in Pmode.
11005 Convert to Pmode so we can index with it. */
11006 if (mode != Pmode)
11007 index = convert_to_mode (Pmode, index, 1);
bbf6f052 11008
ca695ac9
JB
11009 /* Don't let a MEM slip thru, because then INDEX that comes
11010 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11011 and break_out_memory_refs will go to work on it and mess it up. */
11012#ifdef PIC_CASE_VECTOR_ADDRESS
11013 if (flag_pic && GET_CODE (index) != REG)
11014 index = copy_to_mode_reg (Pmode, index);
11015#endif
bbf6f052 11016
ca695ac9
JB
11017 /* If flag_force_addr were to affect this address
11018 it could interfere with the tricky assumptions made
11019 about addresses that contain label-refs,
11020 which may be valid only very near the tablejump itself. */
11021 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11022 GET_MODE_SIZE, because this indicates how large insns are. The other
11023 uses should all be Pmode, because they are addresses. This code
11024 could fail if addresses and insns are not the same size. */
11025 index = gen_rtx (PLUS, Pmode,
11026 gen_rtx (MULT, Pmode, index,
11027 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
11028 gen_rtx (LABEL_REF, Pmode, table_label));
11029#ifdef PIC_CASE_VECTOR_ADDRESS
11030 if (flag_pic)
11031 index = PIC_CASE_VECTOR_ADDRESS (index);
11032 else
11033#endif
11034 index = memory_address_noforce (CASE_VECTOR_MODE, index);
11035 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11036 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
11037 RTX_UNCHANGING_P (vector) = 1;
11038 convert_move (temp, vector, 0);
bbf6f052 11039
ca695ac9 11040 emit_jump_insn (gen_tablejump (temp, table_label));
bbf6f052 11041
ca695ac9
JB
11042#ifndef CASE_VECTOR_PC_RELATIVE
11043 /* If we are generating PIC code or if the table is PC-relative, the
11044 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11045 if (! flag_pic)
11046 emit_barrier ();
bbf6f052 11047#endif
ca695ac9 11048}
bbf6f052 11049
ca695ac9 11050#endif /* HAVE_tablejump */
bbf6f052 11051
bbf6f052 11052
ca695ac9
JB
11053/* Emit a suitable bytecode to load a value from memory, assuming a pointer
11054 to that value is on the top of the stack. The resulting type is TYPE, and
0f41302f 11055 the source declaration is DECL. */
bbf6f052 11056
ca695ac9
JB
11057void
11058bc_load_memory (type, decl)
11059 tree type, decl;
11060{
11061 enum bytecode_opcode opcode;
11062
11063
11064 /* Bit fields are special. We only know about signed and
11065 unsigned ints, and enums. The latter are treated as
0f41302f 11066 signed integers. */
ca695ac9
JB
11067
11068 if (DECL_BIT_FIELD (decl))
11069 if (TREE_CODE (type) == ENUMERAL_TYPE
11070 || TREE_CODE (type) == INTEGER_TYPE)
11071 opcode = TREE_UNSIGNED (type) ? zxloadBI : sxloadBI;
11072 else
11073 abort ();
11074 else
0f41302f 11075 /* See corresponding comment in bc_store_memory(). */
ca695ac9
JB
11076 if (TYPE_MODE (type) == BLKmode
11077 || TYPE_MODE (type) == VOIDmode)
11078 return;
11079 else
6bd6178d 11080 opcode = mode_to_load_map [(int) TYPE_MODE (type)];
bbf6f052 11081
ca695ac9
JB
11082 if (opcode == neverneverland)
11083 abort ();
11084
11085 bc_emit_bytecode (opcode);
11086
11087#ifdef DEBUG_PRINT_CODE
11088 fputc ('\n', stderr);
11089#endif
bbf6f052 11090}
bbf6f052 11091
bbf6f052 11092
ca695ac9
JB
11093/* Store the contents of the second stack slot to the address in the
11094 top stack slot. DECL is the declaration of the destination and is used
0f41302f 11095 to determine whether we're dealing with a bitfield. */
bbf6f052 11096
ca695ac9
JB
11097void
11098bc_store_memory (type, decl)
11099 tree type, decl;
11100{
11101 enum bytecode_opcode opcode;
11102
11103
11104 if (DECL_BIT_FIELD (decl))
f81497d9 11105 {
ca695ac9
JB
11106 if (TREE_CODE (type) == ENUMERAL_TYPE
11107 || TREE_CODE (type) == INTEGER_TYPE)
11108 opcode = sstoreBI;
f81497d9 11109 else
ca695ac9 11110 abort ();
f81497d9 11111 }
ca695ac9
JB
11112 else
11113 if (TYPE_MODE (type) == BLKmode)
11114 {
11115 /* Copy structure. This expands to a block copy instruction, storeBLK.
11116 In addition to the arguments expected by the other store instructions,
11117 it also expects a type size (SImode) on top of the stack, which is the
11118 structure size in size units (usually bytes). The two first arguments
11119 are already on the stack; so we just put the size on level 1. For some
11120 other languages, the size may be variable, this is why we don't encode
0f41302f 11121 it as a storeBLK literal, but rather treat it as a full-fledged expression. */
ca695ac9
JB
11122
11123 bc_expand_expr (TYPE_SIZE (type));
11124 opcode = storeBLK;
11125 }
11126 else
6bd6178d 11127 opcode = mode_to_store_map [(int) TYPE_MODE (type)];
f81497d9 11128
ca695ac9
JB
11129 if (opcode == neverneverland)
11130 abort ();
11131
11132 bc_emit_bytecode (opcode);
11133
11134#ifdef DEBUG_PRINT_CODE
11135 fputc ('\n', stderr);
11136#endif
f81497d9
RS
11137}
11138
f81497d9 11139
ca695ac9
JB
11140/* Allocate local stack space sufficient to hold a value of the given
11141 SIZE at alignment boundary ALIGNMENT bits. ALIGNMENT must be an
11142 integral power of 2. A special case is locals of type VOID, which
11143 have size 0 and alignment 1 - any "voidish" SIZE or ALIGNMENT is
11144 remapped into the corresponding attribute of SI. */
11145
11146rtx
11147bc_allocate_local (size, alignment)
11148 int size, alignment;
f81497d9 11149{
ca695ac9
JB
11150 rtx retval;
11151 int byte_alignment;
f81497d9 11152
ca695ac9
JB
11153 if (size < 0)
11154 abort ();
f81497d9 11155
ca695ac9
JB
11156 /* Normalize size and alignment */
11157 if (!size)
11158 size = UNITS_PER_WORD;
bbf6f052 11159
ca695ac9
JB
11160 if (alignment < BITS_PER_UNIT)
11161 byte_alignment = 1 << (INT_ALIGN - 1);
11162 else
11163 /* Align */
11164 byte_alignment = alignment / BITS_PER_UNIT;
bbf6f052 11165
ca695ac9
JB
11166 if (local_vars_size & (byte_alignment - 1))
11167 local_vars_size += byte_alignment - (local_vars_size & (byte_alignment - 1));
bbf6f052 11168
ca695ac9
JB
11169 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
11170 local_vars_size += size;
bbf6f052 11171
ca695ac9 11172 return retval;
bbf6f052
RK
11173}
11174
bbf6f052 11175
ca695ac9 11176/* Allocate variable-sized local array. Variable-sized arrays are
0f41302f 11177 actually pointers to the address in memory where they are stored. */
ca695ac9
JB
11178
11179rtx
11180bc_allocate_variable_array (size)
11181 tree size;
bbf6f052 11182{
ca695ac9
JB
11183 rtx retval;
11184 const int ptralign = (1 << (PTR_ALIGN - 1));
bbf6f052 11185
ca695ac9
JB
11186 /* Align pointer */
11187 if (local_vars_size & ptralign)
11188 local_vars_size += ptralign - (local_vars_size & ptralign);
bbf6f052 11189
ca695ac9
JB
11190 /* Note down local space needed: pointer to block; also return
11191 dummy rtx */
bbf6f052 11192
ca695ac9
JB
11193 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
11194 local_vars_size += POINTER_SIZE / BITS_PER_UNIT;
11195 return retval;
bbf6f052 11196}
bbf6f052 11197
bbf6f052 11198
ca695ac9 11199/* Push the machine address for the given external variable offset. */
0f41302f 11200
ca695ac9
JB
11201void
11202bc_load_externaddr (externaddr)
11203 rtx externaddr;
11204{
11205 bc_emit_bytecode (constP);
e7a42772
JB
11206 bc_emit_code_labelref (BYTECODE_LABEL (externaddr),
11207 BYTECODE_BC_LABEL (externaddr)->offset);
bbf6f052 11208
ca695ac9
JB
11209#ifdef DEBUG_PRINT_CODE
11210 fputc ('\n', stderr);
11211#endif
bbf6f052
RK
11212}
11213
bbf6f052 11214
ca695ac9 11215/* Like above, but expects an IDENTIFIER. */
0f41302f 11216
ca695ac9
JB
11217void
11218bc_load_externaddr_id (id, offset)
11219 tree id;
11220 int offset;
11221{
11222 if (!IDENTIFIER_POINTER (id))
11223 abort ();
bbf6f052 11224
ca695ac9 11225 bc_emit_bytecode (constP);
3d8e9bc2 11226 bc_emit_code_labelref (xstrdup (IDENTIFIER_POINTER (id)), offset);
bbf6f052 11227
ca695ac9
JB
11228#ifdef DEBUG_PRINT_CODE
11229 fputc ('\n', stderr);
11230#endif
11231}
bbf6f052 11232
bbf6f052 11233
ca695ac9 11234/* Push the machine address for the given local variable offset. */
0f41302f 11235
ca695ac9
JB
11236void
11237bc_load_localaddr (localaddr)
11238 rtx localaddr;
11239{
e7a42772 11240 bc_emit_instruction (localP, (HOST_WIDE_INT) BYTECODE_BC_LABEL (localaddr)->offset);
bbf6f052 11241}
bbf6f052 11242
bbf6f052 11243
ca695ac9 11244/* Push the machine address for the given parameter offset.
0f41302f
MS
11245 NOTE: offset is in bits. */
11246
ca695ac9
JB
11247void
11248bc_load_parmaddr (parmaddr)
11249 rtx parmaddr;
bbf6f052 11250{
e7a42772
JB
11251 bc_emit_instruction (argP, ((HOST_WIDE_INT) BYTECODE_BC_LABEL (parmaddr)->offset
11252 / BITS_PER_UNIT));
ca695ac9 11253}
bbf6f052 11254
ca695ac9
JB
11255
11256/* Convert a[i] into *(a + i). */
0f41302f 11257
ca695ac9
JB
11258tree
11259bc_canonicalize_array_ref (exp)
11260 tree exp;
11261{
11262 tree type = TREE_TYPE (exp);
11263 tree array_adr = build1 (ADDR_EXPR, TYPE_POINTER_TO (type),
11264 TREE_OPERAND (exp, 0));
11265 tree index = TREE_OPERAND (exp, 1);
11266
11267
11268 /* Convert the integer argument to a type the same size as a pointer
11269 so the multiply won't overflow spuriously. */
11270
11271 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
11272 index = convert (type_for_size (POINTER_SIZE, 0), index);
11273
11274 /* The array address isn't volatile even if the array is.
11275 (Of course this isn't terribly relevant since the bytecode
11276 translator treats nearly everything as volatile anyway.) */
11277 TREE_THIS_VOLATILE (array_adr) = 0;
11278
11279 return build1 (INDIRECT_REF, type,
11280 fold (build (PLUS_EXPR,
11281 TYPE_POINTER_TO (type),
11282 array_adr,
11283 fold (build (MULT_EXPR,
11284 TYPE_POINTER_TO (type),
11285 index,
11286 size_in_bytes (type))))));
bbf6f052
RK
11287}
11288
bbf6f052 11289
ca695ac9
JB
11290/* Load the address of the component referenced by the given
11291 COMPONENT_REF expression.
bbf6f052 11292
0f41302f 11293 Returns innermost lvalue. */
bbf6f052 11294
ca695ac9
JB
11295tree
11296bc_expand_component_address (exp)
11297 tree exp;
bbf6f052 11298{
ca695ac9
JB
11299 tree tem, chain;
11300 enum machine_mode mode;
11301 int bitpos = 0;
11302 HOST_WIDE_INT SIval;
a7c5971a 11303
bbf6f052 11304
ca695ac9
JB
11305 tem = TREE_OPERAND (exp, 1);
11306 mode = DECL_MODE (tem);
bbf6f052 11307
ca695ac9
JB
11308
11309 /* Compute cumulative bit offset for nested component refs
11310 and array refs, and find the ultimate containing object. */
11311
11312 for (tem = exp;; tem = TREE_OPERAND (tem, 0))
bbf6f052 11313 {
ca695ac9
JB
11314 if (TREE_CODE (tem) == COMPONENT_REF)
11315 bitpos += TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (tem, 1)));
11316 else
11317 if (TREE_CODE (tem) == ARRAY_REF
11318 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
11319 && TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) == INTEGER_CST)
bbf6f052 11320
ca695ac9
JB
11321 bitpos += (TREE_INT_CST_LOW (TREE_OPERAND (tem, 1))
11322 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)))
11323 /* * TYPE_SIZE_UNIT (TREE_TYPE (tem)) */);
11324 else
11325 break;
11326 }
bbf6f052 11327
c02bd5d9 11328 bc_expand_expr (tem);
bbf6f052 11329
cd1b4b44 11330
ca695ac9
JB
11331 /* For bitfields also push their offset and size */
11332 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
11333 bc_push_offset_and_size (bitpos, /* DECL_SIZE_UNIT */ (TREE_OPERAND (exp, 1)));
11334 else
11335 if (SIval = bitpos / BITS_PER_UNIT)
11336 bc_emit_instruction (addconstPSI, SIval);
bbf6f052 11337
ca695ac9 11338 return (TREE_OPERAND (exp, 1));
bbf6f052 11339}
e7c33f54 11340
bbf6f052 11341
ca695ac9 11342/* Emit code to push two SI constants */
0f41302f 11343
ca695ac9
JB
11344void
11345bc_push_offset_and_size (offset, size)
11346 HOST_WIDE_INT offset, size;
11347{
11348 bc_emit_instruction (constSI, offset);
11349 bc_emit_instruction (constSI, size);
11350}
bbf6f052 11351
bbf6f052 11352
ca695ac9
JB
11353/* Emit byte code to push the address of the given lvalue expression to
11354 the stack. If it's a bit field, we also push offset and size info.
bbf6f052 11355
ca695ac9 11356 Returns innermost component, which allows us to determine not only
0f41302f 11357 its type, but also whether it's a bitfield. */
ca695ac9
JB
11358
11359tree
11360bc_expand_address (exp)
bbf6f052 11361 tree exp;
bbf6f052 11362{
ca695ac9
JB
11363 /* Safeguard */
11364 if (!exp || TREE_CODE (exp) == ERROR_MARK)
11365 return (exp);
bbf6f052 11366
e7c33f54 11367
ca695ac9
JB
11368 switch (TREE_CODE (exp))
11369 {
11370 case ARRAY_REF:
e7c33f54 11371
ca695ac9 11372 return (bc_expand_address (bc_canonicalize_array_ref (exp)));
e7c33f54 11373
ca695ac9 11374 case COMPONENT_REF:
bbf6f052 11375
ca695ac9 11376 return (bc_expand_component_address (exp));
bbf6f052 11377
ca695ac9 11378 case INDIRECT_REF:
bbf6f052 11379
ca695ac9
JB
11380 bc_expand_expr (TREE_OPERAND (exp, 0));
11381
11382 /* For variable-sized types: retrieve pointer. Sometimes the
11383 TYPE_SIZE tree is NULL. Is this a bug or a feature? Let's
0f41302f 11384 also make sure we have an operand, just in case... */
ca695ac9
JB
11385
11386 if (TREE_OPERAND (exp, 0)
11387 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))
11388 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))) != INTEGER_CST)
11389 bc_emit_instruction (loadP);
11390
11391 /* If packed, also return offset and size */
11392 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 0)))
11393
11394 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (exp, 0))),
11395 TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (exp, 0))));
11396
11397 return (TREE_OPERAND (exp, 0));
11398
11399 case FUNCTION_DECL:
11400
e7a42772
JB
11401 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
11402 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
bbf6f052 11403 break;
ca695ac9
JB
11404
11405 case PARM_DECL:
11406
11407 bc_load_parmaddr (DECL_RTL (exp));
11408
11409 /* For variable-sized types: retrieve pointer */
11410 if (TYPE_SIZE (TREE_TYPE (exp))
11411 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
11412 bc_emit_instruction (loadP);
11413
11414 /* If packed, also return offset and size */
11415 if (DECL_BIT_FIELD (exp))
11416 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
11417 TREE_INT_CST_LOW (DECL_SIZE (exp)));
11418
bbf6f052 11419 break;
ca695ac9
JB
11420
11421 case RESULT_DECL:
11422
11423 bc_emit_instruction (returnP);
bbf6f052 11424 break;
ca695ac9
JB
11425
11426 case VAR_DECL:
11427
11428#if 0
e7a42772 11429 if (BYTECODE_LABEL (DECL_RTL (exp)))
ca695ac9
JB
11430 bc_load_externaddr (DECL_RTL (exp));
11431#endif
11432
11433 if (DECL_EXTERNAL (exp))
e7a42772 11434 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
eb862a37 11435 (BYTECODE_BC_LABEL (DECL_RTL (exp)))->offset);
bbf6f052 11436 else
ca695ac9
JB
11437 bc_load_localaddr (DECL_RTL (exp));
11438
11439 /* For variable-sized types: retrieve pointer */
11440 if (TYPE_SIZE (TREE_TYPE (exp))
11441 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
11442 bc_emit_instruction (loadP);
11443
11444 /* If packed, also return offset and size */
11445 if (DECL_BIT_FIELD (exp))
11446 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
11447 TREE_INT_CST_LOW (DECL_SIZE (exp)));
11448
bbf6f052 11449 break;
ca695ac9
JB
11450
11451 case STRING_CST:
11452 {
11453 rtx r;
11454
11455 bc_emit_bytecode (constP);
11456 r = output_constant_def (exp);
e7a42772 11457 bc_emit_code_labelref (BYTECODE_LABEL (r), BYTECODE_BC_LABEL (r)->offset);
ca695ac9
JB
11458
11459#ifdef DEBUG_PRINT_CODE
11460 fputc ('\n', stderr);
11461#endif
11462 }
bbf6f052 11463 break;
ca695ac9 11464
bbf6f052 11465 default:
bbf6f052 11466
ca695ac9
JB
11467 abort();
11468 break;
bbf6f052
RK
11469 }
11470
0f41302f 11471 /* Most lvalues don't have components. */
ca695ac9
JB
11472 return (exp);
11473}
bbf6f052 11474
ca695ac9
JB
11475
11476/* Emit a type code to be used by the runtime support in handling
11477 parameter passing. The type code consists of the machine mode
11478 plus the minimal alignment shifted left 8 bits. */
11479
11480tree
11481bc_runtime_type_code (type)
11482 tree type;
11483{
11484 int val;
11485
11486 switch (TREE_CODE (type))
bbf6f052 11487 {
ca695ac9
JB
11488 case VOID_TYPE:
11489 case INTEGER_TYPE:
11490 case REAL_TYPE:
11491 case COMPLEX_TYPE:
11492 case ENUMERAL_TYPE:
11493 case POINTER_TYPE:
11494 case RECORD_TYPE:
11495
6bd6178d 11496 val = (int) TYPE_MODE (type) | TYPE_ALIGN (type) << 8;
ca695ac9
JB
11497 break;
11498
11499 case ERROR_MARK:
11500
11501 val = 0;
11502 break;
11503
11504 default:
af508edd 11505
ca695ac9
JB
11506 abort ();
11507 }
11508 return build_int_2 (val, 0);
11509}
af508edd 11510
af508edd 11511
ca695ac9 11512/* Generate constructor label */
0f41302f 11513
ca695ac9
JB
11514char *
11515bc_gen_constr_label ()
11516{
11517 static int label_counter;
11518 static char label[20];
bbf6f052 11519
ca695ac9 11520 sprintf (label, "*LR%d", label_counter++);
bbf6f052 11521
ca695ac9
JB
11522 return (obstack_copy0 (&permanent_obstack, label, strlen (label)));
11523}
bbf6f052 11524
bbf6f052 11525
ca695ac9
JB
11526/* Evaluate constructor CONSTR and return pointer to it on level one. We
11527 expand the constructor data as static data, and push a pointer to it.
11528 The pointer is put in the pointer table and is retrieved by a constP
11529 bytecode instruction. We then loop and store each constructor member in
11530 the corresponding component. Finally, we return the original pointer on
0f41302f 11531 the stack. */
af508edd 11532
ca695ac9
JB
11533void
11534bc_expand_constructor (constr)
11535 tree constr;
11536{
11537 char *l;
11538 HOST_WIDE_INT ptroffs;
11539 rtx constr_rtx;
bbf6f052 11540
ca695ac9
JB
11541
11542 /* Literal constructors are handled as constants, whereas
11543 non-literals are evaluated and stored element by element
0f41302f 11544 into the data segment. */
ca695ac9
JB
11545
11546 /* Allocate space in proper segment and push pointer to space on stack.
11547 */
bbf6f052 11548
ca695ac9 11549 l = bc_gen_constr_label ();
bbf6f052 11550
ca695ac9 11551 if (TREE_CONSTANT (constr))
bbf6f052 11552 {
ca695ac9
JB
11553 text_section ();
11554
11555 bc_emit_const_labeldef (l);
11556 bc_output_constructor (constr, int_size_in_bytes (TREE_TYPE (constr)));
bbf6f052 11557 }
ca695ac9
JB
11558 else
11559 {
11560 data_section ();
bbf6f052 11561
ca695ac9
JB
11562 bc_emit_data_labeldef (l);
11563 bc_output_data_constructor (constr);
11564 }
bbf6f052 11565
ca695ac9
JB
11566
11567 /* Add reference to pointer table and recall pointer to stack;
11568 this code is common for both types of constructors: literals
0f41302f 11569 and non-literals. */
bbf6f052 11570
de7d9320
JB
11571 ptroffs = bc_define_pointer (l);
11572 bc_emit_instruction (constP, ptroffs);
d39985fa 11573
0f41302f 11574 /* This is all that has to be done if it's a literal. */
ca695ac9
JB
11575 if (TREE_CONSTANT (constr))
11576 return;
bbf6f052 11577
ca695ac9
JB
11578
11579 /* At this point, we have the pointer to the structure on top of the stack.
0f41302f 11580 Generate sequences of store_memory calls for the constructor. */
ca695ac9
JB
11581
11582 /* constructor type is structure */
11583 if (TREE_CODE (TREE_TYPE (constr)) == RECORD_TYPE)
e7c33f54 11584 {
ca695ac9
JB
11585 register tree elt;
11586
11587 /* If the constructor has fewer fields than the structure,
11588 clear the whole structure first. */
11589
11590 if (list_length (CONSTRUCTOR_ELTS (constr))
11591 != list_length (TYPE_FIELDS (TREE_TYPE (constr))))
11592 {
6d6e61ce 11593 bc_emit_instruction (duplicate);
ca695ac9
JB
11594 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
11595 bc_emit_instruction (clearBLK);
11596 }
11597
11598 /* Store each element of the constructor into the corresponding
11599 field of TARGET. */
11600
11601 for (elt = CONSTRUCTOR_ELTS (constr); elt; elt = TREE_CHAIN (elt))
11602 {
11603 register tree field = TREE_PURPOSE (elt);
11604 register enum machine_mode mode;
11605 int bitsize;
11606 int bitpos;
11607 int unsignedp;
11608
11609 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field)) /* * DECL_SIZE_UNIT (field) */;
11610 mode = DECL_MODE (field);
11611 unsignedp = TREE_UNSIGNED (field);
11612
11613 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
11614
11615 bc_store_field (elt, bitsize, bitpos, mode, TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
11616 /* The alignment of TARGET is
11617 at least what its type requires. */
11618 VOIDmode, 0,
11619 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
11620 int_size_in_bytes (TREE_TYPE (constr)));
11621 }
e7c33f54 11622 }
ca695ac9
JB
11623 else
11624
11625 /* Constructor type is array */
11626 if (TREE_CODE (TREE_TYPE (constr)) == ARRAY_TYPE)
11627 {
11628 register tree elt;
11629 register int i;
11630 tree domain = TYPE_DOMAIN (TREE_TYPE (constr));
11631 int minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
11632 int maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
11633 tree elttype = TREE_TYPE (TREE_TYPE (constr));
11634
11635 /* If the constructor has fewer fields than the structure,
11636 clear the whole structure first. */
11637
11638 if (list_length (CONSTRUCTOR_ELTS (constr)) < maxelt - minelt + 1)
11639 {
6d6e61ce 11640 bc_emit_instruction (duplicate);
ca695ac9
JB
11641 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
11642 bc_emit_instruction (clearBLK);
11643 }
11644
11645
11646 /* Store each element of the constructor into the corresponding
0f41302f 11647 element of TARGET, determined by counting the elements. */
ca695ac9
JB
11648
11649 for (elt = CONSTRUCTOR_ELTS (constr), i = 0;
11650 elt;
11651 elt = TREE_CHAIN (elt), i++)
11652 {
11653 register enum machine_mode mode;
11654 int bitsize;
11655 int bitpos;
11656 int unsignedp;
11657
11658 mode = TYPE_MODE (elttype);
11659 bitsize = GET_MODE_BITSIZE (mode);
11660 unsignedp = TREE_UNSIGNED (elttype);
11661
11662 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype))
11663 /* * TYPE_SIZE_UNIT (elttype) */ );
11664
11665 bc_store_field (elt, bitsize, bitpos, mode,
11666 TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
11667 /* The alignment of TARGET is
11668 at least what its type requires. */
11669 VOIDmode, 0,
11670 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
11671 int_size_in_bytes (TREE_TYPE (constr)));
11672 }
11673
11674 }
11675}
bbf6f052 11676
bbf6f052 11677
ca695ac9
JB
11678/* Store the value of EXP (an expression tree) into member FIELD of
11679 structure at address on stack, which has type TYPE, mode MODE and
11680 occupies BITSIZE bits, starting BITPOS bits from the beginning of the
11681 structure.
bbf6f052 11682
ca695ac9
JB
11683 ALIGN is the alignment that TARGET is known to have, measured in bytes.
11684 TOTAL_SIZE is its size in bytes, or -1 if variable. */
bbf6f052 11685
ca695ac9
JB
11686void
11687bc_store_field (field, bitsize, bitpos, mode, exp, type,
11688 value_mode, unsignedp, align, total_size)
11689 int bitsize, bitpos;
11690 enum machine_mode mode;
11691 tree field, exp, type;
11692 enum machine_mode value_mode;
11693 int unsignedp;
11694 int align;
11695 int total_size;
11696{
bbf6f052 11697
ca695ac9
JB
11698 /* Expand expression and copy pointer */
11699 bc_expand_expr (exp);
11700 bc_emit_instruction (over);
bbf6f052 11701
bbf6f052 11702
ca695ac9
JB
11703 /* If the component is a bit field, we cannot use addressing to access
11704 it. Use bit-field techniques to store in it. */
bbf6f052 11705
ca695ac9
JB
11706 if (DECL_BIT_FIELD (field))
11707 {
11708 bc_store_bit_field (bitpos, bitsize, unsignedp);
11709 return;
11710 }
11711 else
11712 /* Not bit field */
11713 {
11714 HOST_WIDE_INT offset = bitpos / BITS_PER_UNIT;
11715
11716 /* Advance pointer to the desired member */
11717 if (offset)
11718 bc_emit_instruction (addconstPSI, offset);
11719
11720 /* Store */
11721 bc_store_memory (type, field);
11722 }
11723}
bbf6f052 11724
ca695ac9
JB
11725
11726/* Store SI/SU in bitfield */
0f41302f 11727
bbf6f052 11728void
ca695ac9
JB
11729bc_store_bit_field (offset, size, unsignedp)
11730 int offset, size, unsignedp;
bbf6f052 11731{
ca695ac9
JB
11732 /* Push bitfield offset and size */
11733 bc_push_offset_and_size (offset, size);
bbf6f052 11734
ca695ac9
JB
11735 /* Store */
11736 bc_emit_instruction (sstoreBI);
11737}
e87b4f3f 11738
88d3b7f0 11739
ca695ac9 11740/* Load SI/SU from bitfield */
0f41302f 11741
ca695ac9
JB
11742void
11743bc_load_bit_field (offset, size, unsignedp)
11744 int offset, size, unsignedp;
11745{
11746 /* Push bitfield offset and size */
11747 bc_push_offset_and_size (offset, size);
88d3b7f0 11748
ca695ac9
JB
11749 /* Load: sign-extend if signed, else zero-extend */
11750 bc_emit_instruction (unsignedp ? zxloadBI : sxloadBI);
11751}
709f5be1 11752
bbf6f052 11753
ca695ac9
JB
11754/* Adjust interpreter stack by NLEVELS. Positive means drop NLEVELS
11755 (adjust stack pointer upwards), negative means add that number of
11756 levels (adjust the stack pointer downwards). Only positive values
0f41302f 11757 normally make sense. */
bbf6f052 11758
ca695ac9
JB
11759void
11760bc_adjust_stack (nlevels)
11761 int nlevels;
11762{
11763 switch (nlevels)
11764 {
11765 case 0:
11766 break;
11767
11768 case 2:
11769 bc_emit_instruction (drop);
11770
11771 case 1:
11772 bc_emit_instruction (drop);
11773 break;
11774
11775 default:
11776
11777 bc_emit_instruction (adjstackSI, (HOST_WIDE_INT) nlevels);
11778 stack_depth -= nlevels;
11779 }
11780
a68c7608
RS
11781#if defined (VALIDATE_STACK_FOR_BC)
11782 VALIDATE_STACK_FOR_BC ();
bbf6f052
RK
11783#endif
11784}
This page took 2.097855 seconds and 5 git commands to generate.