]> gcc.gnu.org Git - gcc.git/blame - gcc/expr.c
(store_constructor_field): Add explanatory comment.
[gcc.git] / gcc / expr.c
CommitLineData
bbf6f052 1/* Convert tree expression to rtl instructions, for GNU compiler.
3d27140a 2 Copyright (C) 1988, 92, 93, 94, 95, 1996 Free Software Foundation, Inc.
bbf6f052
RK
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
940d9d63
RK
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA. */
bbf6f052
RK
20
21
22#include "config.h"
ca695ac9 23#include "machmode.h"
bbf6f052
RK
24#include "rtl.h"
25#include "tree.h"
ca695ac9 26#include "obstack.h"
bbf6f052 27#include "flags.h"
bf76bb5a 28#include "regs.h"
4ed67205 29#include "hard-reg-set.h"
bbf6f052
RK
30#include "function.h"
31#include "insn-flags.h"
32#include "insn-codes.h"
33#include "expr.h"
34#include "insn-config.h"
35#include "recog.h"
36#include "output.h"
bbf6f052
RK
37#include "typeclass.h"
38
ca695ac9
JB
39#include "bytecode.h"
40#include "bc-opcode.h"
41#include "bc-typecd.h"
42#include "bc-optab.h"
43#include "bc-emit.h"
44
45
bbf6f052
RK
46#define CEIL(x,y) (((x) + (y) - 1) / (y))
47
48/* Decide whether a function's arguments should be processed
bbc8a071
RK
49 from first to last or from last to first.
50
51 They should if the stack and args grow in opposite directions, but
52 only if we have push insns. */
bbf6f052 53
bbf6f052 54#ifdef PUSH_ROUNDING
bbc8a071 55
3319a347 56#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
bbf6f052
RK
57#define PUSH_ARGS_REVERSED /* If it's last to first */
58#endif
bbc8a071 59
bbf6f052
RK
60#endif
61
62#ifndef STACK_PUSH_CODE
63#ifdef STACK_GROWS_DOWNWARD
64#define STACK_PUSH_CODE PRE_DEC
65#else
66#define STACK_PUSH_CODE PRE_INC
67#endif
68#endif
69
70/* Like STACK_BOUNDARY but in units of bytes, not bits. */
71#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
72
73/* If this is nonzero, we do not bother generating VOLATILE
74 around volatile memory references, and we are willing to
75 output indirect addresses. If cse is to follow, we reject
76 indirect addresses so a useful potential cse is generated;
77 if it is used only once, instruction combination will produce
78 the same indirect address eventually. */
79int cse_not_expected;
80
81/* Nonzero to generate code for all the subroutines within an
82 expression before generating the upper levels of the expression.
83 Nowadays this is never zero. */
84int do_preexpand_calls = 1;
85
86/* Number of units that we should eventually pop off the stack.
87 These are the arguments to function calls that have already returned. */
88int pending_stack_adjust;
89
90/* Nonzero means stack pops must not be deferred, and deferred stack
91 pops must not be output. It is nonzero inside a function call,
92 inside a conditional expression, inside a statement expression,
93 and in other cases as well. */
94int inhibit_defer_pop;
95
96/* A list of all cleanups which belong to the arguments of
97 function calls being expanded by expand_call. */
98tree cleanups_this_call;
99
d93d4205
MS
100/* When temporaries are created by TARGET_EXPRs, they are created at
101 this level of temp_slot_level, so that they can remain allocated
102 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
103 of TARGET_EXPRs. */
104int target_temp_slot_level;
105
bbf6f052
RK
106/* Nonzero means __builtin_saveregs has already been done in this function.
107 The value is the pseudoreg containing the value __builtin_saveregs
108 returned. */
109static rtx saveregs_value;
110
dcf76fff
TW
111/* Similarly for __builtin_apply_args. */
112static rtx apply_args_value;
113
4969d05d
RK
114/* This structure is used by move_by_pieces to describe the move to
115 be performed. */
116
117struct move_by_pieces
118{
119 rtx to;
120 rtx to_addr;
121 int autinc_to;
122 int explicit_inc_to;
e9cf6a97 123 int to_struct;
4969d05d
RK
124 rtx from;
125 rtx from_addr;
126 int autinc_from;
127 int explicit_inc_from;
e9cf6a97 128 int from_struct;
4969d05d
RK
129 int len;
130 int offset;
131 int reverse;
132};
133
9de08200
RK
134/* This structure is used by clear_by_pieces to describe the clear to
135 be performed. */
136
137struct clear_by_pieces
138{
139 rtx to;
140 rtx to_addr;
141 int autinc_to;
142 int explicit_inc_to;
143 int to_struct;
144 int len;
145 int offset;
146 int reverse;
147};
148
c02bd5d9
JB
149/* Used to generate bytecodes: keep track of size of local variables,
150 as well as depth of arithmetic stack. (Notice that variables are
151 stored on the machine's stack, not the arithmetic stack.) */
152
186f92ce 153extern int local_vars_size;
c02bd5d9
JB
154extern int stack_depth;
155extern int max_stack_depth;
292b1216 156extern struct obstack permanent_obstack;
4ed67205 157extern rtx arg_pointer_save_area;
c02bd5d9 158
4969d05d
RK
159static rtx enqueue_insn PROTO((rtx, rtx));
160static int queued_subexp_p PROTO((rtx));
161static void init_queue PROTO((void));
162static void move_by_pieces PROTO((rtx, rtx, int, int));
163static int move_by_pieces_ninsns PROTO((unsigned int, int));
164static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
165 struct move_by_pieces *));
9de08200
RK
166static void clear_by_pieces PROTO((rtx, int, int));
167static void clear_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
168 struct clear_by_pieces *));
169static int is_zeros_p PROTO((tree));
170static int mostly_zeros_p PROTO((tree));
e1a43f73 171static void store_constructor PROTO((tree, rtx, int));
4969d05d
RK
172static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
173 enum machine_mode, int, int, int));
6be58303 174static int get_inner_unaligned_p PROTO((tree));
4969d05d
RK
175static tree save_noncopied_parts PROTO((tree, tree));
176static tree init_noncopied_parts PROTO((tree, tree));
177static int safe_from_p PROTO((rtx, tree));
178static int fixed_type_p PROTO((tree));
179static int get_pointer_alignment PROTO((tree, unsigned));
180static tree string_constant PROTO((tree, tree *));
181static tree c_strlen PROTO((tree));
307b821c
RK
182static rtx expand_builtin PROTO((tree, rtx, rtx,
183 enum machine_mode, int));
0006469d
TW
184static int apply_args_size PROTO((void));
185static int apply_result_size PROTO((void));
186static rtx result_vector PROTO((int, rtx));
187static rtx expand_builtin_apply_args PROTO((void));
188static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
189static void expand_builtin_return PROTO((rtx));
4969d05d 190static rtx expand_increment PROTO((tree, int));
ca695ac9
JB
191rtx bc_expand_increment PROTO((struct increment_operator *, tree));
192tree bc_runtime_type_code PROTO((tree));
193rtx bc_allocate_local PROTO((int, int));
194void bc_store_memory PROTO((tree, tree));
195tree bc_expand_component_address PROTO((tree));
196tree bc_expand_address PROTO((tree));
197void bc_expand_constructor PROTO((tree));
198void bc_adjust_stack PROTO((int));
199tree bc_canonicalize_array_ref PROTO((tree));
200void bc_load_memory PROTO((tree, tree));
201void bc_load_externaddr PROTO((rtx));
202void bc_load_externaddr_id PROTO((tree, int));
203void bc_load_localaddr PROTO((rtx));
204void bc_load_parmaddr PROTO((rtx));
4969d05d
RK
205static void preexpand_calls PROTO((tree));
206static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
2e5ec6cf 207void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
4969d05d
RK
208static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
209static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
210static void do_jump_for_compare PROTO((rtx, rtx, rtx));
211static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
212static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
5dab5552 213static tree defer_cleanups_to PROTO((tree));
61d6b1cc 214extern void (*interim_eh_hook) PROTO((tree));
16545b0a 215extern tree truthvalue_conversion PROTO((tree));
bbf6f052 216
4fa52007
RK
217/* Record for each mode whether we can move a register directly to or
218 from an object of that mode in memory. If we can't, we won't try
219 to use that mode directly when accessing a field of that mode. */
220
221static char direct_load[NUM_MACHINE_MODES];
222static char direct_store[NUM_MACHINE_MODES];
223
bbf6f052
RK
224/* MOVE_RATIO is the number of move instructions that is better than
225 a block move. */
226
227#ifndef MOVE_RATIO
266007a7 228#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
bbf6f052
RK
229#define MOVE_RATIO 2
230#else
231/* A value of around 6 would minimize code size; infinity would minimize
232 execution time. */
233#define MOVE_RATIO 15
234#endif
235#endif
e87b4f3f 236
266007a7 237/* This array records the insn_code of insns to perform block moves. */
e6677db3 238enum insn_code movstr_optab[NUM_MACHINE_MODES];
266007a7 239
9de08200
RK
240/* This array records the insn_code of insns to perform block clears. */
241enum insn_code clrstr_optab[NUM_MACHINE_MODES];
242
e87b4f3f
RS
243/* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
244
245#ifndef SLOW_UNALIGNED_ACCESS
c7a7ac46 246#define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
e87b4f3f 247#endif
0006469d
TW
248
249/* Register mappings for target machines without register windows. */
250#ifndef INCOMING_REGNO
251#define INCOMING_REGNO(OUT) (OUT)
252#endif
253#ifndef OUTGOING_REGNO
254#define OUTGOING_REGNO(IN) (IN)
255#endif
bbf6f052 256\f
ca695ac9
JB
257/* Maps used to convert modes to const, load, and store bytecodes. */
258enum bytecode_opcode mode_to_const_map[MAX_MACHINE_MODE];
259enum bytecode_opcode mode_to_load_map[MAX_MACHINE_MODE];
260enum bytecode_opcode mode_to_store_map[MAX_MACHINE_MODE];
261
262/* Initialize maps used to convert modes to const, load, and store
263 bytecodes. */
264void
265bc_init_mode_to_opcode_maps ()
266{
267 int mode;
268
6bd6178d 269 for (mode = 0; mode < (int) MAX_MACHINE_MODE; mode++)
ca695ac9
JB
270 mode_to_const_map[mode] =
271 mode_to_load_map[mode] =
272 mode_to_store_map[mode] = neverneverland;
273
274#define DEF_MODEMAP(SYM, CODE, UCODE, CONST, LOAD, STORE) \
6bd6178d
RK
275 mode_to_const_map[(int) SYM] = CONST; \
276 mode_to_load_map[(int) SYM] = LOAD; \
277 mode_to_store_map[(int) SYM] = STORE;
ca695ac9
JB
278
279#include "modemap.def"
280#undef DEF_MODEMAP
281}
282\f
4fa52007 283/* This is run once per compilation to set up which modes can be used
266007a7 284 directly in memory and to initialize the block move optab. */
4fa52007
RK
285
286void
287init_expr_once ()
288{
289 rtx insn, pat;
290 enum machine_mode mode;
e2549997
RS
291 /* Try indexing by frame ptr and try by stack ptr.
292 It is known that on the Convex the stack ptr isn't a valid index.
293 With luck, one or the other is valid on any machine. */
4fa52007 294 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
e2549997 295 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
4fa52007
RK
296
297 start_sequence ();
298 insn = emit_insn (gen_rtx (SET, 0, 0));
299 pat = PATTERN (insn);
300
301 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
302 mode = (enum machine_mode) ((int) mode + 1))
303 {
304 int regno;
305 rtx reg;
306 int num_clobbers;
307
308 direct_load[(int) mode] = direct_store[(int) mode] = 0;
309 PUT_MODE (mem, mode);
e2549997 310 PUT_MODE (mem1, mode);
4fa52007 311
e6fe56a4
RK
312 /* See if there is some register that can be used in this mode and
313 directly loaded or stored from memory. */
314
7308a047
RS
315 if (mode != VOIDmode && mode != BLKmode)
316 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
317 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
318 regno++)
319 {
320 if (! HARD_REGNO_MODE_OK (regno, mode))
321 continue;
e6fe56a4 322
7308a047 323 reg = gen_rtx (REG, mode, regno);
e6fe56a4 324
7308a047
RS
325 SET_SRC (pat) = mem;
326 SET_DEST (pat) = reg;
327 if (recog (pat, insn, &num_clobbers) >= 0)
328 direct_load[(int) mode] = 1;
e6fe56a4 329
e2549997
RS
330 SET_SRC (pat) = mem1;
331 SET_DEST (pat) = reg;
332 if (recog (pat, insn, &num_clobbers) >= 0)
333 direct_load[(int) mode] = 1;
334
7308a047
RS
335 SET_SRC (pat) = reg;
336 SET_DEST (pat) = mem;
337 if (recog (pat, insn, &num_clobbers) >= 0)
338 direct_store[(int) mode] = 1;
e2549997
RS
339
340 SET_SRC (pat) = reg;
341 SET_DEST (pat) = mem1;
342 if (recog (pat, insn, &num_clobbers) >= 0)
343 direct_store[(int) mode] = 1;
7308a047 344 }
4fa52007
RK
345 }
346
347 end_sequence ();
348}
349
bbf6f052
RK
350/* This is run at the start of compiling a function. */
351
352void
353init_expr ()
354{
355 init_queue ();
356
357 pending_stack_adjust = 0;
358 inhibit_defer_pop = 0;
359 cleanups_this_call = 0;
360 saveregs_value = 0;
0006469d 361 apply_args_value = 0;
e87b4f3f 362 forced_labels = 0;
bbf6f052
RK
363}
364
365/* Save all variables describing the current status into the structure *P.
366 This is used before starting a nested function. */
367
368void
369save_expr_status (p)
370 struct function *p;
371{
372 /* Instead of saving the postincrement queue, empty it. */
373 emit_queue ();
374
375 p->pending_stack_adjust = pending_stack_adjust;
376 p->inhibit_defer_pop = inhibit_defer_pop;
377 p->cleanups_this_call = cleanups_this_call;
378 p->saveregs_value = saveregs_value;
0006469d 379 p->apply_args_value = apply_args_value;
e87b4f3f 380 p->forced_labels = forced_labels;
bbf6f052
RK
381
382 pending_stack_adjust = 0;
383 inhibit_defer_pop = 0;
384 cleanups_this_call = 0;
385 saveregs_value = 0;
0006469d 386 apply_args_value = 0;
e87b4f3f 387 forced_labels = 0;
bbf6f052
RK
388}
389
390/* Restore all variables describing the current status from the structure *P.
391 This is used after a nested function. */
392
393void
394restore_expr_status (p)
395 struct function *p;
396{
397 pending_stack_adjust = p->pending_stack_adjust;
398 inhibit_defer_pop = p->inhibit_defer_pop;
399 cleanups_this_call = p->cleanups_this_call;
400 saveregs_value = p->saveregs_value;
0006469d 401 apply_args_value = p->apply_args_value;
e87b4f3f 402 forced_labels = p->forced_labels;
bbf6f052
RK
403}
404\f
405/* Manage the queue of increment instructions to be output
406 for POSTINCREMENT_EXPR expressions, etc. */
407
408static rtx pending_chain;
409
410/* Queue up to increment (or change) VAR later. BODY says how:
411 BODY should be the same thing you would pass to emit_insn
412 to increment right away. It will go to emit_insn later on.
413
414 The value is a QUEUED expression to be used in place of VAR
415 where you want to guarantee the pre-incrementation value of VAR. */
416
417static rtx
418enqueue_insn (var, body)
419 rtx var, body;
420{
421 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
906c4e36 422 var, NULL_RTX, NULL_RTX, body, pending_chain);
bbf6f052
RK
423 return pending_chain;
424}
425
426/* Use protect_from_queue to convert a QUEUED expression
427 into something that you can put immediately into an instruction.
428 If the queued incrementation has not happened yet,
429 protect_from_queue returns the variable itself.
430 If the incrementation has happened, protect_from_queue returns a temp
431 that contains a copy of the old value of the variable.
432
433 Any time an rtx which might possibly be a QUEUED is to be put
434 into an instruction, it must be passed through protect_from_queue first.
435 QUEUED expressions are not meaningful in instructions.
436
437 Do not pass a value through protect_from_queue and then hold
438 on to it for a while before putting it in an instruction!
439 If the queue is flushed in between, incorrect code will result. */
440
441rtx
442protect_from_queue (x, modify)
443 register rtx x;
444 int modify;
445{
446 register RTX_CODE code = GET_CODE (x);
447
448#if 0 /* A QUEUED can hang around after the queue is forced out. */
449 /* Shortcut for most common case. */
450 if (pending_chain == 0)
451 return x;
452#endif
453
454 if (code != QUEUED)
455 {
e9baa644
RK
456 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
457 use of autoincrement. Make a copy of the contents of the memory
458 location rather than a copy of the address, but not if the value is
459 of mode BLKmode. Don't modify X in place since it might be
460 shared. */
bbf6f052
RK
461 if (code == MEM && GET_MODE (x) != BLKmode
462 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
463 {
464 register rtx y = XEXP (x, 0);
e9baa644
RK
465 register rtx new = gen_rtx (MEM, GET_MODE (x), QUEUED_VAR (y));
466
467 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
468 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
469 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
470
bbf6f052
RK
471 if (QUEUED_INSN (y))
472 {
e9baa644
RK
473 register rtx temp = gen_reg_rtx (GET_MODE (new));
474 emit_insn_before (gen_move_insn (temp, new),
bbf6f052
RK
475 QUEUED_INSN (y));
476 return temp;
477 }
e9baa644 478 return new;
bbf6f052
RK
479 }
480 /* Otherwise, recursively protect the subexpressions of all
481 the kinds of rtx's that can contain a QUEUED. */
482 if (code == MEM)
3f15938e
RS
483 {
484 rtx tem = protect_from_queue (XEXP (x, 0), 0);
485 if (tem != XEXP (x, 0))
486 {
487 x = copy_rtx (x);
488 XEXP (x, 0) = tem;
489 }
490 }
bbf6f052
RK
491 else if (code == PLUS || code == MULT)
492 {
3f15938e
RS
493 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
494 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
495 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
496 {
497 x = copy_rtx (x);
498 XEXP (x, 0) = new0;
499 XEXP (x, 1) = new1;
500 }
bbf6f052
RK
501 }
502 return x;
503 }
504 /* If the increment has not happened, use the variable itself. */
505 if (QUEUED_INSN (x) == 0)
506 return QUEUED_VAR (x);
507 /* If the increment has happened and a pre-increment copy exists,
508 use that copy. */
509 if (QUEUED_COPY (x) != 0)
510 return QUEUED_COPY (x);
511 /* The increment has happened but we haven't set up a pre-increment copy.
512 Set one up now, and use it. */
513 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
514 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
515 QUEUED_INSN (x));
516 return QUEUED_COPY (x);
517}
518
519/* Return nonzero if X contains a QUEUED expression:
520 if it contains anything that will be altered by a queued increment.
521 We handle only combinations of MEM, PLUS, MINUS and MULT operators
522 since memory addresses generally contain only those. */
523
524static int
525queued_subexp_p (x)
526 rtx x;
527{
528 register enum rtx_code code = GET_CODE (x);
529 switch (code)
530 {
531 case QUEUED:
532 return 1;
533 case MEM:
534 return queued_subexp_p (XEXP (x, 0));
535 case MULT:
536 case PLUS:
537 case MINUS:
538 return queued_subexp_p (XEXP (x, 0))
539 || queued_subexp_p (XEXP (x, 1));
540 }
541 return 0;
542}
543
544/* Perform all the pending incrementations. */
545
546void
547emit_queue ()
548{
549 register rtx p;
550 while (p = pending_chain)
551 {
552 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
553 pending_chain = QUEUED_NEXT (p);
554 }
555}
556
557static void
558init_queue ()
559{
560 if (pending_chain)
561 abort ();
562}
563\f
564/* Copy data from FROM to TO, where the machine modes are not the same.
565 Both modes may be integer, or both may be floating.
566 UNSIGNEDP should be nonzero if FROM is an unsigned type.
567 This causes zero-extension instead of sign-extension. */
568
569void
570convert_move (to, from, unsignedp)
571 register rtx to, from;
572 int unsignedp;
573{
574 enum machine_mode to_mode = GET_MODE (to);
575 enum machine_mode from_mode = GET_MODE (from);
576 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
577 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
578 enum insn_code code;
579 rtx libcall;
580
581 /* rtx code for making an equivalent value. */
582 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
583
584 to = protect_from_queue (to, 1);
585 from = protect_from_queue (from, 0);
586
587 if (to_real != from_real)
588 abort ();
589
1499e0a8
RK
590 /* If FROM is a SUBREG that indicates that we have already done at least
591 the required extension, strip it. We don't handle such SUBREGs as
592 TO here. */
593
594 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
595 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
596 >= GET_MODE_SIZE (to_mode))
597 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
598 from = gen_lowpart (to_mode, from), from_mode = to_mode;
599
600 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
601 abort ();
602
bbf6f052
RK
603 if (to_mode == from_mode
604 || (from_mode == VOIDmode && CONSTANT_P (from)))
605 {
606 emit_move_insn (to, from);
607 return;
608 }
609
610 if (to_real)
611 {
81d79e2c
RS
612 rtx value;
613
b424402e 614#ifdef HAVE_extendqfhf2
8ab0613c 615 if (HAVE_extendqfhf2 && from_mode == QFmode && to_mode == HFmode)
b424402e 616 {
8ab0613c 617 emit_unop_insn (CODE_FOR_extendqfhf2, to, from, UNKNOWN);
b424402e
RS
618 return;
619 }
620#endif
621#ifdef HAVE_extendqfsf2
622 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == SFmode)
623 {
624 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
625 return;
626 }
627#endif
628#ifdef HAVE_extendqfdf2
629 if (HAVE_extendqfdf2 && from_mode == QFmode && to_mode == DFmode)
630 {
631 emit_unop_insn (CODE_FOR_extendqfdf2, to, from, UNKNOWN);
632 return;
633 }
634#endif
635#ifdef HAVE_extendqfxf2
636 if (HAVE_extendqfxf2 && from_mode == QFmode && to_mode == XFmode)
637 {
638 emit_unop_insn (CODE_FOR_extendqfxf2, to, from, UNKNOWN);
639 return;
640 }
641#endif
642#ifdef HAVE_extendqftf2
643 if (HAVE_extendqftf2 && from_mode == QFmode && to_mode == TFmode)
644 {
645 emit_unop_insn (CODE_FOR_extendqftf2, to, from, UNKNOWN);
646 return;
647 }
648#endif
649
03747aa3
RK
650#ifdef HAVE_extendhftqf2
651 if (HAVE_extendhftqf2 && from_mode == HFmode && to_mode == TQFmode)
652 {
653 emit_unop_insn (CODE_FOR_extendhftqf2, to, from, UNKNOWN);
654 return;
655 }
656#endif
657
b424402e
RS
658#ifdef HAVE_extendhfsf2
659 if (HAVE_extendhfsf2 && from_mode == HFmode && to_mode == SFmode)
660 {
661 emit_unop_insn (CODE_FOR_extendhfsf2, to, from, UNKNOWN);
662 return;
663 }
664#endif
665#ifdef HAVE_extendhfdf2
666 if (HAVE_extendhfdf2 && from_mode == HFmode && to_mode == DFmode)
667 {
668 emit_unop_insn (CODE_FOR_extendhfdf2, to, from, UNKNOWN);
669 return;
670 }
671#endif
672#ifdef HAVE_extendhfxf2
673 if (HAVE_extendhfxf2 && from_mode == HFmode && to_mode == XFmode)
674 {
675 emit_unop_insn (CODE_FOR_extendhfxf2, to, from, UNKNOWN);
676 return;
677 }
678#endif
679#ifdef HAVE_extendhftf2
680 if (HAVE_extendhftf2 && from_mode == HFmode && to_mode == TFmode)
681 {
682 emit_unop_insn (CODE_FOR_extendhftf2, to, from, UNKNOWN);
683 return;
684 }
685#endif
686
bbf6f052
RK
687#ifdef HAVE_extendsfdf2
688 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
689 {
690 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
691 return;
692 }
693#endif
b092b471
JW
694#ifdef HAVE_extendsfxf2
695 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
696 {
697 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
698 return;
699 }
700#endif
bbf6f052
RK
701#ifdef HAVE_extendsftf2
702 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
703 {
704 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
705 return;
706 }
707#endif
b092b471
JW
708#ifdef HAVE_extenddfxf2
709 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
710 {
711 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
712 return;
713 }
714#endif
bbf6f052
RK
715#ifdef HAVE_extenddftf2
716 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
717 {
718 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
719 return;
720 }
721#endif
b424402e
RS
722
723#ifdef HAVE_trunchfqf2
724 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
725 {
726 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
727 return;
728 }
729#endif
730#ifdef HAVE_truncsfqf2
731 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
732 {
733 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
734 return;
735 }
736#endif
737#ifdef HAVE_truncdfqf2
738 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
739 {
740 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
741 return;
742 }
743#endif
744#ifdef HAVE_truncxfqf2
745 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
746 {
747 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
748 return;
749 }
750#endif
751#ifdef HAVE_trunctfqf2
752 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
753 {
754 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
755 return;
756 }
757#endif
03747aa3
RK
758
759#ifdef HAVE_trunctqfhf2
760 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
761 {
762 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
763 return;
764 }
765#endif
b424402e
RS
766#ifdef HAVE_truncsfhf2
767 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
768 {
769 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
770 return;
771 }
772#endif
773#ifdef HAVE_truncdfhf2
774 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
775 {
776 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
777 return;
778 }
779#endif
780#ifdef HAVE_truncxfhf2
781 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
782 {
783 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
784 return;
785 }
786#endif
787#ifdef HAVE_trunctfhf2
788 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
789 {
790 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
791 return;
792 }
793#endif
bbf6f052
RK
794#ifdef HAVE_truncdfsf2
795 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
796 {
797 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
798 return;
799 }
800#endif
b092b471
JW
801#ifdef HAVE_truncxfsf2
802 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
803 {
804 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
805 return;
806 }
807#endif
bbf6f052
RK
808#ifdef HAVE_trunctfsf2
809 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
810 {
811 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
812 return;
813 }
814#endif
b092b471
JW
815#ifdef HAVE_truncxfdf2
816 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
817 {
818 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
819 return;
820 }
821#endif
bbf6f052
RK
822#ifdef HAVE_trunctfdf2
823 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
824 {
825 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
826 return;
827 }
828#endif
829
b092b471
JW
830 libcall = (rtx) 0;
831 switch (from_mode)
832 {
833 case SFmode:
834 switch (to_mode)
835 {
836 case DFmode:
837 libcall = extendsfdf2_libfunc;
838 break;
839
840 case XFmode:
841 libcall = extendsfxf2_libfunc;
842 break;
843
844 case TFmode:
845 libcall = extendsftf2_libfunc;
846 break;
847 }
848 break;
849
850 case DFmode:
851 switch (to_mode)
852 {
853 case SFmode:
854 libcall = truncdfsf2_libfunc;
855 break;
856
857 case XFmode:
858 libcall = extenddfxf2_libfunc;
859 break;
860
861 case TFmode:
862 libcall = extenddftf2_libfunc;
863 break;
864 }
865 break;
866
867 case XFmode:
868 switch (to_mode)
869 {
870 case SFmode:
871 libcall = truncxfsf2_libfunc;
872 break;
873
874 case DFmode:
875 libcall = truncxfdf2_libfunc;
876 break;
877 }
878 break;
879
880 case TFmode:
881 switch (to_mode)
882 {
883 case SFmode:
884 libcall = trunctfsf2_libfunc;
885 break;
886
887 case DFmode:
888 libcall = trunctfdf2_libfunc;
889 break;
890 }
891 break;
892 }
893
894 if (libcall == (rtx) 0)
895 /* This conversion is not implemented yet. */
bbf6f052
RK
896 abort ();
897
81d79e2c
RS
898 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
899 1, from, from_mode);
900 emit_move_insn (to, value);
bbf6f052
RK
901 return;
902 }
903
904 /* Now both modes are integers. */
905
906 /* Handle expanding beyond a word. */
907 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
908 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
909 {
910 rtx insns;
911 rtx lowpart;
912 rtx fill_value;
913 rtx lowfrom;
914 int i;
915 enum machine_mode lowpart_mode;
916 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
917
918 /* Try converting directly if the insn is supported. */
919 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
920 != CODE_FOR_nothing)
921 {
cd1b4b44
RK
922 /* If FROM is a SUBREG, put it into a register. Do this
923 so that we always generate the same set of insns for
924 better cse'ing; if an intermediate assignment occurred,
925 we won't be doing the operation directly on the SUBREG. */
926 if (optimize > 0 && GET_CODE (from) == SUBREG)
927 from = force_reg (from_mode, from);
bbf6f052
RK
928 emit_unop_insn (code, to, from, equiv_code);
929 return;
930 }
931 /* Next, try converting via full word. */
932 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
933 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
934 != CODE_FOR_nothing))
935 {
a81fee56
RS
936 if (GET_CODE (to) == REG)
937 emit_insn (gen_rtx (CLOBBER, VOIDmode, to));
bbf6f052
RK
938 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
939 emit_unop_insn (code, to,
940 gen_lowpart (word_mode, to), equiv_code);
941 return;
942 }
943
944 /* No special multiword conversion insn; do it by hand. */
945 start_sequence ();
946
5c5033c3
RK
947 /* Since we will turn this into a no conflict block, we must ensure
948 that the source does not overlap the target. */
949
950 if (reg_overlap_mentioned_p (to, from))
951 from = force_reg (from_mode, from);
952
bbf6f052
RK
953 /* Get a copy of FROM widened to a word, if necessary. */
954 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
955 lowpart_mode = word_mode;
956 else
957 lowpart_mode = from_mode;
958
959 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
960
961 lowpart = gen_lowpart (lowpart_mode, to);
962 emit_move_insn (lowpart, lowfrom);
963
964 /* Compute the value to put in each remaining word. */
965 if (unsignedp)
966 fill_value = const0_rtx;
967 else
968 {
969#ifdef HAVE_slt
970 if (HAVE_slt
971 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
972 && STORE_FLAG_VALUE == -1)
973 {
906c4e36
RK
974 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
975 lowpart_mode, 0, 0);
bbf6f052
RK
976 fill_value = gen_reg_rtx (word_mode);
977 emit_insn (gen_slt (fill_value));
978 }
979 else
980#endif
981 {
982 fill_value
983 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
984 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
906c4e36 985 NULL_RTX, 0);
bbf6f052
RK
986 fill_value = convert_to_mode (word_mode, fill_value, 1);
987 }
988 }
989
990 /* Fill the remaining words. */
991 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
992 {
993 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
994 rtx subword = operand_subword (to, index, 1, to_mode);
995
996 if (subword == 0)
997 abort ();
998
999 if (fill_value != subword)
1000 emit_move_insn (subword, fill_value);
1001 }
1002
1003 insns = get_insns ();
1004 end_sequence ();
1005
906c4e36 1006 emit_no_conflict_block (insns, to, from, NULL_RTX,
2abec1b7 1007 gen_rtx (equiv_code, to_mode, copy_rtx (from)));
bbf6f052
RK
1008 return;
1009 }
1010
d3c64ee3
RS
1011 /* Truncating multi-word to a word or less. */
1012 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
1013 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
bbf6f052 1014 {
431a6eca
JW
1015 if (!((GET_CODE (from) == MEM
1016 && ! MEM_VOLATILE_P (from)
1017 && direct_load[(int) to_mode]
1018 && ! mode_dependent_address_p (XEXP (from, 0)))
1019 || GET_CODE (from) == REG
1020 || GET_CODE (from) == SUBREG))
1021 from = force_reg (from_mode, from);
bbf6f052
RK
1022 convert_move (to, gen_lowpart (word_mode, from), 0);
1023 return;
1024 }
1025
1026 /* Handle pointer conversion */ /* SPEE 900220 */
1027 if (to_mode == PSImode)
1028 {
1029 if (from_mode != SImode)
1030 from = convert_to_mode (SImode, from, unsignedp);
1031
1f584163
DE
1032#ifdef HAVE_truncsipsi2
1033 if (HAVE_truncsipsi2)
bbf6f052 1034 {
1f584163 1035 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
bbf6f052
RK
1036 return;
1037 }
1f584163 1038#endif /* HAVE_truncsipsi2 */
bbf6f052
RK
1039 abort ();
1040 }
1041
1042 if (from_mode == PSImode)
1043 {
1044 if (to_mode != SImode)
1045 {
1046 from = convert_to_mode (SImode, from, unsignedp);
1047 from_mode = SImode;
1048 }
1049 else
1050 {
1f584163
DE
1051#ifdef HAVE_extendpsisi2
1052 if (HAVE_extendpsisi2)
bbf6f052 1053 {
1f584163 1054 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
bbf6f052
RK
1055 return;
1056 }
1f584163 1057#endif /* HAVE_extendpsisi2 */
bbf6f052
RK
1058 abort ();
1059 }
1060 }
1061
0407367d
RK
1062 if (to_mode == PDImode)
1063 {
1064 if (from_mode != DImode)
1065 from = convert_to_mode (DImode, from, unsignedp);
1066
1067#ifdef HAVE_truncdipdi2
1068 if (HAVE_truncdipdi2)
1069 {
1070 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1071 return;
1072 }
1073#endif /* HAVE_truncdipdi2 */
1074 abort ();
1075 }
1076
1077 if (from_mode == PDImode)
1078 {
1079 if (to_mode != DImode)
1080 {
1081 from = convert_to_mode (DImode, from, unsignedp);
1082 from_mode = DImode;
1083 }
1084 else
1085 {
1086#ifdef HAVE_extendpdidi2
1087 if (HAVE_extendpdidi2)
1088 {
1089 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1090 return;
1091 }
1092#endif /* HAVE_extendpdidi2 */
1093 abort ();
1094 }
1095 }
1096
bbf6f052
RK
1097 /* Now follow all the conversions between integers
1098 no more than a word long. */
1099
1100 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1101 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1102 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
d3c64ee3 1103 GET_MODE_BITSIZE (from_mode)))
bbf6f052 1104 {
d3c64ee3
RS
1105 if (!((GET_CODE (from) == MEM
1106 && ! MEM_VOLATILE_P (from)
1107 && direct_load[(int) to_mode]
1108 && ! mode_dependent_address_p (XEXP (from, 0)))
1109 || GET_CODE (from) == REG
1110 || GET_CODE (from) == SUBREG))
1111 from = force_reg (from_mode, from);
34aa3599
RK
1112 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1113 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1114 from = copy_to_reg (from);
bbf6f052
RK
1115 emit_move_insn (to, gen_lowpart (to_mode, from));
1116 return;
1117 }
1118
d3c64ee3 1119 /* Handle extension. */
bbf6f052
RK
1120 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1121 {
1122 /* Convert directly if that works. */
1123 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1124 != CODE_FOR_nothing)
1125 {
1126 emit_unop_insn (code, to, from, equiv_code);
1127 return;
1128 }
1129 else
1130 {
1131 enum machine_mode intermediate;
1132
1133 /* Search for a mode to convert via. */
1134 for (intermediate = from_mode; intermediate != VOIDmode;
1135 intermediate = GET_MODE_WIDER_MODE (intermediate))
930b4e39
RK
1136 if (((can_extend_p (to_mode, intermediate, unsignedp)
1137 != CODE_FOR_nothing)
1138 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1139 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
bbf6f052
RK
1140 && (can_extend_p (intermediate, from_mode, unsignedp)
1141 != CODE_FOR_nothing))
1142 {
1143 convert_move (to, convert_to_mode (intermediate, from,
1144 unsignedp), unsignedp);
1145 return;
1146 }
1147
1148 /* No suitable intermediate mode. */
1149 abort ();
1150 }
1151 }
1152
1153 /* Support special truncate insns for certain modes. */
1154
1155 if (from_mode == DImode && to_mode == SImode)
1156 {
1157#ifdef HAVE_truncdisi2
1158 if (HAVE_truncdisi2)
1159 {
1160 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1161 return;
1162 }
1163#endif
1164 convert_move (to, force_reg (from_mode, from), unsignedp);
1165 return;
1166 }
1167
1168 if (from_mode == DImode && to_mode == HImode)
1169 {
1170#ifdef HAVE_truncdihi2
1171 if (HAVE_truncdihi2)
1172 {
1173 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1174 return;
1175 }
1176#endif
1177 convert_move (to, force_reg (from_mode, from), unsignedp);
1178 return;
1179 }
1180
1181 if (from_mode == DImode && to_mode == QImode)
1182 {
1183#ifdef HAVE_truncdiqi2
1184 if (HAVE_truncdiqi2)
1185 {
1186 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1187 return;
1188 }
1189#endif
1190 convert_move (to, force_reg (from_mode, from), unsignedp);
1191 return;
1192 }
1193
1194 if (from_mode == SImode && to_mode == HImode)
1195 {
1196#ifdef HAVE_truncsihi2
1197 if (HAVE_truncsihi2)
1198 {
1199 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1200 return;
1201 }
1202#endif
1203 convert_move (to, force_reg (from_mode, from), unsignedp);
1204 return;
1205 }
1206
1207 if (from_mode == SImode && to_mode == QImode)
1208 {
1209#ifdef HAVE_truncsiqi2
1210 if (HAVE_truncsiqi2)
1211 {
1212 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1213 return;
1214 }
1215#endif
1216 convert_move (to, force_reg (from_mode, from), unsignedp);
1217 return;
1218 }
1219
1220 if (from_mode == HImode && to_mode == QImode)
1221 {
1222#ifdef HAVE_trunchiqi2
1223 if (HAVE_trunchiqi2)
1224 {
1225 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1226 return;
1227 }
1228#endif
1229 convert_move (to, force_reg (from_mode, from), unsignedp);
1230 return;
1231 }
1232
b9bcad65
RK
1233 if (from_mode == TImode && to_mode == DImode)
1234 {
1235#ifdef HAVE_trunctidi2
1236 if (HAVE_trunctidi2)
1237 {
1238 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1239 return;
1240 }
1241#endif
1242 convert_move (to, force_reg (from_mode, from), unsignedp);
1243 return;
1244 }
1245
1246 if (from_mode == TImode && to_mode == SImode)
1247 {
1248#ifdef HAVE_trunctisi2
1249 if (HAVE_trunctisi2)
1250 {
1251 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1252 return;
1253 }
1254#endif
1255 convert_move (to, force_reg (from_mode, from), unsignedp);
1256 return;
1257 }
1258
1259 if (from_mode == TImode && to_mode == HImode)
1260 {
1261#ifdef HAVE_trunctihi2
1262 if (HAVE_trunctihi2)
1263 {
1264 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1265 return;
1266 }
1267#endif
1268 convert_move (to, force_reg (from_mode, from), unsignedp);
1269 return;
1270 }
1271
1272 if (from_mode == TImode && to_mode == QImode)
1273 {
1274#ifdef HAVE_trunctiqi2
1275 if (HAVE_trunctiqi2)
1276 {
1277 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1278 return;
1279 }
1280#endif
1281 convert_move (to, force_reg (from_mode, from), unsignedp);
1282 return;
1283 }
1284
bbf6f052
RK
1285 /* Handle truncation of volatile memrefs, and so on;
1286 the things that couldn't be truncated directly,
1287 and for which there was no special instruction. */
1288 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1289 {
1290 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1291 emit_move_insn (to, temp);
1292 return;
1293 }
1294
1295 /* Mode combination is not recognized. */
1296 abort ();
1297}
1298
1299/* Return an rtx for a value that would result
1300 from converting X to mode MODE.
1301 Both X and MODE may be floating, or both integer.
1302 UNSIGNEDP is nonzero if X is an unsigned value.
1303 This can be done by referring to a part of X in place
5d901c31
RS
1304 or by copying to a new temporary with conversion.
1305
1306 This function *must not* call protect_from_queue
1307 except when putting X into an insn (in which case convert_move does it). */
bbf6f052
RK
1308
1309rtx
1310convert_to_mode (mode, x, unsignedp)
1311 enum machine_mode mode;
1312 rtx x;
1313 int unsignedp;
5ffe63ed
RS
1314{
1315 return convert_modes (mode, VOIDmode, x, unsignedp);
1316}
1317
1318/* Return an rtx for a value that would result
1319 from converting X from mode OLDMODE to mode MODE.
1320 Both modes may be floating, or both integer.
1321 UNSIGNEDP is nonzero if X is an unsigned value.
1322
1323 This can be done by referring to a part of X in place
1324 or by copying to a new temporary with conversion.
1325
1326 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1327
1328 This function *must not* call protect_from_queue
1329 except when putting X into an insn (in which case convert_move does it). */
1330
1331rtx
1332convert_modes (mode, oldmode, x, unsignedp)
1333 enum machine_mode mode, oldmode;
1334 rtx x;
1335 int unsignedp;
bbf6f052
RK
1336{
1337 register rtx temp;
5ffe63ed 1338
1499e0a8
RK
1339 /* If FROM is a SUBREG that indicates that we have already done at least
1340 the required extension, strip it. */
1341
1342 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1343 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1344 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1345 x = gen_lowpart (mode, x);
bbf6f052 1346
64791b18
RK
1347 if (GET_MODE (x) != VOIDmode)
1348 oldmode = GET_MODE (x);
1349
5ffe63ed 1350 if (mode == oldmode)
bbf6f052
RK
1351 return x;
1352
1353 /* There is one case that we must handle specially: If we are converting
906c4e36 1354 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
bbf6f052
RK
1355 we are to interpret the constant as unsigned, gen_lowpart will do
1356 the wrong if the constant appears negative. What we want to do is
1357 make the high-order word of the constant zero, not all ones. */
1358
1359 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
906c4e36 1360 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
bbf6f052 1361 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
906c4e36 1362 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
bbf6f052
RK
1363
1364 /* We can do this with a gen_lowpart if both desired and current modes
1365 are integer, and this is either a constant integer, a register, or a
ba2e110c
RK
1366 non-volatile MEM. Except for the constant case where MODE is no
1367 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
bbf6f052 1368
ba2e110c
RK
1369 if ((GET_CODE (x) == CONST_INT
1370 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 1371 || (GET_MODE_CLASS (mode) == MODE_INT
5ffe63ed 1372 && GET_MODE_CLASS (oldmode) == MODE_INT
bbf6f052 1373 && (GET_CODE (x) == CONST_DOUBLE
5ffe63ed 1374 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
d57c66da
JW
1375 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1376 && direct_load[(int) mode])
2bf29316
JW
1377 || (GET_CODE (x) == REG
1378 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1379 GET_MODE_BITSIZE (GET_MODE (x)))))))))
ba2e110c
RK
1380 {
1381 /* ?? If we don't know OLDMODE, we have to assume here that
1382 X does not need sign- or zero-extension. This may not be
1383 the case, but it's the best we can do. */
1384 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1385 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1386 {
1387 HOST_WIDE_INT val = INTVAL (x);
1388 int width = GET_MODE_BITSIZE (oldmode);
1389
1390 /* We must sign or zero-extend in this case. Start by
1391 zero-extending, then sign extend if we need to. */
1392 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1393 if (! unsignedp
1394 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1395 val |= (HOST_WIDE_INT) (-1) << width;
1396
1397 return GEN_INT (val);
1398 }
1399
1400 return gen_lowpart (mode, x);
1401 }
bbf6f052
RK
1402
1403 temp = gen_reg_rtx (mode);
1404 convert_move (temp, x, unsignedp);
1405 return temp;
1406}
1407\f
1408/* Generate several move instructions to copy LEN bytes
1409 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1410 The caller must pass FROM and TO
1411 through protect_from_queue before calling.
1412 ALIGN (in bytes) is maximum alignment we can assume. */
1413
bbf6f052
RK
1414static void
1415move_by_pieces (to, from, len, align)
1416 rtx to, from;
1417 int len, align;
1418{
1419 struct move_by_pieces data;
1420 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
e87b4f3f 1421 int max_size = MOVE_MAX + 1;
bbf6f052
RK
1422
1423 data.offset = 0;
1424 data.to_addr = to_addr;
1425 data.from_addr = from_addr;
1426 data.to = to;
1427 data.from = from;
1428 data.autinc_to
1429 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1430 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1431 data.autinc_from
1432 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1433 || GET_CODE (from_addr) == POST_INC
1434 || GET_CODE (from_addr) == POST_DEC);
1435
1436 data.explicit_inc_from = 0;
1437 data.explicit_inc_to = 0;
1438 data.reverse
1439 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1440 if (data.reverse) data.offset = len;
1441 data.len = len;
1442
e9cf6a97
JW
1443 data.to_struct = MEM_IN_STRUCT_P (to);
1444 data.from_struct = MEM_IN_STRUCT_P (from);
1445
bbf6f052
RK
1446 /* If copying requires more than two move insns,
1447 copy addresses to registers (to make displacements shorter)
1448 and use post-increment if available. */
1449 if (!(data.autinc_from && data.autinc_to)
1450 && move_by_pieces_ninsns (len, align) > 2)
1451 {
1452#ifdef HAVE_PRE_DECREMENT
1453 if (data.reverse && ! data.autinc_from)
1454 {
1455 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1456 data.autinc_from = 1;
1457 data.explicit_inc_from = -1;
1458 }
1459#endif
1460#ifdef HAVE_POST_INCREMENT
1461 if (! data.autinc_from)
1462 {
1463 data.from_addr = copy_addr_to_reg (from_addr);
1464 data.autinc_from = 1;
1465 data.explicit_inc_from = 1;
1466 }
1467#endif
1468 if (!data.autinc_from && CONSTANT_P (from_addr))
1469 data.from_addr = copy_addr_to_reg (from_addr);
1470#ifdef HAVE_PRE_DECREMENT
1471 if (data.reverse && ! data.autinc_to)
1472 {
1473 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1474 data.autinc_to = 1;
1475 data.explicit_inc_to = -1;
1476 }
1477#endif
1478#ifdef HAVE_POST_INCREMENT
1479 if (! data.reverse && ! data.autinc_to)
1480 {
1481 data.to_addr = copy_addr_to_reg (to_addr);
1482 data.autinc_to = 1;
1483 data.explicit_inc_to = 1;
1484 }
1485#endif
1486 if (!data.autinc_to && CONSTANT_P (to_addr))
1487 data.to_addr = copy_addr_to_reg (to_addr);
1488 }
1489
c7a7ac46 1490 if (! SLOW_UNALIGNED_ACCESS
e87b4f3f 1491 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1492 align = MOVE_MAX;
bbf6f052
RK
1493
1494 /* First move what we can in the largest integer mode, then go to
1495 successively smaller modes. */
1496
1497 while (max_size > 1)
1498 {
1499 enum machine_mode mode = VOIDmode, tmode;
1500 enum insn_code icode;
1501
e7c33f54
RK
1502 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1503 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1504 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1505 mode = tmode;
1506
1507 if (mode == VOIDmode)
1508 break;
1509
1510 icode = mov_optab->handlers[(int) mode].insn_code;
1511 if (icode != CODE_FOR_nothing
1512 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1513 GET_MODE_SIZE (mode)))
1514 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1515
1516 max_size = GET_MODE_SIZE (mode);
1517 }
1518
1519 /* The code above should have handled everything. */
1520 if (data.len != 0)
1521 abort ();
1522}
1523
1524/* Return number of insns required to move L bytes by pieces.
1525 ALIGN (in bytes) is maximum alignment we can assume. */
1526
1527static int
1528move_by_pieces_ninsns (l, align)
1529 unsigned int l;
1530 int align;
1531{
1532 register int n_insns = 0;
e87b4f3f 1533 int max_size = MOVE_MAX + 1;
bbf6f052 1534
c7a7ac46 1535 if (! SLOW_UNALIGNED_ACCESS
e87b4f3f 1536 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1537 align = MOVE_MAX;
bbf6f052
RK
1538
1539 while (max_size > 1)
1540 {
1541 enum machine_mode mode = VOIDmode, tmode;
1542 enum insn_code icode;
1543
e7c33f54
RK
1544 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1545 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1546 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1547 mode = tmode;
1548
1549 if (mode == VOIDmode)
1550 break;
1551
1552 icode = mov_optab->handlers[(int) mode].insn_code;
1553 if (icode != CODE_FOR_nothing
1554 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1555 GET_MODE_SIZE (mode)))
1556 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1557
1558 max_size = GET_MODE_SIZE (mode);
1559 }
1560
1561 return n_insns;
1562}
1563
1564/* Subroutine of move_by_pieces. Move as many bytes as appropriate
1565 with move instructions for mode MODE. GENFUN is the gen_... function
1566 to make a move insn for that mode. DATA has all the other info. */
1567
1568static void
1569move_by_pieces_1 (genfun, mode, data)
1570 rtx (*genfun) ();
1571 enum machine_mode mode;
1572 struct move_by_pieces *data;
1573{
1574 register int size = GET_MODE_SIZE (mode);
1575 register rtx to1, from1;
1576
1577 while (data->len >= size)
1578 {
1579 if (data->reverse) data->offset -= size;
1580
1581 to1 = (data->autinc_to
1582 ? gen_rtx (MEM, mode, data->to_addr)
1583 : change_address (data->to, mode,
1584 plus_constant (data->to_addr, data->offset)));
e9cf6a97 1585 MEM_IN_STRUCT_P (to1) = data->to_struct;
bbf6f052
RK
1586 from1 =
1587 (data->autinc_from
1588 ? gen_rtx (MEM, mode, data->from_addr)
1589 : change_address (data->from, mode,
1590 plus_constant (data->from_addr, data->offset)));
e9cf6a97 1591 MEM_IN_STRUCT_P (from1) = data->from_struct;
bbf6f052
RK
1592
1593#ifdef HAVE_PRE_DECREMENT
1594 if (data->explicit_inc_to < 0)
906c4e36 1595 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
bbf6f052 1596 if (data->explicit_inc_from < 0)
906c4e36 1597 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
bbf6f052
RK
1598#endif
1599
1600 emit_insn ((*genfun) (to1, from1));
1601#ifdef HAVE_POST_INCREMENT
1602 if (data->explicit_inc_to > 0)
906c4e36 1603 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
bbf6f052 1604 if (data->explicit_inc_from > 0)
906c4e36 1605 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
bbf6f052
RK
1606#endif
1607
1608 if (! data->reverse) data->offset += size;
1609
1610 data->len -= size;
1611 }
1612}
1613\f
1614/* Emit code to move a block Y to a block X.
1615 This may be done with string-move instructions,
1616 with multiple scalar move instructions, or with a library call.
1617
1618 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1619 with mode BLKmode.
1620 SIZE is an rtx that says how long they are.
1621 ALIGN is the maximum alignment we can assume they have,
1622 measured in bytes. */
1623
1624void
1625emit_block_move (x, y, size, align)
1626 rtx x, y;
1627 rtx size;
1628 int align;
1629{
1630 if (GET_MODE (x) != BLKmode)
1631 abort ();
1632
1633 if (GET_MODE (y) != BLKmode)
1634 abort ();
1635
1636 x = protect_from_queue (x, 1);
1637 y = protect_from_queue (y, 0);
5d901c31 1638 size = protect_from_queue (size, 0);
bbf6f052
RK
1639
1640 if (GET_CODE (x) != MEM)
1641 abort ();
1642 if (GET_CODE (y) != MEM)
1643 abort ();
1644 if (size == 0)
1645 abort ();
1646
1647 if (GET_CODE (size) == CONST_INT
906c4e36 1648 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
bbf6f052
RK
1649 move_by_pieces (x, y, INTVAL (size), align);
1650 else
1651 {
1652 /* Try the most limited insn first, because there's no point
1653 including more than one in the machine description unless
1654 the more limited one has some advantage. */
266007a7 1655
0bba3f6f 1656 rtx opalign = GEN_INT (align);
266007a7
RK
1657 enum machine_mode mode;
1658
1659 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1660 mode = GET_MODE_WIDER_MODE (mode))
bbf6f052 1661 {
266007a7 1662 enum insn_code code = movstr_optab[(int) mode];
266007a7
RK
1663
1664 if (code != CODE_FOR_nothing
803090c4
RK
1665 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1666 here because if SIZE is less than the mode mask, as it is
8008b228 1667 returned by the macro, it will definitely be less than the
803090c4 1668 actual mode mask. */
8ca00751
RK
1669 && ((GET_CODE (size) == CONST_INT
1670 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1671 <= GET_MODE_MASK (mode)))
1672 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
0bba3f6f
RK
1673 && (insn_operand_predicate[(int) code][0] == 0
1674 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1675 && (insn_operand_predicate[(int) code][1] == 0
1676 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1677 && (insn_operand_predicate[(int) code][3] == 0
1678 || (*insn_operand_predicate[(int) code][3]) (opalign,
1679 VOIDmode)))
bbf6f052 1680 {
1ba1e2a8 1681 rtx op2;
266007a7
RK
1682 rtx last = get_last_insn ();
1683 rtx pat;
1684
1ba1e2a8 1685 op2 = convert_to_mode (mode, size, 1);
0bba3f6f
RK
1686 if (insn_operand_predicate[(int) code][2] != 0
1687 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
266007a7
RK
1688 op2 = copy_to_mode_reg (mode, op2);
1689
1690 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1691 if (pat)
1692 {
1693 emit_insn (pat);
1694 return;
1695 }
1696 else
1697 delete_insns_since (last);
bbf6f052
RK
1698 }
1699 }
bbf6f052
RK
1700
1701#ifdef TARGET_MEM_FUNCTIONS
d562e42e 1702 emit_library_call (memcpy_libfunc, 0,
bbf6f052
RK
1703 VOIDmode, 3, XEXP (x, 0), Pmode,
1704 XEXP (y, 0), Pmode,
0fa83258
RK
1705 convert_to_mode (TYPE_MODE (sizetype), size,
1706 TREE_UNSIGNED (sizetype)),
1707 TYPE_MODE (sizetype));
bbf6f052 1708#else
d562e42e 1709 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
1710 VOIDmode, 3, XEXP (y, 0), Pmode,
1711 XEXP (x, 0), Pmode,
3b6f75e2
JW
1712 convert_to_mode (TYPE_MODE (integer_type_node), size,
1713 TREE_UNSIGNED (integer_type_node)),
1714 TYPE_MODE (integer_type_node));
bbf6f052
RK
1715#endif
1716 }
1717}
1718\f
1719/* Copy all or part of a value X into registers starting at REGNO.
1720 The number of registers to be filled is NREGS. */
1721
1722void
1723move_block_to_reg (regno, x, nregs, mode)
1724 int regno;
1725 rtx x;
1726 int nregs;
1727 enum machine_mode mode;
1728{
1729 int i;
1730 rtx pat, last;
1731
72bb9717
RK
1732 if (nregs == 0)
1733 return;
1734
bbf6f052
RK
1735 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1736 x = validize_mem (force_const_mem (mode, x));
1737
1738 /* See if the machine can do this with a load multiple insn. */
1739#ifdef HAVE_load_multiple
c3a02afe 1740 if (HAVE_load_multiple)
bbf6f052 1741 {
c3a02afe
RK
1742 last = get_last_insn ();
1743 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1744 GEN_INT (nregs));
1745 if (pat)
1746 {
1747 emit_insn (pat);
1748 return;
1749 }
1750 else
1751 delete_insns_since (last);
bbf6f052 1752 }
bbf6f052
RK
1753#endif
1754
1755 for (i = 0; i < nregs; i++)
1756 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1757 operand_subword_force (x, i, mode));
1758}
1759
1760/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
0040593d
JW
1761 The number of registers to be filled is NREGS. SIZE indicates the number
1762 of bytes in the object X. */
1763
bbf6f052
RK
1764
1765void
0040593d 1766move_block_from_reg (regno, x, nregs, size)
bbf6f052
RK
1767 int regno;
1768 rtx x;
1769 int nregs;
0040593d 1770 int size;
bbf6f052
RK
1771{
1772 int i;
1773 rtx pat, last;
1774
0040593d
JW
1775 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1776 to the left before storing to memory. */
1777 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1778 {
1779 rtx tem = operand_subword (x, 0, 1, BLKmode);
1780 rtx shift;
1781
1782 if (tem == 0)
1783 abort ();
1784
1785 shift = expand_shift (LSHIFT_EXPR, word_mode,
1786 gen_rtx (REG, word_mode, regno),
1787 build_int_2 ((UNITS_PER_WORD - size)
1788 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1789 emit_move_insn (tem, shift);
1790 return;
1791 }
1792
bbf6f052
RK
1793 /* See if the machine can do this with a store multiple insn. */
1794#ifdef HAVE_store_multiple
c3a02afe 1795 if (HAVE_store_multiple)
bbf6f052 1796 {
c3a02afe
RK
1797 last = get_last_insn ();
1798 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1799 GEN_INT (nregs));
1800 if (pat)
1801 {
1802 emit_insn (pat);
1803 return;
1804 }
1805 else
1806 delete_insns_since (last);
bbf6f052 1807 }
bbf6f052
RK
1808#endif
1809
1810 for (i = 0; i < nregs; i++)
1811 {
1812 rtx tem = operand_subword (x, i, 1, BLKmode);
1813
1814 if (tem == 0)
1815 abort ();
1816
1817 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1818 }
1819}
1820
94b25f81
RK
1821/* Add a USE expression for REG to the (possibly empty) list pointed
1822 to by CALL_FUSAGE. REG must denote a hard register. */
bbf6f052
RK
1823
1824void
b3f8cf4a
RK
1825use_reg (call_fusage, reg)
1826 rtx *call_fusage, reg;
1827{
0304dfbb
DE
1828 if (GET_CODE (reg) != REG
1829 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
b3f8cf4a
RK
1830 abort();
1831
1832 *call_fusage
1833 = gen_rtx (EXPR_LIST, VOIDmode,
0304dfbb 1834 gen_rtx (USE, VOIDmode, reg), *call_fusage);
b3f8cf4a
RK
1835}
1836
94b25f81
RK
1837/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1838 starting at REGNO. All of these registers must be hard registers. */
b3f8cf4a
RK
1839
1840void
0304dfbb
DE
1841use_regs (call_fusage, regno, nregs)
1842 rtx *call_fusage;
bbf6f052
RK
1843 int regno;
1844 int nregs;
1845{
0304dfbb 1846 int i;
bbf6f052 1847
0304dfbb
DE
1848 if (regno + nregs > FIRST_PSEUDO_REGISTER)
1849 abort ();
1850
1851 for (i = 0; i < nregs; i++)
1852 use_reg (call_fusage, gen_rtx (REG, reg_raw_mode[regno + i], regno + i));
bbf6f052
RK
1853}
1854\f
9de08200
RK
1855/* Generate several move instructions to clear LEN bytes of block TO.
1856 (A MEM rtx with BLKmode). The caller must pass TO through
1857 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
1858 we can assume. */
1859
1860static void
1861clear_by_pieces (to, len, align)
1862 rtx to;
1863 int len, align;
1864{
1865 struct clear_by_pieces data;
1866 rtx to_addr = XEXP (to, 0);
1867 int max_size = MOVE_MAX + 1;
1868
1869 data.offset = 0;
1870 data.to_addr = to_addr;
1871 data.to = to;
1872 data.autinc_to
1873 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1874 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1875
1876 data.explicit_inc_to = 0;
1877 data.reverse
1878 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1879 if (data.reverse) data.offset = len;
1880 data.len = len;
1881
1882 data.to_struct = MEM_IN_STRUCT_P (to);
1883
1884 /* If copying requires more than two move insns,
1885 copy addresses to registers (to make displacements shorter)
1886 and use post-increment if available. */
1887 if (!data.autinc_to
1888 && move_by_pieces_ninsns (len, align) > 2)
1889 {
1890#ifdef HAVE_PRE_DECREMENT
1891 if (data.reverse && ! data.autinc_to)
1892 {
1893 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1894 data.autinc_to = 1;
1895 data.explicit_inc_to = -1;
1896 }
1897#endif
1898#ifdef HAVE_POST_INCREMENT
1899 if (! data.reverse && ! data.autinc_to)
1900 {
1901 data.to_addr = copy_addr_to_reg (to_addr);
1902 data.autinc_to = 1;
1903 data.explicit_inc_to = 1;
1904 }
1905#endif
1906 if (!data.autinc_to && CONSTANT_P (to_addr))
1907 data.to_addr = copy_addr_to_reg (to_addr);
1908 }
1909
1910 if (! SLOW_UNALIGNED_ACCESS
1911 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1912 align = MOVE_MAX;
1913
1914 /* First move what we can in the largest integer mode, then go to
1915 successively smaller modes. */
1916
1917 while (max_size > 1)
1918 {
1919 enum machine_mode mode = VOIDmode, tmode;
1920 enum insn_code icode;
1921
1922 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1923 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1924 if (GET_MODE_SIZE (tmode) < max_size)
1925 mode = tmode;
1926
1927 if (mode == VOIDmode)
1928 break;
1929
1930 icode = mov_optab->handlers[(int) mode].insn_code;
1931 if (icode != CODE_FOR_nothing
1932 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1933 GET_MODE_SIZE (mode)))
1934 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
1935
1936 max_size = GET_MODE_SIZE (mode);
1937 }
1938
1939 /* The code above should have handled everything. */
1940 if (data.len != 0)
1941 abort ();
1942}
1943
1944/* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
1945 with move instructions for mode MODE. GENFUN is the gen_... function
1946 to make a move insn for that mode. DATA has all the other info. */
1947
1948static void
1949clear_by_pieces_1 (genfun, mode, data)
1950 rtx (*genfun) ();
1951 enum machine_mode mode;
1952 struct clear_by_pieces *data;
1953{
1954 register int size = GET_MODE_SIZE (mode);
1955 register rtx to1;
1956
1957 while (data->len >= size)
1958 {
1959 if (data->reverse) data->offset -= size;
1960
1961 to1 = (data->autinc_to
1962 ? gen_rtx (MEM, mode, data->to_addr)
1963 : change_address (data->to, mode,
1964 plus_constant (data->to_addr, data->offset)));
1965 MEM_IN_STRUCT_P (to1) = data->to_struct;
1966
1967#ifdef HAVE_PRE_DECREMENT
1968 if (data->explicit_inc_to < 0)
1969 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1970#endif
1971
1972 emit_insn ((*genfun) (to1, const0_rtx));
1973#ifdef HAVE_POST_INCREMENT
1974 if (data->explicit_inc_to > 0)
1975 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1976#endif
1977
1978 if (! data->reverse) data->offset += size;
1979
1980 data->len -= size;
1981 }
1982}
1983\f
bbf6f052 1984/* Write zeros through the storage of OBJECT.
9de08200
RK
1985 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
1986 the maximum alignment we can is has, measured in bytes. */
bbf6f052
RK
1987
1988void
9de08200 1989clear_storage (object, size, align)
bbf6f052 1990 rtx object;
4c08eef0 1991 rtx size;
9de08200 1992 int align;
bbf6f052
RK
1993{
1994 if (GET_MODE (object) == BLKmode)
1995 {
9de08200
RK
1996 object = protect_from_queue (object, 1);
1997 size = protect_from_queue (size, 0);
1998
1999 if (GET_CODE (size) == CONST_INT
2000 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
2001 clear_by_pieces (object, INTVAL (size), align);
2002
2003 else
2004 {
2005 /* Try the most limited insn first, because there's no point
2006 including more than one in the machine description unless
2007 the more limited one has some advantage. */
2008
2009 rtx opalign = GEN_INT (align);
2010 enum machine_mode mode;
2011
2012 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2013 mode = GET_MODE_WIDER_MODE (mode))
2014 {
2015 enum insn_code code = clrstr_optab[(int) mode];
2016
2017 if (code != CODE_FOR_nothing
2018 /* We don't need MODE to be narrower than
2019 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2020 the mode mask, as it is returned by the macro, it will
2021 definitely be less than the actual mode mask. */
2022 && ((GET_CODE (size) == CONST_INT
2023 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2024 <= GET_MODE_MASK (mode)))
2025 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2026 && (insn_operand_predicate[(int) code][0] == 0
2027 || (*insn_operand_predicate[(int) code][0]) (object,
2028 BLKmode))
2029 && (insn_operand_predicate[(int) code][2] == 0
2030 || (*insn_operand_predicate[(int) code][2]) (opalign,
2031 VOIDmode)))
2032 {
2033 rtx op1;
2034 rtx last = get_last_insn ();
2035 rtx pat;
2036
2037 op1 = convert_to_mode (mode, size, 1);
2038 if (insn_operand_predicate[(int) code][1] != 0
2039 && ! (*insn_operand_predicate[(int) code][1]) (op1,
2040 mode))
2041 op1 = copy_to_mode_reg (mode, op1);
2042
2043 pat = GEN_FCN ((int) code) (object, op1, opalign);
2044 if (pat)
2045 {
2046 emit_insn (pat);
2047 return;
2048 }
2049 else
2050 delete_insns_since (last);
2051 }
2052 }
2053
2054
bbf6f052 2055#ifdef TARGET_MEM_FUNCTIONS
9de08200
RK
2056 emit_library_call (memset_libfunc, 0,
2057 VOIDmode, 3,
2058 XEXP (object, 0), Pmode,
2059 const0_rtx, TYPE_MODE (integer_type_node),
2060 convert_to_mode (TYPE_MODE (sizetype),
2061 size, TREE_UNSIGNED (sizetype)),
2062 TYPE_MODE (sizetype));
bbf6f052 2063#else
9de08200
RK
2064 emit_library_call (bzero_libfunc, 0,
2065 VOIDmode, 2,
2066 XEXP (object, 0), Pmode,
2067 convert_to_mode (TYPE_MODE (integer_type_node),
2068 size,
2069 TREE_UNSIGNED (integer_type_node)),
2070 TYPE_MODE (integer_type_node));
bbf6f052 2071#endif
9de08200 2072 }
bbf6f052
RK
2073 }
2074 else
2075 emit_move_insn (object, const0_rtx);
2076}
2077
2078/* Generate code to copy Y into X.
2079 Both Y and X must have the same mode, except that
2080 Y can be a constant with VOIDmode.
2081 This mode cannot be BLKmode; use emit_block_move for that.
2082
2083 Return the last instruction emitted. */
2084
2085rtx
2086emit_move_insn (x, y)
2087 rtx x, y;
2088{
2089 enum machine_mode mode = GET_MODE (x);
bbf6f052
RK
2090
2091 x = protect_from_queue (x, 1);
2092 y = protect_from_queue (y, 0);
2093
2094 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2095 abort ();
2096
2097 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2098 y = force_const_mem (mode, y);
2099
2100 /* If X or Y are memory references, verify that their addresses are valid
2101 for the machine. */
2102 if (GET_CODE (x) == MEM
2103 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2104 && ! push_operand (x, GET_MODE (x)))
2105 || (flag_force_addr
2106 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2107 x = change_address (x, VOIDmode, XEXP (x, 0));
2108
2109 if (GET_CODE (y) == MEM
2110 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2111 || (flag_force_addr
2112 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2113 y = change_address (y, VOIDmode, XEXP (y, 0));
2114
2115 if (mode == BLKmode)
2116 abort ();
2117
261c4230
RS
2118 return emit_move_insn_1 (x, y);
2119}
2120
2121/* Low level part of emit_move_insn.
2122 Called just like emit_move_insn, but assumes X and Y
2123 are basically valid. */
2124
2125rtx
2126emit_move_insn_1 (x, y)
2127 rtx x, y;
2128{
2129 enum machine_mode mode = GET_MODE (x);
2130 enum machine_mode submode;
2131 enum mode_class class = GET_MODE_CLASS (mode);
2132 int i;
2133
bbf6f052
RK
2134 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2135 return
2136 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2137
89742723 2138 /* Expand complex moves by moving real part and imag part, if possible. */
7308a047 2139 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
d0c76654
RK
2140 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2141 * BITS_PER_UNIT),
2142 (class == MODE_COMPLEX_INT
2143 ? MODE_INT : MODE_FLOAT),
2144 0))
7308a047
RS
2145 && (mov_optab->handlers[(int) submode].insn_code
2146 != CODE_FOR_nothing))
2147 {
2148 /* Don't split destination if it is a stack push. */
2149 int stack = push_operand (x, GET_MODE (x));
6551fa4d 2150 rtx insns;
7308a047 2151
7308a047
RS
2152 /* If this is a stack, push the highpart first, so it
2153 will be in the argument order.
2154
2155 In that case, change_address is used only to convert
2156 the mode, not to change the address. */
c937357e
RS
2157 if (stack)
2158 {
e33c0d66
RS
2159 /* Note that the real part always precedes the imag part in memory
2160 regardless of machine's endianness. */
c937357e
RS
2161#ifdef STACK_GROWS_DOWNWARD
2162 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2163 (gen_rtx (MEM, submode, (XEXP (x, 0))),
e33c0d66 2164 gen_imagpart (submode, y)));
c937357e
RS
2165 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2166 (gen_rtx (MEM, submode, (XEXP (x, 0))),
e33c0d66 2167 gen_realpart (submode, y)));
c937357e
RS
2168#else
2169 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2170 (gen_rtx (MEM, submode, (XEXP (x, 0))),
e33c0d66 2171 gen_realpart (submode, y)));
c937357e
RS
2172 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2173 (gen_rtx (MEM, submode, (XEXP (x, 0))),
e33c0d66 2174 gen_imagpart (submode, y)));
c937357e
RS
2175#endif
2176 }
2177 else
2178 {
2179 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
976ff203 2180 (gen_realpart (submode, x), gen_realpart (submode, y)));
c937357e 2181 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
976ff203 2182 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
c937357e 2183 }
7308a047 2184
7a1ab50a 2185 return get_last_insn ();
7308a047
RS
2186 }
2187
bbf6f052
RK
2188 /* This will handle any multi-word mode that lacks a move_insn pattern.
2189 However, you will get better code if you define such patterns,
2190 even if they must turn into multiple assembler instructions. */
a4320483 2191 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
bbf6f052
RK
2192 {
2193 rtx last_insn = 0;
6551fa4d
JW
2194 rtx insns;
2195
a98c9f1a
RK
2196#ifdef PUSH_ROUNDING
2197
2198 /* If X is a push on the stack, do the push now and replace
2199 X with a reference to the stack pointer. */
2200 if (push_operand (x, GET_MODE (x)))
2201 {
2202 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2203 x = change_address (x, VOIDmode, stack_pointer_rtx);
2204 }
2205#endif
2206
15a7a8ec 2207 /* Show the output dies here. */
43e046cb
RK
2208 if (x != y)
2209 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
15a7a8ec 2210
bbf6f052
RK
2211 for (i = 0;
2212 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2213 i++)
2214 {
2215 rtx xpart = operand_subword (x, i, 1, mode);
2216 rtx ypart = operand_subword (y, i, 1, mode);
2217
2218 /* If we can't get a part of Y, put Y into memory if it is a
2219 constant. Otherwise, force it into a register. If we still
2220 can't get a part of Y, abort. */
2221 if (ypart == 0 && CONSTANT_P (y))
2222 {
2223 y = force_const_mem (mode, y);
2224 ypart = operand_subword (y, i, 1, mode);
2225 }
2226 else if (ypart == 0)
2227 ypart = operand_subword_force (y, i, mode);
2228
2229 if (xpart == 0 || ypart == 0)
2230 abort ();
2231
2232 last_insn = emit_move_insn (xpart, ypart);
2233 }
6551fa4d 2234
bbf6f052
RK
2235 return last_insn;
2236 }
2237 else
2238 abort ();
2239}
2240\f
2241/* Pushing data onto the stack. */
2242
2243/* Push a block of length SIZE (perhaps variable)
2244 and return an rtx to address the beginning of the block.
2245 Note that it is not possible for the value returned to be a QUEUED.
2246 The value may be virtual_outgoing_args_rtx.
2247
2248 EXTRA is the number of bytes of padding to push in addition to SIZE.
2249 BELOW nonzero means this padding comes at low addresses;
2250 otherwise, the padding comes at high addresses. */
2251
2252rtx
2253push_block (size, extra, below)
2254 rtx size;
2255 int extra, below;
2256{
2257 register rtx temp;
88f63c77
RK
2258
2259 size = convert_modes (Pmode, ptr_mode, size, 1);
bbf6f052
RK
2260 if (CONSTANT_P (size))
2261 anti_adjust_stack (plus_constant (size, extra));
2262 else if (GET_CODE (size) == REG && extra == 0)
2263 anti_adjust_stack (size);
2264 else
2265 {
2266 rtx temp = copy_to_mode_reg (Pmode, size);
2267 if (extra != 0)
906c4e36 2268 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
bbf6f052
RK
2269 temp, 0, OPTAB_LIB_WIDEN);
2270 anti_adjust_stack (temp);
2271 }
2272
2273#ifdef STACK_GROWS_DOWNWARD
2274 temp = virtual_outgoing_args_rtx;
2275 if (extra != 0 && below)
2276 temp = plus_constant (temp, extra);
2277#else
2278 if (GET_CODE (size) == CONST_INT)
2279 temp = plus_constant (virtual_outgoing_args_rtx,
2280 - INTVAL (size) - (below ? 0 : extra));
2281 else if (extra != 0 && !below)
2282 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2283 negate_rtx (Pmode, plus_constant (size, extra)));
2284 else
2285 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2286 negate_rtx (Pmode, size));
2287#endif
2288
2289 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2290}
2291
87e38d84 2292rtx
bbf6f052
RK
2293gen_push_operand ()
2294{
2295 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2296}
2297
2298/* Generate code to push X onto the stack, assuming it has mode MODE and
2299 type TYPE.
2300 MODE is redundant except when X is a CONST_INT (since they don't
2301 carry mode info).
2302 SIZE is an rtx for the size of data to be copied (in bytes),
2303 needed only if X is BLKmode.
2304
2305 ALIGN (in bytes) is maximum alignment we can assume.
2306
cd048831
RK
2307 If PARTIAL and REG are both nonzero, then copy that many of the first
2308 words of X into registers starting with REG, and push the rest of X.
bbf6f052
RK
2309 The amount of space pushed is decreased by PARTIAL words,
2310 rounded *down* to a multiple of PARM_BOUNDARY.
2311 REG must be a hard register in this case.
cd048831
RK
2312 If REG is zero but PARTIAL is not, take any all others actions for an
2313 argument partially in registers, but do not actually load any
2314 registers.
bbf6f052
RK
2315
2316 EXTRA is the amount in bytes of extra space to leave next to this arg.
6dc42e49 2317 This is ignored if an argument block has already been allocated.
bbf6f052
RK
2318
2319 On a machine that lacks real push insns, ARGS_ADDR is the address of
2320 the bottom of the argument block for this call. We use indexing off there
2321 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2322 argument block has not been preallocated.
2323
2324 ARGS_SO_FAR is the size of args previously pushed for this call. */
2325
2326void
2327emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2328 args_addr, args_so_far)
2329 register rtx x;
2330 enum machine_mode mode;
2331 tree type;
2332 rtx size;
2333 int align;
2334 int partial;
2335 rtx reg;
2336 int extra;
2337 rtx args_addr;
2338 rtx args_so_far;
2339{
2340 rtx xinner;
2341 enum direction stack_direction
2342#ifdef STACK_GROWS_DOWNWARD
2343 = downward;
2344#else
2345 = upward;
2346#endif
2347
2348 /* Decide where to pad the argument: `downward' for below,
2349 `upward' for above, or `none' for don't pad it.
2350 Default is below for small data on big-endian machines; else above. */
2351 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2352
2353 /* Invert direction if stack is post-update. */
2354 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2355 if (where_pad != none)
2356 where_pad = (where_pad == downward ? upward : downward);
2357
2358 xinner = x = protect_from_queue (x, 0);
2359
2360 if (mode == BLKmode)
2361 {
2362 /* Copy a block into the stack, entirely or partially. */
2363
2364 register rtx temp;
2365 int used = partial * UNITS_PER_WORD;
2366 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2367 int skip;
2368
2369 if (size == 0)
2370 abort ();
2371
2372 used -= offset;
2373
2374 /* USED is now the # of bytes we need not copy to the stack
2375 because registers will take care of them. */
2376
2377 if (partial != 0)
2378 xinner = change_address (xinner, BLKmode,
2379 plus_constant (XEXP (xinner, 0), used));
2380
2381 /* If the partial register-part of the arg counts in its stack size,
2382 skip the part of stack space corresponding to the registers.
2383 Otherwise, start copying to the beginning of the stack space,
2384 by setting SKIP to 0. */
2385#ifndef REG_PARM_STACK_SPACE
2386 skip = 0;
2387#else
2388 skip = used;
2389#endif
2390
2391#ifdef PUSH_ROUNDING
2392 /* Do it with several push insns if that doesn't take lots of insns
2393 and if there is no difficulty with push insns that skip bytes
2394 on the stack for alignment purposes. */
2395 if (args_addr == 0
2396 && GET_CODE (size) == CONST_INT
2397 && skip == 0
2398 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2399 < MOVE_RATIO)
bbf6f052
RK
2400 /* Here we avoid the case of a structure whose weak alignment
2401 forces many pushes of a small amount of data,
2402 and such small pushes do rounding that causes trouble. */
c7a7ac46 2403 && ((! SLOW_UNALIGNED_ACCESS)
e87b4f3f 2404 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
bbf6f052 2405 || PUSH_ROUNDING (align) == align)
bbf6f052
RK
2406 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2407 {
2408 /* Push padding now if padding above and stack grows down,
2409 or if padding below and stack grows up.
2410 But if space already allocated, this has already been done. */
2411 if (extra && args_addr == 0
2412 && where_pad != none && where_pad != stack_direction)
906c4e36 2413 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2414
2415 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
2416 INTVAL (size) - used, align);
2417 }
2418 else
2419#endif /* PUSH_ROUNDING */
2420 {
2421 /* Otherwise make space on the stack and copy the data
2422 to the address of that space. */
2423
2424 /* Deduct words put into registers from the size we must copy. */
2425 if (partial != 0)
2426 {
2427 if (GET_CODE (size) == CONST_INT)
906c4e36 2428 size = GEN_INT (INTVAL (size) - used);
bbf6f052
RK
2429 else
2430 size = expand_binop (GET_MODE (size), sub_optab, size,
906c4e36
RK
2431 GEN_INT (used), NULL_RTX, 0,
2432 OPTAB_LIB_WIDEN);
bbf6f052
RK
2433 }
2434
2435 /* Get the address of the stack space.
2436 In this case, we do not deal with EXTRA separately.
2437 A single stack adjust will do. */
2438 if (! args_addr)
2439 {
2440 temp = push_block (size, extra, where_pad == downward);
2441 extra = 0;
2442 }
2443 else if (GET_CODE (args_so_far) == CONST_INT)
2444 temp = memory_address (BLKmode,
2445 plus_constant (args_addr,
2446 skip + INTVAL (args_so_far)));
2447 else
2448 temp = memory_address (BLKmode,
2449 plus_constant (gen_rtx (PLUS, Pmode,
2450 args_addr, args_so_far),
2451 skip));
2452
2453 /* TEMP is the address of the block. Copy the data there. */
2454 if (GET_CODE (size) == CONST_INT
2455 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2456 < MOVE_RATIO))
2457 {
2458 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
2459 INTVAL (size), align);
2460 goto ret;
2461 }
2462 /* Try the most limited insn first, because there's no point
2463 including more than one in the machine description unless
2464 the more limited one has some advantage. */
2465#ifdef HAVE_movstrqi
2466 if (HAVE_movstrqi
2467 && GET_CODE (size) == CONST_INT
2468 && ((unsigned) INTVAL (size)
2469 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
2470 {
c841050e
RS
2471 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
2472 xinner, size, GEN_INT (align));
2473 if (pat != 0)
2474 {
2475 emit_insn (pat);
2476 goto ret;
2477 }
bbf6f052
RK
2478 }
2479#endif
2480#ifdef HAVE_movstrhi
2481 if (HAVE_movstrhi
2482 && GET_CODE (size) == CONST_INT
2483 && ((unsigned) INTVAL (size)
2484 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
2485 {
c841050e
RS
2486 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
2487 xinner, size, GEN_INT (align));
2488 if (pat != 0)
2489 {
2490 emit_insn (pat);
2491 goto ret;
2492 }
bbf6f052
RK
2493 }
2494#endif
2495#ifdef HAVE_movstrsi
2496 if (HAVE_movstrsi)
2497 {
c841050e
RS
2498 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
2499 xinner, size, GEN_INT (align));
2500 if (pat != 0)
2501 {
2502 emit_insn (pat);
2503 goto ret;
2504 }
bbf6f052
RK
2505 }
2506#endif
2507#ifdef HAVE_movstrdi
2508 if (HAVE_movstrdi)
2509 {
c841050e
RS
2510 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
2511 xinner, size, GEN_INT (align));
2512 if (pat != 0)
2513 {
2514 emit_insn (pat);
2515 goto ret;
2516 }
bbf6f052
RK
2517 }
2518#endif
2519
2520#ifndef ACCUMULATE_OUTGOING_ARGS
2521 /* If the source is referenced relative to the stack pointer,
2522 copy it to another register to stabilize it. We do not need
2523 to do this if we know that we won't be changing sp. */
2524
2525 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2526 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2527 temp = copy_to_reg (temp);
2528#endif
2529
2530 /* Make inhibit_defer_pop nonzero around the library call
2531 to force it to pop the bcopy-arguments right away. */
2532 NO_DEFER_POP;
2533#ifdef TARGET_MEM_FUNCTIONS
d562e42e 2534 emit_library_call (memcpy_libfunc, 0,
bbf6f052 2535 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
0fa83258
RK
2536 convert_to_mode (TYPE_MODE (sizetype),
2537 size, TREE_UNSIGNED (sizetype)),
26ba80fc 2538 TYPE_MODE (sizetype));
bbf6f052 2539#else
d562e42e 2540 emit_library_call (bcopy_libfunc, 0,
bbf6f052 2541 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3b6f75e2
JW
2542 convert_to_mode (TYPE_MODE (integer_type_node),
2543 size,
2544 TREE_UNSIGNED (integer_type_node)),
2545 TYPE_MODE (integer_type_node));
bbf6f052
RK
2546#endif
2547 OK_DEFER_POP;
2548 }
2549 }
2550 else if (partial > 0)
2551 {
2552 /* Scalar partly in registers. */
2553
2554 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2555 int i;
2556 int not_stack;
2557 /* # words of start of argument
2558 that we must make space for but need not store. */
2559 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2560 int args_offset = INTVAL (args_so_far);
2561 int skip;
2562
2563 /* Push padding now if padding above and stack grows down,
2564 or if padding below and stack grows up.
2565 But if space already allocated, this has already been done. */
2566 if (extra && args_addr == 0
2567 && where_pad != none && where_pad != stack_direction)
906c4e36 2568 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2569
2570 /* If we make space by pushing it, we might as well push
2571 the real data. Otherwise, we can leave OFFSET nonzero
2572 and leave the space uninitialized. */
2573 if (args_addr == 0)
2574 offset = 0;
2575
2576 /* Now NOT_STACK gets the number of words that we don't need to
2577 allocate on the stack. */
2578 not_stack = partial - offset;
2579
2580 /* If the partial register-part of the arg counts in its stack size,
2581 skip the part of stack space corresponding to the registers.
2582 Otherwise, start copying to the beginning of the stack space,
2583 by setting SKIP to 0. */
2584#ifndef REG_PARM_STACK_SPACE
2585 skip = 0;
2586#else
2587 skip = not_stack;
2588#endif
2589
2590 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2591 x = validize_mem (force_const_mem (mode, x));
2592
2593 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2594 SUBREGs of such registers are not allowed. */
2595 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2596 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2597 x = copy_to_reg (x);
2598
2599 /* Loop over all the words allocated on the stack for this arg. */
2600 /* We can do it by words, because any scalar bigger than a word
2601 has a size a multiple of a word. */
2602#ifndef PUSH_ARGS_REVERSED
2603 for (i = not_stack; i < size; i++)
2604#else
2605 for (i = size - 1; i >= not_stack; i--)
2606#endif
2607 if (i >= not_stack + offset)
2608 emit_push_insn (operand_subword_force (x, i, mode),
906c4e36
RK
2609 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2610 0, args_addr,
2611 GEN_INT (args_offset + ((i - not_stack + skip)
bbf6f052
RK
2612 * UNITS_PER_WORD)));
2613 }
2614 else
2615 {
2616 rtx addr;
2617
2618 /* Push padding now if padding above and stack grows down,
2619 or if padding below and stack grows up.
2620 But if space already allocated, this has already been done. */
2621 if (extra && args_addr == 0
2622 && where_pad != none && where_pad != stack_direction)
906c4e36 2623 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2624
2625#ifdef PUSH_ROUNDING
2626 if (args_addr == 0)
2627 addr = gen_push_operand ();
2628 else
2629#endif
2630 if (GET_CODE (args_so_far) == CONST_INT)
2631 addr
2632 = memory_address (mode,
2633 plus_constant (args_addr, INTVAL (args_so_far)));
2634 else
2635 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2636 args_so_far));
2637
2638 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2639 }
2640
2641 ret:
2642 /* If part should go in registers, copy that part
2643 into the appropriate registers. Do this now, at the end,
2644 since mem-to-mem copies above may do function calls. */
cd048831 2645 if (partial > 0 && reg != 0)
bbf6f052
RK
2646 move_block_to_reg (REGNO (reg), x, partial, mode);
2647
2648 if (extra && args_addr == 0 && where_pad == stack_direction)
906c4e36 2649 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2650}
2651\f
bbf6f052
RK
2652/* Expand an assignment that stores the value of FROM into TO.
2653 If WANT_VALUE is nonzero, return an rtx for the value of TO.
709f5be1
RS
2654 (This may contain a QUEUED rtx;
2655 if the value is constant, this rtx is a constant.)
2656 Otherwise, the returned value is NULL_RTX.
bbf6f052
RK
2657
2658 SUGGEST_REG is no longer actually used.
2659 It used to mean, copy the value through a register
2660 and return that register, if that is possible.
709f5be1 2661 We now use WANT_VALUE to decide whether to do this. */
bbf6f052
RK
2662
2663rtx
2664expand_assignment (to, from, want_value, suggest_reg)
2665 tree to, from;
2666 int want_value;
2667 int suggest_reg;
2668{
2669 register rtx to_rtx = 0;
2670 rtx result;
2671
2672 /* Don't crash if the lhs of the assignment was erroneous. */
2673
2674 if (TREE_CODE (to) == ERROR_MARK)
709f5be1
RS
2675 {
2676 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
2677 return want_value ? result : NULL_RTX;
2678 }
bbf6f052 2679
ca695ac9
JB
2680 if (output_bytecode)
2681 {
2682 tree dest_innermost;
2683
2684 bc_expand_expr (from);
6d6e61ce 2685 bc_emit_instruction (duplicate);
ca695ac9
JB
2686
2687 dest_innermost = bc_expand_address (to);
2688
2689 /* Can't deduce from TYPE that we're dealing with a bitfield, so
2690 take care of it here. */
2691
2692 bc_store_memory (TREE_TYPE (to), dest_innermost);
2693 return NULL;
2694 }
2695
bbf6f052
RK
2696 /* Assignment of a structure component needs special treatment
2697 if the structure component's rtx is not simply a MEM.
6be58303
JW
2698 Assignment of an array element at a constant index, and assignment of
2699 an array element in an unaligned packed structure field, has the same
2700 problem. */
bbf6f052
RK
2701
2702 if (TREE_CODE (to) == COMPONENT_REF
2703 || TREE_CODE (to) == BIT_FIELD_REF
2704 || (TREE_CODE (to) == ARRAY_REF
6be58303
JW
2705 && ((TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2706 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST)
c7a7ac46 2707 || (SLOW_UNALIGNED_ACCESS && get_inner_unaligned_p (to)))))
bbf6f052
RK
2708 {
2709 enum machine_mode mode1;
2710 int bitsize;
2711 int bitpos;
7bb0943f 2712 tree offset;
bbf6f052
RK
2713 int unsignedp;
2714 int volatilep = 0;
0088fcb1 2715 tree tem;
d78d243c 2716 int alignment;
0088fcb1
RK
2717
2718 push_temp_slots ();
2719 tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
bbf6f052
RK
2720 &mode1, &unsignedp, &volatilep);
2721
2722 /* If we are going to use store_bit_field and extract_bit_field,
2723 make sure to_rtx will be safe for multiple use. */
2724
2725 if (mode1 == VOIDmode && want_value)
2726 tem = stabilize_reference (tem);
2727
d78d243c 2728 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
906c4e36 2729 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
2730 if (offset != 0)
2731 {
906c4e36 2732 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
2733
2734 if (GET_CODE (to_rtx) != MEM)
2735 abort ();
2736 to_rtx = change_address (to_rtx, VOIDmode,
88f63c77
RK
2737 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
2738 force_reg (ptr_mode, offset_rtx)));
d78d243c
RS
2739 /* If we have a variable offset, the known alignment
2740 is only that of the innermost structure containing the field.
2741 (Actually, we could sometimes do better by using the
2742 align of an element of the innermost array, but no need.) */
2743 if (TREE_CODE (to) == COMPONENT_REF
2744 || TREE_CODE (to) == BIT_FIELD_REF)
2745 alignment
2746 = TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (to, 0))) / BITS_PER_UNIT;
7bb0943f 2747 }
bbf6f052
RK
2748 if (volatilep)
2749 {
2750 if (GET_CODE (to_rtx) == MEM)
01188446
JW
2751 {
2752 /* When the offset is zero, to_rtx is the address of the
2753 structure we are storing into, and hence may be shared.
2754 We must make a new MEM before setting the volatile bit. */
2755 if (offset == 0)
2756 to_rtx = change_address (to_rtx, VOIDmode, XEXP (to_rtx, 0));
2757 MEM_VOLATILE_P (to_rtx) = 1;
2758 }
bbf6f052
RK
2759#if 0 /* This was turned off because, when a field is volatile
2760 in an object which is not volatile, the object may be in a register,
2761 and then we would abort over here. */
2762 else
2763 abort ();
2764#endif
2765 }
2766
2767 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2768 (want_value
2769 /* Spurious cast makes HPUX compiler happy. */
2770 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2771 : VOIDmode),
2772 unsignedp,
2773 /* Required alignment of containing datum. */
d78d243c 2774 alignment,
bbf6f052
RK
2775 int_size_in_bytes (TREE_TYPE (tem)));
2776 preserve_temp_slots (result);
2777 free_temp_slots ();
0088fcb1 2778 pop_temp_slots ();
bbf6f052 2779
709f5be1
RS
2780 /* If the value is meaningful, convert RESULT to the proper mode.
2781 Otherwise, return nothing. */
5ffe63ed
RS
2782 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
2783 TYPE_MODE (TREE_TYPE (from)),
2784 result,
2785 TREE_UNSIGNED (TREE_TYPE (to)))
709f5be1 2786 : NULL_RTX);
bbf6f052
RK
2787 }
2788
cd1db108
RS
2789 /* If the rhs is a function call and its value is not an aggregate,
2790 call the function before we start to compute the lhs.
2791 This is needed for correct code for cases such as
2792 val = setjmp (buf) on machines where reference to val
1ad87b63
RK
2793 requires loading up part of an address in a separate insn.
2794
2795 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
2796 a promoted variable where the zero- or sign- extension needs to be done.
2797 Handling this in the normal way is safe because no computation is done
2798 before the call. */
2799 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
2800 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
cd1db108 2801 {
0088fcb1
RK
2802 rtx value;
2803
2804 push_temp_slots ();
2805 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
cd1db108
RS
2806 if (to_rtx == 0)
2807 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
aaf87c45
JL
2808
2809 if (GET_MODE (to_rtx) == BLKmode)
db3ec607 2810 emit_block_move (to_rtx, value, expr_size (from),
ff9b5bd8 2811 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
aaf87c45
JL
2812 else
2813 emit_move_insn (to_rtx, value);
cd1db108
RS
2814 preserve_temp_slots (to_rtx);
2815 free_temp_slots ();
0088fcb1 2816 pop_temp_slots ();
709f5be1 2817 return want_value ? to_rtx : NULL_RTX;
cd1db108
RS
2818 }
2819
bbf6f052
RK
2820 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2821 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2822
2823 if (to_rtx == 0)
906c4e36 2824 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
bbf6f052 2825
86d38d25
RS
2826 /* Don't move directly into a return register. */
2827 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
2828 {
0088fcb1
RK
2829 rtx temp;
2830
2831 push_temp_slots ();
2832 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
86d38d25
RS
2833 emit_move_insn (to_rtx, temp);
2834 preserve_temp_slots (to_rtx);
2835 free_temp_slots ();
0088fcb1 2836 pop_temp_slots ();
709f5be1 2837 return want_value ? to_rtx : NULL_RTX;
86d38d25
RS
2838 }
2839
bbf6f052
RK
2840 /* In case we are returning the contents of an object which overlaps
2841 the place the value is being stored, use a safe function when copying
2842 a value through a pointer into a structure value return block. */
2843 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2844 && current_function_returns_struct
2845 && !current_function_returns_pcc_struct)
2846 {
0088fcb1
RK
2847 rtx from_rtx, size;
2848
2849 push_temp_slots ();
33a20d10
RK
2850 size = expr_size (from);
2851 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
2852
2853#ifdef TARGET_MEM_FUNCTIONS
d562e42e 2854 emit_library_call (memcpy_libfunc, 0,
bbf6f052
RK
2855 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2856 XEXP (from_rtx, 0), Pmode,
0fa83258
RK
2857 convert_to_mode (TYPE_MODE (sizetype),
2858 size, TREE_UNSIGNED (sizetype)),
26ba80fc 2859 TYPE_MODE (sizetype));
bbf6f052 2860#else
d562e42e 2861 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
2862 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2863 XEXP (to_rtx, 0), Pmode,
3b6f75e2
JW
2864 convert_to_mode (TYPE_MODE (integer_type_node),
2865 size, TREE_UNSIGNED (integer_type_node)),
2866 TYPE_MODE (integer_type_node));
bbf6f052
RK
2867#endif
2868
2869 preserve_temp_slots (to_rtx);
2870 free_temp_slots ();
0088fcb1 2871 pop_temp_slots ();
709f5be1 2872 return want_value ? to_rtx : NULL_RTX;
bbf6f052
RK
2873 }
2874
2875 /* Compute FROM and store the value in the rtx we got. */
2876
0088fcb1 2877 push_temp_slots ();
bbf6f052
RK
2878 result = store_expr (from, to_rtx, want_value);
2879 preserve_temp_slots (result);
2880 free_temp_slots ();
0088fcb1 2881 pop_temp_slots ();
709f5be1 2882 return want_value ? result : NULL_RTX;
bbf6f052
RK
2883}
2884
2885/* Generate code for computing expression EXP,
2886 and storing the value into TARGET.
bbf6f052
RK
2887 TARGET may contain a QUEUED rtx.
2888
709f5be1
RS
2889 If WANT_VALUE is nonzero, return a copy of the value
2890 not in TARGET, so that we can be sure to use the proper
2891 value in a containing expression even if TARGET has something
2892 else stored in it. If possible, we copy the value through a pseudo
2893 and return that pseudo. Or, if the value is constant, we try to
2894 return the constant. In some cases, we return a pseudo
2895 copied *from* TARGET.
2896
2897 If the mode is BLKmode then we may return TARGET itself.
2898 It turns out that in BLKmode it doesn't cause a problem.
2899 because C has no operators that could combine two different
2900 assignments into the same BLKmode object with different values
2901 with no sequence point. Will other languages need this to
2902 be more thorough?
2903
2904 If WANT_VALUE is 0, we return NULL, to make sure
2905 to catch quickly any cases where the caller uses the value
2906 and fails to set WANT_VALUE. */
bbf6f052
RK
2907
2908rtx
709f5be1 2909store_expr (exp, target, want_value)
bbf6f052
RK
2910 register tree exp;
2911 register rtx target;
709f5be1 2912 int want_value;
bbf6f052
RK
2913{
2914 register rtx temp;
2915 int dont_return_target = 0;
2916
2917 if (TREE_CODE (exp) == COMPOUND_EXPR)
2918 {
2919 /* Perform first part of compound expression, then assign from second
2920 part. */
2921 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2922 emit_queue ();
709f5be1 2923 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
bbf6f052
RK
2924 }
2925 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2926 {
2927 /* For conditional expression, get safe form of the target. Then
2928 test the condition, doing the appropriate assignment on either
2929 side. This avoids the creation of unnecessary temporaries.
2930 For non-BLKmode, it is more efficient not to do this. */
2931
2932 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2933
2934 emit_queue ();
2935 target = protect_from_queue (target, 1);
2936
dabf8373 2937 do_pending_stack_adjust ();
bbf6f052
RK
2938 NO_DEFER_POP;
2939 jumpifnot (TREE_OPERAND (exp, 0), lab1);
709f5be1 2940 store_expr (TREE_OPERAND (exp, 1), target, 0);
bbf6f052
RK
2941 emit_queue ();
2942 emit_jump_insn (gen_jump (lab2));
2943 emit_barrier ();
2944 emit_label (lab1);
709f5be1 2945 store_expr (TREE_OPERAND (exp, 2), target, 0);
bbf6f052
RK
2946 emit_queue ();
2947 emit_label (lab2);
2948 OK_DEFER_POP;
709f5be1 2949 return want_value ? target : NULL_RTX;
bbf6f052 2950 }
709f5be1 2951 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
bbf6f052
RK
2952 && GET_MODE (target) != BLKmode)
2953 /* If target is in memory and caller wants value in a register instead,
2954 arrange that. Pass TARGET as target for expand_expr so that,
709f5be1 2955 if EXP is another assignment, WANT_VALUE will be nonzero for it.
c2e6aff6
RS
2956 We know expand_expr will not use the target in that case.
2957 Don't do this if TARGET is volatile because we are supposed
2958 to write it and then read it. */
bbf6f052 2959 {
906c4e36 2960 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
bbf6f052
RK
2961 GET_MODE (target), 0);
2962 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2963 temp = copy_to_reg (temp);
2964 dont_return_target = 1;
2965 }
2966 else if (queued_subexp_p (target))
709f5be1
RS
2967 /* If target contains a postincrement, let's not risk
2968 using it as the place to generate the rhs. */
bbf6f052
RK
2969 {
2970 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2971 {
2972 /* Expand EXP into a new pseudo. */
2973 temp = gen_reg_rtx (GET_MODE (target));
2974 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2975 }
2976 else
906c4e36 2977 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
709f5be1
RS
2978
2979 /* If target is volatile, ANSI requires accessing the value
2980 *from* the target, if it is accessed. So make that happen.
2981 In no case return the target itself. */
2982 if (! MEM_VOLATILE_P (target) && want_value)
2983 dont_return_target = 1;
bbf6f052 2984 }
1499e0a8
RK
2985 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2986 /* If this is an scalar in a register that is stored in a wider mode
2987 than the declared mode, compute the result into its declared mode
2988 and then convert to the wider mode. Our value is the computed
2989 expression. */
2990 {
5a32d038 2991 /* If we don't want a value, we can do the conversion inside EXP,
f635a84d
RK
2992 which will often result in some optimizations. Do the conversion
2993 in two steps: first change the signedness, if needed, then
2994 the extend. */
5a32d038 2995 if (! want_value)
f635a84d
RK
2996 {
2997 if (TREE_UNSIGNED (TREE_TYPE (exp))
2998 != SUBREG_PROMOTED_UNSIGNED_P (target))
2999 exp
3000 = convert
3001 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3002 TREE_TYPE (exp)),
3003 exp);
3004
3005 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3006 SUBREG_PROMOTED_UNSIGNED_P (target)),
3007 exp);
3008 }
5a32d038 3009
1499e0a8 3010 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
b258707c 3011
766f36c7 3012 /* If TEMP is a volatile MEM and we want a result value, make
f29369b9
RK
3013 the access now so it gets done only once. Likewise if
3014 it contains TARGET. */
3015 if (GET_CODE (temp) == MEM && want_value
3016 && (MEM_VOLATILE_P (temp)
3017 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
766f36c7
RK
3018 temp = copy_to_reg (temp);
3019
b258707c
RS
3020 /* If TEMP is a VOIDmode constant, use convert_modes to make
3021 sure that we properly convert it. */
3022 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3023 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3024 TYPE_MODE (TREE_TYPE (exp)), temp,
3025 SUBREG_PROMOTED_UNSIGNED_P (target));
3026
1499e0a8
RK
3027 convert_move (SUBREG_REG (target), temp,
3028 SUBREG_PROMOTED_UNSIGNED_P (target));
709f5be1 3029 return want_value ? temp : NULL_RTX;
1499e0a8 3030 }
bbf6f052
RK
3031 else
3032 {
3033 temp = expand_expr (exp, target, GET_MODE (target), 0);
766f36c7 3034 /* Return TARGET if it's a specified hardware register.
709f5be1
RS
3035 If TARGET is a volatile mem ref, either return TARGET
3036 or return a reg copied *from* TARGET; ANSI requires this.
3037
3038 Otherwise, if TEMP is not TARGET, return TEMP
3039 if it is constant (for efficiency),
3040 or if we really want the correct value. */
bbf6f052
RK
3041 if (!(target && GET_CODE (target) == REG
3042 && REGNO (target) < FIRST_PSEUDO_REGISTER)
709f5be1
RS
3043 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3044 && temp != target
3045 && (CONSTANT_P (temp) || want_value))
bbf6f052
RK
3046 dont_return_target = 1;
3047 }
3048
b258707c
RS
3049 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3050 the same as that of TARGET, adjust the constant. This is needed, for
3051 example, in case it is a CONST_DOUBLE and we want only a word-sized
3052 value. */
3053 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
c1da1f33 3054 && TREE_CODE (exp) != ERROR_MARK
b258707c
RS
3055 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3056 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3057 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3058
bbf6f052
RK
3059 /* If value was not generated in the target, store it there.
3060 Convert the value to TARGET's type first if nec. */
3061
3062 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
3063 {
3064 target = protect_from_queue (target, 1);
3065 if (GET_MODE (temp) != GET_MODE (target)
3066 && GET_MODE (temp) != VOIDmode)
3067 {
3068 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3069 if (dont_return_target)
3070 {
3071 /* In this case, we will return TEMP,
3072 so make sure it has the proper mode.
3073 But don't forget to store the value into TARGET. */
3074 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3075 emit_move_insn (target, temp);
3076 }
3077 else
3078 convert_move (target, temp, unsignedp);
3079 }
3080
3081 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3082 {
3083 /* Handle copying a string constant into an array.
3084 The string constant may be shorter than the array.
3085 So copy just the string's actual length, and clear the rest. */
3086 rtx size;
22619c3f 3087 rtx addr;
bbf6f052 3088
e87b4f3f
RS
3089 /* Get the size of the data type of the string,
3090 which is actually the size of the target. */
3091 size = expr_size (exp);
3092 if (GET_CODE (size) == CONST_INT
3093 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3094 emit_block_move (target, temp, size,
3095 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3096 else
bbf6f052 3097 {
e87b4f3f
RS
3098 /* Compute the size of the data to copy from the string. */
3099 tree copy_size
c03b7665 3100 = size_binop (MIN_EXPR,
b50d17a1 3101 make_tree (sizetype, size),
c03b7665
RK
3102 convert (sizetype,
3103 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
906c4e36
RK
3104 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3105 VOIDmode, 0);
e87b4f3f
RS
3106 rtx label = 0;
3107
3108 /* Copy that much. */
3109 emit_block_move (target, temp, copy_size_rtx,
3110 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3111
88f63c77
RK
3112 /* Figure out how much is left in TARGET that we have to clear.
3113 Do all calculations in ptr_mode. */
3114
3115 addr = XEXP (target, 0);
3116 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3117
e87b4f3f
RS
3118 if (GET_CODE (copy_size_rtx) == CONST_INT)
3119 {
88f63c77 3120 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
22619c3f 3121 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
e87b4f3f
RS
3122 }
3123 else
3124 {
88f63c77
RK
3125 addr = force_reg (ptr_mode, addr);
3126 addr = expand_binop (ptr_mode, add_optab, addr,
906c4e36
RK
3127 copy_size_rtx, NULL_RTX, 0,
3128 OPTAB_LIB_WIDEN);
e87b4f3f 3129
88f63c77 3130 size = expand_binop (ptr_mode, sub_optab, size,
906c4e36
RK
3131 copy_size_rtx, NULL_RTX, 0,
3132 OPTAB_LIB_WIDEN);
e87b4f3f 3133
906c4e36 3134 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
e87b4f3f
RS
3135 GET_MODE (size), 0, 0);
3136 label = gen_label_rtx ();
3137 emit_jump_insn (gen_blt (label));
3138 }
3139
3140 if (size != const0_rtx)
3141 {
bbf6f052 3142#ifdef TARGET_MEM_FUNCTIONS
3b6f75e2
JW
3143 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3144 addr, Pmode,
3145 const0_rtx, TYPE_MODE (integer_type_node),
3146 convert_to_mode (TYPE_MODE (sizetype),
3147 size,
3148 TREE_UNSIGNED (sizetype)),
3149 TYPE_MODE (sizetype));
bbf6f052 3150#else
d562e42e 3151 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3b6f75e2
JW
3152 addr, Pmode,
3153 convert_to_mode (TYPE_MODE (integer_type_node),
3154 size,
3155 TREE_UNSIGNED (integer_type_node)),
3156 TYPE_MODE (integer_type_node));
bbf6f052 3157#endif
e87b4f3f 3158 }
22619c3f 3159
e87b4f3f
RS
3160 if (label)
3161 emit_label (label);
bbf6f052
RK
3162 }
3163 }
3164 else if (GET_MODE (temp) == BLKmode)
3165 emit_block_move (target, temp, expr_size (exp),
3166 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3167 else
3168 emit_move_insn (target, temp);
3169 }
709f5be1 3170
766f36c7
RK
3171 /* If we don't want a value, return NULL_RTX. */
3172 if (! want_value)
3173 return NULL_RTX;
3174
3175 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3176 ??? The latter test doesn't seem to make sense. */
3177 else if (dont_return_target && GET_CODE (temp) != MEM)
bbf6f052 3178 return temp;
766f36c7
RK
3179
3180 /* Return TARGET itself if it is a hard register. */
3181 else if (want_value && GET_MODE (target) != BLKmode
3182 && ! (GET_CODE (target) == REG
3183 && REGNO (target) < FIRST_PSEUDO_REGISTER))
709f5be1 3184 return copy_to_reg (target);
766f36c7
RK
3185
3186 else
709f5be1 3187 return target;
bbf6f052
RK
3188}
3189\f
9de08200
RK
3190/* Return 1 if EXP just contains zeros. */
3191
3192static int
3193is_zeros_p (exp)
3194 tree exp;
3195{
3196 tree elt;
3197
3198 switch (TREE_CODE (exp))
3199 {
3200 case CONVERT_EXPR:
3201 case NOP_EXPR:
3202 case NON_LVALUE_EXPR:
3203 return is_zeros_p (TREE_OPERAND (exp, 0));
3204
3205 case INTEGER_CST:
3206 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3207
3208 case COMPLEX_CST:
3209 return
3210 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3211
3212 case REAL_CST:
3213 return REAL_VALUES_EQUAL (TREE_REAL_CST (exp), dconst0);
3214
3215 case CONSTRUCTOR:
e1a43f73
PB
3216 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3217 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
9de08200
RK
3218 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3219 if (! is_zeros_p (TREE_VALUE (elt)))
3220 return 0;
3221
3222 return 1;
3223 }
3224
3225 return 0;
3226}
3227
3228/* Return 1 if EXP contains mostly (3/4) zeros. */
3229
3230static int
3231mostly_zeros_p (exp)
3232 tree exp;
3233{
9de08200
RK
3234 if (TREE_CODE (exp) == CONSTRUCTOR)
3235 {
e1a43f73
PB
3236 int elts = 0, zeros = 0;
3237 tree elt = CONSTRUCTOR_ELTS (exp);
3238 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3239 {
3240 /* If there are no ranges of true bits, it is all zero. */
3241 return elt == NULL_TREE;
3242 }
3243 for (; elt; elt = TREE_CHAIN (elt))
3244 {
3245 /* We do not handle the case where the index is a RANGE_EXPR,
3246 so the statistic will be somewhat inaccurate.
3247 We do make a more accurate count in store_constructor itself,
3248 so since this function is only used for nested array elements,
3249 this should be close enough. */
3250 if (mostly_zeros_p (TREE_VALUE (elt)))
3251 zeros++;
3252 elts++;
3253 }
9de08200
RK
3254
3255 return 4 * zeros >= 3 * elts;
3256 }
3257
3258 return is_zeros_p (exp);
3259}
3260\f
e1a43f73
PB
3261/* Helper function for store_constructor.
3262 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3263 TYPE is the type of the CONSTRUCTOR, not the element type.
23ccec44
JW
3264 CLEARED is as for store_constructor.
3265
3266 This provides a recursive shortcut back to store_constructor when it isn't
3267 necessary to go through store_field. This is so that we can pass through
3268 the cleared field to let store_constructor know that we may not have to
3269 clear a substructure if the outer structure has already been cleared. */
e1a43f73
PB
3270
3271static void
3272store_constructor_field (target, bitsize, bitpos,
3273 mode, exp, type, cleared)
3274 rtx target;
3275 int bitsize, bitpos;
3276 enum machine_mode mode;
3277 tree exp, type;
3278 int cleared;
3279{
3280 if (TREE_CODE (exp) == CONSTRUCTOR
23ccec44
JW
3281 && bitpos % BITS_PER_UNIT == 0
3282 /* If we have a non-zero bitpos for a register target, then we just
3283 let store_field do the bitfield handling. This is unlikely to
3284 generate unnecessary clear instructions anyways. */
3285 && (bitpos == 0 || GET_CODE (target) == MEM))
e1a43f73 3286 {
126e5b0d
JW
3287 if (bitpos != 0)
3288 target = change_address (target, VOIDmode,
3289 plus_constant (XEXP (target, 0),
3290 bitpos / BITS_PER_UNIT));
3291 store_constructor (exp, target, cleared);
e1a43f73
PB
3292 }
3293 else
3294 store_field (target, bitsize, bitpos, mode, exp,
3295 VOIDmode, 0, TYPE_ALIGN (type) / BITS_PER_UNIT,
3296 int_size_in_bytes (type));
3297}
3298
bbf6f052 3299/* Store the value of constructor EXP into the rtx TARGET.
e1a43f73
PB
3300 TARGET is either a REG or a MEM.
3301 CLEARED is true if TARGET is known to have been zero'd. */
bbf6f052
RK
3302
3303static void
e1a43f73 3304store_constructor (exp, target, cleared)
bbf6f052
RK
3305 tree exp;
3306 rtx target;
e1a43f73 3307 int cleared;
bbf6f052 3308{
4af3895e
JVA
3309 tree type = TREE_TYPE (exp);
3310
bbf6f052
RK
3311 /* We know our target cannot conflict, since safe_from_p has been called. */
3312#if 0
3313 /* Don't try copying piece by piece into a hard register
3314 since that is vulnerable to being clobbered by EXP.
3315 Instead, construct in a pseudo register and then copy it all. */
3316 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
3317 {
3318 rtx temp = gen_reg_rtx (GET_MODE (target));
e1a43f73 3319 store_constructor (exp, temp, 0);
bbf6f052
RK
3320 emit_move_insn (target, temp);
3321 return;
3322 }
3323#endif
3324
e44842fe
RK
3325 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
3326 || TREE_CODE (type) == QUAL_UNION_TYPE)
bbf6f052
RK
3327 {
3328 register tree elt;
3329
4af3895e 3330 /* Inform later passes that the whole union value is dead. */
e44842fe
RK
3331 if (TREE_CODE (type) == UNION_TYPE
3332 || TREE_CODE (type) == QUAL_UNION_TYPE)
bbf6f052 3333 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
4af3895e
JVA
3334
3335 /* If we are building a static constructor into a register,
3336 set the initial value as zero so we can fold the value into
67225c15
RK
3337 a constant. But if more than one register is involved,
3338 this probably loses. */
3339 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
3340 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
9de08200
RK
3341 {
3342 if (! cleared)
3343 emit_move_insn (target, const0_rtx);
4af3895e 3344
9de08200
RK
3345 cleared = 1;
3346 }
3347
3348 /* If the constructor has fewer fields than the structure
3349 or if we are initializing the structure to mostly zeros,
bbf6f052 3350 clear the whole structure first. */
9de08200
RK
3351 else if ((list_length (CONSTRUCTOR_ELTS (exp))
3352 != list_length (TYPE_FIELDS (type)))
3353 || mostly_zeros_p (exp))
3354 {
3355 if (! cleared)
3356 clear_storage (target, expr_size (exp),
3357 TYPE_ALIGN (type) / BITS_PER_UNIT);
3358
3359 cleared = 1;
3360 }
bbf6f052
RK
3361 else
3362 /* Inform later passes that the old value is dead. */
3363 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3364
3365 /* Store each element of the constructor into
3366 the corresponding field of TARGET. */
3367
3368 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3369 {
3370 register tree field = TREE_PURPOSE (elt);
3371 register enum machine_mode mode;
3372 int bitsize;
b50d17a1 3373 int bitpos = 0;
bbf6f052 3374 int unsignedp;
b50d17a1
RK
3375 tree pos, constant = 0, offset = 0;
3376 rtx to_rtx = target;
bbf6f052 3377
f32fd778
RS
3378 /* Just ignore missing fields.
3379 We cleared the whole structure, above,
3380 if any fields are missing. */
3381 if (field == 0)
3382 continue;
3383
e1a43f73
PB
3384 if (cleared && is_zeros_p (TREE_VALUE (elt)))
3385 continue;
9de08200 3386
bbf6f052
RK
3387 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
3388 unsignedp = TREE_UNSIGNED (field);
3389 mode = DECL_MODE (field);
3390 if (DECL_BIT_FIELD (field))
3391 mode = VOIDmode;
3392
b50d17a1
RK
3393 pos = DECL_FIELD_BITPOS (field);
3394 if (TREE_CODE (pos) == INTEGER_CST)
3395 constant = pos;
3396 else if (TREE_CODE (pos) == PLUS_EXPR
3397 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3398 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
3399 else
3400 offset = pos;
3401
3402 if (constant)
cd11b87e 3403 bitpos = TREE_INT_CST_LOW (constant);
b50d17a1
RK
3404
3405 if (offset)
3406 {
3407 rtx offset_rtx;
3408
3409 if (contains_placeholder_p (offset))
3410 offset = build (WITH_RECORD_EXPR, sizetype,
3411 offset, exp);
bbf6f052 3412
b50d17a1
RK
3413 offset = size_binop (FLOOR_DIV_EXPR, offset,
3414 size_int (BITS_PER_UNIT));
bbf6f052 3415
b50d17a1
RK
3416 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3417 if (GET_CODE (to_rtx) != MEM)
3418 abort ();
3419
3420 to_rtx
3421 = change_address (to_rtx, VOIDmode,
88f63c77
RK
3422 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
3423 force_reg (ptr_mode, offset_rtx)));
b50d17a1 3424 }
cf04eb80
RK
3425 if (TREE_READONLY (field))
3426 {
9151b3bf
RK
3427 if (GET_CODE (to_rtx) == MEM)
3428 to_rtx = change_address (to_rtx, GET_MODE (to_rtx),
3429 XEXP (to_rtx, 0));
cf04eb80
RK
3430 RTX_UNCHANGING_P (to_rtx) = 1;
3431 }
3432
e1a43f73
PB
3433 store_constructor_field (to_rtx, bitsize, bitpos,
3434 mode, TREE_VALUE (elt), type, cleared);
bbf6f052
RK
3435 }
3436 }
4af3895e 3437 else if (TREE_CODE (type) == ARRAY_TYPE)
bbf6f052
RK
3438 {
3439 register tree elt;
3440 register int i;
e1a43f73 3441 int need_to_clear;
4af3895e 3442 tree domain = TYPE_DOMAIN (type);
906c4e36
RK
3443 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
3444 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4af3895e 3445 tree elttype = TREE_TYPE (type);
bbf6f052 3446
e1a43f73
PB
3447 /* If the constructor has fewer elements than the array,
3448 clear the whole array first. Similarly if this this is
3449 static constructor of a non-BLKmode object. */
3450 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
3451 need_to_clear = 1;
3452 else
3453 {
3454 HOST_WIDE_INT count = 0, zero_count = 0;
3455 need_to_clear = 0;
3456 /* This loop is a more accurate version of the loop in
3457 mostly_zeros_p (it handles RANGE_EXPR in an index).
3458 It is also needed to check for missing elements. */
3459 for (elt = CONSTRUCTOR_ELTS (exp);
3460 elt != NULL_TREE;
3461 elt = TREE_CHAIN (elt), i++)
3462 {
3463 tree index = TREE_PURPOSE (elt);
3464 HOST_WIDE_INT this_node_count;
3465 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3466 {
3467 tree lo_index = TREE_OPERAND (index, 0);
3468 tree hi_index = TREE_OPERAND (index, 1);
3469 if (TREE_CODE (lo_index) != INTEGER_CST
3470 || TREE_CODE (hi_index) != INTEGER_CST)
3471 {
3472 need_to_clear = 1;
3473 break;
3474 }
3475 this_node_count = TREE_INT_CST_LOW (hi_index)
3476 - TREE_INT_CST_LOW (lo_index) + 1;
3477 }
3478 else
3479 this_node_count = 1;
3480 count += this_node_count;
3481 if (mostly_zeros_p (TREE_VALUE (elt)))
3482 zero_count += this_node_count;
3483 }
3484 if (4 * zero_count >= 3 * count)
3485 need_to_clear = 1;
3486 }
3487 if (need_to_clear)
9de08200
RK
3488 {
3489 if (! cleared)
3490 clear_storage (target, expr_size (exp),
3491 TYPE_ALIGN (type) / BITS_PER_UNIT);
9de08200
RK
3492 cleared = 1;
3493 }
bbf6f052
RK
3494 else
3495 /* Inform later passes that the old value is dead. */
3496 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3497
3498 /* Store each element of the constructor into
3499 the corresponding element of TARGET, determined
3500 by counting the elements. */
3501 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
3502 elt;
3503 elt = TREE_CHAIN (elt), i++)
3504 {
3505 register enum machine_mode mode;
3506 int bitsize;
3507 int bitpos;
3508 int unsignedp;
e1a43f73 3509 tree value = TREE_VALUE (elt);
03dc44a6
RS
3510 tree index = TREE_PURPOSE (elt);
3511 rtx xtarget = target;
bbf6f052 3512
e1a43f73
PB
3513 if (cleared && is_zeros_p (value))
3514 continue;
9de08200 3515
bbf6f052
RK
3516 mode = TYPE_MODE (elttype);
3517 bitsize = GET_MODE_BITSIZE (mode);
3518 unsignedp = TREE_UNSIGNED (elttype);
3519
e1a43f73
PB
3520 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3521 {
3522 tree lo_index = TREE_OPERAND (index, 0);
3523 tree hi_index = TREE_OPERAND (index, 1);
3524 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
3525 struct nesting *loop;
05c0b405
PB
3526 HOST_WIDE_INT lo, hi, count;
3527 tree position;
e1a43f73 3528
05c0b405 3529 /* If the range is constant and "small", unroll the loop. */
e1a43f73 3530 if (TREE_CODE (lo_index) == INTEGER_CST
05c0b405
PB
3531 && TREE_CODE (hi_index) == INTEGER_CST
3532 && (lo = TREE_INT_CST_LOW (lo_index),
3533 hi = TREE_INT_CST_LOW (hi_index),
3534 count = hi - lo + 1,
3535 (GET_CODE (target) != MEM
3536 || count <= 2
3537 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
3538 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
3539 <= 40 * 8))))
e1a43f73 3540 {
05c0b405
PB
3541 lo -= minelt; hi -= minelt;
3542 for (; lo <= hi; lo++)
e1a43f73 3543 {
05c0b405
PB
3544 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
3545 store_constructor_field (target, bitsize, bitpos,
3546 mode, value, type, cleared);
e1a43f73
PB
3547 }
3548 }
3549 else
3550 {
3551 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
3552 loop_top = gen_label_rtx ();
3553 loop_end = gen_label_rtx ();
3554
3555 unsignedp = TREE_UNSIGNED (domain);
3556
3557 index = build_decl (VAR_DECL, NULL_TREE, domain);
3558
3559 DECL_RTL (index) = index_r
3560 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
3561 &unsignedp, 0));
3562
3563 if (TREE_CODE (value) == SAVE_EXPR
3564 && SAVE_EXPR_RTL (value) == 0)
3565 {
3566 /* Make sure value gets expanded once before the loop. */
3567 expand_expr (value, const0_rtx, VOIDmode, 0);
3568 emit_queue ();
3569 }
3570 store_expr (lo_index, index_r, 0);
3571 loop = expand_start_loop (0);
3572
3573 /* Assign value to element index. */
3574 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3575 size_int (BITS_PER_UNIT));
3576 position = size_binop (MULT_EXPR,
3577 size_binop (MINUS_EXPR, index,
3578 TYPE_MIN_VALUE (domain)),
3579 position);
3580 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3581 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
3582 xtarget = change_address (target, mode, addr);
3583 if (TREE_CODE (value) == CONSTRUCTOR)
05c0b405 3584 store_constructor (value, xtarget, cleared);
e1a43f73
PB
3585 else
3586 store_expr (value, xtarget, 0);
3587
3588 expand_exit_loop_if_false (loop,
3589 build (LT_EXPR, integer_type_node,
3590 index, hi_index));
3591
3592 expand_increment (build (PREINCREMENT_EXPR,
3593 TREE_TYPE (index),
3594 index, integer_one_node), 0);
3595 expand_end_loop ();
3596 emit_label (loop_end);
3597
3598 /* Needed by stupid register allocation. to extend the
3599 lifetime of pseudo-regs used by target past the end
3600 of the loop. */
3601 emit_insn (gen_rtx (USE, GET_MODE (target), target));
3602 }
3603 }
3604 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
5b6c44ff 3605 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
03dc44a6 3606 {
e1a43f73 3607 rtx pos_rtx, addr;
03dc44a6
RS
3608 tree position;
3609
5b6c44ff
RK
3610 if (index == 0)
3611 index = size_int (i);
3612
e1a43f73
PB
3613 if (minelt)
3614 index = size_binop (MINUS_EXPR, index,
3615 TYPE_MIN_VALUE (domain));
5b6c44ff
RK
3616 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3617 size_int (BITS_PER_UNIT));
3618 position = size_binop (MULT_EXPR, index, position);
03dc44a6
RS
3619 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3620 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
3621 xtarget = change_address (target, mode, addr);
e1a43f73 3622 store_expr (value, xtarget, 0);
03dc44a6
RS
3623 }
3624 else
3625 {
3626 if (index != 0)
7c314719 3627 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
03dc44a6
RS
3628 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3629 else
3630 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
e1a43f73
PB
3631 store_constructor_field (target, bitsize, bitpos,
3632 mode, value, type, cleared);
03dc44a6 3633 }
bbf6f052
RK
3634 }
3635 }
071a6595
PB
3636 /* set constructor assignments */
3637 else if (TREE_CODE (type) == SET_TYPE)
3638 {
e1a43f73 3639 tree elt = CONSTRUCTOR_ELTS (exp);
071a6595
PB
3640 rtx xtarget = XEXP (target, 0);
3641 int set_word_size = TYPE_ALIGN (type);
e1a43f73 3642 int nbytes = int_size_in_bytes (type), nbits;
071a6595
PB
3643 tree domain = TYPE_DOMAIN (type);
3644 tree domain_min, domain_max, bitlength;
3645
9faa82d8 3646 /* The default implementation strategy is to extract the constant
071a6595
PB
3647 parts of the constructor, use that to initialize the target,
3648 and then "or" in whatever non-constant ranges we need in addition.
3649
3650 If a large set is all zero or all ones, it is
3651 probably better to set it using memset (if available) or bzero.
3652 Also, if a large set has just a single range, it may also be
3653 better to first clear all the first clear the set (using
3654 bzero/memset), and set the bits we want. */
3655
3656 /* Check for all zeros. */
e1a43f73 3657 if (elt == NULL_TREE)
071a6595 3658 {
e1a43f73
PB
3659 if (!cleared)
3660 clear_storage (target, expr_size (exp),
3661 TYPE_ALIGN (type) / BITS_PER_UNIT);
071a6595
PB
3662 return;
3663 }
3664
071a6595
PB
3665 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
3666 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
3667 bitlength = size_binop (PLUS_EXPR,
3668 size_binop (MINUS_EXPR, domain_max, domain_min),
3669 size_one_node);
3670
e1a43f73
PB
3671 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
3672 abort ();
3673 nbits = TREE_INT_CST_LOW (bitlength);
3674
3675 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
3676 are "complicated" (more than one range), initialize (the
3677 constant parts) by copying from a constant. */
3678 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
3679 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
071a6595 3680 {
b4ee5a72
PB
3681 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
3682 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
3683 char *bit_buffer = (char*) alloca (nbits);
3684 HOST_WIDE_INT word = 0;
3685 int bit_pos = 0;
3686 int ibit = 0;
3687 int offset = 0; /* In bytes from beginning of set. */
e1a43f73 3688 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
b4ee5a72 3689 for (;;)
071a6595 3690 {
b4ee5a72
PB
3691 if (bit_buffer[ibit])
3692 {
b09f3348 3693 if (BYTES_BIG_ENDIAN)
b4ee5a72
PB
3694 word |= (1 << (set_word_size - 1 - bit_pos));
3695 else
3696 word |= 1 << bit_pos;
3697 }
3698 bit_pos++; ibit++;
3699 if (bit_pos >= set_word_size || ibit == nbits)
071a6595 3700 {
e1a43f73
PB
3701 if (word != 0 || ! cleared)
3702 {
3703 rtx datum = GEN_INT (word);
3704 rtx to_rtx;
3705 /* The assumption here is that it is safe to use XEXP if
3706 the set is multi-word, but not if it's single-word. */
3707 if (GET_CODE (target) == MEM)
3708 {
3709 to_rtx = plus_constant (XEXP (target, 0), offset);
3710 to_rtx = change_address (target, mode, to_rtx);
3711 }
3712 else if (offset == 0)
3713 to_rtx = target;
3714 else
3715 abort ();
3716 emit_move_insn (to_rtx, datum);
3717 }
b4ee5a72
PB
3718 if (ibit == nbits)
3719 break;
3720 word = 0;
3721 bit_pos = 0;
3722 offset += set_word_size / BITS_PER_UNIT;
071a6595
PB
3723 }
3724 }
071a6595 3725 }
e1a43f73
PB
3726 else if (!cleared)
3727 {
3728 /* Don't bother clearing storage if the set is all ones. */
3729 if (TREE_CHAIN (elt) != NULL_TREE
3730 || (TREE_PURPOSE (elt) == NULL_TREE
3731 ? nbits != 1
3732 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
3733 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
3734 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
3735 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
3736 != nbits))))
3737 clear_storage (target, expr_size (exp),
3738 TYPE_ALIGN (type) / BITS_PER_UNIT);
3739 }
3740
3741 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
071a6595
PB
3742 {
3743 /* start of range of element or NULL */
3744 tree startbit = TREE_PURPOSE (elt);
3745 /* end of range of element, or element value */
3746 tree endbit = TREE_VALUE (elt);
3747 HOST_WIDE_INT startb, endb;
3748 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
3749
3750 bitlength_rtx = expand_expr (bitlength,
3751 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
3752
3753 /* handle non-range tuple element like [ expr ] */
3754 if (startbit == NULL_TREE)
3755 {
3756 startbit = save_expr (endbit);
3757 endbit = startbit;
3758 }
3759 startbit = convert (sizetype, startbit);
3760 endbit = convert (sizetype, endbit);
3761 if (! integer_zerop (domain_min))
3762 {
3763 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
3764 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
3765 }
3766 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
3767 EXPAND_CONST_ADDRESS);
3768 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
3769 EXPAND_CONST_ADDRESS);
3770
3771 if (REG_P (target))
3772 {
3773 targetx = assign_stack_temp (GET_MODE (target),
3774 GET_MODE_SIZE (GET_MODE (target)),
3775 0);
3776 emit_move_insn (targetx, target);
3777 }
3778 else if (GET_CODE (target) == MEM)
3779 targetx = target;
3780 else
3781 abort ();
3782
3783#ifdef TARGET_MEM_FUNCTIONS
3784 /* Optimization: If startbit and endbit are
9faa82d8 3785 constants divisible by BITS_PER_UNIT,
071a6595
PB
3786 call memset instead. */
3787 if (TREE_CODE (startbit) == INTEGER_CST
3788 && TREE_CODE (endbit) == INTEGER_CST
3789 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
e1a43f73 3790 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
071a6595 3791 {
071a6595
PB
3792 emit_library_call (memset_libfunc, 0,
3793 VOIDmode, 3,
e1a43f73
PB
3794 plus_constant (XEXP (targetx, 0),
3795 startb / BITS_PER_UNIT),
071a6595 3796 Pmode,
3b6f75e2 3797 constm1_rtx, TYPE_MODE (integer_type_node),
071a6595 3798 GEN_INT ((endb - startb) / BITS_PER_UNIT),
3b6f75e2 3799 TYPE_MODE (sizetype));
071a6595
PB
3800 }
3801 else
3802#endif
3803 {
071a6595
PB
3804 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__setbits"),
3805 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
3806 bitlength_rtx, TYPE_MODE (sizetype),
3807 startbit_rtx, TYPE_MODE (sizetype),
3808 endbit_rtx, TYPE_MODE (sizetype));
3809 }
3810 if (REG_P (target))
3811 emit_move_insn (target, targetx);
3812 }
3813 }
bbf6f052
RK
3814
3815 else
3816 abort ();
3817}
3818
3819/* Store the value of EXP (an expression tree)
3820 into a subfield of TARGET which has mode MODE and occupies
3821 BITSIZE bits, starting BITPOS bits from the start of TARGET.
3822 If MODE is VOIDmode, it means that we are storing into a bit-field.
3823
3824 If VALUE_MODE is VOIDmode, return nothing in particular.
3825 UNSIGNEDP is not used in this case.
3826
3827 Otherwise, return an rtx for the value stored. This rtx
3828 has mode VALUE_MODE if that is convenient to do.
3829 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
3830
3831 ALIGN is the alignment that TARGET is known to have, measured in bytes.
3832 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
3833
3834static rtx
3835store_field (target, bitsize, bitpos, mode, exp, value_mode,
3836 unsignedp, align, total_size)
3837 rtx target;
3838 int bitsize, bitpos;
3839 enum machine_mode mode;
3840 tree exp;
3841 enum machine_mode value_mode;
3842 int unsignedp;
3843 int align;
3844 int total_size;
3845{
906c4e36 3846 HOST_WIDE_INT width_mask = 0;
bbf6f052 3847
906c4e36
RK
3848 if (bitsize < HOST_BITS_PER_WIDE_INT)
3849 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
bbf6f052
RK
3850
3851 /* If we are storing into an unaligned field of an aligned union that is
3852 in a register, we may have the mode of TARGET being an integer mode but
3853 MODE == BLKmode. In that case, get an aligned object whose size and
3854 alignment are the same as TARGET and store TARGET into it (we can avoid
3855 the store if the field being stored is the entire width of TARGET). Then
3856 call ourselves recursively to store the field into a BLKmode version of
3857 that object. Finally, load from the object into TARGET. This is not
3858 very efficient in general, but should only be slightly more expensive
3859 than the otherwise-required unaligned accesses. Perhaps this can be
3860 cleaned up later. */
3861
3862 if (mode == BLKmode
3863 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
3864 {
3865 rtx object = assign_stack_temp (GET_MODE (target),
3866 GET_MODE_SIZE (GET_MODE (target)), 0);
3867 rtx blk_object = copy_rtx (object);
3868
24a13950
JW
3869 MEM_IN_STRUCT_P (object) = 1;
3870 MEM_IN_STRUCT_P (blk_object) = 1;
bbf6f052
RK
3871 PUT_MODE (blk_object, BLKmode);
3872
3873 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
3874 emit_move_insn (object, target);
3875
3876 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
3877 align, total_size);
3878
46093b97
RS
3879 /* Even though we aren't returning target, we need to
3880 give it the updated value. */
bbf6f052
RK
3881 emit_move_insn (target, object);
3882
46093b97 3883 return blk_object;
bbf6f052
RK
3884 }
3885
3886 /* If the structure is in a register or if the component
3887 is a bit field, we cannot use addressing to access it.
3888 Use bit-field techniques or SUBREG to store in it. */
3889
4fa52007
RK
3890 if (mode == VOIDmode
3891 || (mode != BLKmode && ! direct_store[(int) mode])
3892 || GET_CODE (target) == REG
c980ac49 3893 || GET_CODE (target) == SUBREG
ccc98036
RS
3894 /* If the field isn't aligned enough to store as an ordinary memref,
3895 store it as a bit field. */
c7a7ac46 3896 || (SLOW_UNALIGNED_ACCESS
ccc98036 3897 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
c7a7ac46 3898 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
bbf6f052 3899 {
906c4e36 3900 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbd6cf73
RK
3901
3902 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
3903 MODE. */
3904 if (mode != VOIDmode && mode != BLKmode
3905 && mode != TYPE_MODE (TREE_TYPE (exp)))
3906 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
3907
bbf6f052
RK
3908 /* Store the value in the bitfield. */
3909 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
3910 if (value_mode != VOIDmode)
3911 {
3912 /* The caller wants an rtx for the value. */
3913 /* If possible, avoid refetching from the bitfield itself. */
3914 if (width_mask != 0
3915 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5c4d7cfb 3916 {
9074de27 3917 tree count;
5c4d7cfb 3918 enum machine_mode tmode;
86a2c12a 3919
5c4d7cfb
RS
3920 if (unsignedp)
3921 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
3922 tmode = GET_MODE (temp);
86a2c12a
RS
3923 if (tmode == VOIDmode)
3924 tmode = value_mode;
5c4d7cfb
RS
3925 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
3926 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
3927 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
3928 }
bbf6f052 3929 return extract_bit_field (target, bitsize, bitpos, unsignedp,
906c4e36
RK
3930 NULL_RTX, value_mode, 0, align,
3931 total_size);
bbf6f052
RK
3932 }
3933 return const0_rtx;
3934 }
3935 else
3936 {
3937 rtx addr = XEXP (target, 0);
3938 rtx to_rtx;
3939
3940 /* If a value is wanted, it must be the lhs;
3941 so make the address stable for multiple use. */
3942
3943 if (value_mode != VOIDmode && GET_CODE (addr) != REG
3944 && ! CONSTANT_ADDRESS_P (addr)
3945 /* A frame-pointer reference is already stable. */
3946 && ! (GET_CODE (addr) == PLUS
3947 && GET_CODE (XEXP (addr, 1)) == CONST_INT
3948 && (XEXP (addr, 0) == virtual_incoming_args_rtx
3949 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
3950 addr = copy_to_reg (addr);
3951
3952 /* Now build a reference to just the desired component. */
3953
3954 to_rtx = change_address (target, mode,
3955 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
3956 MEM_IN_STRUCT_P (to_rtx) = 1;
3957
3958 return store_expr (exp, to_rtx, value_mode != VOIDmode);
3959 }
3960}
3961\f
6be58303
JW
3962/* Return true if any object containing the innermost array is an unaligned
3963 packed structure field. */
3964
3965static int
3966get_inner_unaligned_p (exp)
3967 tree exp;
3968{
3969 int needed_alignment = TYPE_ALIGN (TREE_TYPE (exp));
3970
3971 while (1)
3972 {
3973 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
3974 {
3975 if (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
3976 < needed_alignment)
3977 return 1;
3978 }
3979 else if (TREE_CODE (exp) != ARRAY_REF
3980 && TREE_CODE (exp) != NON_LVALUE_EXPR
3981 && ! ((TREE_CODE (exp) == NOP_EXPR
3982 || TREE_CODE (exp) == CONVERT_EXPR)
3983 && (TYPE_MODE (TREE_TYPE (exp))
3984 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
3985 break;
3986
3987 exp = TREE_OPERAND (exp, 0);
3988 }
3989
3990 return 0;
3991}
3992
bbf6f052
RK
3993/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
3994 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
742920c7 3995 ARRAY_REFs and find the ultimate containing object, which we return.
bbf6f052
RK
3996
3997 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
3998 bit position, and *PUNSIGNEDP to the signedness of the field.
7bb0943f
RS
3999 If the position of the field is variable, we store a tree
4000 giving the variable offset (in units) in *POFFSET.
4001 This offset is in addition to the bit position.
4002 If the position is not variable, we store 0 in *POFFSET.
bbf6f052
RK
4003
4004 If any of the extraction expressions is volatile,
4005 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4006
4007 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4008 is a mode that can be used to access the field. In that case, *PBITSIZE
e7c33f54
RK
4009 is redundant.
4010
4011 If the field describes a variable-sized object, *PMODE is set to
4012 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4013 this case, but the address of the object can be found. */
bbf6f052
RK
4014
4015tree
4969d05d
RK
4016get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4017 punsignedp, pvolatilep)
bbf6f052
RK
4018 tree exp;
4019 int *pbitsize;
4020 int *pbitpos;
7bb0943f 4021 tree *poffset;
bbf6f052
RK
4022 enum machine_mode *pmode;
4023 int *punsignedp;
4024 int *pvolatilep;
4025{
b50d17a1 4026 tree orig_exp = exp;
bbf6f052
RK
4027 tree size_tree = 0;
4028 enum machine_mode mode = VOIDmode;
742920c7 4029 tree offset = integer_zero_node;
bbf6f052
RK
4030
4031 if (TREE_CODE (exp) == COMPONENT_REF)
4032 {
4033 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4034 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4035 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4036 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4037 }
4038 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4039 {
4040 size_tree = TREE_OPERAND (exp, 1);
4041 *punsignedp = TREE_UNSIGNED (exp);
4042 }
4043 else
4044 {
4045 mode = TYPE_MODE (TREE_TYPE (exp));
4046 *pbitsize = GET_MODE_BITSIZE (mode);
4047 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4048 }
4049
4050 if (size_tree)
4051 {
4052 if (TREE_CODE (size_tree) != INTEGER_CST)
e7c33f54
RK
4053 mode = BLKmode, *pbitsize = -1;
4054 else
4055 *pbitsize = TREE_INT_CST_LOW (size_tree);
bbf6f052
RK
4056 }
4057
4058 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4059 and find the ultimate containing object. */
4060
4061 *pbitpos = 0;
4062
4063 while (1)
4064 {
7bb0943f 4065 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
bbf6f052 4066 {
7bb0943f
RS
4067 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4068 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4069 : TREE_OPERAND (exp, 2));
e6d8c385 4070 tree constant = integer_zero_node, var = pos;
bbf6f052 4071
e7f3c83f
RK
4072 /* If this field hasn't been filled in yet, don't go
4073 past it. This should only happen when folding expressions
4074 made during type construction. */
4075 if (pos == 0)
4076 break;
4077
e6d8c385
RK
4078 /* Assume here that the offset is a multiple of a unit.
4079 If not, there should be an explicitly added constant. */
4080 if (TREE_CODE (pos) == PLUS_EXPR
4081 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4082 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
7bb0943f 4083 else if (TREE_CODE (pos) == INTEGER_CST)
e6d8c385
RK
4084 constant = pos, var = integer_zero_node;
4085
4086 *pbitpos += TREE_INT_CST_LOW (constant);
4087
4088 if (var)
4089 offset = size_binop (PLUS_EXPR, offset,
4090 size_binop (EXACT_DIV_EXPR, var,
4091 size_int (BITS_PER_UNIT)));
bbf6f052 4092 }
bbf6f052 4093
742920c7 4094 else if (TREE_CODE (exp) == ARRAY_REF)
bbf6f052 4095 {
742920c7
RK
4096 /* This code is based on the code in case ARRAY_REF in expand_expr
4097 below. We assume here that the size of an array element is
4098 always an integral multiple of BITS_PER_UNIT. */
4099
4100 tree index = TREE_OPERAND (exp, 1);
4101 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4102 tree low_bound
4103 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4104 tree index_type = TREE_TYPE (index);
4105
4106 if (! integer_zerop (low_bound))
4107 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
4108
4c08eef0 4109 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
742920c7 4110 {
4c08eef0
RK
4111 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4112 index);
742920c7
RK
4113 index_type = TREE_TYPE (index);
4114 }
4115
4116 index = fold (build (MULT_EXPR, index_type, index,
4117 TYPE_SIZE (TREE_TYPE (exp))));
4118
4119 if (TREE_CODE (index) == INTEGER_CST
4120 && TREE_INT_CST_HIGH (index) == 0)
4121 *pbitpos += TREE_INT_CST_LOW (index);
4122 else
4123 offset = size_binop (PLUS_EXPR, offset,
4124 size_binop (FLOOR_DIV_EXPR, index,
4125 size_int (BITS_PER_UNIT)));
bbf6f052
RK
4126 }
4127 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
4128 && ! ((TREE_CODE (exp) == NOP_EXPR
4129 || TREE_CODE (exp) == CONVERT_EXPR)
7f62854a
RK
4130 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4131 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
4132 != UNION_TYPE))
bbf6f052
RK
4133 && (TYPE_MODE (TREE_TYPE (exp))
4134 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4135 break;
7bb0943f
RS
4136
4137 /* If any reference in the chain is volatile, the effect is volatile. */
4138 if (TREE_THIS_VOLATILE (exp))
4139 *pvolatilep = 1;
bbf6f052
RK
4140 exp = TREE_OPERAND (exp, 0);
4141 }
4142
4143 /* If this was a bit-field, see if there is a mode that allows direct
4144 access in case EXP is in memory. */
e7f3c83f 4145 if (mode == VOIDmode && *pbitsize != 0 && *pbitpos % *pbitsize == 0)
bbf6f052
RK
4146 {
4147 mode = mode_for_size (*pbitsize, MODE_INT, 0);
4148 if (mode == BLKmode)
4149 mode = VOIDmode;
4150 }
4151
742920c7
RK
4152 if (integer_zerop (offset))
4153 offset = 0;
4154
b50d17a1
RK
4155 if (offset != 0 && contains_placeholder_p (offset))
4156 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
4157
bbf6f052 4158 *pmode = mode;
7bb0943f 4159 *poffset = offset;
bbf6f052
RK
4160 return exp;
4161}
4162\f
4163/* Given an rtx VALUE that may contain additions and multiplications,
4164 return an equivalent value that just refers to a register or memory.
4165 This is done by generating instructions to perform the arithmetic
c45a13a6
RK
4166 and returning a pseudo-register containing the value.
4167
4168 The returned value may be a REG, SUBREG, MEM or constant. */
bbf6f052
RK
4169
4170rtx
4171force_operand (value, target)
4172 rtx value, target;
4173{
4174 register optab binoptab = 0;
4175 /* Use a temporary to force order of execution of calls to
4176 `force_operand'. */
4177 rtx tmp;
4178 register rtx op2;
4179 /* Use subtarget as the target for operand 0 of a binary operation. */
4180 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4181
4182 if (GET_CODE (value) == PLUS)
4183 binoptab = add_optab;
4184 else if (GET_CODE (value) == MINUS)
4185 binoptab = sub_optab;
4186 else if (GET_CODE (value) == MULT)
4187 {
4188 op2 = XEXP (value, 1);
4189 if (!CONSTANT_P (op2)
4190 && !(GET_CODE (op2) == REG && op2 != subtarget))
4191 subtarget = 0;
4192 tmp = force_operand (XEXP (value, 0), subtarget);
4193 return expand_mult (GET_MODE (value), tmp,
906c4e36 4194 force_operand (op2, NULL_RTX),
bbf6f052
RK
4195 target, 0);
4196 }
4197
4198 if (binoptab)
4199 {
4200 op2 = XEXP (value, 1);
4201 if (!CONSTANT_P (op2)
4202 && !(GET_CODE (op2) == REG && op2 != subtarget))
4203 subtarget = 0;
4204 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
4205 {
4206 binoptab = add_optab;
4207 op2 = negate_rtx (GET_MODE (value), op2);
4208 }
4209
4210 /* Check for an addition with OP2 a constant integer and our first
4211 operand a PLUS of a virtual register and something else. In that
4212 case, we want to emit the sum of the virtual register and the
4213 constant first and then add the other value. This allows virtual
4214 register instantiation to simply modify the constant rather than
4215 creating another one around this addition. */
4216 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
4217 && GET_CODE (XEXP (value, 0)) == PLUS
4218 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
4219 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
4220 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
4221 {
4222 rtx temp = expand_binop (GET_MODE (value), binoptab,
4223 XEXP (XEXP (value, 0), 0), op2,
4224 subtarget, 0, OPTAB_LIB_WIDEN);
4225 return expand_binop (GET_MODE (value), binoptab, temp,
4226 force_operand (XEXP (XEXP (value, 0), 1), 0),
4227 target, 0, OPTAB_LIB_WIDEN);
4228 }
4229
4230 tmp = force_operand (XEXP (value, 0), subtarget);
4231 return expand_binop (GET_MODE (value), binoptab, tmp,
906c4e36 4232 force_operand (op2, NULL_RTX),
bbf6f052 4233 target, 0, OPTAB_LIB_WIDEN);
8008b228 4234 /* We give UNSIGNEDP = 0 to expand_binop
bbf6f052
RK
4235 because the only operations we are expanding here are signed ones. */
4236 }
4237 return value;
4238}
4239\f
4240/* Subroutine of expand_expr:
4241 save the non-copied parts (LIST) of an expr (LHS), and return a list
4242 which can restore these values to their previous values,
4243 should something modify their storage. */
4244
4245static tree
4246save_noncopied_parts (lhs, list)
4247 tree lhs;
4248 tree list;
4249{
4250 tree tail;
4251 tree parts = 0;
4252
4253 for (tail = list; tail; tail = TREE_CHAIN (tail))
4254 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4255 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
4256 else
4257 {
4258 tree part = TREE_VALUE (tail);
4259 tree part_type = TREE_TYPE (part);
906c4e36 4260 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
06089a8b 4261 rtx target = assign_temp (part_type, 0, 1, 1);
bbf6f052 4262 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
906c4e36 4263 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
bbf6f052 4264 parts = tree_cons (to_be_saved,
906c4e36
RK
4265 build (RTL_EXPR, part_type, NULL_TREE,
4266 (tree) target),
bbf6f052
RK
4267 parts);
4268 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
4269 }
4270 return parts;
4271}
4272
4273/* Subroutine of expand_expr:
4274 record the non-copied parts (LIST) of an expr (LHS), and return a list
4275 which specifies the initial values of these parts. */
4276
4277static tree
4278init_noncopied_parts (lhs, list)
4279 tree lhs;
4280 tree list;
4281{
4282 tree tail;
4283 tree parts = 0;
4284
4285 for (tail = list; tail; tail = TREE_CHAIN (tail))
4286 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4287 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
4288 else
4289 {
4290 tree part = TREE_VALUE (tail);
4291 tree part_type = TREE_TYPE (part);
906c4e36 4292 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
bbf6f052
RK
4293 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
4294 }
4295 return parts;
4296}
4297
4298/* Subroutine of expand_expr: return nonzero iff there is no way that
4299 EXP can reference X, which is being modified. */
4300
4301static int
4302safe_from_p (x, exp)
4303 rtx x;
4304 tree exp;
4305{
4306 rtx exp_rtl = 0;
4307 int i, nops;
4308
6676e72f
RK
4309 if (x == 0
4310 /* If EXP has varying size, we MUST use a target since we currently
4311 have no way of allocating temporaries of variable size. So we
4312 assume here that something at a higher level has prevented a
f4510f37
RK
4313 clash. This is somewhat bogus, but the best we can do. Only
4314 do this when X is BLKmode. */
45524ce9 4315 || (TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
f4510f37
RK
4316 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
4317 && GET_MODE (x) == BLKmode))
bbf6f052
RK
4318 return 1;
4319
4320 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
4321 find the underlying pseudo. */
4322 if (GET_CODE (x) == SUBREG)
4323 {
4324 x = SUBREG_REG (x);
4325 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4326 return 0;
4327 }
4328
4329 /* If X is a location in the outgoing argument area, it is always safe. */
4330 if (GET_CODE (x) == MEM
4331 && (XEXP (x, 0) == virtual_outgoing_args_rtx
4332 || (GET_CODE (XEXP (x, 0)) == PLUS
4333 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
4334 return 1;
4335
4336 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4337 {
4338 case 'd':
4339 exp_rtl = DECL_RTL (exp);
4340 break;
4341
4342 case 'c':
4343 return 1;
4344
4345 case 'x':
4346 if (TREE_CODE (exp) == TREE_LIST)
f32fd778
RS
4347 return ((TREE_VALUE (exp) == 0
4348 || safe_from_p (x, TREE_VALUE (exp)))
bbf6f052
RK
4349 && (TREE_CHAIN (exp) == 0
4350 || safe_from_p (x, TREE_CHAIN (exp))));
4351 else
4352 return 0;
4353
4354 case '1':
4355 return safe_from_p (x, TREE_OPERAND (exp, 0));
4356
4357 case '2':
4358 case '<':
4359 return (safe_from_p (x, TREE_OPERAND (exp, 0))
4360 && safe_from_p (x, TREE_OPERAND (exp, 1)));
4361
4362 case 'e':
4363 case 'r':
4364 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
4365 the expression. If it is set, we conflict iff we are that rtx or
4366 both are in memory. Otherwise, we check all operands of the
4367 expression recursively. */
4368
4369 switch (TREE_CODE (exp))
4370 {
4371 case ADDR_EXPR:
e44842fe
RK
4372 return (staticp (TREE_OPERAND (exp, 0))
4373 || safe_from_p (x, TREE_OPERAND (exp, 0)));
bbf6f052
RK
4374
4375 case INDIRECT_REF:
4376 if (GET_CODE (x) == MEM)
4377 return 0;
4378 break;
4379
4380 case CALL_EXPR:
4381 exp_rtl = CALL_EXPR_RTL (exp);
4382 if (exp_rtl == 0)
4383 {
4384 /* Assume that the call will clobber all hard registers and
4385 all of memory. */
4386 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4387 || GET_CODE (x) == MEM)
4388 return 0;
4389 }
4390
4391 break;
4392
4393 case RTL_EXPR:
3bb5826a
RK
4394 /* If a sequence exists, we would have to scan every instruction
4395 in the sequence to see if it was safe. This is probably not
4396 worthwhile. */
4397 if (RTL_EXPR_SEQUENCE (exp))
bbf6f052
RK
4398 return 0;
4399
3bb5826a 4400 exp_rtl = RTL_EXPR_RTL (exp);
bbf6f052
RK
4401 break;
4402
4403 case WITH_CLEANUP_EXPR:
4404 exp_rtl = RTL_EXPR_RTL (exp);
4405 break;
4406
5dab5552
MS
4407 case CLEANUP_POINT_EXPR:
4408 return safe_from_p (x, TREE_OPERAND (exp, 0));
4409
bbf6f052
RK
4410 case SAVE_EXPR:
4411 exp_rtl = SAVE_EXPR_RTL (exp);
4412 break;
4413
8129842c
RS
4414 case BIND_EXPR:
4415 /* The only operand we look at is operand 1. The rest aren't
4416 part of the expression. */
4417 return safe_from_p (x, TREE_OPERAND (exp, 1));
4418
bbf6f052
RK
4419 case METHOD_CALL_EXPR:
4420 /* This takes a rtx argument, but shouldn't appear here. */
4421 abort ();
4422 }
4423
4424 /* If we have an rtx, we do not need to scan our operands. */
4425 if (exp_rtl)
4426 break;
4427
4428 nops = tree_code_length[(int) TREE_CODE (exp)];
4429 for (i = 0; i < nops; i++)
4430 if (TREE_OPERAND (exp, i) != 0
4431 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
4432 return 0;
4433 }
4434
4435 /* If we have an rtl, find any enclosed object. Then see if we conflict
4436 with it. */
4437 if (exp_rtl)
4438 {
4439 if (GET_CODE (exp_rtl) == SUBREG)
4440 {
4441 exp_rtl = SUBREG_REG (exp_rtl);
4442 if (GET_CODE (exp_rtl) == REG
4443 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
4444 return 0;
4445 }
4446
4447 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
4448 are memory and EXP is not readonly. */
4449 return ! (rtx_equal_p (x, exp_rtl)
4450 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
4451 && ! TREE_READONLY (exp)));
4452 }
4453
4454 /* If we reach here, it is safe. */
4455 return 1;
4456}
4457
4458/* Subroutine of expand_expr: return nonzero iff EXP is an
4459 expression whose type is statically determinable. */
4460
4461static int
4462fixed_type_p (exp)
4463 tree exp;
4464{
4465 if (TREE_CODE (exp) == PARM_DECL
4466 || TREE_CODE (exp) == VAR_DECL
4467 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
4468 || TREE_CODE (exp) == COMPONENT_REF
4469 || TREE_CODE (exp) == ARRAY_REF)
4470 return 1;
4471 return 0;
4472}
4473\f
4474/* expand_expr: generate code for computing expression EXP.
4475 An rtx for the computed value is returned. The value is never null.
4476 In the case of a void EXP, const0_rtx is returned.
4477
4478 The value may be stored in TARGET if TARGET is nonzero.
4479 TARGET is just a suggestion; callers must assume that
4480 the rtx returned may not be the same as TARGET.
4481
4482 If TARGET is CONST0_RTX, it means that the value will be ignored.
4483
4484 If TMODE is not VOIDmode, it suggests generating the
4485 result in mode TMODE. But this is done only when convenient.
4486 Otherwise, TMODE is ignored and the value generated in its natural mode.
4487 TMODE is just a suggestion; callers must assume that
4488 the rtx returned may not have mode TMODE.
4489
d6a5ac33
RK
4490 Note that TARGET may have neither TMODE nor MODE. In that case, it
4491 probably will not be used.
bbf6f052
RK
4492
4493 If MODIFIER is EXPAND_SUM then when EXP is an addition
4494 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
4495 or a nest of (PLUS ...) and (MINUS ...) where the terms are
4496 products as above, or REG or MEM, or constant.
4497 Ordinarily in such cases we would output mul or add instructions
4498 and then return a pseudo reg containing the sum.
4499
4500 EXPAND_INITIALIZER is much like EXPAND_SUM except that
4501 it also marks a label as absolutely required (it can't be dead).
26fcb35a 4502 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
d6a5ac33
RK
4503 This is used for outputting expressions used in initializers.
4504
4505 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
4506 with a constant address even if that address is not normally legitimate.
4507 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
bbf6f052
RK
4508
4509rtx
4510expand_expr (exp, target, tmode, modifier)
4511 register tree exp;
4512 rtx target;
4513 enum machine_mode tmode;
4514 enum expand_modifier modifier;
4515{
b50d17a1
RK
4516 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
4517 This is static so it will be accessible to our recursive callees. */
4518 static tree placeholder_list = 0;
bbf6f052
RK
4519 register rtx op0, op1, temp;
4520 tree type = TREE_TYPE (exp);
4521 int unsignedp = TREE_UNSIGNED (type);
4522 register enum machine_mode mode = TYPE_MODE (type);
4523 register enum tree_code code = TREE_CODE (exp);
4524 optab this_optab;
4525 /* Use subtarget as the target for operand 0 of a binary operation. */
4526 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4527 rtx original_target = target;
ca695ac9 4528 /* Maybe defer this until sure not doing bytecode? */
dd27116b
RK
4529 int ignore = (target == const0_rtx
4530 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
4d87de75
RS
4531 || code == CONVERT_EXPR || code == REFERENCE_EXPR
4532 || code == COND_EXPR)
dd27116b 4533 && TREE_CODE (type) == VOID_TYPE));
bbf6f052
RK
4534 tree context;
4535
ca695ac9 4536
1d556704 4537 if (output_bytecode && modifier != EXPAND_INITIALIZER)
ca695ac9
JB
4538 {
4539 bc_expand_expr (exp);
4540 return NULL;
4541 }
4542
bbf6f052
RK
4543 /* Don't use hard regs as subtargets, because the combiner
4544 can only handle pseudo regs. */
4545 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
4546 subtarget = 0;
4547 /* Avoid subtargets inside loops,
4548 since they hide some invariant expressions. */
4549 if (preserve_subexpressions_p ())
4550 subtarget = 0;
4551
dd27116b
RK
4552 /* If we are going to ignore this result, we need only do something
4553 if there is a side-effect somewhere in the expression. If there
b50d17a1
RK
4554 is, short-circuit the most common cases here. Note that we must
4555 not call expand_expr with anything but const0_rtx in case this
4556 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
bbf6f052 4557
dd27116b
RK
4558 if (ignore)
4559 {
4560 if (! TREE_SIDE_EFFECTS (exp))
4561 return const0_rtx;
4562
4563 /* Ensure we reference a volatile object even if value is ignored. */
4564 if (TREE_THIS_VOLATILE (exp)
4565 && TREE_CODE (exp) != FUNCTION_DECL
4566 && mode != VOIDmode && mode != BLKmode)
4567 {
4568 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
4569 if (GET_CODE (temp) == MEM)
4570 temp = copy_to_reg (temp);
4571 return const0_rtx;
4572 }
4573
4574 if (TREE_CODE_CLASS (code) == '1')
4575 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4576 VOIDmode, modifier);
4577 else if (TREE_CODE_CLASS (code) == '2'
4578 || TREE_CODE_CLASS (code) == '<')
4579 {
4580 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
4581 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
4582 return const0_rtx;
4583 }
4584 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
4585 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
4586 /* If the second operand has no side effects, just evaluate
4587 the first. */
4588 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4589 VOIDmode, modifier);
dd27116b 4590
90764a87 4591 target = 0;
dd27116b 4592 }
bbf6f052 4593
e44842fe
RK
4594 /* If will do cse, generate all results into pseudo registers
4595 since 1) that allows cse to find more things
4596 and 2) otherwise cse could produce an insn the machine
4597 cannot support. */
4598
bbf6f052
RK
4599 if (! cse_not_expected && mode != BLKmode && target
4600 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
4601 target = subtarget;
4602
bbf6f052
RK
4603 switch (code)
4604 {
4605 case LABEL_DECL:
b552441b
RS
4606 {
4607 tree function = decl_function_context (exp);
4608 /* Handle using a label in a containing function. */
4609 if (function != current_function_decl && function != 0)
4610 {
4611 struct function *p = find_function_data (function);
4612 /* Allocate in the memory associated with the function
4613 that the label is in. */
4614 push_obstacks (p->function_obstack,
4615 p->function_maybepermanent_obstack);
4616
4617 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4618 label_rtx (exp), p->forced_labels);
4619 pop_obstacks ();
4620 }
4621 else if (modifier == EXPAND_INITIALIZER)
4622 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4623 label_rtx (exp), forced_labels);
26fcb35a 4624 temp = gen_rtx (MEM, FUNCTION_MODE,
b552441b 4625 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
26fcb35a
RS
4626 if (function != current_function_decl && function != 0)
4627 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
4628 return temp;
b552441b 4629 }
bbf6f052
RK
4630
4631 case PARM_DECL:
4632 if (DECL_RTL (exp) == 0)
4633 {
4634 error_with_decl (exp, "prior parameter's size depends on `%s'");
4af3895e 4635 return CONST0_RTX (mode);
bbf6f052
RK
4636 }
4637
d6a5ac33
RK
4638 /* ... fall through ... */
4639
bbf6f052 4640 case VAR_DECL:
2dca20cd
RS
4641 /* If a static var's type was incomplete when the decl was written,
4642 but the type is complete now, lay out the decl now. */
4643 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4644 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
4645 {
4646 push_obstacks_nochange ();
4647 end_temporary_allocation ();
4648 layout_decl (exp, 0);
4649 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
4650 pop_obstacks ();
4651 }
d6a5ac33
RK
4652
4653 /* ... fall through ... */
4654
2dca20cd 4655 case FUNCTION_DECL:
bbf6f052
RK
4656 case RESULT_DECL:
4657 if (DECL_RTL (exp) == 0)
4658 abort ();
d6a5ac33 4659
e44842fe
RK
4660 /* Ensure variable marked as used even if it doesn't go through
4661 a parser. If it hasn't be used yet, write out an external
4662 definition. */
4663 if (! TREE_USED (exp))
4664 {
4665 assemble_external (exp);
4666 TREE_USED (exp) = 1;
4667 }
4668
dc6d66b3
RK
4669 /* Show we haven't gotten RTL for this yet. */
4670 temp = 0;
4671
bbf6f052
RK
4672 /* Handle variables inherited from containing functions. */
4673 context = decl_function_context (exp);
4674
4675 /* We treat inline_function_decl as an alias for the current function
4676 because that is the inline function whose vars, types, etc.
4677 are being merged into the current function.
4678 See expand_inline_function. */
d6a5ac33 4679
bbf6f052
RK
4680 if (context != 0 && context != current_function_decl
4681 && context != inline_function_decl
4682 /* If var is static, we don't need a static chain to access it. */
4683 && ! (GET_CODE (DECL_RTL (exp)) == MEM
4684 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
4685 {
4686 rtx addr;
4687
4688 /* Mark as non-local and addressable. */
81feeecb 4689 DECL_NONLOCAL (exp) = 1;
38ee6ed9
JM
4690 if (DECL_NO_STATIC_CHAIN (current_function_decl))
4691 abort ();
bbf6f052
RK
4692 mark_addressable (exp);
4693 if (GET_CODE (DECL_RTL (exp)) != MEM)
4694 abort ();
4695 addr = XEXP (DECL_RTL (exp), 0);
4696 if (GET_CODE (addr) == MEM)
d6a5ac33
RK
4697 addr = gen_rtx (MEM, Pmode,
4698 fix_lexical_addr (XEXP (addr, 0), exp));
bbf6f052
RK
4699 else
4700 addr = fix_lexical_addr (addr, exp);
dc6d66b3 4701 temp = change_address (DECL_RTL (exp), mode, addr);
bbf6f052 4702 }
4af3895e 4703
bbf6f052
RK
4704 /* This is the case of an array whose size is to be determined
4705 from its initializer, while the initializer is still being parsed.
4706 See expand_decl. */
d6a5ac33 4707
dc6d66b3
RK
4708 else if (GET_CODE (DECL_RTL (exp)) == MEM
4709 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
4710 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
bbf6f052 4711 XEXP (DECL_RTL (exp), 0));
d6a5ac33
RK
4712
4713 /* If DECL_RTL is memory, we are in the normal case and either
4714 the address is not valid or it is not a register and -fforce-addr
4715 is specified, get the address into a register. */
4716
dc6d66b3
RK
4717 else if (GET_CODE (DECL_RTL (exp)) == MEM
4718 && modifier != EXPAND_CONST_ADDRESS
4719 && modifier != EXPAND_SUM
4720 && modifier != EXPAND_INITIALIZER
4721 && (! memory_address_p (DECL_MODE (exp),
4722 XEXP (DECL_RTL (exp), 0))
4723 || (flag_force_addr
4724 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
4725 temp = change_address (DECL_RTL (exp), VOIDmode,
d6a5ac33 4726 copy_rtx (XEXP (DECL_RTL (exp), 0)));
1499e0a8 4727
dc6d66b3
RK
4728 /* If we got something, return it. But first, set the alignment
4729 the address is a register. */
4730 if (temp != 0)
4731 {
4732 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
4733 mark_reg_pointer (XEXP (temp, 0),
4734 DECL_ALIGN (exp) / BITS_PER_UNIT);
4735
4736 return temp;
4737 }
4738
1499e0a8
RK
4739 /* If the mode of DECL_RTL does not match that of the decl, it
4740 must be a promoted value. We return a SUBREG of the wanted mode,
4741 but mark it so that we know that it was already extended. */
4742
4743 if (GET_CODE (DECL_RTL (exp)) == REG
4744 && GET_MODE (DECL_RTL (exp)) != mode)
4745 {
1499e0a8
RK
4746 /* Get the signedness used for this variable. Ensure we get the
4747 same mode we got when the variable was declared. */
78911e8b
RK
4748 if (GET_MODE (DECL_RTL (exp))
4749 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
1499e0a8
RK
4750 abort ();
4751
4752 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
4753 SUBREG_PROMOTED_VAR_P (temp) = 1;
4754 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4755 return temp;
4756 }
4757
bbf6f052
RK
4758 return DECL_RTL (exp);
4759
4760 case INTEGER_CST:
4761 return immed_double_const (TREE_INT_CST_LOW (exp),
4762 TREE_INT_CST_HIGH (exp),
4763 mode);
4764
4765 case CONST_DECL:
4766 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
4767
4768 case REAL_CST:
4769 /* If optimized, generate immediate CONST_DOUBLE
4770 which will be turned into memory by reload if necessary.
4771
4772 We used to force a register so that loop.c could see it. But
4773 this does not allow gen_* patterns to perform optimizations with
4774 the constants. It also produces two insns in cases like "x = 1.0;".
4775 On most machines, floating-point constants are not permitted in
4776 many insns, so we'd end up copying it to a register in any case.
4777
4778 Now, we do the copying in expand_binop, if appropriate. */
4779 return immed_real_const (exp);
4780
4781 case COMPLEX_CST:
4782 case STRING_CST:
4783 if (! TREE_CST_RTL (exp))
4784 output_constant_def (exp);
4785
4786 /* TREE_CST_RTL probably contains a constant address.
4787 On RISC machines where a constant address isn't valid,
4788 make some insns to get that address into a register. */
4789 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
4790 && modifier != EXPAND_CONST_ADDRESS
4791 && modifier != EXPAND_INITIALIZER
4792 && modifier != EXPAND_SUM
d6a5ac33
RK
4793 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
4794 || (flag_force_addr
4795 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
bbf6f052
RK
4796 return change_address (TREE_CST_RTL (exp), VOIDmode,
4797 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
4798 return TREE_CST_RTL (exp);
4799
4800 case SAVE_EXPR:
4801 context = decl_function_context (exp);
d6a5ac33 4802
bbf6f052
RK
4803 /* We treat inline_function_decl as an alias for the current function
4804 because that is the inline function whose vars, types, etc.
4805 are being merged into the current function.
4806 See expand_inline_function. */
4807 if (context == current_function_decl || context == inline_function_decl)
4808 context = 0;
4809
4810 /* If this is non-local, handle it. */
4811 if (context)
4812 {
4813 temp = SAVE_EXPR_RTL (exp);
4814 if (temp && GET_CODE (temp) == REG)
4815 {
4816 put_var_into_stack (exp);
4817 temp = SAVE_EXPR_RTL (exp);
4818 }
4819 if (temp == 0 || GET_CODE (temp) != MEM)
4820 abort ();
4821 return change_address (temp, mode,
4822 fix_lexical_addr (XEXP (temp, 0), exp));
4823 }
4824 if (SAVE_EXPR_RTL (exp) == 0)
4825 {
06089a8b
RK
4826 if (mode == VOIDmode)
4827 temp = const0_rtx;
4828 else
4829 temp = assign_temp (type, 0, 0, 0);
1499e0a8 4830
bbf6f052 4831 SAVE_EXPR_RTL (exp) = temp;
bbf6f052
RK
4832 if (!optimize && GET_CODE (temp) == REG)
4833 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
4834 save_expr_regs);
ff78f773
RK
4835
4836 /* If the mode of TEMP does not match that of the expression, it
4837 must be a promoted value. We pass store_expr a SUBREG of the
4838 wanted mode but mark it so that we know that it was already
4839 extended. Note that `unsignedp' was modified above in
4840 this case. */
4841
4842 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
4843 {
4844 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
4845 SUBREG_PROMOTED_VAR_P (temp) = 1;
4846 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4847 }
4848
4c7a0be9
JW
4849 if (temp == const0_rtx)
4850 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4851 else
4852 store_expr (TREE_OPERAND (exp, 0), temp, 0);
bbf6f052 4853 }
1499e0a8
RK
4854
4855 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
4856 must be a promoted value. We return a SUBREG of the wanted mode,
adc22a04 4857 but mark it so that we know that it was already extended. */
1499e0a8
RK
4858
4859 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
4860 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
4861 {
e70d22c8
RK
4862 /* Compute the signedness and make the proper SUBREG. */
4863 promote_mode (type, mode, &unsignedp, 0);
4864 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
1499e0a8
RK
4865 SUBREG_PROMOTED_VAR_P (temp) = 1;
4866 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4867 return temp;
4868 }
4869
bbf6f052
RK
4870 return SAVE_EXPR_RTL (exp);
4871
b50d17a1
RK
4872 case PLACEHOLDER_EXPR:
4873 /* If there is an object on the head of the placeholder list,
4874 see if some object in it's references is of type TYPE. For
4875 further information, see tree.def. */
4876 if (placeholder_list)
4877 {
4878 tree object;
f59d43a9 4879 tree old_list = placeholder_list;
b50d17a1
RK
4880
4881 for (object = TREE_PURPOSE (placeholder_list);
330446eb
RK
4882 (TYPE_MAIN_VARIANT (TREE_TYPE (object))
4883 != TYPE_MAIN_VARIANT (type))
b50d17a1 4884 && (TREE_CODE_CLASS (TREE_CODE (object)) == 'r'
4805bfa0
RK
4885 || TREE_CODE_CLASS (TREE_CODE (object)) == '1'
4886 || TREE_CODE_CLASS (TREE_CODE (object)) == '2'
4887 || TREE_CODE_CLASS (TREE_CODE (object)) == 'e');
b50d17a1
RK
4888 object = TREE_OPERAND (object, 0))
4889 ;
4890
330446eb
RK
4891 if (object != 0
4892 && (TYPE_MAIN_VARIANT (TREE_TYPE (object))
4893 == TYPE_MAIN_VARIANT (type)))
f59d43a9
RK
4894 {
4895 /* Expand this object skipping the list entries before
4896 it was found in case it is also a PLACEHOLDER_EXPR.
4897 In that case, we want to translate it using subsequent
4898 entries. */
4899 placeholder_list = TREE_CHAIN (placeholder_list);
4900 temp = expand_expr (object, original_target, tmode, modifier);
4901 placeholder_list = old_list;
4902 return temp;
4903 }
b50d17a1
RK
4904 }
4905
4906 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
4907 abort ();
4908
4909 case WITH_RECORD_EXPR:
4910 /* Put the object on the placeholder list, expand our first operand,
4911 and pop the list. */
4912 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
4913 placeholder_list);
4914 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
4915 tmode, modifier);
4916 placeholder_list = TREE_CHAIN (placeholder_list);
4917 return target;
4918
bbf6f052 4919 case EXIT_EXPR:
e44842fe
RK
4920 expand_exit_loop_if_false (NULL_PTR,
4921 invert_truthvalue (TREE_OPERAND (exp, 0)));
bbf6f052
RK
4922 return const0_rtx;
4923
4924 case LOOP_EXPR:
0088fcb1 4925 push_temp_slots ();
bbf6f052
RK
4926 expand_start_loop (1);
4927 expand_expr_stmt (TREE_OPERAND (exp, 0));
4928 expand_end_loop ();
0088fcb1 4929 pop_temp_slots ();
bbf6f052
RK
4930
4931 return const0_rtx;
4932
4933 case BIND_EXPR:
4934 {
4935 tree vars = TREE_OPERAND (exp, 0);
4936 int vars_need_expansion = 0;
4937
4938 /* Need to open a binding contour here because
4939 if there are any cleanups they most be contained here. */
4940 expand_start_bindings (0);
4941
2df53c0b
RS
4942 /* Mark the corresponding BLOCK for output in its proper place. */
4943 if (TREE_OPERAND (exp, 2) != 0
4944 && ! TREE_USED (TREE_OPERAND (exp, 2)))
4945 insert_block (TREE_OPERAND (exp, 2));
bbf6f052
RK
4946
4947 /* If VARS have not yet been expanded, expand them now. */
4948 while (vars)
4949 {
4950 if (DECL_RTL (vars) == 0)
4951 {
4952 vars_need_expansion = 1;
4953 expand_decl (vars);
4954 }
4955 expand_decl_init (vars);
4956 vars = TREE_CHAIN (vars);
4957 }
4958
4959 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
4960
4961 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
4962
4963 return temp;
4964 }
4965
4966 case RTL_EXPR:
4967 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
4968 abort ();
4969 emit_insns (RTL_EXPR_SEQUENCE (exp));
4970 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
99310285 4971 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
ca814259 4972 free_temps_for_rtl_expr (exp);
bbf6f052
RK
4973 return RTL_EXPR_RTL (exp);
4974
4975 case CONSTRUCTOR:
dd27116b
RK
4976 /* If we don't need the result, just ensure we evaluate any
4977 subexpressions. */
4978 if (ignore)
4979 {
4980 tree elt;
4981 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4982 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
4983 return const0_rtx;
4984 }
3207b172 4985
4af3895e
JVA
4986 /* All elts simple constants => refer to a constant in memory. But
4987 if this is a non-BLKmode mode, let it store a field at a time
4988 since that should make a CONST_INT or CONST_DOUBLE when we
3207b172 4989 fold. Likewise, if we have a target we can use, it is best to
d720b9d1
RK
4990 store directly into the target unless the type is large enough
4991 that memcpy will be used. If we are making an initializer and
3207b172 4992 all operands are constant, put it in memory as well. */
dd27116b 4993 else if ((TREE_STATIC (exp)
3207b172
RK
4994 && ((mode == BLKmode
4995 && ! (target != 0 && safe_from_p (target, exp)))
d720b9d1
RK
4996 || TREE_ADDRESSABLE (exp)
4997 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
4998 && (move_by_pieces_ninsns
67225c15
RK
4999 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
5000 TYPE_ALIGN (type) / BITS_PER_UNIT)
9de08200
RK
5001 > MOVE_RATIO)
5002 && ! mostly_zeros_p (exp))))
dd27116b 5003 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
bbf6f052
RK
5004 {
5005 rtx constructor = output_constant_def (exp);
b552441b
RS
5006 if (modifier != EXPAND_CONST_ADDRESS
5007 && modifier != EXPAND_INITIALIZER
5008 && modifier != EXPAND_SUM
d6a5ac33
RK
5009 && (! memory_address_p (GET_MODE (constructor),
5010 XEXP (constructor, 0))
5011 || (flag_force_addr
5012 && GET_CODE (XEXP (constructor, 0)) != REG)))
bbf6f052
RK
5013 constructor = change_address (constructor, VOIDmode,
5014 XEXP (constructor, 0));
5015 return constructor;
5016 }
5017
bbf6f052
RK
5018 else
5019 {
5020 if (target == 0 || ! safe_from_p (target, exp))
06089a8b
RK
5021 {
5022 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
5023 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5024 else
5025 target = assign_temp (type, 0, 1, 1);
5026 }
07604beb
RK
5027
5028 if (TREE_READONLY (exp))
5029 {
9151b3bf
RK
5030 if (GET_CODE (target) == MEM)
5031 target = change_address (target, GET_MODE (target),
5032 XEXP (target, 0));
07604beb
RK
5033 RTX_UNCHANGING_P (target) = 1;
5034 }
5035
e1a43f73 5036 store_constructor (exp, target, 0);
bbf6f052
RK
5037 return target;
5038 }
5039
5040 case INDIRECT_REF:
5041 {
5042 tree exp1 = TREE_OPERAND (exp, 0);
5043 tree exp2;
5044
5045 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
5046 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
5047 This code has the same general effect as simply doing
5048 expand_expr on the save expr, except that the expression PTR
5049 is computed for use as a memory address. This means different
5050 code, suitable for indexing, may be generated. */
5051 if (TREE_CODE (exp1) == SAVE_EXPR
5052 && SAVE_EXPR_RTL (exp1) == 0
88f63c77 5053 && TYPE_MODE (TREE_TYPE (exp1)) == ptr_mode)
bbf6f052 5054 {
906c4e36
RK
5055 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
5056 VOIDmode, EXPAND_SUM);
bbf6f052
RK
5057 op0 = memory_address (mode, temp);
5058 op0 = copy_all_regs (op0);
5059 SAVE_EXPR_RTL (exp1) = op0;
5060 }
5061 else
5062 {
906c4e36 5063 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
bbf6f052
RK
5064 op0 = memory_address (mode, op0);
5065 }
8c8a8e34
JW
5066
5067 temp = gen_rtx (MEM, mode, op0);
5068 /* If address was computed by addition,
5069 mark this as an element of an aggregate. */
5070 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5071 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
5072 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
05e3bdb9 5073 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
8c8a8e34
JW
5074 || (TREE_CODE (exp1) == ADDR_EXPR
5075 && (exp2 = TREE_OPERAND (exp1, 0))
05e3bdb9 5076 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
8c8a8e34 5077 MEM_IN_STRUCT_P (temp) = 1;
2c4c436a 5078 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
1125706f
RK
5079
5080 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
5081 here, because, in C and C++, the fact that a location is accessed
5082 through a pointer to const does not mean that the value there can
5083 never change. Languages where it can never change should
5084 also set TREE_STATIC. */
5cb7a25a 5085 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
8c8a8e34
JW
5086 return temp;
5087 }
bbf6f052
RK
5088
5089 case ARRAY_REF:
742920c7
RK
5090 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
5091 abort ();
bbf6f052 5092
bbf6f052 5093 {
742920c7
RK
5094 tree array = TREE_OPERAND (exp, 0);
5095 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5096 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
5097 tree index = TREE_OPERAND (exp, 1);
5098 tree index_type = TREE_TYPE (index);
bbf6f052 5099 int i;
bbf6f052 5100
b50d17a1
RK
5101 if (TREE_CODE (low_bound) != INTEGER_CST
5102 && contains_placeholder_p (low_bound))
5103 low_bound = build (WITH_RECORD_EXPR, sizetype, low_bound, exp);
5104
d4c89139
PB
5105 /* Optimize the special-case of a zero lower bound.
5106
5107 We convert the low_bound to sizetype to avoid some problems
5108 with constant folding. (E.g. suppose the lower bound is 1,
5109 and its mode is QI. Without the conversion, (ARRAY
5110 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
5111 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
5112
5113 But sizetype isn't quite right either (especially if
5114 the lowbound is negative). FIXME */
5115
742920c7 5116 if (! integer_zerop (low_bound))
d4c89139
PB
5117 index = fold (build (MINUS_EXPR, index_type, index,
5118 convert (sizetype, low_bound)));
742920c7 5119
6be58303
JW
5120 if ((TREE_CODE (index) != INTEGER_CST
5121 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
c7a7ac46 5122 && (! SLOW_UNALIGNED_ACCESS || ! get_inner_unaligned_p (exp)))
742920c7 5123 {
6be58303
JW
5124 /* Nonconstant array index or nonconstant element size, and
5125 not an array in an unaligned (packed) structure field.
742920c7
RK
5126 Generate the tree for *(&array+index) and expand that,
5127 except do it in a language-independent way
5128 and don't complain about non-lvalue arrays.
5129 `mark_addressable' should already have been called
5130 for any array for which this case will be reached. */
5131
5132 /* Don't forget the const or volatile flag from the array
5133 element. */
5134 tree variant_type = build_type_variant (type,
5135 TREE_READONLY (exp),
5136 TREE_THIS_VOLATILE (exp));
5137 tree array_adr = build1 (ADDR_EXPR,
5138 build_pointer_type (variant_type), array);
5139 tree elt;
b50d17a1 5140 tree size = size_in_bytes (type);
742920c7 5141
4c08eef0
RK
5142 /* Convert the integer argument to a type the same size as sizetype
5143 so the multiply won't overflow spuriously. */
5144 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
5145 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
5146 index);
742920c7 5147
b50d17a1
RK
5148 if (TREE_CODE (size) != INTEGER_CST
5149 && contains_placeholder_p (size))
5150 size = build (WITH_RECORD_EXPR, sizetype, size, exp);
5151
742920c7
RK
5152 /* Don't think the address has side effects
5153 just because the array does.
5154 (In some cases the address might have side effects,
5155 and we fail to record that fact here. However, it should not
5156 matter, since expand_expr should not care.) */
5157 TREE_SIDE_EFFECTS (array_adr) = 0;
5158
2ae342f7
RK
5159 elt
5160 = build1
5161 (INDIRECT_REF, type,
5162 fold (build (PLUS_EXPR,
5163 TYPE_POINTER_TO (variant_type),
5164 array_adr,
5165 fold
5166 (build1
5167 (NOP_EXPR,
5168 TYPE_POINTER_TO (variant_type),
5169 fold (build (MULT_EXPR, TREE_TYPE (index),
5170 index,
5171 convert (TREE_TYPE (index),
5172 size))))))));;
742920c7
RK
5173
5174 /* Volatility, etc., of new expression is same as old
5175 expression. */
5176 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
5177 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
5178 TREE_READONLY (elt) = TREE_READONLY (exp);
5179
5180 return expand_expr (elt, target, tmode, modifier);
5181 }
5182
5183 /* Fold an expression like: "foo"[2].
ad2e7dd0
RK
5184 This is not done in fold so it won't happen inside &.
5185 Don't fold if this is for wide characters since it's too
5186 difficult to do correctly and this is a very rare case. */
742920c7
RK
5187
5188 if (TREE_CODE (array) == STRING_CST
5189 && TREE_CODE (index) == INTEGER_CST
5190 && !TREE_INT_CST_HIGH (index)
307b821c 5191 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
ad2e7dd0
RK
5192 && GET_MODE_CLASS (mode) == MODE_INT
5193 && GET_MODE_SIZE (mode) == 1)
307b821c 5194 return GEN_INT (TREE_STRING_POINTER (array)[i]);
bbf6f052 5195
742920c7
RK
5196 /* If this is a constant index into a constant array,
5197 just get the value from the array. Handle both the cases when
5198 we have an explicit constructor and when our operand is a variable
5199 that was declared const. */
4af3895e 5200
742920c7
RK
5201 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
5202 {
5203 if (TREE_CODE (index) == INTEGER_CST
5204 && TREE_INT_CST_HIGH (index) == 0)
5205 {
5206 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
5207
5208 i = TREE_INT_CST_LOW (index);
5209 while (elem && i--)
5210 elem = TREE_CHAIN (elem);
5211 if (elem)
5212 return expand_expr (fold (TREE_VALUE (elem)), target,
5213 tmode, modifier);
5214 }
5215 }
4af3895e 5216
742920c7
RK
5217 else if (optimize >= 1
5218 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
5219 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
5220 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
5221 {
5222 if (TREE_CODE (index) == INTEGER_CST
5223 && TREE_INT_CST_HIGH (index) == 0)
5224 {
5225 tree init = DECL_INITIAL (array);
5226
5227 i = TREE_INT_CST_LOW (index);
5228 if (TREE_CODE (init) == CONSTRUCTOR)
5229 {
5230 tree elem = CONSTRUCTOR_ELTS (init);
5231
03dc44a6
RS
5232 while (elem
5233 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
742920c7
RK
5234 elem = TREE_CHAIN (elem);
5235 if (elem)
5236 return expand_expr (fold (TREE_VALUE (elem)), target,
5237 tmode, modifier);
5238 }
5239 else if (TREE_CODE (init) == STRING_CST
5240 && i < TREE_STRING_LENGTH (init))
307b821c 5241 return GEN_INT (TREE_STRING_POINTER (init)[i]);
742920c7
RK
5242 }
5243 }
5244 }
8c8a8e34 5245
bbf6f052
RK
5246 /* Treat array-ref with constant index as a component-ref. */
5247
5248 case COMPONENT_REF:
5249 case BIT_FIELD_REF:
4af3895e 5250 /* If the operand is a CONSTRUCTOR, we can just extract the
7a0b7b9a
RK
5251 appropriate field if it is present. Don't do this if we have
5252 already written the data since we want to refer to that copy
5253 and varasm.c assumes that's what we'll do. */
4af3895e 5254 if (code != ARRAY_REF
7a0b7b9a
RK
5255 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
5256 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
4af3895e
JVA
5257 {
5258 tree elt;
5259
5260 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
5261 elt = TREE_CHAIN (elt))
5262 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
5263 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
5264 }
5265
bbf6f052
RK
5266 {
5267 enum machine_mode mode1;
5268 int bitsize;
5269 int bitpos;
7bb0943f 5270 tree offset;
bbf6f052 5271 int volatilep = 0;
7bb0943f 5272 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
bbf6f052 5273 &mode1, &unsignedp, &volatilep);
034f9101 5274 int alignment;
bbf6f052 5275
e7f3c83f
RK
5276 /* If we got back the original object, something is wrong. Perhaps
5277 we are evaluating an expression too early. In any event, don't
5278 infinitely recurse. */
5279 if (tem == exp)
5280 abort ();
5281
3d27140a 5282 /* If TEM's type is a union of variable size, pass TARGET to the inner
b74f5ff2
RK
5283 computation, since it will need a temporary and TARGET is known
5284 to have to do. This occurs in unchecked conversion in Ada. */
5285
5286 op0 = expand_expr (tem,
5287 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
5288 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
5289 != INTEGER_CST)
5290 ? target : NULL_RTX),
4ed67205
RK
5291 VOIDmode,
5292 modifier == EXPAND_INITIALIZER ? modifier : 0);
bbf6f052 5293
8c8a8e34 5294 /* If this is a constant, put it into a register if it is a
8008b228 5295 legitimate constant and memory if it isn't. */
8c8a8e34
JW
5296 if (CONSTANT_P (op0))
5297 {
5298 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
f2878c6b 5299 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
8c8a8e34
JW
5300 op0 = force_reg (mode, op0);
5301 else
5302 op0 = validize_mem (force_const_mem (mode, op0));
5303 }
5304
034f9101 5305 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
7bb0943f
RS
5306 if (offset != 0)
5307 {
906c4e36 5308 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
5309
5310 if (GET_CODE (op0) != MEM)
5311 abort ();
5312 op0 = change_address (op0, VOIDmode,
88f63c77
RK
5313 gen_rtx (PLUS, ptr_mode, XEXP (op0, 0),
5314 force_reg (ptr_mode, offset_rtx)));
034f9101
RS
5315 /* If we have a variable offset, the known alignment
5316 is only that of the innermost structure containing the field.
5317 (Actually, we could sometimes do better by using the
5318 size of an element of the innermost array, but no need.) */
5319 if (TREE_CODE (exp) == COMPONENT_REF
5320 || TREE_CODE (exp) == BIT_FIELD_REF)
5321 alignment = (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5322 / BITS_PER_UNIT);
7bb0943f
RS
5323 }
5324
bbf6f052
RK
5325 /* Don't forget about volatility even if this is a bitfield. */
5326 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
5327 {
5328 op0 = copy_rtx (op0);
5329 MEM_VOLATILE_P (op0) = 1;
5330 }
5331
ccc98036
RS
5332 /* In cases where an aligned union has an unaligned object
5333 as a field, we might be extracting a BLKmode value from
5334 an integer-mode (e.g., SImode) object. Handle this case
5335 by doing the extract into an object as wide as the field
5336 (which we know to be the width of a basic mode), then
5337 storing into memory, and changing the mode to BLKmode. */
bbf6f052 5338 if (mode1 == VOIDmode
ccc98036 5339 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
f9409c3a
JW
5340 || (modifier != EXPAND_CONST_ADDRESS
5341 && modifier != EXPAND_SUM
5342 && modifier != EXPAND_INITIALIZER
5343 && ((mode1 != BLKmode && ! direct_load[(int) mode1])
5344 /* If the field isn't aligned enough to fetch as a memref,
5345 fetch it as a bit field. */
5346 || (SLOW_UNALIGNED_ACCESS
5347 && ((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
5348 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
bbf6f052 5349 {
bbf6f052
RK
5350 enum machine_mode ext_mode = mode;
5351
5352 if (ext_mode == BLKmode)
5353 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
5354
5355 if (ext_mode == BLKmode)
5356 abort ();
5357
dc6d66b3
RK
5358 op0 = validize_mem (op0);
5359
5360 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
5361 mark_reg_pointer (XEXP (op0, 0), alignment);
5362
5363 op0 = extract_bit_field (op0, bitsize, bitpos,
bbf6f052 5364 unsignedp, target, ext_mode, ext_mode,
034f9101 5365 alignment,
bbf6f052
RK
5366 int_size_in_bytes (TREE_TYPE (tem)));
5367 if (mode == BLKmode)
5368 {
5369 rtx new = assign_stack_temp (ext_mode,
5370 bitsize / BITS_PER_UNIT, 0);
5371
5372 emit_move_insn (new, op0);
5373 op0 = copy_rtx (new);
5374 PUT_MODE (op0, BLKmode);
092dded9 5375 MEM_IN_STRUCT_P (op0) = 1;
bbf6f052
RK
5376 }
5377
5378 return op0;
5379 }
5380
05019f83
RK
5381 /* If the result is BLKmode, use that to access the object
5382 now as well. */
5383 if (mode == BLKmode)
5384 mode1 = BLKmode;
5385
bbf6f052
RK
5386 /* Get a reference to just this component. */
5387 if (modifier == EXPAND_CONST_ADDRESS
5388 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5389 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
5390 (bitpos / BITS_PER_UNIT)));
5391 else
5392 op0 = change_address (op0, mode1,
5393 plus_constant (XEXP (op0, 0),
5394 (bitpos / BITS_PER_UNIT)));
dc6d66b3
RK
5395 if (GET_CODE (XEXP (op0, 0)) == REG)
5396 mark_reg_pointer (XEXP (op0, 0), alignment);
5397
bbf6f052
RK
5398 MEM_IN_STRUCT_P (op0) = 1;
5399 MEM_VOLATILE_P (op0) |= volatilep;
5400 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
5401 return op0;
5402 if (target == 0)
5403 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5404 convert_move (target, op0, unsignedp);
5405 return target;
5406 }
5407
5408 case OFFSET_REF:
5409 {
da120c2f 5410 tree base = build1 (ADDR_EXPR, type, TREE_OPERAND (exp, 0));
bbf6f052 5411 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
906c4e36 5412 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
bbf6f052
RK
5413 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
5414 MEM_IN_STRUCT_P (temp) = 1;
f94cc92f 5415 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
89742723 5416#if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
bbf6f052
RK
5417 a location is accessed through a pointer to const does not mean
5418 that the value there can never change. */
5419 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
5420#endif
5421 return temp;
5422 }
5423
5424 /* Intended for a reference to a buffer of a file-object in Pascal.
5425 But it's not certain that a special tree code will really be
5426 necessary for these. INDIRECT_REF might work for them. */
5427 case BUFFER_REF:
5428 abort ();
5429
7308a047 5430 case IN_EXPR:
7308a047 5431 {
d6a5ac33
RK
5432 /* Pascal set IN expression.
5433
5434 Algorithm:
5435 rlo = set_low - (set_low%bits_per_word);
5436 the_word = set [ (index - rlo)/bits_per_word ];
5437 bit_index = index % bits_per_word;
5438 bitmask = 1 << bit_index;
5439 return !!(the_word & bitmask); */
5440
7308a047
RS
5441 tree set = TREE_OPERAND (exp, 0);
5442 tree index = TREE_OPERAND (exp, 1);
d6a5ac33 5443 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7308a047 5444 tree set_type = TREE_TYPE (set);
7308a047
RS
5445 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
5446 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
d6a5ac33
RK
5447 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
5448 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
5449 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
5450 rtx setval = expand_expr (set, 0, VOIDmode, 0);
5451 rtx setaddr = XEXP (setval, 0);
5452 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7308a047
RS
5453 rtx rlow;
5454 rtx diff, quo, rem, addr, bit, result;
7308a047 5455
d6a5ac33
RK
5456 preexpand_calls (exp);
5457
5458 /* If domain is empty, answer is no. Likewise if index is constant
5459 and out of bounds. */
5460 if ((TREE_CODE (set_high_bound) == INTEGER_CST
5461 && TREE_CODE (set_low_bound) == INTEGER_CST
5462 && tree_int_cst_lt (set_high_bound, set_low_bound)
5463 || (TREE_CODE (index) == INTEGER_CST
5464 && TREE_CODE (set_low_bound) == INTEGER_CST
5465 && tree_int_cst_lt (index, set_low_bound))
5466 || (TREE_CODE (set_high_bound) == INTEGER_CST
5467 && TREE_CODE (index) == INTEGER_CST
5468 && tree_int_cst_lt (set_high_bound, index))))
7308a047
RS
5469 return const0_rtx;
5470
d6a5ac33
RK
5471 if (target == 0)
5472 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7308a047
RS
5473
5474 /* If we get here, we have to generate the code for both cases
5475 (in range and out of range). */
5476
5477 op0 = gen_label_rtx ();
5478 op1 = gen_label_rtx ();
5479
5480 if (! (GET_CODE (index_val) == CONST_INT
5481 && GET_CODE (lo_r) == CONST_INT))
5482 {
17938e57 5483 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
d6a5ac33 5484 GET_MODE (index_val), iunsignedp, 0);
7308a047
RS
5485 emit_jump_insn (gen_blt (op1));
5486 }
5487
5488 if (! (GET_CODE (index_val) == CONST_INT
5489 && GET_CODE (hi_r) == CONST_INT))
5490 {
17938e57 5491 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
d6a5ac33 5492 GET_MODE (index_val), iunsignedp, 0);
7308a047
RS
5493 emit_jump_insn (gen_bgt (op1));
5494 }
5495
5496 /* Calculate the element number of bit zero in the first word
5497 of the set. */
5498 if (GET_CODE (lo_r) == CONST_INT)
17938e57
RK
5499 rlow = GEN_INT (INTVAL (lo_r)
5500 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7308a047 5501 else
17938e57
RK
5502 rlow = expand_binop (index_mode, and_optab, lo_r,
5503 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
d6a5ac33 5504 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047 5505
d6a5ac33
RK
5506 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
5507 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047
RS
5508
5509 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
d6a5ac33 5510 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7308a047 5511 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
d6a5ac33
RK
5512 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
5513
7308a047 5514 addr = memory_address (byte_mode,
d6a5ac33
RK
5515 expand_binop (index_mode, add_optab, diff,
5516 setaddr, NULL_RTX, iunsignedp,
17938e57 5517 OPTAB_LIB_WIDEN));
d6a5ac33 5518
7308a047
RS
5519 /* Extract the bit we want to examine */
5520 bit = expand_shift (RSHIFT_EXPR, byte_mode,
17938e57
RK
5521 gen_rtx (MEM, byte_mode, addr),
5522 make_tree (TREE_TYPE (index), rem),
5523 NULL_RTX, 1);
5524 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
5525 GET_MODE (target) == byte_mode ? target : 0,
7308a047 5526 1, OPTAB_LIB_WIDEN);
17938e57
RK
5527
5528 if (result != target)
5529 convert_move (target, result, 1);
7308a047
RS
5530
5531 /* Output the code to handle the out-of-range case. */
5532 emit_jump (op0);
5533 emit_label (op1);
5534 emit_move_insn (target, const0_rtx);
5535 emit_label (op0);
5536 return target;
5537 }
5538
bbf6f052
RK
5539 case WITH_CLEANUP_EXPR:
5540 if (RTL_EXPR_RTL (exp) == 0)
5541 {
5542 RTL_EXPR_RTL (exp)
6fcc9690 5543 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
906c4e36
RK
5544 cleanups_this_call
5545 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
bbf6f052
RK
5546 /* That's it for this cleanup. */
5547 TREE_OPERAND (exp, 2) = 0;
61d6b1cc 5548 (*interim_eh_hook) (NULL_TREE);
bbf6f052
RK
5549 }
5550 return RTL_EXPR_RTL (exp);
5551
5dab5552
MS
5552 case CLEANUP_POINT_EXPR:
5553 {
d93d4205 5554 extern int temp_slot_level;
5dab5552 5555 tree old_cleanups = cleanups_this_call;
d93d4205
MS
5556 int old_temp_level = target_temp_slot_level;
5557 push_temp_slots ();
5558 target_temp_slot_level = temp_slot_level;
f283f66b
JM
5559 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5560 /* If we're going to use this value, load it up now. */
5561 if (! ignore)
5562 op0 = force_not_mem (op0);
5dab5552 5563 expand_cleanups_to (old_cleanups);
d93d4205
MS
5564 preserve_temp_slots (op0);
5565 free_temp_slots ();
5566 pop_temp_slots ();
5567 target_temp_slot_level = old_temp_level;
5dab5552
MS
5568 }
5569 return op0;
5570
bbf6f052
RK
5571 case CALL_EXPR:
5572 /* Check for a built-in function. */
5573 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
d6a5ac33
RK
5574 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5575 == FUNCTION_DECL)
bbf6f052
RK
5576 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5577 return expand_builtin (exp, target, subtarget, tmode, ignore);
d6a5ac33 5578
bbf6f052
RK
5579 /* If this call was expanded already by preexpand_calls,
5580 just return the result we got. */
5581 if (CALL_EXPR_RTL (exp) != 0)
5582 return CALL_EXPR_RTL (exp);
d6a5ac33 5583
8129842c 5584 return expand_call (exp, target, ignore);
bbf6f052
RK
5585
5586 case NON_LVALUE_EXPR:
5587 case NOP_EXPR:
5588 case CONVERT_EXPR:
5589 case REFERENCE_EXPR:
bbf6f052
RK
5590 if (TREE_CODE (type) == UNION_TYPE)
5591 {
5592 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
5593 if (target == 0)
06089a8b
RK
5594 {
5595 if (mode != BLKmode)
5596 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5597 else
5598 target = assign_temp (type, 0, 1, 1);
5599 }
d6a5ac33 5600
bbf6f052
RK
5601 if (GET_CODE (target) == MEM)
5602 /* Store data into beginning of memory target. */
5603 store_expr (TREE_OPERAND (exp, 0),
1499e0a8
RK
5604 change_address (target, TYPE_MODE (valtype), 0), 0);
5605
bbf6f052
RK
5606 else if (GET_CODE (target) == REG)
5607 /* Store this field into a union of the proper type. */
5608 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
5609 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
5610 VOIDmode, 0, 1,
5611 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
5612 else
5613 abort ();
5614
5615 /* Return the entire union. */
5616 return target;
5617 }
d6a5ac33 5618
7f62854a
RK
5619 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5620 {
5621 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
5622 modifier);
5623
5624 /* If the signedness of the conversion differs and OP0 is
5625 a promoted SUBREG, clear that indication since we now
5626 have to do the proper extension. */
5627 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
5628 && GET_CODE (op0) == SUBREG)
5629 SUBREG_PROMOTED_VAR_P (op0) = 0;
5630
5631 return op0;
5632 }
5633
1499e0a8 5634 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
12342f90
RS
5635 if (GET_MODE (op0) == mode)
5636 return op0;
12342f90 5637
d6a5ac33
RK
5638 /* If OP0 is a constant, just convert it into the proper mode. */
5639 if (CONSTANT_P (op0))
5640 return
5641 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5642 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
12342f90 5643
26fcb35a
RS
5644 if (modifier == EXPAND_INITIALIZER)
5645 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
d6a5ac33 5646
bbf6f052 5647 if (target == 0)
d6a5ac33
RK
5648 return
5649 convert_to_mode (mode, op0,
5650 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052 5651 else
d6a5ac33
RK
5652 convert_move (target, op0,
5653 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052
RK
5654 return target;
5655
5656 case PLUS_EXPR:
5657 /* We come here from MINUS_EXPR when the second operand is a constant. */
5658 plus_expr:
5659 this_optab = add_optab;
5660
5661 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
5662 something else, make sure we add the register to the constant and
5663 then to the other thing. This case can occur during strength
5664 reduction and doing it this way will produce better code if the
5665 frame pointer or argument pointer is eliminated.
5666
5667 fold-const.c will ensure that the constant is always in the inner
5668 PLUS_EXPR, so the only case we need to do anything about is if
5669 sp, ap, or fp is our second argument, in which case we must swap
5670 the innermost first argument and our second argument. */
5671
5672 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5673 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
5674 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
5675 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
5676 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
5677 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
5678 {
5679 tree t = TREE_OPERAND (exp, 1);
5680
5681 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5682 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
5683 }
5684
88f63c77 5685 /* If the result is to be ptr_mode and we are adding an integer to
bbf6f052
RK
5686 something, we might be forming a constant. So try to use
5687 plus_constant. If it produces a sum and we can't accept it,
5688 use force_operand. This allows P = &ARR[const] to generate
5689 efficient code on machines where a SYMBOL_REF is not a valid
5690 address.
5691
5692 If this is an EXPAND_SUM call, always return the sum. */
c980ac49 5693 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
88f63c77 5694 || mode == ptr_mode)
bbf6f052 5695 {
c980ac49
RS
5696 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
5697 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5698 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
5699 {
5700 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
5701 EXPAND_SUM);
5702 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
5703 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5704 op1 = force_operand (op1, target);
5705 return op1;
5706 }
bbf6f052 5707
c980ac49
RS
5708 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
5709 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
5710 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
5711 {
5712 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
5713 EXPAND_SUM);
5714 if (! CONSTANT_P (op0))
5715 {
5716 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5717 VOIDmode, modifier);
709f5be1
RS
5718 /* Don't go to both_summands if modifier
5719 says it's not right to return a PLUS. */
5720 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5721 goto binop2;
c980ac49
RS
5722 goto both_summands;
5723 }
5724 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
5725 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5726 op0 = force_operand (op0, target);
5727 return op0;
5728 }
bbf6f052
RK
5729 }
5730
5731 /* No sense saving up arithmetic to be done
5732 if it's all in the wrong mode to form part of an address.
5733 And force_operand won't know whether to sign-extend or
5734 zero-extend. */
5735 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
88f63c77 5736 || mode != ptr_mode)
c980ac49 5737 goto binop;
bbf6f052
RK
5738
5739 preexpand_calls (exp);
5740 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5741 subtarget = 0;
5742
5743 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
906c4e36 5744 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
bbf6f052 5745
c980ac49 5746 both_summands:
bbf6f052
RK
5747 /* Make sure any term that's a sum with a constant comes last. */
5748 if (GET_CODE (op0) == PLUS
5749 && CONSTANT_P (XEXP (op0, 1)))
5750 {
5751 temp = op0;
5752 op0 = op1;
5753 op1 = temp;
5754 }
5755 /* If adding to a sum including a constant,
5756 associate it to put the constant outside. */
5757 if (GET_CODE (op1) == PLUS
5758 && CONSTANT_P (XEXP (op1, 1)))
5759 {
5760 rtx constant_term = const0_rtx;
5761
5762 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
5763 if (temp != 0)
5764 op0 = temp;
6f90e075
JW
5765 /* Ensure that MULT comes first if there is one. */
5766 else if (GET_CODE (op0) == MULT)
5767 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
bbf6f052
RK
5768 else
5769 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
5770
5771 /* Let's also eliminate constants from op0 if possible. */
5772 op0 = eliminate_constant_term (op0, &constant_term);
5773
5774 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
5775 their sum should be a constant. Form it into OP1, since the
5776 result we want will then be OP0 + OP1. */
5777
5778 temp = simplify_binary_operation (PLUS, mode, constant_term,
5779 XEXP (op1, 1));
5780 if (temp != 0)
5781 op1 = temp;
5782 else
5783 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
5784 }
5785
5786 /* Put a constant term last and put a multiplication first. */
5787 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
5788 temp = op1, op1 = op0, op0 = temp;
5789
5790 temp = simplify_binary_operation (PLUS, mode, op0, op1);
5791 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
5792
5793 case MINUS_EXPR:
ea87523e
RK
5794 /* For initializers, we are allowed to return a MINUS of two
5795 symbolic constants. Here we handle all cases when both operands
5796 are constant. */
bbf6f052
RK
5797 /* Handle difference of two symbolic constants,
5798 for the sake of an initializer. */
5799 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5800 && really_constant_p (TREE_OPERAND (exp, 0))
5801 && really_constant_p (TREE_OPERAND (exp, 1)))
5802 {
906c4e36
RK
5803 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
5804 VOIDmode, modifier);
5805 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5806 VOIDmode, modifier);
ea87523e 5807
ea87523e
RK
5808 /* If the last operand is a CONST_INT, use plus_constant of
5809 the negated constant. Else make the MINUS. */
5810 if (GET_CODE (op1) == CONST_INT)
5811 return plus_constant (op0, - INTVAL (op1));
5812 else
5813 return gen_rtx (MINUS, mode, op0, op1);
bbf6f052
RK
5814 }
5815 /* Convert A - const to A + (-const). */
5816 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
5817 {
ae431183
RK
5818 tree negated = fold (build1 (NEGATE_EXPR, type,
5819 TREE_OPERAND (exp, 1)));
5820
5821 /* Deal with the case where we can't negate the constant
5822 in TYPE. */
5823 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
5824 {
5825 tree newtype = signed_type (type);
5826 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
5827 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
5828 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
5829
5830 if (! TREE_OVERFLOW (newneg))
5831 return expand_expr (convert (type,
5832 build (PLUS_EXPR, newtype,
5833 newop0, newneg)),
5834 target, tmode, modifier);
5835 }
5836 else
5837 {
5838 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
5839 goto plus_expr;
5840 }
bbf6f052
RK
5841 }
5842 this_optab = sub_optab;
5843 goto binop;
5844
5845 case MULT_EXPR:
5846 preexpand_calls (exp);
5847 /* If first operand is constant, swap them.
5848 Thus the following special case checks need only
5849 check the second operand. */
5850 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5851 {
5852 register tree t1 = TREE_OPERAND (exp, 0);
5853 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
5854 TREE_OPERAND (exp, 1) = t1;
5855 }
5856
5857 /* Attempt to return something suitable for generating an
5858 indexed address, for machines that support that. */
5859
88f63c77 5860 if (modifier == EXPAND_SUM && mode == ptr_mode
bbf6f052 5861 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
906c4e36 5862 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
5863 {
5864 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
5865
5866 /* Apply distributive law if OP0 is x+c. */
5867 if (GET_CODE (op0) == PLUS
5868 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
5869 return gen_rtx (PLUS, mode,
5870 gen_rtx (MULT, mode, XEXP (op0, 0),
906c4e36
RK
5871 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
5872 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
5873 * INTVAL (XEXP (op0, 1))));
bbf6f052
RK
5874
5875 if (GET_CODE (op0) != REG)
906c4e36 5876 op0 = force_operand (op0, NULL_RTX);
bbf6f052
RK
5877 if (GET_CODE (op0) != REG)
5878 op0 = copy_to_mode_reg (mode, op0);
5879
5880 return gen_rtx (MULT, mode, op0,
906c4e36 5881 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
bbf6f052
RK
5882 }
5883
5884 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5885 subtarget = 0;
5886
5887 /* Check for multiplying things that have been extended
5888 from a narrower type. If this machine supports multiplying
5889 in that narrower type with a result in the desired type,
5890 do it that way, and avoid the explicit type-conversion. */
5891 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
5892 && TREE_CODE (type) == INTEGER_TYPE
5893 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5894 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
5895 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
5896 && int_fits_type_p (TREE_OPERAND (exp, 1),
5897 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5898 /* Don't use a widening multiply if a shift will do. */
5899 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
906c4e36 5900 > HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
5901 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
5902 ||
5903 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
5904 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
5905 ==
5906 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
5907 /* If both operands are extended, they must either both
5908 be zero-extended or both be sign-extended. */
5909 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
5910 ==
5911 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
5912 {
5913 enum machine_mode innermode
5914 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
b10af0c8
TG
5915 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5916 ? smul_widen_optab : umul_widen_optab);
bbf6f052
RK
5917 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5918 ? umul_widen_optab : smul_widen_optab);
b10af0c8 5919 if (mode == GET_MODE_WIDER_MODE (innermode))
bbf6f052 5920 {
b10af0c8
TG
5921 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
5922 {
5923 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5924 NULL_RTX, VOIDmode, 0);
5925 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
5926 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5927 VOIDmode, 0);
5928 else
5929 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
5930 NULL_RTX, VOIDmode, 0);
5931 goto binop2;
5932 }
5933 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
5934 && innermode == word_mode)
5935 {
5936 rtx htem;
5937 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5938 NULL_RTX, VOIDmode, 0);
5939 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
5940 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5941 VOIDmode, 0);
5942 else
5943 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
5944 NULL_RTX, VOIDmode, 0);
5945 temp = expand_binop (mode, other_optab, op0, op1, target,
5946 unsignedp, OPTAB_LIB_WIDEN);
5947 htem = expand_mult_highpart_adjust (innermode,
5948 gen_highpart (innermode, temp),
5949 op0, op1,
5950 gen_highpart (innermode, temp),
5951 unsignedp);
5952 emit_move_insn (gen_highpart (innermode, temp), htem);
5953 return temp;
5954 }
bbf6f052
RK
5955 }
5956 }
5957 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 5958 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5959 return expand_mult (mode, op0, op1, target, unsignedp);
5960
5961 case TRUNC_DIV_EXPR:
5962 case FLOOR_DIV_EXPR:
5963 case CEIL_DIV_EXPR:
5964 case ROUND_DIV_EXPR:
5965 case EXACT_DIV_EXPR:
5966 preexpand_calls (exp);
5967 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5968 subtarget = 0;
5969 /* Possible optimization: compute the dividend with EXPAND_SUM
5970 then if the divisor is constant can optimize the case
5971 where some terms of the dividend have coeffs divisible by it. */
5972 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 5973 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5974 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
5975
5976 case RDIV_EXPR:
5977 this_optab = flodiv_optab;
5978 goto binop;
5979
5980 case TRUNC_MOD_EXPR:
5981 case FLOOR_MOD_EXPR:
5982 case CEIL_MOD_EXPR:
5983 case ROUND_MOD_EXPR:
5984 preexpand_calls (exp);
5985 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5986 subtarget = 0;
5987 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 5988 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5989 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
5990
5991 case FIX_ROUND_EXPR:
5992 case FIX_FLOOR_EXPR:
5993 case FIX_CEIL_EXPR:
5994 abort (); /* Not used for C. */
5995
5996 case FIX_TRUNC_EXPR:
906c4e36 5997 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5998 if (target == 0)
5999 target = gen_reg_rtx (mode);
6000 expand_fix (target, op0, unsignedp);
6001 return target;
6002
6003 case FLOAT_EXPR:
906c4e36 6004 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6005 if (target == 0)
6006 target = gen_reg_rtx (mode);
6007 /* expand_float can't figure out what to do if FROM has VOIDmode.
6008 So give it the correct mode. With -O, cse will optimize this. */
6009 if (GET_MODE (op0) == VOIDmode)
6010 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6011 op0);
6012 expand_float (target, op0,
6013 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6014 return target;
6015
6016 case NEGATE_EXPR:
5b22bee8 6017 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
bbf6f052
RK
6018 temp = expand_unop (mode, neg_optab, op0, target, 0);
6019 if (temp == 0)
6020 abort ();
6021 return temp;
6022
6023 case ABS_EXPR:
6024 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6025
2d7050fd 6026 /* Handle complex values specially. */
d6a5ac33
RK
6027 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
6028 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
6029 return expand_complex_abs (mode, op0, target, unsignedp);
2d7050fd 6030
bbf6f052
RK
6031 /* Unsigned abs is simply the operand. Testing here means we don't
6032 risk generating incorrect code below. */
6033 if (TREE_UNSIGNED (type))
6034 return op0;
6035
2e5ec6cf
RK
6036 return expand_abs (mode, op0, target, unsignedp,
6037 safe_from_p (target, TREE_OPERAND (exp, 0)));
bbf6f052
RK
6038
6039 case MAX_EXPR:
6040 case MIN_EXPR:
6041 target = original_target;
6042 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
fc155707 6043 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
d6a5ac33 6044 || GET_MODE (target) != mode
bbf6f052
RK
6045 || (GET_CODE (target) == REG
6046 && REGNO (target) < FIRST_PSEUDO_REGISTER))
6047 target = gen_reg_rtx (mode);
906c4e36 6048 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6049 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6050
6051 /* First try to do it with a special MIN or MAX instruction.
6052 If that does not win, use a conditional jump to select the proper
6053 value. */
6054 this_optab = (TREE_UNSIGNED (type)
6055 ? (code == MIN_EXPR ? umin_optab : umax_optab)
6056 : (code == MIN_EXPR ? smin_optab : smax_optab));
6057
6058 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
6059 OPTAB_WIDEN);
6060 if (temp != 0)
6061 return temp;
6062
fa2981d8
JW
6063 /* At this point, a MEM target is no longer useful; we will get better
6064 code without it. */
6065
6066 if (GET_CODE (target) == MEM)
6067 target = gen_reg_rtx (mode);
6068
ee456b1c
RK
6069 if (target != op0)
6070 emit_move_insn (target, op0);
d6a5ac33 6071
bbf6f052 6072 op0 = gen_label_rtx ();
d6a5ac33 6073
f81497d9
RS
6074 /* If this mode is an integer too wide to compare properly,
6075 compare word by word. Rely on cse to optimize constant cases. */
d6a5ac33 6076 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
bbf6f052 6077 {
f81497d9 6078 if (code == MAX_EXPR)
d6a5ac33
RK
6079 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6080 target, op1, NULL_RTX, op0);
bbf6f052 6081 else
d6a5ac33
RK
6082 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6083 op1, target, NULL_RTX, op0);
ee456b1c 6084 emit_move_insn (target, op1);
bbf6f052 6085 }
f81497d9
RS
6086 else
6087 {
6088 if (code == MAX_EXPR)
6089 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
ee456b1c
RK
6090 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
6091 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
f81497d9
RS
6092 else
6093 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
ee456b1c
RK
6094 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
6095 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
f81497d9 6096 if (temp == const0_rtx)
ee456b1c 6097 emit_move_insn (target, op1);
f81497d9
RS
6098 else if (temp != const_true_rtx)
6099 {
6100 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
6101 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
6102 else
6103 abort ();
ee456b1c 6104 emit_move_insn (target, op1);
f81497d9
RS
6105 }
6106 }
bbf6f052
RK
6107 emit_label (op0);
6108 return target;
6109
bbf6f052
RK
6110 case BIT_NOT_EXPR:
6111 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6112 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
6113 if (temp == 0)
6114 abort ();
6115 return temp;
6116
6117 case FFS_EXPR:
6118 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6119 temp = expand_unop (mode, ffs_optab, op0, target, 1);
6120 if (temp == 0)
6121 abort ();
6122 return temp;
6123
d6a5ac33
RK
6124 /* ??? Can optimize bitwise operations with one arg constant.
6125 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
6126 and (a bitwise1 b) bitwise2 b (etc)
6127 but that is probably not worth while. */
6128
6129 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
6130 boolean values when we want in all cases to compute both of them. In
6131 general it is fastest to do TRUTH_AND_EXPR by computing both operands
6132 as actual zero-or-1 values and then bitwise anding. In cases where
6133 there cannot be any side effects, better code would be made by
6134 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
6135 how to recognize those cases. */
6136
bbf6f052
RK
6137 case TRUTH_AND_EXPR:
6138 case BIT_AND_EXPR:
6139 this_optab = and_optab;
6140 goto binop;
6141
bbf6f052
RK
6142 case TRUTH_OR_EXPR:
6143 case BIT_IOR_EXPR:
6144 this_optab = ior_optab;
6145 goto binop;
6146
874726a8 6147 case TRUTH_XOR_EXPR:
bbf6f052
RK
6148 case BIT_XOR_EXPR:
6149 this_optab = xor_optab;
6150 goto binop;
6151
6152 case LSHIFT_EXPR:
6153 case RSHIFT_EXPR:
6154 case LROTATE_EXPR:
6155 case RROTATE_EXPR:
6156 preexpand_calls (exp);
6157 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6158 subtarget = 0;
6159 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6160 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
6161 unsignedp);
6162
d6a5ac33
RK
6163 /* Could determine the answer when only additive constants differ. Also,
6164 the addition of one can be handled by changing the condition. */
bbf6f052
RK
6165 case LT_EXPR:
6166 case LE_EXPR:
6167 case GT_EXPR:
6168 case GE_EXPR:
6169 case EQ_EXPR:
6170 case NE_EXPR:
6171 preexpand_calls (exp);
6172 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
6173 if (temp != 0)
6174 return temp;
d6a5ac33 6175
bbf6f052
RK
6176 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
6177 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
6178 && original_target
6179 && GET_CODE (original_target) == REG
6180 && (GET_MODE (original_target)
6181 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6182 {
d6a5ac33
RK
6183 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
6184 VOIDmode, 0);
6185
bbf6f052
RK
6186 if (temp != original_target)
6187 temp = copy_to_reg (temp);
d6a5ac33 6188
bbf6f052 6189 op1 = gen_label_rtx ();
906c4e36 6190 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
bbf6f052
RK
6191 GET_MODE (temp), unsignedp, 0);
6192 emit_jump_insn (gen_beq (op1));
6193 emit_move_insn (temp, const1_rtx);
6194 emit_label (op1);
6195 return temp;
6196 }
d6a5ac33 6197
bbf6f052
RK
6198 /* If no set-flag instruction, must generate a conditional
6199 store into a temporary variable. Drop through
6200 and handle this like && and ||. */
6201
6202 case TRUTH_ANDIF_EXPR:
6203 case TRUTH_ORIF_EXPR:
e44842fe
RK
6204 if (! ignore
6205 && (target == 0 || ! safe_from_p (target, exp)
6206 /* Make sure we don't have a hard reg (such as function's return
6207 value) live across basic blocks, if not optimizing. */
6208 || (!optimize && GET_CODE (target) == REG
6209 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
bbf6f052 6210 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
e44842fe
RK
6211
6212 if (target)
6213 emit_clr_insn (target);
6214
bbf6f052
RK
6215 op1 = gen_label_rtx ();
6216 jumpifnot (exp, op1);
e44842fe
RK
6217
6218 if (target)
6219 emit_0_to_1_insn (target);
6220
bbf6f052 6221 emit_label (op1);
e44842fe 6222 return ignore ? const0_rtx : target;
bbf6f052
RK
6223
6224 case TRUTH_NOT_EXPR:
6225 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6226 /* The parser is careful to generate TRUTH_NOT_EXPR
6227 only with operands that are always zero or one. */
906c4e36 6228 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
bbf6f052
RK
6229 target, 1, OPTAB_LIB_WIDEN);
6230 if (temp == 0)
6231 abort ();
6232 return temp;
6233
6234 case COMPOUND_EXPR:
6235 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6236 emit_queue ();
6237 return expand_expr (TREE_OPERAND (exp, 1),
6238 (ignore ? const0_rtx : target),
6239 VOIDmode, 0);
6240
6241 case COND_EXPR:
6242 {
5dab5552
MS
6243 rtx flag = NULL_RTX;
6244 tree left_cleanups = NULL_TREE;
6245 tree right_cleanups = NULL_TREE;
6246
6247 /* Used to save a pointer to the place to put the setting of
6248 the flag that indicates if this side of the conditional was
6249 taken. We backpatch the code, if we find out later that we
6250 have any conditional cleanups that need to be performed. */
6251 rtx dest_right_flag = NULL_RTX;
6252 rtx dest_left_flag = NULL_RTX;
6253
bbf6f052
RK
6254 /* Note that COND_EXPRs whose type is a structure or union
6255 are required to be constructed to contain assignments of
6256 a temporary variable, so that we can evaluate them here
6257 for side effect only. If type is void, we must do likewise. */
6258
6259 /* If an arm of the branch requires a cleanup,
6260 only that cleanup is performed. */
6261
6262 tree singleton = 0;
6263 tree binary_op = 0, unary_op = 0;
6264 tree old_cleanups = cleanups_this_call;
bbf6f052
RK
6265
6266 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
6267 convert it to our mode, if necessary. */
6268 if (integer_onep (TREE_OPERAND (exp, 1))
6269 && integer_zerop (TREE_OPERAND (exp, 2))
6270 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6271 {
dd27116b
RK
6272 if (ignore)
6273 {
6274 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6275 modifier);
6276 return const0_rtx;
6277 }
6278
bbf6f052
RK
6279 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
6280 if (GET_MODE (op0) == mode)
6281 return op0;
d6a5ac33 6282
bbf6f052
RK
6283 if (target == 0)
6284 target = gen_reg_rtx (mode);
6285 convert_move (target, op0, unsignedp);
6286 return target;
6287 }
6288
6289 /* If we are not to produce a result, we have no target. Otherwise,
6290 if a target was specified use it; it will not be used as an
6291 intermediate target unless it is safe. If no target, use a
6292 temporary. */
6293
dd27116b 6294 if (ignore)
bbf6f052
RK
6295 temp = 0;
6296 else if (original_target
d6a5ac33 6297 && safe_from_p (original_target, TREE_OPERAND (exp, 0))
2d444001
RK
6298 && GET_MODE (original_target) == mode
6299 && ! (GET_CODE (original_target) == MEM
6300 && MEM_VOLATILE_P (original_target)))
bbf6f052 6301 temp = original_target;
bbf6f052 6302 else
06089a8b 6303 temp = assign_temp (type, 0, 0, 1);
bbf6f052
RK
6304
6305 /* Check for X ? A + B : A. If we have this, we can copy
6306 A to the output and conditionally add B. Similarly for unary
6307 operations. Don't do this if X has side-effects because
6308 those side effects might affect A or B and the "?" operation is
6309 a sequence point in ANSI. (We test for side effects later.) */
6310
6311 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
6312 && operand_equal_p (TREE_OPERAND (exp, 2),
6313 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6314 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
6315 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
6316 && operand_equal_p (TREE_OPERAND (exp, 1),
6317 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6318 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
6319 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
6320 && operand_equal_p (TREE_OPERAND (exp, 2),
6321 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6322 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
6323 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
6324 && operand_equal_p (TREE_OPERAND (exp, 1),
6325 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6326 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
6327
6328 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
6329 operation, do this as A + (X != 0). Similarly for other simple
6330 binary operators. */
dd27116b 6331 if (temp && singleton && binary_op
bbf6f052
RK
6332 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6333 && (TREE_CODE (binary_op) == PLUS_EXPR
6334 || TREE_CODE (binary_op) == MINUS_EXPR
6335 || TREE_CODE (binary_op) == BIT_IOR_EXPR
9fbd9f58 6336 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
bbf6f052
RK
6337 && integer_onep (TREE_OPERAND (binary_op, 1))
6338 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6339 {
6340 rtx result;
6341 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
6342 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
6343 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
2d444001 6344 : xor_optab);
bbf6f052
RK
6345
6346 /* If we had X ? A : A + 1, do this as A + (X == 0).
6347
6348 We have to invert the truth value here and then put it
6349 back later if do_store_flag fails. We cannot simply copy
6350 TREE_OPERAND (exp, 0) to another variable and modify that
6351 because invert_truthvalue can modify the tree pointed to
6352 by its argument. */
6353 if (singleton == TREE_OPERAND (exp, 1))
6354 TREE_OPERAND (exp, 0)
6355 = invert_truthvalue (TREE_OPERAND (exp, 0));
6356
6357 result = do_store_flag (TREE_OPERAND (exp, 0),
906c4e36
RK
6358 (safe_from_p (temp, singleton)
6359 ? temp : NULL_RTX),
bbf6f052
RK
6360 mode, BRANCH_COST <= 1);
6361
6362 if (result)
6363 {
906c4e36 6364 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6365 return expand_binop (mode, boptab, op1, result, temp,
6366 unsignedp, OPTAB_LIB_WIDEN);
6367 }
6368 else if (singleton == TREE_OPERAND (exp, 1))
6369 TREE_OPERAND (exp, 0)
6370 = invert_truthvalue (TREE_OPERAND (exp, 0));
6371 }
6372
dabf8373 6373 do_pending_stack_adjust ();
bbf6f052
RK
6374 NO_DEFER_POP;
6375 op0 = gen_label_rtx ();
6376
5dab5552 6377 flag = gen_reg_rtx (word_mode);
bbf6f052
RK
6378 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
6379 {
6380 if (temp != 0)
6381 {
6382 /* If the target conflicts with the other operand of the
6383 binary op, we can't use it. Also, we can't use the target
6384 if it is a hard register, because evaluating the condition
6385 might clobber it. */
6386 if ((binary_op
6387 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
6388 || (GET_CODE (temp) == REG
6389 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
6390 temp = gen_reg_rtx (mode);
6391 store_expr (singleton, temp, 0);
6392 }
6393 else
906c4e36 6394 expand_expr (singleton,
2937cf87 6395 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5dab5552 6396 dest_left_flag = get_last_insn ();
bbf6f052
RK
6397 if (singleton == TREE_OPERAND (exp, 1))
6398 jumpif (TREE_OPERAND (exp, 0), op0);
6399 else
6400 jumpifnot (TREE_OPERAND (exp, 0), op0);
6401
5dab5552
MS
6402 /* Allows cleanups up to here. */
6403 old_cleanups = cleanups_this_call;
bbf6f052
RK
6404 if (binary_op && temp == 0)
6405 /* Just touch the other operand. */
6406 expand_expr (TREE_OPERAND (binary_op, 1),
906c4e36 6407 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6408 else if (binary_op)
6409 store_expr (build (TREE_CODE (binary_op), type,
6410 make_tree (type, temp),
6411 TREE_OPERAND (binary_op, 1)),
6412 temp, 0);
6413 else
6414 store_expr (build1 (TREE_CODE (unary_op), type,
6415 make_tree (type, temp)),
6416 temp, 0);
6417 op1 = op0;
5dab5552 6418 dest_right_flag = get_last_insn ();
bbf6f052
RK
6419 }
6420#if 0
6421 /* This is now done in jump.c and is better done there because it
6422 produces shorter register lifetimes. */
6423
6424 /* Check for both possibilities either constants or variables
6425 in registers (but not the same as the target!). If so, can
6426 save branches by assigning one, branching, and assigning the
6427 other. */
6428 else if (temp && GET_MODE (temp) != BLKmode
6429 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
6430 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
6431 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
6432 && DECL_RTL (TREE_OPERAND (exp, 1))
6433 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
6434 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
6435 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
6436 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
6437 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
6438 && DECL_RTL (TREE_OPERAND (exp, 2))
6439 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
6440 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
6441 {
6442 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6443 temp = gen_reg_rtx (mode);
6444 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5dab5552 6445 dest_left_flag = get_last_insn ();
bbf6f052 6446 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552
MS
6447
6448 /* Allows cleanups up to here. */
6449 old_cleanups = cleanups_this_call;
bbf6f052
RK
6450 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6451 op1 = op0;
5dab5552 6452 dest_right_flag = get_last_insn ();
bbf6f052
RK
6453 }
6454#endif
6455 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
6456 comparison operator. If we have one of these cases, set the
6457 output to A, branch on A (cse will merge these two references),
6458 then set the output to FOO. */
6459 else if (temp
6460 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6461 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6462 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6463 TREE_OPERAND (exp, 1), 0)
6464 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6465 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
6466 {
6467 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6468 temp = gen_reg_rtx (mode);
6469 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5dab5552 6470 dest_left_flag = get_last_insn ();
bbf6f052 6471 jumpif (TREE_OPERAND (exp, 0), op0);
5dab5552
MS
6472
6473 /* Allows cleanups up to here. */
6474 old_cleanups = cleanups_this_call;
bbf6f052
RK
6475 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6476 op1 = op0;
5dab5552 6477 dest_right_flag = get_last_insn ();
bbf6f052
RK
6478 }
6479 else if (temp
6480 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6481 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6482 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6483 TREE_OPERAND (exp, 2), 0)
6484 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6485 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
6486 {
6487 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6488 temp = gen_reg_rtx (mode);
6489 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5dab5552 6490 dest_left_flag = get_last_insn ();
bbf6f052 6491 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552
MS
6492
6493 /* Allows cleanups up to here. */
6494 old_cleanups = cleanups_this_call;
bbf6f052
RK
6495 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6496 op1 = op0;
5dab5552 6497 dest_right_flag = get_last_insn ();
bbf6f052
RK
6498 }
6499 else
6500 {
6501 op1 = gen_label_rtx ();
6502 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552
MS
6503
6504 /* Allows cleanups up to here. */
6505 old_cleanups = cleanups_this_call;
bbf6f052
RK
6506 if (temp != 0)
6507 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6508 else
906c4e36
RK
6509 expand_expr (TREE_OPERAND (exp, 1),
6510 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5dab5552
MS
6511 dest_left_flag = get_last_insn ();
6512
6513 /* Handle conditional cleanups, if any. */
6514 left_cleanups = defer_cleanups_to (old_cleanups);
bbf6f052
RK
6515
6516 emit_queue ();
6517 emit_jump_insn (gen_jump (op1));
6518 emit_barrier ();
6519 emit_label (op0);
6520 if (temp != 0)
6521 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6522 else
906c4e36
RK
6523 expand_expr (TREE_OPERAND (exp, 2),
6524 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5dab5552 6525 dest_right_flag = get_last_insn ();
bbf6f052
RK
6526 }
6527
5dab5552
MS
6528 /* Handle conditional cleanups, if any. */
6529 right_cleanups = defer_cleanups_to (old_cleanups);
bbf6f052
RK
6530
6531 emit_queue ();
6532 emit_label (op1);
6533 OK_DEFER_POP;
5dab5552
MS
6534
6535 /* Add back in, any conditional cleanups. */
6536 if (left_cleanups || right_cleanups)
6537 {
6538 tree new_cleanups;
6539 tree cond;
6540 rtx last;
6541
6542 /* Now that we know that a flag is needed, go back and add in the
6543 setting of the flag. */
6544
6545 /* Do the left side flag. */
6546 last = get_last_insn ();
6547 /* Flag left cleanups as needed. */
6548 emit_move_insn (flag, const1_rtx);
6549 /* ??? deprecated, use sequences instead. */
6550 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_left_flag);
6551
6552 /* Do the right side flag. */
6553 last = get_last_insn ();
6554 /* Flag left cleanups as needed. */
6555 emit_move_insn (flag, const0_rtx);
6556 /* ??? deprecated, use sequences instead. */
6557 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_right_flag);
6558
9ba73d38
MS
6559 /* All cleanups must be on the function_obstack. */
6560 push_obstacks_nochange ();
6561 resume_temporary_allocation ();
6562
5dab5552
MS
6563 /* convert flag, which is an rtx, into a tree. */
6564 cond = make_node (RTL_EXPR);
6565 TREE_TYPE (cond) = integer_type_node;
6566 RTL_EXPR_RTL (cond) = flag;
6567 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
01842234 6568 cond = save_expr (cond);
5dab5552
MS
6569
6570 if (! left_cleanups)
6571 left_cleanups = integer_zero_node;
6572 if (! right_cleanups)
6573 right_cleanups = integer_zero_node;
fd67d2b6
JM
6574 new_cleanups = build (COND_EXPR, void_type_node,
6575 truthvalue_conversion (cond),
5dab5552
MS
6576 left_cleanups, right_cleanups);
6577 new_cleanups = fold (new_cleanups);
6578
9ba73d38
MS
6579 pop_obstacks ();
6580
5dab5552
MS
6581 /* Now add in the conditionalized cleanups. */
6582 cleanups_this_call
6583 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
61d6b1cc 6584 (*interim_eh_hook) (NULL_TREE);
5dab5552 6585 }
bbf6f052
RK
6586 return temp;
6587 }
6588
6589 case TARGET_EXPR:
6590 {
61d6b1cc 6591 int need_exception_region = 0;
bbf6f052
RK
6592 /* Something needs to be initialized, but we didn't know
6593 where that thing was when building the tree. For example,
6594 it could be the return value of a function, or a parameter
6595 to a function which lays down in the stack, or a temporary
6596 variable which must be passed by reference.
6597
6598 We guarantee that the expression will either be constructed
6599 or copied into our original target. */
6600
6601 tree slot = TREE_OPERAND (exp, 0);
5c062816 6602 tree exp1;
61d6b1cc 6603 rtx temp;
bbf6f052
RK
6604
6605 if (TREE_CODE (slot) != VAR_DECL)
6606 abort ();
6607
9c51f375
RK
6608 if (! ignore)
6609 target = original_target;
6610
bbf6f052
RK
6611 if (target == 0)
6612 {
6613 if (DECL_RTL (slot) != 0)
ac993f4f
MS
6614 {
6615 target = DECL_RTL (slot);
5c062816 6616 /* If we have already expanded the slot, so don't do
ac993f4f 6617 it again. (mrs) */
5c062816
MS
6618 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6619 return target;
ac993f4f 6620 }
bbf6f052
RK
6621 else
6622 {
06089a8b 6623 target = assign_temp (type, 2, 1, 1);
bbf6f052
RK
6624 /* All temp slots at this level must not conflict. */
6625 preserve_temp_slots (target);
6626 DECL_RTL (slot) = target;
bbf6f052 6627
e287fd6e
RK
6628 /* Since SLOT is not known to the called function
6629 to belong to its stack frame, we must build an explicit
6630 cleanup. This case occurs when we must build up a reference
6631 to pass the reference as an argument. In this case,
6632 it is very likely that such a reference need not be
6633 built here. */
6634
6635 if (TREE_OPERAND (exp, 2) == 0)
6636 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
6637 if (TREE_OPERAND (exp, 2))
19d3f3c5
MS
6638 {
6639 cleanups_this_call = tree_cons (NULL_TREE,
6640 TREE_OPERAND (exp, 2),
6641 cleanups_this_call);
61d6b1cc 6642 need_exception_region = 1;
19d3f3c5 6643 }
e287fd6e 6644 }
bbf6f052
RK
6645 }
6646 else
6647 {
6648 /* This case does occur, when expanding a parameter which
6649 needs to be constructed on the stack. The target
6650 is the actual stack address that we want to initialize.
6651 The function we call will perform the cleanup in this case. */
6652
8c042b47
RS
6653 /* If we have already assigned it space, use that space,
6654 not target that we were passed in, as our target
6655 parameter is only a hint. */
6656 if (DECL_RTL (slot) != 0)
6657 {
6658 target = DECL_RTL (slot);
6659 /* If we have already expanded the slot, so don't do
6660 it again. (mrs) */
6661 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6662 return target;
6663 }
6664
bbf6f052
RK
6665 DECL_RTL (slot) = target;
6666 }
6667
5c062816
MS
6668 exp1 = TREE_OPERAND (exp, 1);
6669 /* Mark it as expanded. */
6670 TREE_OPERAND (exp, 1) = NULL_TREE;
6671
41531e5b 6672 store_expr (exp1, target, 0);
61d6b1cc
MS
6673
6674 if (need_exception_region)
6675 (*interim_eh_hook) (NULL_TREE);
6676
41531e5b 6677 return target;
bbf6f052
RK
6678 }
6679
6680 case INIT_EXPR:
6681 {
6682 tree lhs = TREE_OPERAND (exp, 0);
6683 tree rhs = TREE_OPERAND (exp, 1);
6684 tree noncopied_parts = 0;
6685 tree lhs_type = TREE_TYPE (lhs);
6686
6687 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
6688 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
6689 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
6690 TYPE_NONCOPIED_PARTS (lhs_type));
6691 while (noncopied_parts != 0)
6692 {
6693 expand_assignment (TREE_VALUE (noncopied_parts),
6694 TREE_PURPOSE (noncopied_parts), 0, 0);
6695 noncopied_parts = TREE_CHAIN (noncopied_parts);
6696 }
6697 return temp;
6698 }
6699
6700 case MODIFY_EXPR:
6701 {
6702 /* If lhs is complex, expand calls in rhs before computing it.
6703 That's so we don't compute a pointer and save it over a call.
6704 If lhs is simple, compute it first so we can give it as a
6705 target if the rhs is just a call. This avoids an extra temp and copy
6706 and that prevents a partial-subsumption which makes bad code.
6707 Actually we could treat component_ref's of vars like vars. */
6708
6709 tree lhs = TREE_OPERAND (exp, 0);
6710 tree rhs = TREE_OPERAND (exp, 1);
6711 tree noncopied_parts = 0;
6712 tree lhs_type = TREE_TYPE (lhs);
6713
6714 temp = 0;
6715
6716 if (TREE_CODE (lhs) != VAR_DECL
6717 && TREE_CODE (lhs) != RESULT_DECL
6718 && TREE_CODE (lhs) != PARM_DECL)
6719 preexpand_calls (exp);
6720
6721 /* Check for |= or &= of a bitfield of size one into another bitfield
6722 of size 1. In this case, (unless we need the result of the
6723 assignment) we can do this more efficiently with a
6724 test followed by an assignment, if necessary.
6725
6726 ??? At this point, we can't get a BIT_FIELD_REF here. But if
6727 things change so we do, this code should be enhanced to
6728 support it. */
6729 if (ignore
6730 && TREE_CODE (lhs) == COMPONENT_REF
6731 && (TREE_CODE (rhs) == BIT_IOR_EXPR
6732 || TREE_CODE (rhs) == BIT_AND_EXPR)
6733 && TREE_OPERAND (rhs, 0) == lhs
6734 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
6735 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
6736 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
6737 {
6738 rtx label = gen_label_rtx ();
6739
6740 do_jump (TREE_OPERAND (rhs, 1),
6741 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
6742 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
6743 expand_assignment (lhs, convert (TREE_TYPE (rhs),
6744 (TREE_CODE (rhs) == BIT_IOR_EXPR
6745 ? integer_one_node
6746 : integer_zero_node)),
6747 0, 0);
e7c33f54 6748 do_pending_stack_adjust ();
bbf6f052
RK
6749 emit_label (label);
6750 return const0_rtx;
6751 }
6752
6753 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
6754 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
6755 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
6756 TYPE_NONCOPIED_PARTS (lhs_type));
6757
6758 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
6759 while (noncopied_parts != 0)
6760 {
6761 expand_assignment (TREE_PURPOSE (noncopied_parts),
6762 TREE_VALUE (noncopied_parts), 0, 0);
6763 noncopied_parts = TREE_CHAIN (noncopied_parts);
6764 }
6765 return temp;
6766 }
6767
6768 case PREINCREMENT_EXPR:
6769 case PREDECREMENT_EXPR:
6770 return expand_increment (exp, 0);
6771
6772 case POSTINCREMENT_EXPR:
6773 case POSTDECREMENT_EXPR:
6774 /* Faster to treat as pre-increment if result is not used. */
6775 return expand_increment (exp, ! ignore);
6776
6777 case ADDR_EXPR:
987c71d9
RK
6778 /* If nonzero, TEMP will be set to the address of something that might
6779 be a MEM corresponding to a stack slot. */
6780 temp = 0;
6781
bbf6f052
RK
6782 /* Are we taking the address of a nested function? */
6783 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
38ee6ed9
JM
6784 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
6785 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0)))
bbf6f052
RK
6786 {
6787 op0 = trampoline_address (TREE_OPERAND (exp, 0));
6788 op0 = force_operand (op0, target);
6789 }
682ba3a6
RK
6790 /* If we are taking the address of something erroneous, just
6791 return a zero. */
6792 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
6793 return const0_rtx;
bbf6f052
RK
6794 else
6795 {
e287fd6e
RK
6796 /* We make sure to pass const0_rtx down if we came in with
6797 ignore set, to avoid doing the cleanups twice for something. */
6798 op0 = expand_expr (TREE_OPERAND (exp, 0),
6799 ignore ? const0_rtx : NULL_RTX, VOIDmode,
bbf6f052
RK
6800 (modifier == EXPAND_INITIALIZER
6801 ? modifier : EXPAND_CONST_ADDRESS));
896102d0 6802
119af78a
RK
6803 /* If we are going to ignore the result, OP0 will have been set
6804 to const0_rtx, so just return it. Don't get confused and
6805 think we are taking the address of the constant. */
6806 if (ignore)
6807 return op0;
6808
3539e816
MS
6809 op0 = protect_from_queue (op0, 0);
6810
896102d0
RK
6811 /* We would like the object in memory. If it is a constant,
6812 we can have it be statically allocated into memory. For
682ba3a6 6813 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
896102d0
RK
6814 memory and store the value into it. */
6815
6816 if (CONSTANT_P (op0))
6817 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6818 op0);
987c71d9 6819 else if (GET_CODE (op0) == MEM)
af5b53ed
RK
6820 {
6821 mark_temp_addr_taken (op0);
6822 temp = XEXP (op0, 0);
6823 }
896102d0 6824
682ba3a6
RK
6825 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6826 || GET_CODE (op0) == CONCAT)
896102d0
RK
6827 {
6828 /* If this object is in a register, it must be not
6829 be BLKmode. */
6830 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
06089a8b 6831 rtx memloc = assign_temp (inner_type, 1, 1, 1);
896102d0 6832
7a0b7b9a 6833 mark_temp_addr_taken (memloc);
896102d0
RK
6834 emit_move_insn (memloc, op0);
6835 op0 = memloc;
6836 }
6837
bbf6f052
RK
6838 if (GET_CODE (op0) != MEM)
6839 abort ();
6840
6841 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
88f63c77
RK
6842 {
6843 temp = XEXP (op0, 0);
6844#ifdef POINTERS_EXTEND_UNSIGNED
6845 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
6846 && mode == ptr_mode)
9fcfcce7 6847 temp = convert_memory_address (ptr_mode, temp);
88f63c77
RK
6848#endif
6849 return temp;
6850 }
987c71d9 6851
bbf6f052
RK
6852 op0 = force_operand (XEXP (op0, 0), target);
6853 }
987c71d9 6854
bbf6f052 6855 if (flag_force_addr && GET_CODE (op0) != REG)
987c71d9
RK
6856 op0 = force_reg (Pmode, op0);
6857
dc6d66b3
RK
6858 if (GET_CODE (op0) == REG
6859 && ! REG_USERVAR_P (op0))
6860 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
987c71d9
RK
6861
6862 /* If we might have had a temp slot, add an equivalent address
6863 for it. */
6864 if (temp != 0)
6865 update_temp_slot_address (temp, op0);
6866
88f63c77
RK
6867#ifdef POINTERS_EXTEND_UNSIGNED
6868 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
6869 && mode == ptr_mode)
9fcfcce7 6870 op0 = convert_memory_address (ptr_mode, op0);
88f63c77
RK
6871#endif
6872
bbf6f052
RK
6873 return op0;
6874
6875 case ENTRY_VALUE_EXPR:
6876 abort ();
6877
7308a047
RS
6878 /* COMPLEX type for Extended Pascal & Fortran */
6879 case COMPLEX_EXPR:
6880 {
6881 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6551fa4d 6882 rtx insns;
7308a047
RS
6883
6884 /* Get the rtx code of the operands. */
6885 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6886 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
6887
6888 if (! target)
6889 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6890
6551fa4d 6891 start_sequence ();
7308a047
RS
6892
6893 /* Move the real (op0) and imaginary (op1) parts to their location. */
2d7050fd
RS
6894 emit_move_insn (gen_realpart (mode, target), op0);
6895 emit_move_insn (gen_imagpart (mode, target), op1);
7308a047 6896
6551fa4d
JW
6897 insns = get_insns ();
6898 end_sequence ();
6899
7308a047 6900 /* Complex construction should appear as a single unit. */
6551fa4d
JW
6901 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
6902 each with a separate pseudo as destination.
6903 It's not correct for flow to treat them as a unit. */
6d6e61ce 6904 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
6905 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
6906 else
6907 emit_insns (insns);
7308a047
RS
6908
6909 return target;
6910 }
6911
6912 case REALPART_EXPR:
2d7050fd
RS
6913 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6914 return gen_realpart (mode, op0);
7308a047
RS
6915
6916 case IMAGPART_EXPR:
2d7050fd
RS
6917 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6918 return gen_imagpart (mode, op0);
7308a047
RS
6919
6920 case CONJ_EXPR:
6921 {
62acb978 6922 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7308a047 6923 rtx imag_t;
6551fa4d 6924 rtx insns;
7308a047
RS
6925
6926 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6927
6928 if (! target)
d6a5ac33 6929 target = gen_reg_rtx (mode);
7308a047 6930
6551fa4d 6931 start_sequence ();
7308a047
RS
6932
6933 /* Store the realpart and the negated imagpart to target. */
62acb978
RK
6934 emit_move_insn (gen_realpart (partmode, target),
6935 gen_realpart (partmode, op0));
7308a047 6936
62acb978
RK
6937 imag_t = gen_imagpart (partmode, target);
6938 temp = expand_unop (partmode, neg_optab,
6939 gen_imagpart (partmode, op0), imag_t, 0);
7308a047
RS
6940 if (temp != imag_t)
6941 emit_move_insn (imag_t, temp);
6942
6551fa4d
JW
6943 insns = get_insns ();
6944 end_sequence ();
6945
d6a5ac33
RK
6946 /* Conjugate should appear as a single unit
6947 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
6551fa4d
JW
6948 each with a separate pseudo as destination.
6949 It's not correct for flow to treat them as a unit. */
6d6e61ce 6950 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
6951 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
6952 else
6953 emit_insns (insns);
7308a047
RS
6954
6955 return target;
6956 }
6957
bbf6f052 6958 case ERROR_MARK:
66538193
RS
6959 op0 = CONST0_RTX (tmode);
6960 if (op0 != 0)
6961 return op0;
bbf6f052
RK
6962 return const0_rtx;
6963
6964 default:
90764a87 6965 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
bbf6f052
RK
6966 }
6967
6968 /* Here to do an ordinary binary operator, generating an instruction
6969 from the optab already placed in `this_optab'. */
6970 binop:
6971 preexpand_calls (exp);
6972 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6973 subtarget = 0;
6974 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 6975 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6976 binop2:
6977 temp = expand_binop (mode, this_optab, op0, op1, target,
6978 unsignedp, OPTAB_LIB_WIDEN);
6979 if (temp == 0)
6980 abort ();
6981 return temp;
6982}
bbf6f052 6983
bbf6f052 6984
ca695ac9
JB
6985/* Emit bytecode to evaluate the given expression EXP to the stack. */
6986void
6987bc_expand_expr (exp)
6988 tree exp;
bbf6f052 6989{
ca695ac9
JB
6990 enum tree_code code;
6991 tree type, arg0;
6992 rtx r;
6993 struct binary_operator *binoptab;
6994 struct unary_operator *unoptab;
6995 struct increment_operator *incroptab;
6996 struct bc_label *lab, *lab1;
6997 enum bytecode_opcode opcode;
6998
6999
7000 code = TREE_CODE (exp);
7001
7002 switch (code)
bbf6f052 7003 {
ca695ac9
JB
7004 case PARM_DECL:
7005
7006 if (DECL_RTL (exp) == 0)
bbf6f052 7007 {
ca695ac9
JB
7008 error_with_decl (exp, "prior parameter's size depends on `%s'");
7009 return;
bbf6f052 7010 }
ca695ac9
JB
7011
7012 bc_load_parmaddr (DECL_RTL (exp));
7013 bc_load_memory (TREE_TYPE (exp), exp);
7014
7015 return;
7016
7017 case VAR_DECL:
7018
7019 if (DECL_RTL (exp) == 0)
7020 abort ();
7021
7022#if 0
e7a42772 7023 if (BYTECODE_LABEL (DECL_RTL (exp)))
ca695ac9
JB
7024 bc_load_externaddr (DECL_RTL (exp));
7025 else
7026 bc_load_localaddr (DECL_RTL (exp));
7027#endif
7028 if (TREE_PUBLIC (exp))
e7a42772
JB
7029 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
7030 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
ca695ac9
JB
7031 else
7032 bc_load_localaddr (DECL_RTL (exp));
7033
7034 bc_load_memory (TREE_TYPE (exp), exp);
7035 return;
7036
7037 case INTEGER_CST:
7038
7039#ifdef DEBUG_PRINT_CODE
7040 fprintf (stderr, " [%x]\n", TREE_INT_CST_LOW (exp));
7041#endif
6bd6178d 7042 bc_emit_instruction (mode_to_const_map[(int) (DECL_BIT_FIELD (exp)
ca695ac9 7043 ? SImode
6bd6178d 7044 : TYPE_MODE (TREE_TYPE (exp)))],
ca695ac9
JB
7045 (HOST_WIDE_INT) TREE_INT_CST_LOW (exp));
7046 return;
7047
7048 case REAL_CST:
7049
c02bd5d9 7050#if 0
ca695ac9
JB
7051#ifdef DEBUG_PRINT_CODE
7052 fprintf (stderr, " [%g]\n", (double) TREE_INT_CST_LOW (exp));
7053#endif
c02bd5d9 7054 /* FIX THIS: find a better way to pass real_cst's. -bson */
ca695ac9
JB
7055 bc_emit_instruction (mode_to_const_map[TYPE_MODE (TREE_TYPE (exp))],
7056 (double) TREE_REAL_CST (exp));
c02bd5d9
JB
7057#else
7058 abort ();
7059#endif
7060
ca695ac9
JB
7061 return;
7062
7063 case CALL_EXPR:
7064
7065 /* We build a call description vector describing the type of
7066 the return value and of the arguments; this call vector,
7067 together with a pointer to a location for the return value
7068 and the base of the argument list, is passed to the low
7069 level machine dependent call subroutine, which is responsible
7070 for putting the arguments wherever real functions expect
7071 them, as well as getting the return value back. */
7072 {
7073 tree calldesc = 0, arg;
7074 int nargs = 0, i;
7075 rtx retval;
7076
7077 /* Push the evaluated args on the evaluation stack in reverse
7078 order. Also make an entry for each arg in the calldesc
7079 vector while we're at it. */
7080
7081 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
7082
7083 for (arg = TREE_OPERAND (exp, 1); arg; arg = TREE_CHAIN (arg))
7084 {
7085 ++nargs;
7086 bc_expand_expr (TREE_VALUE (arg));
7087
7088 calldesc = tree_cons ((tree) 0,
7089 size_in_bytes (TREE_TYPE (TREE_VALUE (arg))),
7090 calldesc);
7091 calldesc = tree_cons ((tree) 0,
7092 bc_runtime_type_code (TREE_TYPE (TREE_VALUE (arg))),
7093 calldesc);
7094 }
7095
7096 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
7097
7098 /* Allocate a location for the return value and push its
7099 address on the evaluation stack. Also make an entry
7100 at the front of the calldesc for the return value type. */
7101
7102 type = TREE_TYPE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7103 retval = bc_allocate_local (int_size_in_bytes (type), TYPE_ALIGN (type));
7104 bc_load_localaddr (retval);
7105
7106 calldesc = tree_cons ((tree) 0, size_in_bytes (type), calldesc);
7107 calldesc = tree_cons ((tree) 0, bc_runtime_type_code (type), calldesc);
7108
7109 /* Prepend the argument count. */
7110 calldesc = tree_cons ((tree) 0,
7111 build_int_2 (nargs, 0),
7112 calldesc);
7113
7114 /* Push the address of the call description vector on the stack. */
7115 calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
7116 TREE_TYPE (calldesc) = build_array_type (integer_type_node,
7117 build_index_type (build_int_2 (nargs * 2, 0)));
7118 r = output_constant_def (calldesc);
7119 bc_load_externaddr (r);
7120
7121 /* Push the address of the function to be called. */
7122 bc_expand_expr (TREE_OPERAND (exp, 0));
7123
7124 /* Call the function, popping its address and the calldesc vector
7125 address off the evaluation stack in the process. */
7126 bc_emit_instruction (call);
7127
7128 /* Pop the arguments off the stack. */
7129 bc_adjust_stack (nargs);
7130
7131 /* Load the return value onto the stack. */
7132 bc_load_localaddr (retval);
7133 bc_load_memory (type, TREE_OPERAND (exp, 0));
7134 }
7135 return;
7136
7137 case SAVE_EXPR:
7138
7139 if (!SAVE_EXPR_RTL (exp))
bbf6f052 7140 {
ca695ac9
JB
7141 /* First time around: copy to local variable */
7142 SAVE_EXPR_RTL (exp) = bc_allocate_local (int_size_in_bytes (TREE_TYPE (exp)),
7143 TYPE_ALIGN (TREE_TYPE(exp)));
7144 bc_expand_expr (TREE_OPERAND (exp, 0));
6d6e61ce 7145 bc_emit_instruction (duplicate);
ca695ac9
JB
7146
7147 bc_load_localaddr (SAVE_EXPR_RTL (exp));
7148 bc_store_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
bbf6f052 7149 }
ca695ac9 7150 else
bbf6f052 7151 {
ca695ac9
JB
7152 /* Consecutive reference: use saved copy */
7153 bc_load_localaddr (SAVE_EXPR_RTL (exp));
7154 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
bbf6f052 7155 }
ca695ac9
JB
7156 return;
7157
7158#if 0
7159 /* FIXME: the XXXX_STMT codes have been removed in GCC2, but
7160 how are they handled instead? */
7161 case LET_STMT:
7162
7163 TREE_USED (exp) = 1;
7164 bc_expand_expr (STMT_BODY (exp));
7165 return;
7166#endif
7167
7168 case NOP_EXPR:
7169 case CONVERT_EXPR:
7170
7171 bc_expand_expr (TREE_OPERAND (exp, 0));
7172 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)), TREE_TYPE (exp));
7173 return;
7174
7175 case MODIFY_EXPR:
7176
c02bd5d9 7177 expand_assignment (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 0, 0);
ca695ac9
JB
7178 return;
7179
7180 case ADDR_EXPR:
7181
7182 bc_expand_address (TREE_OPERAND (exp, 0));
7183 return;
7184
7185 case INDIRECT_REF:
7186
7187 bc_expand_expr (TREE_OPERAND (exp, 0));
7188 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7189 return;
7190
7191 case ARRAY_REF:
7192
7193 bc_expand_expr (bc_canonicalize_array_ref (exp));
7194 return;
7195
7196 case COMPONENT_REF:
7197
7198 bc_expand_component_address (exp);
7199
7200 /* If we have a bitfield, generate a proper load */
7201 bc_load_memory (TREE_TYPE (TREE_OPERAND (exp, 1)), TREE_OPERAND (exp, 1));
7202 return;
7203
7204 case COMPOUND_EXPR:
7205
7206 bc_expand_expr (TREE_OPERAND (exp, 0));
7207 bc_emit_instruction (drop);
7208 bc_expand_expr (TREE_OPERAND (exp, 1));
7209 return;
7210
7211 case COND_EXPR:
7212
7213 bc_expand_expr (TREE_OPERAND (exp, 0));
7214 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
7215 lab = bc_get_bytecode_label ();
c02bd5d9 7216 bc_emit_bytecode (xjumpifnot);
ca695ac9
JB
7217 bc_emit_bytecode_labelref (lab);
7218
7219#ifdef DEBUG_PRINT_CODE
7220 fputc ('\n', stderr);
7221#endif
7222 bc_expand_expr (TREE_OPERAND (exp, 1));
7223 lab1 = bc_get_bytecode_label ();
7224 bc_emit_bytecode (jump);
7225 bc_emit_bytecode_labelref (lab1);
7226
7227#ifdef DEBUG_PRINT_CODE
7228 fputc ('\n', stderr);
7229#endif
7230
7231 bc_emit_bytecode_labeldef (lab);
7232 bc_expand_expr (TREE_OPERAND (exp, 2));
7233 bc_emit_bytecode_labeldef (lab1);
7234 return;
7235
7236 case TRUTH_ANDIF_EXPR:
7237
c02bd5d9 7238 opcode = xjumpifnot;
ca695ac9
JB
7239 goto andorif;
7240
7241 case TRUTH_ORIF_EXPR:
7242
c02bd5d9 7243 opcode = xjumpif;
ca695ac9
JB
7244 goto andorif;
7245
7246 case PLUS_EXPR:
7247
7248 binoptab = optab_plus_expr;
7249 goto binop;
7250
7251 case MINUS_EXPR:
7252
7253 binoptab = optab_minus_expr;
7254 goto binop;
7255
7256 case MULT_EXPR:
7257
7258 binoptab = optab_mult_expr;
7259 goto binop;
7260
7261 case TRUNC_DIV_EXPR:
7262 case FLOOR_DIV_EXPR:
7263 case CEIL_DIV_EXPR:
7264 case ROUND_DIV_EXPR:
7265 case EXACT_DIV_EXPR:
7266
7267 binoptab = optab_trunc_div_expr;
7268 goto binop;
7269
7270 case TRUNC_MOD_EXPR:
7271 case FLOOR_MOD_EXPR:
7272 case CEIL_MOD_EXPR:
7273 case ROUND_MOD_EXPR:
7274
7275 binoptab = optab_trunc_mod_expr;
7276 goto binop;
7277
7278 case FIX_ROUND_EXPR:
7279 case FIX_FLOOR_EXPR:
7280 case FIX_CEIL_EXPR:
7281 abort (); /* Not used for C. */
7282
7283 case FIX_TRUNC_EXPR:
7284 case FLOAT_EXPR:
7285 case MAX_EXPR:
7286 case MIN_EXPR:
7287 case FFS_EXPR:
7288 case LROTATE_EXPR:
7289 case RROTATE_EXPR:
7290 abort (); /* FIXME */
7291
7292 case RDIV_EXPR:
7293
7294 binoptab = optab_rdiv_expr;
7295 goto binop;
7296
7297 case BIT_AND_EXPR:
7298
7299 binoptab = optab_bit_and_expr;
7300 goto binop;
7301
7302 case BIT_IOR_EXPR:
7303
7304 binoptab = optab_bit_ior_expr;
7305 goto binop;
7306
7307 case BIT_XOR_EXPR:
7308
7309 binoptab = optab_bit_xor_expr;
7310 goto binop;
7311
7312 case LSHIFT_EXPR:
7313
7314 binoptab = optab_lshift_expr;
7315 goto binop;
7316
7317 case RSHIFT_EXPR:
7318
7319 binoptab = optab_rshift_expr;
7320 goto binop;
7321
7322 case TRUTH_AND_EXPR:
7323
7324 binoptab = optab_truth_and_expr;
7325 goto binop;
7326
7327 case TRUTH_OR_EXPR:
7328
7329 binoptab = optab_truth_or_expr;
7330 goto binop;
7331
7332 case LT_EXPR:
7333
7334 binoptab = optab_lt_expr;
7335 goto binop;
7336
7337 case LE_EXPR:
7338
7339 binoptab = optab_le_expr;
7340 goto binop;
7341
7342 case GE_EXPR:
7343
7344 binoptab = optab_ge_expr;
7345 goto binop;
7346
7347 case GT_EXPR:
7348
7349 binoptab = optab_gt_expr;
7350 goto binop;
7351
7352 case EQ_EXPR:
7353
7354 binoptab = optab_eq_expr;
7355 goto binop;
7356
7357 case NE_EXPR:
7358
7359 binoptab = optab_ne_expr;
7360 goto binop;
7361
7362 case NEGATE_EXPR:
7363
7364 unoptab = optab_negate_expr;
7365 goto unop;
7366
7367 case BIT_NOT_EXPR:
7368
7369 unoptab = optab_bit_not_expr;
7370 goto unop;
7371
7372 case TRUTH_NOT_EXPR:
7373
7374 unoptab = optab_truth_not_expr;
7375 goto unop;
7376
7377 case PREDECREMENT_EXPR:
7378
7379 incroptab = optab_predecrement_expr;
7380 goto increment;
7381
7382 case PREINCREMENT_EXPR:
7383
7384 incroptab = optab_preincrement_expr;
7385 goto increment;
7386
7387 case POSTDECREMENT_EXPR:
7388
7389 incroptab = optab_postdecrement_expr;
7390 goto increment;
7391
7392 case POSTINCREMENT_EXPR:
7393
7394 incroptab = optab_postincrement_expr;
7395 goto increment;
7396
7397 case CONSTRUCTOR:
7398
7399 bc_expand_constructor (exp);
7400 return;
7401
7402 case ERROR_MARK:
7403 case RTL_EXPR:
7404
7405 return;
7406
7407 case BIND_EXPR:
7408 {
7409 tree vars = TREE_OPERAND (exp, 0);
7410 int vars_need_expansion = 0;
7411
7412 /* Need to open a binding contour here because
7413 if there are any cleanups they most be contained here. */
7414 expand_start_bindings (0);
7415
7416 /* Mark the corresponding BLOCK for output. */
7417 if (TREE_OPERAND (exp, 2) != 0)
7418 TREE_USED (TREE_OPERAND (exp, 2)) = 1;
7419
7420 /* If VARS have not yet been expanded, expand them now. */
7421 while (vars)
7422 {
7423 if (DECL_RTL (vars) == 0)
7424 {
7425 vars_need_expansion = 1;
9bac07c3 7426 expand_decl (vars);
ca695ac9 7427 }
9bac07c3 7428 expand_decl_init (vars);
ca695ac9
JB
7429 vars = TREE_CHAIN (vars);
7430 }
7431
7432 bc_expand_expr (TREE_OPERAND (exp, 1));
7433
7434 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
7435
7436 return;
7437 }
7438 }
7439
7440 abort ();
7441
7442 binop:
7443
7444 bc_expand_binary_operation (binoptab, TREE_TYPE (exp),
7445 TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1));
7446 return;
7447
7448
7449 unop:
7450
7451 bc_expand_unary_operation (unoptab, TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7452 return;
7453
7454
7455 andorif:
7456
7457 bc_expand_expr (TREE_OPERAND (exp, 0));
7458 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
7459 lab = bc_get_bytecode_label ();
7460
6d6e61ce 7461 bc_emit_instruction (duplicate);
ca695ac9
JB
7462 bc_emit_bytecode (opcode);
7463 bc_emit_bytecode_labelref (lab);
7464
7465#ifdef DEBUG_PRINT_CODE
7466 fputc ('\n', stderr);
7467#endif
7468
7469 bc_emit_instruction (drop);
7470
7471 bc_expand_expr (TREE_OPERAND (exp, 1));
7472 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)));
7473 bc_emit_bytecode_labeldef (lab);
7474 return;
7475
7476
7477 increment:
7478
7479 type = TREE_TYPE (TREE_OPERAND (exp, 0));
7480
7481 /* Push the quantum. */
7482 bc_expand_expr (TREE_OPERAND (exp, 1));
7483
7484 /* Convert it to the lvalue's type. */
7485 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)), type);
7486
7487 /* Push the address of the lvalue */
c02bd5d9 7488 bc_expand_expr (build1 (ADDR_EXPR, TYPE_POINTER_TO (type), TREE_OPERAND (exp, 0)));
ca695ac9
JB
7489
7490 /* Perform actual increment */
c02bd5d9 7491 bc_expand_increment (incroptab, type);
ca695ac9
JB
7492 return;
7493}
7494\f
7495/* Return the alignment in bits of EXP, a pointer valued expression.
7496 But don't return more than MAX_ALIGN no matter what.
7497 The alignment returned is, by default, the alignment of the thing that
7498 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
7499
7500 Otherwise, look at the expression to see if we can do better, i.e., if the
7501 expression is actually pointing at an object whose alignment is tighter. */
7502
7503static int
7504get_pointer_alignment (exp, max_align)
7505 tree exp;
7506 unsigned max_align;
7507{
7508 unsigned align, inner;
7509
7510 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7511 return 0;
7512
7513 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7514 align = MIN (align, max_align);
7515
7516 while (1)
7517 {
7518 switch (TREE_CODE (exp))
7519 {
7520 case NOP_EXPR:
7521 case CONVERT_EXPR:
7522 case NON_LVALUE_EXPR:
7523 exp = TREE_OPERAND (exp, 0);
7524 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7525 return align;
7526 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
8dc2fbcf 7527 align = MIN (inner, max_align);
ca695ac9
JB
7528 break;
7529
7530 case PLUS_EXPR:
7531 /* If sum of pointer + int, restrict our maximum alignment to that
7532 imposed by the integer. If not, we can't do any better than
7533 ALIGN. */
7534 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
7535 return align;
7536
7537 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
7538 & (max_align - 1))
7539 != 0)
7540 max_align >>= 1;
7541
7542 exp = TREE_OPERAND (exp, 0);
7543 break;
7544
7545 case ADDR_EXPR:
7546 /* See what we are pointing at and look at its alignment. */
7547 exp = TREE_OPERAND (exp, 0);
7548 if (TREE_CODE (exp) == FUNCTION_DECL)
8dc2fbcf 7549 align = FUNCTION_BOUNDARY;
ca695ac9 7550 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
8dc2fbcf 7551 align = DECL_ALIGN (exp);
ca695ac9
JB
7552#ifdef CONSTANT_ALIGNMENT
7553 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
7554 align = CONSTANT_ALIGNMENT (exp, align);
7555#endif
7556 return MIN (align, max_align);
7557
7558 default:
7559 return align;
7560 }
7561 }
7562}
7563\f
7564/* Return the tree node and offset if a given argument corresponds to
7565 a string constant. */
7566
7567static tree
7568string_constant (arg, ptr_offset)
7569 tree arg;
7570 tree *ptr_offset;
7571{
7572 STRIP_NOPS (arg);
7573
7574 if (TREE_CODE (arg) == ADDR_EXPR
7575 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
7576 {
7577 *ptr_offset = integer_zero_node;
7578 return TREE_OPERAND (arg, 0);
7579 }
7580 else if (TREE_CODE (arg) == PLUS_EXPR)
7581 {
7582 tree arg0 = TREE_OPERAND (arg, 0);
7583 tree arg1 = TREE_OPERAND (arg, 1);
7584
7585 STRIP_NOPS (arg0);
7586 STRIP_NOPS (arg1);
7587
7588 if (TREE_CODE (arg0) == ADDR_EXPR
7589 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
7590 {
7591 *ptr_offset = arg1;
7592 return TREE_OPERAND (arg0, 0);
7593 }
7594 else if (TREE_CODE (arg1) == ADDR_EXPR
7595 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
7596 {
7597 *ptr_offset = arg0;
7598 return TREE_OPERAND (arg1, 0);
7599 }
7600 }
7601
7602 return 0;
7603}
7604
7605/* Compute the length of a C string. TREE_STRING_LENGTH is not the right
7606 way, because it could contain a zero byte in the middle.
7607 TREE_STRING_LENGTH is the size of the character array, not the string.
7608
7609 Unfortunately, string_constant can't access the values of const char
7610 arrays with initializers, so neither can we do so here. */
7611
7612static tree
7613c_strlen (src)
7614 tree src;
7615{
7616 tree offset_node;
7617 int offset, max;
7618 char *ptr;
7619
7620 src = string_constant (src, &offset_node);
7621 if (src == 0)
7622 return 0;
7623 max = TREE_STRING_LENGTH (src);
7624 ptr = TREE_STRING_POINTER (src);
7625 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
7626 {
7627 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
7628 compute the offset to the following null if we don't know where to
7629 start searching for it. */
7630 int i;
7631 for (i = 0; i < max; i++)
7632 if (ptr[i] == 0)
7633 return 0;
7634 /* We don't know the starting offset, but we do know that the string
7635 has no internal zero bytes. We can assume that the offset falls
7636 within the bounds of the string; otherwise, the programmer deserves
7637 what he gets. Subtract the offset from the length of the string,
7638 and return that. */
7639 /* This would perhaps not be valid if we were dealing with named
7640 arrays in addition to literal string constants. */
7641 return size_binop (MINUS_EXPR, size_int (max), offset_node);
7642 }
7643
7644 /* We have a known offset into the string. Start searching there for
7645 a null character. */
7646 if (offset_node == 0)
7647 offset = 0;
7648 else
7649 {
7650 /* Did we get a long long offset? If so, punt. */
7651 if (TREE_INT_CST_HIGH (offset_node) != 0)
7652 return 0;
7653 offset = TREE_INT_CST_LOW (offset_node);
7654 }
7655 /* If the offset is known to be out of bounds, warn, and call strlen at
7656 runtime. */
7657 if (offset < 0 || offset > max)
7658 {
7659 warning ("offset outside bounds of constant string");
7660 return 0;
7661 }
7662 /* Use strlen to search for the first zero byte. Since any strings
7663 constructed with build_string will have nulls appended, we win even
7664 if we get handed something like (char[4])"abcd".
7665
7666 Since OFFSET is our starting index into the string, no further
7667 calculation is needed. */
7668 return size_int (strlen (ptr + offset));
7669}
2bbf216f
RK
7670
7671rtx
7672expand_builtin_return_addr (fndecl_code, count, tem)
7673 enum built_in_function fndecl_code;
7674 rtx tem;
7675 int count;
7676{
7677 int i;
7678
7679 /* Some machines need special handling before we can access
7680 arbitrary frames. For example, on the sparc, we must first flush
7681 all register windows to the stack. */
7682#ifdef SETUP_FRAME_ADDRESSES
7683 SETUP_FRAME_ADDRESSES ();
7684#endif
7685
7686 /* On the sparc, the return address is not in the frame, it is in a
7687 register. There is no way to access it off of the current frame
7688 pointer, but it can be accessed off the previous frame pointer by
7689 reading the value from the register window save area. */
7690#ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
7691 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
7692 count--;
7693#endif
7694
7695 /* Scan back COUNT frames to the specified frame. */
7696 for (i = 0; i < count; i++)
7697 {
7698 /* Assume the dynamic chain pointer is in the word that the
7699 frame address points to, unless otherwise specified. */
7700#ifdef DYNAMIC_CHAIN_ADDRESS
7701 tem = DYNAMIC_CHAIN_ADDRESS (tem);
7702#endif
7703 tem = memory_address (Pmode, tem);
7704 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
7705 }
7706
7707 /* For __builtin_frame_address, return what we've got. */
7708 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
7709 return tem;
7710
7711 /* For __builtin_return_address, Get the return address from that
7712 frame. */
7713#ifdef RETURN_ADDR_RTX
7714 tem = RETURN_ADDR_RTX (count, tem);
7715#else
7716 tem = memory_address (Pmode,
7717 plus_constant (tem, GET_MODE_SIZE (Pmode)));
7718 tem = gen_rtx (MEM, Pmode, tem);
7719#endif
0ebba7fc 7720 return tem;
2bbf216f 7721}
ca695ac9
JB
7722\f
7723/* Expand an expression EXP that calls a built-in function,
7724 with result going to TARGET if that's convenient
7725 (and in mode MODE if that's convenient).
7726 SUBTARGET may be used as the target for computing one of EXP's operands.
7727 IGNORE is nonzero if the value is to be ignored. */
7728
98aad286
RK
7729#define CALLED_AS_BUILT_IN(NODE) \
7730 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
7731
ca695ac9
JB
7732static rtx
7733expand_builtin (exp, target, subtarget, mode, ignore)
7734 tree exp;
7735 rtx target;
7736 rtx subtarget;
7737 enum machine_mode mode;
7738 int ignore;
7739{
7740 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7741 tree arglist = TREE_OPERAND (exp, 1);
7742 rtx op0;
7743 rtx lab1, insns;
7744 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
7745 optab builtin_optab;
7746
7747 switch (DECL_FUNCTION_CODE (fndecl))
7748 {
7749 case BUILT_IN_ABS:
7750 case BUILT_IN_LABS:
7751 case BUILT_IN_FABS:
7752 /* build_function_call changes these into ABS_EXPR. */
7753 abort ();
7754
7755 case BUILT_IN_SIN:
7756 case BUILT_IN_COS:
ba558a85
RK
7757 /* Treat these like sqrt, but only if the user asks for them. */
7758 if (! flag_fast_math)
7759 break;
ca695ac9
JB
7760 case BUILT_IN_FSQRT:
7761 /* If not optimizing, call the library function. */
7762 if (! optimize)
7763 break;
7764
7765 if (arglist == 0
7766 /* Arg could be wrong type if user redeclared this fcn wrong. */
7767 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
7b073ca6 7768 break;
ca695ac9
JB
7769
7770 /* Stabilize and compute the argument. */
7771 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
7772 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
7773 {
7774 exp = copy_node (exp);
7775 arglist = copy_node (arglist);
7776 TREE_OPERAND (exp, 1) = arglist;
7777 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
7778 }
7779 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
7780
7781 /* Make a suitable register to place result in. */
7782 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7783
7784 emit_queue ();
7785 start_sequence ();
7786
7787 switch (DECL_FUNCTION_CODE (fndecl))
7788 {
7789 case BUILT_IN_SIN:
7790 builtin_optab = sin_optab; break;
7791 case BUILT_IN_COS:
7792 builtin_optab = cos_optab; break;
7793 case BUILT_IN_FSQRT:
7794 builtin_optab = sqrt_optab; break;
7795 default:
7796 abort ();
7797 }
7798
7799 /* Compute into TARGET.
7800 Set TARGET to wherever the result comes back. */
7801 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
7802 builtin_optab, op0, target, 0);
7803
7804 /* If we were unable to expand via the builtin, stop the
7805 sequence (without outputting the insns) and break, causing
7806 a call the the library function. */
7807 if (target == 0)
7808 {
7809 end_sequence ();
7810 break;
7811 }
7812
7813 /* Check the results by default. But if flag_fast_math is turned on,
7814 then assume sqrt will always be called with valid arguments. */
7815
7816 if (! flag_fast_math)
7817 {
7818 /* Don't define the builtin FP instructions
7819 if your machine is not IEEE. */
7820 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
7821 abort ();
7822
7823 lab1 = gen_label_rtx ();
7824
7825 /* Test the result; if it is NaN, set errno=EDOM because
7826 the argument was not in the domain. */
7827 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
7828 emit_jump_insn (gen_beq (lab1));
7829
4ac09687 7830#ifdef TARGET_EDOM
ca695ac9
JB
7831 {
7832#ifdef GEN_ERRNO_RTX
7833 rtx errno_rtx = GEN_ERRNO_RTX;
7834#else
7835 rtx errno_rtx
e74a2201 7836 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "errno"));
ca695ac9
JB
7837#endif
7838
7839 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
7840 }
7841#else
7842 /* We can't set errno=EDOM directly; let the library call do it.
7843 Pop the arguments right away in case the call gets deleted. */
7844 NO_DEFER_POP;
7845 expand_call (exp, target, 0);
7846 OK_DEFER_POP;
7847#endif
7848
7849 emit_label (lab1);
7850 }
7851
7852 /* Output the entire sequence. */
7853 insns = get_insns ();
7854 end_sequence ();
7855 emit_insns (insns);
7856
7857 return target;
7858
7859 /* __builtin_apply_args returns block of memory allocated on
7860 the stack into which is stored the arg pointer, structure
7861 value address, static chain, and all the registers that might
7862 possibly be used in performing a function call. The code is
7863 moved to the start of the function so the incoming values are
7864 saved. */
7865 case BUILT_IN_APPLY_ARGS:
7866 /* Don't do __builtin_apply_args more than once in a function.
7867 Save the result of the first call and reuse it. */
7868 if (apply_args_value != 0)
7869 return apply_args_value;
7870 {
7871 /* When this function is called, it means that registers must be
7872 saved on entry to this function. So we migrate the
7873 call to the first insn of this function. */
7874 rtx temp;
7875 rtx seq;
7876
7877 start_sequence ();
7878 temp = expand_builtin_apply_args ();
7879 seq = get_insns ();
7880 end_sequence ();
7881
7882 apply_args_value = temp;
7883
7884 /* Put the sequence after the NOTE that starts the function.
7885 If this is inside a SEQUENCE, make the outer-level insn
7886 chain current, so the code is placed at the start of the
7887 function. */
7888 push_topmost_sequence ();
7889 emit_insns_before (seq, NEXT_INSN (get_insns ()));
7890 pop_topmost_sequence ();
7891 return temp;
7892 }
7893
7894 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7895 FUNCTION with a copy of the parameters described by
7896 ARGUMENTS, and ARGSIZE. It returns a block of memory
7897 allocated on the stack into which is stored all the registers
7898 that might possibly be used for returning the result of a
7899 function. ARGUMENTS is the value returned by
7900 __builtin_apply_args. ARGSIZE is the number of bytes of
7901 arguments that must be copied. ??? How should this value be
7902 computed? We'll also need a safe worst case value for varargs
7903 functions. */
7904 case BUILT_IN_APPLY:
7905 if (arglist == 0
7906 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7907 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7908 || TREE_CHAIN (arglist) == 0
7909 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
7910 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
7911 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7912 return const0_rtx;
7913 else
7914 {
7915 int i;
7916 tree t;
7917 rtx ops[3];
7918
7919 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
7920 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
7921
7922 return expand_builtin_apply (ops[0], ops[1], ops[2]);
7923 }
7924
7925 /* __builtin_return (RESULT) causes the function to return the
7926 value described by RESULT. RESULT is address of the block of
7927 memory returned by __builtin_apply. */
7928 case BUILT_IN_RETURN:
7929 if (arglist
7930 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7931 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
7932 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
7933 NULL_RTX, VOIDmode, 0));
7934 return const0_rtx;
7935
7936 case BUILT_IN_SAVEREGS:
7937 /* Don't do __builtin_saveregs more than once in a function.
7938 Save the result of the first call and reuse it. */
7939 if (saveregs_value != 0)
7940 return saveregs_value;
7941 {
7942 /* When this function is called, it means that registers must be
7943 saved on entry to this function. So we migrate the
7944 call to the first insn of this function. */
7945 rtx temp;
7946 rtx seq;
ca695ac9
JB
7947
7948 /* Now really call the function. `expand_call' does not call
7949 expand_builtin, so there is no danger of infinite recursion here. */
7950 start_sequence ();
7951
7952#ifdef EXPAND_BUILTIN_SAVEREGS
7953 /* Do whatever the machine needs done in this case. */
7954 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
7955#else
7956 /* The register where the function returns its value
7957 is likely to have something else in it, such as an argument.
7958 So preserve that register around the call. */
d0c76654 7959
ca695ac9
JB
7960 if (value_mode != VOIDmode)
7961 {
d0c76654
RK
7962 rtx valreg = hard_libcall_value (value_mode);
7963 rtx saved_valreg = gen_reg_rtx (value_mode);
7964
ca695ac9 7965 emit_move_insn (saved_valreg, valreg);
d0c76654
RK
7966 temp = expand_call (exp, target, ignore);
7967 emit_move_insn (valreg, saved_valreg);
ca695ac9 7968 }
d0c76654
RK
7969 else
7970 /* Generate the call, putting the value in a pseudo. */
7971 temp = expand_call (exp, target, ignore);
ca695ac9
JB
7972#endif
7973
7974 seq = get_insns ();
7975 end_sequence ();
7976
7977 saveregs_value = temp;
7978
7979 /* Put the sequence after the NOTE that starts the function.
7980 If this is inside a SEQUENCE, make the outer-level insn
7981 chain current, so the code is placed at the start of the
7982 function. */
7983 push_topmost_sequence ();
7984 emit_insns_before (seq, NEXT_INSN (get_insns ()));
7985 pop_topmost_sequence ();
7986 return temp;
7987 }
7988
7989 /* __builtin_args_info (N) returns word N of the arg space info
7990 for the current function. The number and meanings of words
7991 is controlled by the definition of CUMULATIVE_ARGS. */
7992 case BUILT_IN_ARGS_INFO:
7993 {
7994 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
7995 int i;
7996 int *word_ptr = (int *) &current_function_args_info;
7997 tree type, elts, result;
7998
7999 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
8000 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
8001 __FILE__, __LINE__);
8002
8003 if (arglist != 0)
8004 {
8005 tree arg = TREE_VALUE (arglist);
8006 if (TREE_CODE (arg) != INTEGER_CST)
8007 error ("argument of `__builtin_args_info' must be constant");
8008 else
8009 {
8010 int wordnum = TREE_INT_CST_LOW (arg);
8011
8012 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
8013 error ("argument of `__builtin_args_info' out of range");
8014 else
8015 return GEN_INT (word_ptr[wordnum]);
8016 }
8017 }
8018 else
8019 error ("missing argument in `__builtin_args_info'");
8020
8021 return const0_rtx;
8022
8023#if 0
8024 for (i = 0; i < nwords; i++)
8025 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
8026
8027 type = build_array_type (integer_type_node,
8028 build_index_type (build_int_2 (nwords, 0)));
8029 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
8030 TREE_CONSTANT (result) = 1;
8031 TREE_STATIC (result) = 1;
8032 result = build (INDIRECT_REF, build_pointer_type (type), result);
8033 TREE_CONSTANT (result) = 1;
8034 return expand_expr (result, NULL_RTX, VOIDmode, 0);
8035#endif
8036 }
8037
17bbab26 8038 /* Return the address of the first anonymous stack arg. */
ca695ac9
JB
8039 case BUILT_IN_NEXT_ARG:
8040 {
8041 tree fntype = TREE_TYPE (current_function_decl);
c4dfe0fc 8042
33162beb
DE
8043 if ((TYPE_ARG_TYPES (fntype) == 0
8044 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
8045 == void_type_node))
8046 && ! current_function_varargs)
ca695ac9
JB
8047 {
8048 error ("`va_start' used in function with fixed args");
8049 return const0_rtx;
8050 }
c4dfe0fc 8051
e4493c04
RK
8052 if (arglist)
8053 {
8054 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
8055 tree arg = TREE_VALUE (arglist);
8056
8057 /* Strip off all nops for the sake of the comparison. This
6692a31f
RK
8058 is not quite the same as STRIP_NOPS. It does more.
8059 We must also strip off INDIRECT_EXPR for C++ reference
8060 parameters. */
e4493c04
RK
8061 while (TREE_CODE (arg) == NOP_EXPR
8062 || TREE_CODE (arg) == CONVERT_EXPR
6692a31f
RK
8063 || TREE_CODE (arg) == NON_LVALUE_EXPR
8064 || TREE_CODE (arg) == INDIRECT_REF)
e4493c04
RK
8065 arg = TREE_OPERAND (arg, 0);
8066 if (arg != last_parm)
8067 warning ("second parameter of `va_start' not last named argument");
8068 }
5b4ff0de 8069 else if (! current_function_varargs)
e4493c04
RK
8070 /* Evidently an out of date version of <stdarg.h>; can't validate
8071 va_start's second argument, but can still work as intended. */
8072 warning ("`__builtin_next_arg' called without an argument");
ca695ac9
JB
8073 }
8074
8075 return expand_binop (Pmode, add_optab,
8076 current_function_internal_arg_pointer,
8077 current_function_arg_offset_rtx,
8078 NULL_RTX, 0, OPTAB_LIB_WIDEN);
8079
8080 case BUILT_IN_CLASSIFY_TYPE:
8081 if (arglist != 0)
8082 {
8083 tree type = TREE_TYPE (TREE_VALUE (arglist));
8084 enum tree_code code = TREE_CODE (type);
8085 if (code == VOID_TYPE)
8086 return GEN_INT (void_type_class);
8087 if (code == INTEGER_TYPE)
8088 return GEN_INT (integer_type_class);
8089 if (code == CHAR_TYPE)
8090 return GEN_INT (char_type_class);
8091 if (code == ENUMERAL_TYPE)
8092 return GEN_INT (enumeral_type_class);
8093 if (code == BOOLEAN_TYPE)
8094 return GEN_INT (boolean_type_class);
8095 if (code == POINTER_TYPE)
8096 return GEN_INT (pointer_type_class);
8097 if (code == REFERENCE_TYPE)
8098 return GEN_INT (reference_type_class);
8099 if (code == OFFSET_TYPE)
8100 return GEN_INT (offset_type_class);
8101 if (code == REAL_TYPE)
8102 return GEN_INT (real_type_class);
8103 if (code == COMPLEX_TYPE)
8104 return GEN_INT (complex_type_class);
8105 if (code == FUNCTION_TYPE)
8106 return GEN_INT (function_type_class);
8107 if (code == METHOD_TYPE)
8108 return GEN_INT (method_type_class);
8109 if (code == RECORD_TYPE)
8110 return GEN_INT (record_type_class);
8111 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
8112 return GEN_INT (union_type_class);
8113 if (code == ARRAY_TYPE)
4042d440
PB
8114 {
8115 if (TYPE_STRING_FLAG (type))
8116 return GEN_INT (string_type_class);
8117 else
8118 return GEN_INT (array_type_class);
8119 }
ca695ac9
JB
8120 if (code == SET_TYPE)
8121 return GEN_INT (set_type_class);
8122 if (code == FILE_TYPE)
8123 return GEN_INT (file_type_class);
8124 if (code == LANG_TYPE)
8125 return GEN_INT (lang_type_class);
8126 }
8127 return GEN_INT (no_type_class);
8128
8129 case BUILT_IN_CONSTANT_P:
8130 if (arglist == 0)
8131 return const0_rtx;
8132 else
33cf5823
RK
8133 {
8134 tree arg = TREE_VALUE (arglist);
8135
8136 STRIP_NOPS (arg);
8137 return (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c'
8138 || (TREE_CODE (arg) == ADDR_EXPR
8139 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8140 ? const1_rtx : const0_rtx);
8141 }
ca695ac9
JB
8142
8143 case BUILT_IN_FRAME_ADDRESS:
8144 /* The argument must be a nonnegative integer constant.
8145 It counts the number of frames to scan up the stack.
8146 The value is the address of that frame. */
8147 case BUILT_IN_RETURN_ADDRESS:
8148 /* The argument must be a nonnegative integer constant.
8149 It counts the number of frames to scan up the stack.
8150 The value is the return address saved in that frame. */
8151 if (arglist == 0)
8152 /* Warning about missing arg was already issued. */
8153 return const0_rtx;
8154 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
8155 {
8156 error ("invalid arg to `__builtin_return_address'");
8157 return const0_rtx;
8158 }
153c149b 8159 else if (tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
ca695ac9
JB
8160 {
8161 error ("invalid arg to `__builtin_return_address'");
8162 return const0_rtx;
8163 }
8164 else
8165 {
2bbf216f
RK
8166 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
8167 TREE_INT_CST_LOW (TREE_VALUE (arglist)),
8168 hard_frame_pointer_rtx);
ca695ac9
JB
8169
8170 /* For __builtin_frame_address, return what we've got. */
8171 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8172 return tem;
8173
2bbf216f
RK
8174 if (GET_CODE (tem) != REG)
8175 tem = copy_to_reg (tem);
8176 return tem;
ca695ac9
JB
8177 }
8178
8179 case BUILT_IN_ALLOCA:
8180 if (arglist == 0
8181 /* Arg could be non-integer if user redeclared this fcn wrong. */
8182 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
7b073ca6 8183 break;
1ee86d15 8184
ca695ac9
JB
8185 /* Compute the argument. */
8186 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
8187
8188 /* Allocate the desired space. */
1ee86d15 8189 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
ca695ac9
JB
8190
8191 case BUILT_IN_FFS:
8192 /* If not optimizing, call the library function. */
98aad286 8193 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
ca695ac9
JB
8194 break;
8195
8196 if (arglist == 0
8197 /* Arg could be non-integer if user redeclared this fcn wrong. */
8198 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
7b073ca6 8199 break;
ca695ac9
JB
8200
8201 /* Compute the argument. */
8202 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8203 /* Compute ffs, into TARGET if possible.
8204 Set TARGET to wherever the result comes back. */
8205 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8206 ffs_optab, op0, target, 1);
8207 if (target == 0)
8208 abort ();
8209 return target;
8210
8211 case BUILT_IN_STRLEN:
8212 /* If not optimizing, call the library function. */
98aad286 8213 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
ca695ac9
JB
8214 break;
8215
8216 if (arglist == 0
8217 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8218 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
7b073ca6 8219 break;
ca695ac9
JB
8220 else
8221 {
8222 tree src = TREE_VALUE (arglist);
8223 tree len = c_strlen (src);
8224
8225 int align
8226 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8227
8228 rtx result, src_rtx, char_rtx;
8229 enum machine_mode insn_mode = value_mode, char_mode;
8230 enum insn_code icode;
8231
8232 /* If the length is known, just return it. */
8233 if (len != 0)
8234 return expand_expr (len, target, mode, 0);
8235
8236 /* If SRC is not a pointer type, don't do this operation inline. */
8237 if (align == 0)
8238 break;
8239
8240 /* Call a function if we can't compute strlen in the right mode. */
8241
8242 while (insn_mode != VOIDmode)
8243 {
8244 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
8245 if (icode != CODE_FOR_nothing)
8246 break;
bbf6f052 8247
ca695ac9
JB
8248 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
8249 }
8250 if (insn_mode == VOIDmode)
8251 break;
bbf6f052 8252
ca695ac9
JB
8253 /* Make a place to write the result of the instruction. */
8254 result = target;
8255 if (! (result != 0
8256 && GET_CODE (result) == REG
8257 && GET_MODE (result) == insn_mode
8258 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8259 result = gen_reg_rtx (insn_mode);
bbf6f052 8260
ca695ac9
JB
8261 /* Make sure the operands are acceptable to the predicates. */
8262
8263 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
8264 result = gen_reg_rtx (insn_mode);
8265
8266 src_rtx = memory_address (BLKmode,
88f63c77 8267 expand_expr (src, NULL_RTX, ptr_mode,
ca695ac9
JB
8268 EXPAND_NORMAL));
8269 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
8270 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
8271
8272 char_rtx = const0_rtx;
8273 char_mode = insn_operand_mode[(int)icode][2];
8274 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
8275 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
8276
8277 emit_insn (GEN_FCN (icode) (result,
8278 gen_rtx (MEM, BLKmode, src_rtx),
8279 char_rtx, GEN_INT (align)));
8280
8281 /* Return the value in the proper mode for this function. */
8282 if (GET_MODE (result) == value_mode)
8283 return result;
8284 else if (target != 0)
8285 {
8286 convert_move (target, result, 0);
8287 return target;
8288 }
8289 else
8290 return convert_to_mode (value_mode, result, 0);
8291 }
8292
8293 case BUILT_IN_STRCPY:
e87b4f3f 8294 /* If not optimizing, call the library function. */
98aad286 8295 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
e87b4f3f
RS
8296 break;
8297
8298 if (arglist == 0
ca695ac9
JB
8299 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8300 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8301 || TREE_CHAIN (arglist) == 0
8302 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
7b073ca6 8303 break;
ca695ac9 8304 else
db0e6d01 8305 {
ca695ac9 8306 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
e7c33f54 8307
ca695ac9
JB
8308 if (len == 0)
8309 break;
e7c33f54 8310
ca695ac9 8311 len = size_binop (PLUS_EXPR, len, integer_one_node);
e7c33f54 8312
ca695ac9 8313 chainon (arglist, build_tree_list (NULL_TREE, len));
1bbddf11
JVA
8314 }
8315
ca695ac9
JB
8316 /* Drops in. */
8317 case BUILT_IN_MEMCPY:
8318 /* If not optimizing, call the library function. */
98aad286 8319 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
ca695ac9 8320 break;
e7c33f54 8321
ca695ac9
JB
8322 if (arglist == 0
8323 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8324 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8325 || TREE_CHAIN (arglist) == 0
8326 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8327 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8328 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7b073ca6 8329 break;
ca695ac9 8330 else
e7c33f54 8331 {
ca695ac9
JB
8332 tree dest = TREE_VALUE (arglist);
8333 tree src = TREE_VALUE (TREE_CHAIN (arglist));
8334 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
e9cf6a97 8335 tree type;
e87b4f3f 8336
ca695ac9
JB
8337 int src_align
8338 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8339 int dest_align
8340 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8341 rtx dest_rtx, dest_mem, src_mem;
60bac6ea 8342
ca695ac9
JB
8343 /* If either SRC or DEST is not a pointer type, don't do
8344 this operation in-line. */
8345 if (src_align == 0 || dest_align == 0)
8346 {
8347 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
8348 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8349 break;
8350 }
8351
88f63c77 8352 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
ca695ac9
JB
8353 dest_mem = gen_rtx (MEM, BLKmode,
8354 memory_address (BLKmode, dest_rtx));
e9cf6a97 8355 /* There could be a void* cast on top of the object. */
5480a90c
RK
8356 while (TREE_CODE (dest) == NOP_EXPR)
8357 dest = TREE_OPERAND (dest, 0);
8358 type = TREE_TYPE (TREE_TYPE (dest));
e9cf6a97 8359 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
ca695ac9
JB
8360 src_mem = gen_rtx (MEM, BLKmode,
8361 memory_address (BLKmode,
8362 expand_expr (src, NULL_RTX,
88f63c77
RK
8363 ptr_mode,
8364 EXPAND_SUM)));
e9cf6a97 8365 /* There could be a void* cast on top of the object. */
5480a90c
RK
8366 while (TREE_CODE (src) == NOP_EXPR)
8367 src = TREE_OPERAND (src, 0);
8368 type = TREE_TYPE (TREE_TYPE (src));
e9cf6a97 8369 MEM_IN_STRUCT_P (src_mem) = AGGREGATE_TYPE_P (type);
ca695ac9
JB
8370
8371 /* Copy word part most expediently. */
8372 emit_block_move (dest_mem, src_mem,
8373 expand_expr (len, NULL_RTX, VOIDmode, 0),
8374 MIN (src_align, dest_align));
85c53d24 8375 return force_operand (dest_rtx, NULL_RTX);
ca695ac9
JB
8376 }
8377
8378/* These comparison functions need an instruction that returns an actual
8379 index. An ordinary compare that just sets the condition codes
8380 is not enough. */
8381#ifdef HAVE_cmpstrsi
8382 case BUILT_IN_STRCMP:
8383 /* If not optimizing, call the library function. */
98aad286 8384 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
ca695ac9
JB
8385 break;
8386
8387 if (arglist == 0
8388 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8389 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8390 || TREE_CHAIN (arglist) == 0
8391 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
7b073ca6 8392 break;
ca695ac9
JB
8393 else if (!HAVE_cmpstrsi)
8394 break;
8395 {
8396 tree arg1 = TREE_VALUE (arglist);
8397 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
8398 tree offset;
8399 tree len, len2;
8400
8401 len = c_strlen (arg1);
8402 if (len)
8403 len = size_binop (PLUS_EXPR, integer_one_node, len);
8404 len2 = c_strlen (arg2);
8405 if (len2)
8406 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
8407
8408 /* If we don't have a constant length for the first, use the length
8409 of the second, if we know it. We don't require a constant for
8410 this case; some cost analysis could be done if both are available
8411 but neither is constant. For now, assume they're equally cheap.
8412
8413 If both strings have constant lengths, use the smaller. This
8414 could arise if optimization results in strcpy being called with
8415 two fixed strings, or if the code was machine-generated. We should
8416 add some code to the `memcmp' handler below to deal with such
8417 situations, someday. */
8418 if (!len || TREE_CODE (len) != INTEGER_CST)
8419 {
8420 if (len2)
8421 len = len2;
8422 else if (len == 0)
8423 break;
8424 }
8425 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
8426 {
8427 if (tree_int_cst_lt (len2, len))
8428 len = len2;
8429 }
8430
8431 chainon (arglist, build_tree_list (NULL_TREE, len));
8432 }
8433
8434 /* Drops in. */
8435 case BUILT_IN_MEMCMP:
8436 /* If not optimizing, call the library function. */
98aad286 8437 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
ca695ac9
JB
8438 break;
8439
8440 if (arglist == 0
8441 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8442 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8443 || TREE_CHAIN (arglist) == 0
8444 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8445 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8446 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7b073ca6 8447 break;
ca695ac9
JB
8448 else if (!HAVE_cmpstrsi)
8449 break;
8450 {
8451 tree arg1 = TREE_VALUE (arglist);
8452 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
8453 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8454 rtx result;
8455
8456 int arg1_align
8457 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8458 int arg2_align
8459 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8460 enum machine_mode insn_mode
8461 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
60bac6ea 8462
ca695ac9
JB
8463 /* If we don't have POINTER_TYPE, call the function. */
8464 if (arg1_align == 0 || arg2_align == 0)
8465 {
8466 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
8467 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8468 break;
8469 }
60bac6ea 8470
ca695ac9
JB
8471 /* Make a place to write the result of the instruction. */
8472 result = target;
8473 if (! (result != 0
8474 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
8475 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8476 result = gen_reg_rtx (insn_mode);
60bac6ea 8477
ca695ac9
JB
8478 emit_insn (gen_cmpstrsi (result,
8479 gen_rtx (MEM, BLKmode,
88f63c77
RK
8480 expand_expr (arg1, NULL_RTX,
8481 ptr_mode,
ca695ac9
JB
8482 EXPAND_NORMAL)),
8483 gen_rtx (MEM, BLKmode,
88f63c77
RK
8484 expand_expr (arg2, NULL_RTX,
8485 ptr_mode,
ca695ac9
JB
8486 EXPAND_NORMAL)),
8487 expand_expr (len, NULL_RTX, VOIDmode, 0),
8488 GEN_INT (MIN (arg1_align, arg2_align))));
60bac6ea 8489
ca695ac9
JB
8490 /* Return the value in the proper mode for this function. */
8491 mode = TYPE_MODE (TREE_TYPE (exp));
8492 if (GET_MODE (result) == mode)
8493 return result;
8494 else if (target != 0)
8495 {
8496 convert_move (target, result, 0);
8497 return target;
60bac6ea 8498 }
ca695ac9
JB
8499 else
8500 return convert_to_mode (mode, result, 0);
8501 }
60bac6ea 8502#else
ca695ac9
JB
8503 case BUILT_IN_STRCMP:
8504 case BUILT_IN_MEMCMP:
8505 break;
60bac6ea
RS
8506#endif
8507
4ed67205
RK
8508 /* __builtin_setjmp is passed a pointer to an array of five words
8509 (not all will be used on all machines). It operates similarly to
8510 the C library function of the same name, but is more efficient.
8511 Much of the code below (and for longjmp) is copied from the handling
8512 of non-local gotos.
8513
8514 NOTE: This is intended for use by GNAT and will only work in
8515 the method used by it. This code will likely NOT survive to
8516 the GCC 2.8.0 release. */
8517 case BUILT_IN_SETJMP:
8518 if (arglist == 0
8519 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8520 break;
8521
8522 {
8523 rtx buf_addr
8524 = force_reg (Pmode, expand_expr (TREE_VALUE (arglist), subtarget,
8525 VOIDmode, 0));
8526 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
8527 enum machine_mode sa_mode = Pmode;
8528 rtx stack_save;
a8a8cbb7 8529 int i;
4ed67205
RK
8530
8531 if (target == 0 || GET_CODE (target) != REG
8532 || REGNO (target) < FIRST_PSEUDO_REGISTER)
8533 target = gen_reg_rtx (value_mode);
8534
8535 emit_queue ();
8536
0dddb42d 8537 CONST_CALL_P (emit_note (NULL_PTR, NOTE_INSN_SETJMP)) = 1;
4ed67205
RK
8538 current_function_calls_setjmp = 1;
8539
8540 /* We store the frame pointer and the address of lab1 in the buffer
8541 and use the rest of it for the stack save area, which is
8542 machine-dependent. */
8543 emit_move_insn (gen_rtx (MEM, Pmode, buf_addr),
8544 virtual_stack_vars_rtx);
8545 emit_move_insn
8546 (validize_mem (gen_rtx (MEM, Pmode,
8547 plus_constant (buf_addr,
8548 GET_MODE_SIZE (Pmode)))),
8549 gen_rtx (LABEL_REF, Pmode, lab1));
8550
8551#ifdef HAVE_save_stack_nonlocal
8552 if (HAVE_save_stack_nonlocal)
8553 sa_mode = insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0];
8554#endif
8555
8556 stack_save = gen_rtx (MEM, sa_mode,
8557 plus_constant (buf_addr,
8558 2 * GET_MODE_SIZE (Pmode)));
8559 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
8560
8561 /* Set TARGET to zero and branch around the other case. */
8562 emit_move_insn (target, const0_rtx);
8563 emit_jump_insn (gen_jump (lab2));
8564 emit_barrier ();
8565 emit_label (lab1);
8566
a8a8cbb7 8567 /* Note that setjmp clobbers FP when we get here, so we have to
0dddb42d 8568 make sure it's marked as used by this function. */
a8a8cbb7
RK
8569 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
8570
4ed67205
RK
8571 /* Now put in the code to restore the frame pointer, and argument
8572 pointer, if needed. The code below is from expand_end_bindings
8573 in stmt.c; see detailed documentation there. */
8574#ifdef HAVE_nonlocal_goto
8575 if (! HAVE_nonlocal_goto)
8576#endif
8577 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
8578
a8a8cbb7
RK
8579 current_function_has_nonlocal_goto = 1;
8580
4ed67205
RK
8581#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
8582 if (fixed_regs[ARG_POINTER_REGNUM])
8583 {
8584#ifdef ELIMINABLE_REGS
8585 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
4ed67205
RK
8586
8587 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
8588 if (elim_regs[i].from == ARG_POINTER_REGNUM
8589 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
8590 break;
8591
8592 if (i == sizeof elim_regs / sizeof elim_regs [0])
8593#endif
8594 {
8595 /* Now restore our arg pointer from the address at which it
8596 was saved in our stack frame.
8597 If there hasn't be space allocated for it yet, make
8598 some now. */
8599 if (arg_pointer_save_area == 0)
8600 arg_pointer_save_area
8601 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
8602 emit_move_insn (virtual_incoming_args_rtx,
8603 copy_to_reg (arg_pointer_save_area));
8604 }
8605 }
8606#endif
8607
8608 /* The result to return is in the static chain pointer. */
8609 if (GET_MODE (static_chain_rtx) == GET_MODE (target))
8610 emit_move_insn (target, static_chain_rtx);
8611 else
8612 convert_move (target, static_chain_rtx, 0);
8613
8614 emit_label (lab2);
8615 return target;
8616 }
8617
8618 /* __builtin_longjmp is passed a pointer to an array of five words
8619 and a value to return. It's similar to the C library longjmp
8620 function but works with __builtin_setjmp above. */
8621 case BUILT_IN_LONGJMP:
8622 if (arglist == 0 || TREE_CHAIN (arglist) == 0
8623 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8624 break;
8625
8626 {
8627 rtx buf_addr
8628 = force_reg (Pmode, expand_expr (TREE_VALUE (arglist), NULL_RTX,
8629 VOIDmode, 0));
8630 rtx fp = gen_rtx (MEM, Pmode, buf_addr);
8631 rtx lab = gen_rtx (MEM, Pmode,
8632 plus_constant (buf_addr, GET_MODE_SIZE (Pmode)));
8633 enum machine_mode sa_mode
8634#ifdef HAVE_save_stack_nonlocal
8635 = (HAVE_save_stack_nonlocal
8636 ? insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0]
8637 : Pmode);
8638#else
8639 = Pmode;
8640#endif
8641 rtx stack = gen_rtx (MEM, sa_mode,
8642 plus_constant (buf_addr,
8643 2 * GET_MODE_SIZE (Pmode)));
8644 rtx value = expand_expr (TREE_VALUE (TREE_CHAIN (arglist)), NULL_RTX,
8645 VOIDmode, 0);
8646
8647 /* Pick up FP, label, and SP from the block and jump. This code is
8648 from expand_goto in stmt.c; see there for detailed comments. */
8649#if HAVE_nonlocal_goto
8650 if (HAVE_nonlocal_goto)
8651 emit_insn (gen_nonlocal_goto (fp, lab, stack, value));
8652 else
8653#endif
8654 {
8655 emit_move_insn (hard_frame_pointer_rtx, fp);
8656 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
8657
8658 /* Put in the static chain register the return value. */
8659 emit_move_insn (static_chain_rtx, value);
8660 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
8661 emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
8662 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
8663 emit_indirect_jump (copy_to_reg (lab));
8664 }
8665
8666 return const0_rtx;
8667 }
8668
ca695ac9
JB
8669 default: /* just do library call, if unknown builtin */
8670 error ("built-in function `%s' not currently supported",
8671 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
8672 }
e87b4f3f 8673
ca695ac9
JB
8674 /* The switch statement above can drop through to cause the function
8675 to be called normally. */
e7c33f54 8676
ca695ac9
JB
8677 return expand_call (exp, target, ignore);
8678}
8679\f
8680/* Built-in functions to perform an untyped call and return. */
0006469d 8681
ca695ac9
JB
8682/* For each register that may be used for calling a function, this
8683 gives a mode used to copy the register's value. VOIDmode indicates
8684 the register is not used for calling a function. If the machine
8685 has register windows, this gives only the outbound registers.
8686 INCOMING_REGNO gives the corresponding inbound register. */
8687static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
0006469d 8688
ca695ac9
JB
8689/* For each register that may be used for returning values, this gives
8690 a mode used to copy the register's value. VOIDmode indicates the
8691 register is not used for returning values. If the machine has
8692 register windows, this gives only the outbound registers.
8693 INCOMING_REGNO gives the corresponding inbound register. */
8694static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
0006469d 8695
ca695ac9
JB
8696/* For each register that may be used for calling a function, this
8697 gives the offset of that register into the block returned by
9faa82d8 8698 __builtin_apply_args. 0 indicates that the register is not
ca695ac9
JB
8699 used for calling a function. */
8700static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
0006469d 8701
ca695ac9
JB
8702/* Return the offset of register REGNO into the block returned by
8703 __builtin_apply_args. This is not declared static, since it is
8704 needed in objc-act.c. */
0006469d 8705
ca695ac9
JB
8706int
8707apply_args_register_offset (regno)
8708 int regno;
8709{
8710 apply_args_size ();
0006469d 8711
ca695ac9
JB
8712 /* Arguments are always put in outgoing registers (in the argument
8713 block) if such make sense. */
8714#ifdef OUTGOING_REGNO
8715 regno = OUTGOING_REGNO(regno);
8716#endif
8717 return apply_args_reg_offset[regno];
8718}
0006469d 8719
ca695ac9
JB
8720/* Return the size required for the block returned by __builtin_apply_args,
8721 and initialize apply_args_mode. */
0006469d 8722
ca695ac9
JB
8723static int
8724apply_args_size ()
8725{
8726 static int size = -1;
8727 int align, regno;
8728 enum machine_mode mode;
bbf6f052 8729
ca695ac9
JB
8730 /* The values computed by this function never change. */
8731 if (size < 0)
8732 {
8733 /* The first value is the incoming arg-pointer. */
8734 size = GET_MODE_SIZE (Pmode);
bbf6f052 8735
ca695ac9
JB
8736 /* The second value is the structure value address unless this is
8737 passed as an "invisible" first argument. */
8738 if (struct_value_rtx)
8739 size += GET_MODE_SIZE (Pmode);
8740
8741 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8742 if (FUNCTION_ARG_REGNO_P (regno))
bbf6f052 8743 {
ca695ac9
JB
8744 /* Search for the proper mode for copying this register's
8745 value. I'm not sure this is right, but it works so far. */
8746 enum machine_mode best_mode = VOIDmode;
8747
8748 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
8749 mode != VOIDmode;
8750 mode = GET_MODE_WIDER_MODE (mode))
8751 if (HARD_REGNO_MODE_OK (regno, mode)
8752 && HARD_REGNO_NREGS (regno, mode) == 1)
8753 best_mode = mode;
8754
8755 if (best_mode == VOIDmode)
8756 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
8757 mode != VOIDmode;
8758 mode = GET_MODE_WIDER_MODE (mode))
8759 if (HARD_REGNO_MODE_OK (regno, mode)
8760 && (mov_optab->handlers[(int) mode].insn_code
8761 != CODE_FOR_nothing))
8762 best_mode = mode;
8763
8764 mode = best_mode;
8765 if (mode == VOIDmode)
8766 abort ();
8767
8768 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8769 if (size % align != 0)
8770 size = CEIL (size, align) * align;
8771 apply_args_reg_offset[regno] = size;
8772 size += GET_MODE_SIZE (mode);
8773 apply_args_mode[regno] = mode;
8774 }
8775 else
8776 {
8777 apply_args_mode[regno] = VOIDmode;
8778 apply_args_reg_offset[regno] = 0;
bbf6f052 8779 }
ca695ac9
JB
8780 }
8781 return size;
8782}
bbf6f052 8783
ca695ac9
JB
8784/* Return the size required for the block returned by __builtin_apply,
8785 and initialize apply_result_mode. */
bbf6f052 8786
ca695ac9
JB
8787static int
8788apply_result_size ()
8789{
8790 static int size = -1;
8791 int align, regno;
8792 enum machine_mode mode;
bbf6f052 8793
ca695ac9
JB
8794 /* The values computed by this function never change. */
8795 if (size < 0)
8796 {
8797 size = 0;
bbf6f052 8798
ca695ac9
JB
8799 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8800 if (FUNCTION_VALUE_REGNO_P (regno))
8801 {
8802 /* Search for the proper mode for copying this register's
8803 value. I'm not sure this is right, but it works so far. */
8804 enum machine_mode best_mode = VOIDmode;
bbf6f052 8805
ca695ac9
JB
8806 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
8807 mode != TImode;
8808 mode = GET_MODE_WIDER_MODE (mode))
8809 if (HARD_REGNO_MODE_OK (regno, mode))
8810 best_mode = mode;
bbf6f052 8811
ca695ac9
JB
8812 if (best_mode == VOIDmode)
8813 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
8814 mode != VOIDmode;
8815 mode = GET_MODE_WIDER_MODE (mode))
8816 if (HARD_REGNO_MODE_OK (regno, mode)
8817 && (mov_optab->handlers[(int) mode].insn_code
8818 != CODE_FOR_nothing))
8819 best_mode = mode;
bbf6f052 8820
ca695ac9
JB
8821 mode = best_mode;
8822 if (mode == VOIDmode)
8823 abort ();
bbf6f052 8824
ca695ac9
JB
8825 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8826 if (size % align != 0)
8827 size = CEIL (size, align) * align;
8828 size += GET_MODE_SIZE (mode);
8829 apply_result_mode[regno] = mode;
bbf6f052
RK
8830 }
8831 else
ca695ac9 8832 apply_result_mode[regno] = VOIDmode;
bbf6f052 8833
ca695ac9
JB
8834 /* Allow targets that use untyped_call and untyped_return to override
8835 the size so that machine-specific information can be stored here. */
8836#ifdef APPLY_RESULT_SIZE
8837 size = APPLY_RESULT_SIZE;
8838#endif
8839 }
8840 return size;
8841}
bbf6f052 8842
ca695ac9
JB
8843#if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
8844/* Create a vector describing the result block RESULT. If SAVEP is true,
8845 the result block is used to save the values; otherwise it is used to
8846 restore the values. */
bbf6f052 8847
ca695ac9
JB
8848static rtx
8849result_vector (savep, result)
8850 int savep;
8851 rtx result;
8852{
8853 int regno, size, align, nelts;
8854 enum machine_mode mode;
8855 rtx reg, mem;
8856 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
8857
8858 size = nelts = 0;
8859 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8860 if ((mode = apply_result_mode[regno]) != VOIDmode)
8861 {
8862 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8863 if (size % align != 0)
8864 size = CEIL (size, align) * align;
18992995 8865 reg = gen_rtx (REG, mode, savep ? regno : INCOMING_REGNO (regno));
ca695ac9
JB
8866 mem = change_address (result, mode,
8867 plus_constant (XEXP (result, 0), size));
8868 savevec[nelts++] = (savep
8869 ? gen_rtx (SET, VOIDmode, mem, reg)
8870 : gen_rtx (SET, VOIDmode, reg, mem));
8871 size += GET_MODE_SIZE (mode);
bbf6f052 8872 }
ca695ac9
JB
8873 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
8874}
8875#endif /* HAVE_untyped_call or HAVE_untyped_return */
bbf6f052 8876
ca695ac9
JB
8877/* Save the state required to perform an untyped call with the same
8878 arguments as were passed to the current function. */
8879
8880static rtx
8881expand_builtin_apply_args ()
8882{
8883 rtx registers;
8884 int size, align, regno;
8885 enum machine_mode mode;
8886
8887 /* Create a block where the arg-pointer, structure value address,
8888 and argument registers can be saved. */
8889 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
8890
8891 /* Walk past the arg-pointer and structure value address. */
8892 size = GET_MODE_SIZE (Pmode);
8893 if (struct_value_rtx)
8894 size += GET_MODE_SIZE (Pmode);
8895
c816db88
RK
8896 /* Save each register used in calling a function to the block. */
8897 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
ca695ac9 8898 if ((mode = apply_args_mode[regno]) != VOIDmode)
bbf6f052 8899 {
ee33823f
RK
8900 rtx tem;
8901
ca695ac9
JB
8902 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8903 if (size % align != 0)
8904 size = CEIL (size, align) * align;
ee33823f
RK
8905
8906 tem = gen_rtx (REG, mode, INCOMING_REGNO (regno));
8907
8908#ifdef STACK_REGS
8909 /* For reg-stack.c's stack register household.
8910 Compare with a similar piece of code in function.c. */
8911
8912 emit_insn (gen_rtx (USE, mode, tem));
8913#endif
8914
ca695ac9
JB
8915 emit_move_insn (change_address (registers, mode,
8916 plus_constant (XEXP (registers, 0),
8917 size)),
ee33823f 8918 tem);
ca695ac9 8919 size += GET_MODE_SIZE (mode);
bbf6f052
RK
8920 }
8921
ca695ac9
JB
8922 /* Save the arg pointer to the block. */
8923 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
8924 copy_to_reg (virtual_incoming_args_rtx));
8925 size = GET_MODE_SIZE (Pmode);
bbf6f052 8926
ca695ac9
JB
8927 /* Save the structure value address unless this is passed as an
8928 "invisible" first argument. */
8929 if (struct_value_incoming_rtx)
8930 {
8931 emit_move_insn (change_address (registers, Pmode,
8932 plus_constant (XEXP (registers, 0),
8933 size)),
8934 copy_to_reg (struct_value_incoming_rtx));
8935 size += GET_MODE_SIZE (Pmode);
8936 }
8937
8938 /* Return the address of the block. */
8939 return copy_addr_to_reg (XEXP (registers, 0));
8940}
8941
8942/* Perform an untyped call and save the state required to perform an
8943 untyped return of whatever value was returned by the given function. */
8944
8945static rtx
8946expand_builtin_apply (function, arguments, argsize)
8947 rtx function, arguments, argsize;
8948{
8949 int size, align, regno;
8950 enum machine_mode mode;
8951 rtx incoming_args, result, reg, dest, call_insn;
8952 rtx old_stack_level = 0;
b3f8cf4a 8953 rtx call_fusage = 0;
bbf6f052 8954
ca695ac9
JB
8955 /* Create a block where the return registers can be saved. */
8956 result = assign_stack_local (BLKmode, apply_result_size (), -1);
bbf6f052 8957
ca695ac9 8958 /* ??? The argsize value should be adjusted here. */
bbf6f052 8959
ca695ac9
JB
8960 /* Fetch the arg pointer from the ARGUMENTS block. */
8961 incoming_args = gen_reg_rtx (Pmode);
8962 emit_move_insn (incoming_args,
8963 gen_rtx (MEM, Pmode, arguments));
8964#ifndef STACK_GROWS_DOWNWARD
8965 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
8966 incoming_args, 0, OPTAB_LIB_WIDEN);
46b68a37
JW
8967#endif
8968
ca695ac9
JB
8969 /* Perform postincrements before actually calling the function. */
8970 emit_queue ();
46b68a37 8971
ca695ac9
JB
8972 /* Push a new argument block and copy the arguments. */
8973 do_pending_stack_adjust ();
8974 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
bbf6f052 8975
ca695ac9
JB
8976 /* Push a block of memory onto the stack to store the memory arguments.
8977 Save the address in a register, and copy the memory arguments. ??? I
8978 haven't figured out how the calling convention macros effect this,
8979 but it's likely that the source and/or destination addresses in
8980 the block copy will need updating in machine specific ways. */
8981 dest = copy_addr_to_reg (push_block (argsize, 0, 0));
8982 emit_block_move (gen_rtx (MEM, BLKmode, dest),
8983 gen_rtx (MEM, BLKmode, incoming_args),
8984 argsize,
8985 PARM_BOUNDARY / BITS_PER_UNIT);
bbf6f052 8986
ca695ac9
JB
8987 /* Refer to the argument block. */
8988 apply_args_size ();
8989 arguments = gen_rtx (MEM, BLKmode, arguments);
8990
8991 /* Walk past the arg-pointer and structure value address. */
8992 size = GET_MODE_SIZE (Pmode);
8993 if (struct_value_rtx)
8994 size += GET_MODE_SIZE (Pmode);
8995
8996 /* Restore each of the registers previously saved. Make USE insns
c816db88
RK
8997 for each of these registers for use in making the call. */
8998 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
ca695ac9
JB
8999 if ((mode = apply_args_mode[regno]) != VOIDmode)
9000 {
9001 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9002 if (size % align != 0)
9003 size = CEIL (size, align) * align;
9004 reg = gen_rtx (REG, mode, regno);
9005 emit_move_insn (reg,
9006 change_address (arguments, mode,
9007 plus_constant (XEXP (arguments, 0),
9008 size)));
9009
b3f8cf4a 9010 use_reg (&call_fusage, reg);
ca695ac9
JB
9011 size += GET_MODE_SIZE (mode);
9012 }
9013
9014 /* Restore the structure value address unless this is passed as an
9015 "invisible" first argument. */
9016 size = GET_MODE_SIZE (Pmode);
9017 if (struct_value_rtx)
9018 {
9019 rtx value = gen_reg_rtx (Pmode);
9020 emit_move_insn (value,
9021 change_address (arguments, Pmode,
9022 plus_constant (XEXP (arguments, 0),
9023 size)));
9024 emit_move_insn (struct_value_rtx, value);
9025 if (GET_CODE (struct_value_rtx) == REG)
b3f8cf4a 9026 use_reg (&call_fusage, struct_value_rtx);
ca695ac9
JB
9027 size += GET_MODE_SIZE (Pmode);
9028 }
bbf6f052 9029
ca695ac9 9030 /* All arguments and registers used for the call are set up by now! */
b3f8cf4a 9031 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
bbf6f052 9032
ca695ac9
JB
9033 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
9034 and we don't want to load it into a register as an optimization,
9035 because prepare_call_address already did it if it should be done. */
9036 if (GET_CODE (function) != SYMBOL_REF)
9037 function = memory_address (FUNCTION_MODE, function);
bbf6f052 9038
ca695ac9
JB
9039 /* Generate the actual call instruction and save the return value. */
9040#ifdef HAVE_untyped_call
9041 if (HAVE_untyped_call)
9042 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
9043 result, result_vector (1, result)));
9044 else
9045#endif
9046#ifdef HAVE_call_value
9047 if (HAVE_call_value)
9048 {
9049 rtx valreg = 0;
bbf6f052 9050
ca695ac9
JB
9051 /* Locate the unique return register. It is not possible to
9052 express a call that sets more than one return register using
9053 call_value; use untyped_call for that. In fact, untyped_call
9054 only needs to save the return registers in the given block. */
9055 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9056 if ((mode = apply_result_mode[regno]) != VOIDmode)
9057 {
9058 if (valreg)
9059 abort (); /* HAVE_untyped_call required. */
9060 valreg = gen_rtx (REG, mode, regno);
9061 }
bbf6f052 9062
ca695ac9
JB
9063 emit_call_insn (gen_call_value (valreg,
9064 gen_rtx (MEM, FUNCTION_MODE, function),
9065 const0_rtx, NULL_RTX, const0_rtx));
bbf6f052 9066
ca695ac9
JB
9067 emit_move_insn (change_address (result, GET_MODE (valreg),
9068 XEXP (result, 0)),
9069 valreg);
9070 }
9071 else
9072#endif
9073 abort ();
bbf6f052 9074
b3f8cf4a 9075 /* Find the CALL insn we just emitted. */
ca695ac9
JB
9076 for (call_insn = get_last_insn ();
9077 call_insn && GET_CODE (call_insn) != CALL_INSN;
9078 call_insn = PREV_INSN (call_insn))
9079 ;
bbf6f052 9080
ca695ac9
JB
9081 if (! call_insn)
9082 abort ();
bbf6f052 9083
6d100794
RK
9084 /* Put the register usage information on the CALL. If there is already
9085 some usage information, put ours at the end. */
9086 if (CALL_INSN_FUNCTION_USAGE (call_insn))
9087 {
9088 rtx link;
9089
9090 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
9091 link = XEXP (link, 1))
9092 ;
9093
9094 XEXP (link, 1) = call_fusage;
9095 }
9096 else
9097 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
e7c33f54 9098
ca695ac9
JB
9099 /* Restore the stack. */
9100 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
e7c33f54 9101
ca695ac9
JB
9102 /* Return the address of the result block. */
9103 return copy_addr_to_reg (XEXP (result, 0));
9104}
e7c33f54 9105
ca695ac9 9106/* Perform an untyped return. */
e7c33f54 9107
ca695ac9
JB
9108static void
9109expand_builtin_return (result)
9110 rtx result;
9111{
9112 int size, align, regno;
9113 enum machine_mode mode;
9114 rtx reg;
b3f8cf4a 9115 rtx call_fusage = 0;
e7c33f54 9116
ca695ac9
JB
9117 apply_result_size ();
9118 result = gen_rtx (MEM, BLKmode, result);
e7c33f54 9119
ca695ac9
JB
9120#ifdef HAVE_untyped_return
9121 if (HAVE_untyped_return)
9122 {
9123 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
9124 emit_barrier ();
9125 return;
9126 }
9127#endif
e7c33f54 9128
ca695ac9
JB
9129 /* Restore the return value and note that each value is used. */
9130 size = 0;
9131 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9132 if ((mode = apply_result_mode[regno]) != VOIDmode)
9133 {
9134 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9135 if (size % align != 0)
9136 size = CEIL (size, align) * align;
9137 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
9138 emit_move_insn (reg,
9139 change_address (result, mode,
9140 plus_constant (XEXP (result, 0),
9141 size)));
e7c33f54 9142
b3f8cf4a 9143 push_to_sequence (call_fusage);
ca695ac9 9144 emit_insn (gen_rtx (USE, VOIDmode, reg));
b3f8cf4a 9145 call_fusage = get_insns ();
ca695ac9
JB
9146 end_sequence ();
9147 size += GET_MODE_SIZE (mode);
9148 }
e7c33f54 9149
ca695ac9 9150 /* Put the USE insns before the return. */
b3f8cf4a 9151 emit_insns (call_fusage);
e7c33f54 9152
ca695ac9
JB
9153 /* Return whatever values was restored by jumping directly to the end
9154 of the function. */
9155 expand_null_return ();
9156}
9157\f
9158/* Expand code for a post- or pre- increment or decrement
9159 and return the RTX for the result.
9160 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
e7c33f54 9161
ca695ac9
JB
9162static rtx
9163expand_increment (exp, post)
9164 register tree exp;
9165 int post;
9166{
9167 register rtx op0, op1;
9168 register rtx temp, value;
9169 register tree incremented = TREE_OPERAND (exp, 0);
9170 optab this_optab = add_optab;
9171 int icode;
9172 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9173 int op0_is_copy = 0;
9174 int single_insn = 0;
a97f5a86
RS
9175 /* 1 means we can't store into OP0 directly,
9176 because it is a subreg narrower than a word,
9177 and we don't dare clobber the rest of the word. */
9178 int bad_subreg = 0;
e7c33f54 9179
ca695ac9 9180 if (output_bytecode)
c02bd5d9
JB
9181 {
9182 bc_expand_expr (exp);
9183 return NULL_RTX;
9184 }
e7c33f54 9185
ca695ac9
JB
9186 /* Stabilize any component ref that might need to be
9187 evaluated more than once below. */
9188 if (!post
9189 || TREE_CODE (incremented) == BIT_FIELD_REF
9190 || (TREE_CODE (incremented) == COMPONENT_REF
9191 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9192 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9193 incremented = stabilize_reference (incremented);
9194 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9195 ones into save exprs so that they don't accidentally get evaluated
9196 more than once by the code below. */
9197 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9198 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9199 incremented = save_expr (incremented);
bbf6f052 9200
ca695ac9
JB
9201 /* Compute the operands as RTX.
9202 Note whether OP0 is the actual lvalue or a copy of it:
9203 I believe it is a copy iff it is a register or subreg
9204 and insns were generated in computing it. */
bbf6f052 9205
ca695ac9
JB
9206 temp = get_last_insn ();
9207 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
bbf6f052 9208
ca695ac9 9209 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9faa82d8 9210 in place but instead must do sign- or zero-extension during assignment,
ca695ac9
JB
9211 so we copy it into a new register and let the code below use it as
9212 a copy.
bbf6f052 9213
ca695ac9
JB
9214 Note that we can safely modify this SUBREG since it is know not to be
9215 shared (it was made by the expand_expr call above). */
bbf6f052 9216
ca695ac9 9217 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
3e073e72
RK
9218 {
9219 if (post)
9220 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9221 else
9222 bad_subreg = 1;
9223 }
a97f5a86
RS
9224 else if (GET_CODE (op0) == SUBREG
9225 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
79777b79
RK
9226 {
9227 /* We cannot increment this SUBREG in place. If we are
9228 post-incrementing, get a copy of the old value. Otherwise,
9229 just mark that we cannot increment in place. */
9230 if (post)
9231 op0 = copy_to_reg (op0);
9232 else
9233 bad_subreg = 1;
9234 }
bbf6f052 9235
ca695ac9
JB
9236 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9237 && temp != get_last_insn ());
9238 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052 9239
ca695ac9
JB
9240 /* Decide whether incrementing or decrementing. */
9241 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9242 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9243 this_optab = sub_optab;
bbf6f052 9244
ca695ac9
JB
9245 /* Convert decrement by a constant into a negative increment. */
9246 if (this_optab == sub_optab
9247 && GET_CODE (op1) == CONST_INT)
9248 {
9249 op1 = GEN_INT (- INTVAL (op1));
9250 this_optab = add_optab;
9251 }
bbf6f052 9252
ca695ac9
JB
9253 /* For a preincrement, see if we can do this with a single instruction. */
9254 if (!post)
9255 {
9256 icode = (int) this_optab->handlers[(int) mode].insn_code;
9257 if (icode != (int) CODE_FOR_nothing
9258 /* Make sure that OP0 is valid for operands 0 and 1
9259 of the insn we want to queue. */
9260 && (*insn_operand_predicate[icode][0]) (op0, mode)
9261 && (*insn_operand_predicate[icode][1]) (op0, mode)
9262 && (*insn_operand_predicate[icode][2]) (op1, mode))
9263 single_insn = 1;
9264 }
bbf6f052 9265
ca695ac9
JB
9266 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9267 then we cannot just increment OP0. We must therefore contrive to
9268 increment the original value. Then, for postincrement, we can return
9269 OP0 since it is a copy of the old value. For preincrement, expand here
a97f5a86
RS
9270 unless we can do it with a single insn.
9271
9272 Likewise if storing directly into OP0 would clobber high bits
9273 we need to preserve (bad_subreg). */
9274 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
ca695ac9
JB
9275 {
9276 /* This is the easiest way to increment the value wherever it is.
9277 Problems with multiple evaluation of INCREMENTED are prevented
9278 because either (1) it is a component_ref or preincrement,
9279 in which case it was stabilized above, or (2) it is an array_ref
9280 with constant index in an array in a register, which is
9281 safe to reevaluate. */
9282 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9283 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9284 ? MINUS_EXPR : PLUS_EXPR),
9285 TREE_TYPE (exp),
9286 incremented,
9287 TREE_OPERAND (exp, 1));
e9cdf6e4
RK
9288
9289 while (TREE_CODE (incremented) == NOP_EXPR
9290 || TREE_CODE (incremented) == CONVERT_EXPR)
9291 {
9292 newexp = convert (TREE_TYPE (incremented), newexp);
9293 incremented = TREE_OPERAND (incremented, 0);
9294 }
9295
ca695ac9
JB
9296 temp = expand_assignment (incremented, newexp, ! post, 0);
9297 return post ? op0 : temp;
9298 }
bbf6f052 9299
ca695ac9
JB
9300 if (post)
9301 {
9302 /* We have a true reference to the value in OP0.
9303 If there is an insn to add or subtract in this mode, queue it.
9304 Queueing the increment insn avoids the register shuffling
9305 that often results if we must increment now and first save
9306 the old value for subsequent use. */
bbf6f052 9307
ca695ac9
JB
9308#if 0 /* Turned off to avoid making extra insn for indexed memref. */
9309 op0 = stabilize (op0);
9310#endif
bbf6f052 9311
ca695ac9
JB
9312 icode = (int) this_optab->handlers[(int) mode].insn_code;
9313 if (icode != (int) CODE_FOR_nothing
9314 /* Make sure that OP0 is valid for operands 0 and 1
9315 of the insn we want to queue. */
9316 && (*insn_operand_predicate[icode][0]) (op0, mode)
9317 && (*insn_operand_predicate[icode][1]) (op0, mode))
9318 {
9319 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
9320 op1 = force_reg (mode, op1);
bbf6f052 9321
ca695ac9
JB
9322 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9323 }
9324 }
bbf6f052 9325
ca695ac9
JB
9326 /* Preincrement, or we can't increment with one simple insn. */
9327 if (post)
9328 /* Save a copy of the value before inc or dec, to return it later. */
9329 temp = value = copy_to_reg (op0);
9330 else
9331 /* Arrange to return the incremented value. */
9332 /* Copy the rtx because expand_binop will protect from the queue,
9333 and the results of that would be invalid for us to return
9334 if our caller does emit_queue before using our result. */
9335 temp = copy_rtx (value = op0);
bbf6f052 9336
ca695ac9
JB
9337 /* Increment however we can. */
9338 op1 = expand_binop (mode, this_optab, value, op1, op0,
9339 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9340 /* Make sure the value is stored into OP0. */
9341 if (op1 != op0)
9342 emit_move_insn (op0, op1);
bbf6f052 9343
ca695ac9
JB
9344 return temp;
9345}
9346\f
9347/* Expand all function calls contained within EXP, innermost ones first.
9348 But don't look within expressions that have sequence points.
9349 For each CALL_EXPR, record the rtx for its value
9350 in the CALL_EXPR_RTL field. */
bbf6f052 9351
ca695ac9
JB
9352static void
9353preexpand_calls (exp)
9354 tree exp;
9355{
9356 register int nops, i;
9357 int type = TREE_CODE_CLASS (TREE_CODE (exp));
bbf6f052 9358
ca695ac9
JB
9359 if (! do_preexpand_calls)
9360 return;
bbf6f052 9361
ca695ac9 9362 /* Only expressions and references can contain calls. */
bbf6f052 9363
ca695ac9
JB
9364 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
9365 return;
bbf6f052 9366
ca695ac9
JB
9367 switch (TREE_CODE (exp))
9368 {
9369 case CALL_EXPR:
9370 /* Do nothing if already expanded. */
9371 if (CALL_EXPR_RTL (exp) != 0)
9372 return;
bbf6f052 9373
ca695ac9
JB
9374 /* Do nothing to built-in functions. */
9375 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
9376 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
6676e72f
RK
9377 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9378 /* Do nothing if the call returns a variable-sized object. */
9379 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST)
ca695ac9
JB
9380 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
9381 return;
bbf6f052 9382
ca695ac9
JB
9383 case COMPOUND_EXPR:
9384 case COND_EXPR:
9385 case TRUTH_ANDIF_EXPR:
9386 case TRUTH_ORIF_EXPR:
9387 /* If we find one of these, then we can be sure
9388 the adjust will be done for it (since it makes jumps).
9389 Do it now, so that if this is inside an argument
9390 of a function, we don't get the stack adjustment
9391 after some other args have already been pushed. */
9392 do_pending_stack_adjust ();
9393 return;
bbf6f052 9394
ca695ac9
JB
9395 case BLOCK:
9396 case RTL_EXPR:
9397 case WITH_CLEANUP_EXPR:
402c7311 9398 case CLEANUP_POINT_EXPR:
ca695ac9 9399 return;
bbf6f052 9400
ca695ac9
JB
9401 case SAVE_EXPR:
9402 if (SAVE_EXPR_RTL (exp) != 0)
9403 return;
9404 }
bbf6f052 9405
ca695ac9
JB
9406 nops = tree_code_length[(int) TREE_CODE (exp)];
9407 for (i = 0; i < nops; i++)
9408 if (TREE_OPERAND (exp, i) != 0)
9409 {
9410 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9411 if (type == 'e' || type == '<' || type == '1' || type == '2'
9412 || type == 'r')
9413 preexpand_calls (TREE_OPERAND (exp, i));
9414 }
bbf6f052
RK
9415}
9416\f
ca695ac9
JB
9417/* At the start of a function, record that we have no previously-pushed
9418 arguments waiting to be popped. */
0006469d 9419
ca695ac9
JB
9420void
9421init_pending_stack_adjust ()
9422{
9423 pending_stack_adjust = 0;
9424}
fb2ca25a 9425
ca695ac9
JB
9426/* When exiting from function, if safe, clear out any pending stack adjust
9427 so the adjustment won't get done. */
904762c8 9428
ca695ac9
JB
9429void
9430clear_pending_stack_adjust ()
fb2ca25a 9431{
ca695ac9 9432#ifdef EXIT_IGNORE_STACK
b7c2e1e2
RK
9433 if (optimize > 0
9434 && ! flag_omit_frame_pointer && EXIT_IGNORE_STACK
ca695ac9
JB
9435 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9436 && ! flag_inline_functions)
9437 pending_stack_adjust = 0;
fb2ca25a 9438#endif
fb2ca25a
KKT
9439}
9440
ca695ac9
JB
9441/* Pop any previously-pushed arguments that have not been popped yet. */
9442
9443void
9444do_pending_stack_adjust ()
9445{
9446 if (inhibit_defer_pop == 0)
9447 {
9448 if (pending_stack_adjust != 0)
9449 adjust_stack (GEN_INT (pending_stack_adjust));
9450 pending_stack_adjust = 0;
9451 }
9452}
9453
5dab5552
MS
9454/* Defer the expansion all cleanups up to OLD_CLEANUPS.
9455 Returns the cleanups to be performed. */
9456
9457static tree
9458defer_cleanups_to (old_cleanups)
9459 tree old_cleanups;
9460{
9461 tree new_cleanups = NULL_TREE;
9462 tree cleanups = cleanups_this_call;
9463 tree last = NULL_TREE;
9464
9465 while (cleanups_this_call != old_cleanups)
9466 {
61d6b1cc 9467 (*interim_eh_hook) (TREE_VALUE (cleanups_this_call));
4ea8537b 9468 last = cleanups_this_call;
5dab5552
MS
9469 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
9470 }
9471
9472 if (last)
9473 {
9474 /* Remove the list from the chain of cleanups. */
9475 TREE_CHAIN (last) = NULL_TREE;
9476
9477 /* reverse them so that we can build them in the right order. */
9478 cleanups = nreverse (cleanups);
9479
9ba73d38
MS
9480 /* All cleanups must be on the function_obstack. */
9481 push_obstacks_nochange ();
9482 resume_temporary_allocation ();
9483
5dab5552
MS
9484 while (cleanups)
9485 {
9486 if (new_cleanups)
9487 new_cleanups = build (COMPOUND_EXPR, TREE_TYPE (new_cleanups),
9488 TREE_VALUE (cleanups), new_cleanups);
9489 else
9490 new_cleanups = TREE_VALUE (cleanups);
9491
9492 cleanups = TREE_CHAIN (cleanups);
9493 }
9ba73d38
MS
9494
9495 pop_obstacks ();
5dab5552
MS
9496 }
9497
9498 return new_cleanups;
9499}
9500
ca695ac9
JB
9501/* Expand all cleanups up to OLD_CLEANUPS.
9502 Needed here, and also for language-dependent calls. */
904762c8 9503
ca695ac9
JB
9504void
9505expand_cleanups_to (old_cleanups)
9506 tree old_cleanups;
0006469d 9507{
ca695ac9 9508 while (cleanups_this_call != old_cleanups)
0006469d 9509 {
61d6b1cc 9510 (*interim_eh_hook) (TREE_VALUE (cleanups_this_call));
d3158f1a 9511 expand_expr (TREE_VALUE (cleanups_this_call), const0_rtx, VOIDmode, 0);
ca695ac9
JB
9512 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
9513 }
9514}
9515\f
9516/* Expand conditional expressions. */
0006469d 9517
ca695ac9
JB
9518/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9519 LABEL is an rtx of code CODE_LABEL, in this function and all the
9520 functions here. */
0006469d 9521
ca695ac9
JB
9522void
9523jumpifnot (exp, label)
9524 tree exp;
9525 rtx label;
9526{
9527 do_jump (exp, label, NULL_RTX);
9528}
0006469d 9529
ca695ac9 9530/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
0006469d 9531
ca695ac9
JB
9532void
9533jumpif (exp, label)
9534 tree exp;
9535 rtx label;
9536{
9537 do_jump (exp, NULL_RTX, label);
9538}
0006469d 9539
ca695ac9
JB
9540/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9541 the result is zero, or IF_TRUE_LABEL if the result is one.
9542 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9543 meaning fall through in that case.
0006469d 9544
ca695ac9
JB
9545 do_jump always does any pending stack adjust except when it does not
9546 actually perform a jump. An example where there is no jump
9547 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
0006469d 9548
ca695ac9
JB
9549 This function is responsible for optimizing cases such as
9550 &&, || and comparison operators in EXP. */
904762c8 9551
ca695ac9
JB
9552void
9553do_jump (exp, if_false_label, if_true_label)
9554 tree exp;
9555 rtx if_false_label, if_true_label;
0006469d 9556{
ca695ac9
JB
9557 register enum tree_code code = TREE_CODE (exp);
9558 /* Some cases need to create a label to jump to
9559 in order to properly fall through.
9560 These cases set DROP_THROUGH_LABEL nonzero. */
9561 rtx drop_through_label = 0;
9562 rtx temp;
9563 rtx comparison = 0;
9564 int i;
9565 tree type;
2f6e6d22 9566 enum machine_mode mode;
0006469d 9567
ca695ac9 9568 emit_queue ();
0006469d 9569
ca695ac9
JB
9570 switch (code)
9571 {
9572 case ERROR_MARK:
9573 break;
0006469d 9574
ca695ac9
JB
9575 case INTEGER_CST:
9576 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9577 if (temp)
9578 emit_jump (temp);
9579 break;
0006469d 9580
ca695ac9
JB
9581#if 0
9582 /* This is not true with #pragma weak */
9583 case ADDR_EXPR:
9584 /* The address of something can never be zero. */
9585 if (if_true_label)
9586 emit_jump (if_true_label);
9587 break;
9588#endif
0006469d 9589
ca695ac9
JB
9590 case NOP_EXPR:
9591 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9592 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9593 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9594 goto normal;
9595 case CONVERT_EXPR:
9596 /* If we are narrowing the operand, we have to do the compare in the
9597 narrower mode. */
9598 if ((TYPE_PRECISION (TREE_TYPE (exp))
9599 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9600 goto normal;
9601 case NON_LVALUE_EXPR:
9602 case REFERENCE_EXPR:
9603 case ABS_EXPR:
9604 case NEGATE_EXPR:
9605 case LROTATE_EXPR:
9606 case RROTATE_EXPR:
9607 /* These cannot change zero->non-zero or vice versa. */
9608 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9609 break;
0006469d 9610
ca695ac9
JB
9611#if 0
9612 /* This is never less insns than evaluating the PLUS_EXPR followed by
9613 a test and can be longer if the test is eliminated. */
9614 case PLUS_EXPR:
9615 /* Reduce to minus. */
9616 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9617 TREE_OPERAND (exp, 0),
9618 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9619 TREE_OPERAND (exp, 1))));
9620 /* Process as MINUS. */
0006469d 9621#endif
0006469d 9622
ca695ac9
JB
9623 case MINUS_EXPR:
9624 /* Non-zero iff operands of minus differ. */
9625 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
9626 TREE_OPERAND (exp, 0),
9627 TREE_OPERAND (exp, 1)),
9628 NE, NE);
9629 break;
904762c8 9630
ca695ac9
JB
9631 case BIT_AND_EXPR:
9632 /* If we are AND'ing with a small constant, do this comparison in the
9633 smallest type that fits. If the machine doesn't have comparisons
9634 that small, it will be converted back to the wider comparison.
9635 This helps if we are testing the sign bit of a narrower object.
9636 combine can't do this for us because it can't know whether a
9637 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
0006469d 9638
ca695ac9
JB
9639 if (! SLOW_BYTE_ACCESS
9640 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9641 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9642 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
2f6e6d22
RK
9643 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9644 && (type = type_for_mode (mode, 1)) != 0
ca695ac9
JB
9645 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9646 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9647 != CODE_FOR_nothing))
9648 {
9649 do_jump (convert (type, exp), if_false_label, if_true_label);
9650 break;
9651 }
9652 goto normal;
904762c8 9653
ca695ac9
JB
9654 case TRUTH_NOT_EXPR:
9655 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9656 break;
0006469d 9657
ca695ac9 9658 case TRUTH_ANDIF_EXPR:
7ee055f4
MS
9659 {
9660 rtx seq1, seq2;
9661 tree cleanups, old_cleanups;
9662
9663 if (if_false_label == 0)
9664 if_false_label = drop_through_label = gen_label_rtx ();
9665 start_sequence ();
9666 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9667 seq1 = get_insns ();
9668 end_sequence ();
9669
9670 old_cleanups = cleanups_this_call;
9671 start_sequence ();
9672 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9673 seq2 = get_insns ();
9674 end_sequence ();
9675
9676 cleanups = defer_cleanups_to (old_cleanups);
9677 if (cleanups)
9678 {
9679 rtx flag = gen_reg_rtx (word_mode);
9680 tree new_cleanups;
9681 tree cond;
9682
9683 /* Flag cleanups as not needed. */
9684 emit_move_insn (flag, const0_rtx);
9685 emit_insns (seq1);
9686
9687 /* Flag cleanups as needed. */
9688 emit_move_insn (flag, const1_rtx);
9689 emit_insns (seq2);
9690
9ba73d38
MS
9691 /* All cleanups must be on the function_obstack. */
9692 push_obstacks_nochange ();
9693 resume_temporary_allocation ();
9694
7ee055f4
MS
9695 /* convert flag, which is an rtx, into a tree. */
9696 cond = make_node (RTL_EXPR);
9697 TREE_TYPE (cond) = integer_type_node;
9698 RTL_EXPR_RTL (cond) = flag;
9699 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
01842234 9700 cond = save_expr (cond);
7ee055f4
MS
9701
9702 new_cleanups = build (COND_EXPR, void_type_node,
9703 truthvalue_conversion (cond),
9704 cleanups, integer_zero_node);
9705 new_cleanups = fold (new_cleanups);
9706
9ba73d38
MS
9707 pop_obstacks ();
9708
7ee055f4
MS
9709 /* Now add in the conditionalized cleanups. */
9710 cleanups_this_call
9711 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
9712 (*interim_eh_hook) (NULL_TREE);
9713 }
9714 else
9715 {
9716 emit_insns (seq1);
9717 emit_insns (seq2);
9718 }
9719 }
ca695ac9 9720 break;
0006469d 9721
ca695ac9 9722 case TRUTH_ORIF_EXPR:
7ee055f4
MS
9723 {
9724 rtx seq1, seq2;
9725 tree cleanups, old_cleanups;
9726
9727 if (if_true_label == 0)
9728 if_true_label = drop_through_label = gen_label_rtx ();
9729 start_sequence ();
9730 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9731 seq1 = get_insns ();
9732 end_sequence ();
9733
9734 old_cleanups = cleanups_this_call;
9735 start_sequence ();
9736 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9737 seq2 = get_insns ();
9738 end_sequence ();
9739
9740 cleanups = defer_cleanups_to (old_cleanups);
9741 if (cleanups)
9742 {
9743 rtx flag = gen_reg_rtx (word_mode);
9744 tree new_cleanups;
9745 tree cond;
9746
9747 /* Flag cleanups as not needed. */
9748 emit_move_insn (flag, const0_rtx);
9749 emit_insns (seq1);
9750
9751 /* Flag cleanups as needed. */
9752 emit_move_insn (flag, const1_rtx);
9753 emit_insns (seq2);
9754
9ba73d38
MS
9755 /* All cleanups must be on the function_obstack. */
9756 push_obstacks_nochange ();
9757 resume_temporary_allocation ();
9758
7ee055f4
MS
9759 /* convert flag, which is an rtx, into a tree. */
9760 cond = make_node (RTL_EXPR);
9761 TREE_TYPE (cond) = integer_type_node;
9762 RTL_EXPR_RTL (cond) = flag;
9763 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
01842234 9764 cond = save_expr (cond);
7ee055f4
MS
9765
9766 new_cleanups = build (COND_EXPR, void_type_node,
9767 truthvalue_conversion (cond),
9768 cleanups, integer_zero_node);
9769 new_cleanups = fold (new_cleanups);
9770
9ba73d38
MS
9771 pop_obstacks ();
9772
7ee055f4
MS
9773 /* Now add in the conditionalized cleanups. */
9774 cleanups_this_call
9775 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
9776 (*interim_eh_hook) (NULL_TREE);
9777 }
9778 else
9779 {
9780 emit_insns (seq1);
9781 emit_insns (seq2);
9782 }
9783 }
ca695ac9 9784 break;
0006469d 9785
ca695ac9 9786 case COMPOUND_EXPR:
0088fcb1 9787 push_temp_slots ();
ca695ac9 9788 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
d80f96e9 9789 preserve_temp_slots (NULL_RTX);
ca695ac9 9790 free_temp_slots ();
0088fcb1 9791 pop_temp_slots ();
ca695ac9
JB
9792 emit_queue ();
9793 do_pending_stack_adjust ();
9794 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9795 break;
0006469d 9796
ca695ac9
JB
9797 case COMPONENT_REF:
9798 case BIT_FIELD_REF:
9799 case ARRAY_REF:
9800 {
9801 int bitsize, bitpos, unsignedp;
9802 enum machine_mode mode;
9803 tree type;
9804 tree offset;
9805 int volatilep = 0;
0006469d 9806
ca695ac9
JB
9807 /* Get description of this reference. We don't actually care
9808 about the underlying object here. */
9809 get_inner_reference (exp, &bitsize, &bitpos, &offset,
9810 &mode, &unsignedp, &volatilep);
0006469d 9811
ca695ac9
JB
9812 type = type_for_size (bitsize, unsignedp);
9813 if (! SLOW_BYTE_ACCESS
9814 && type != 0 && bitsize >= 0
9815 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9816 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9817 != CODE_FOR_nothing))
9818 {
9819 do_jump (convert (type, exp), if_false_label, if_true_label);
9820 break;
9821 }
9822 goto normal;
9823 }
0006469d 9824
ca695ac9
JB
9825 case COND_EXPR:
9826 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9827 if (integer_onep (TREE_OPERAND (exp, 1))
9828 && integer_zerop (TREE_OPERAND (exp, 2)))
9829 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
904762c8 9830
ca695ac9
JB
9831 else if (integer_zerop (TREE_OPERAND (exp, 1))
9832 && integer_onep (TREE_OPERAND (exp, 2)))
9833 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
0006469d 9834
ca695ac9
JB
9835 else
9836 {
9837 register rtx label1 = gen_label_rtx ();
9838 drop_through_label = gen_label_rtx ();
9839 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9840 /* Now the THEN-expression. */
9841 do_jump (TREE_OPERAND (exp, 1),
9842 if_false_label ? if_false_label : drop_through_label,
9843 if_true_label ? if_true_label : drop_through_label);
9844 /* In case the do_jump just above never jumps. */
9845 do_pending_stack_adjust ();
9846 emit_label (label1);
9847 /* Now the ELSE-expression. */
9848 do_jump (TREE_OPERAND (exp, 2),
9849 if_false_label ? if_false_label : drop_through_label,
9850 if_true_label ? if_true_label : drop_through_label);
9851 }
9852 break;
0006469d 9853
ca695ac9 9854 case EQ_EXPR:
0e8c9172
RK
9855 {
9856 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9857
9858 if (integer_zerop (TREE_OPERAND (exp, 1)))
9859 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9860 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
201012cb 9861 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
0e8c9172
RK
9862 do_jump
9863 (fold
9864 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9865 fold (build (EQ_EXPR, TREE_TYPE (exp),
c8465d86
RK
9866 fold (build1 (REALPART_EXPR,
9867 TREE_TYPE (inner_type),
0e8c9172 9868 TREE_OPERAND (exp, 0))),
c8465d86
RK
9869 fold (build1 (REALPART_EXPR,
9870 TREE_TYPE (inner_type),
0e8c9172
RK
9871 TREE_OPERAND (exp, 1))))),
9872 fold (build (EQ_EXPR, TREE_TYPE (exp),
c8465d86
RK
9873 fold (build1 (IMAGPART_EXPR,
9874 TREE_TYPE (inner_type),
0e8c9172 9875 TREE_OPERAND (exp, 0))),
c8465d86
RK
9876 fold (build1 (IMAGPART_EXPR,
9877 TREE_TYPE (inner_type),
0e8c9172
RK
9878 TREE_OPERAND (exp, 1))))))),
9879 if_false_label, if_true_label);
9880 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9881 && !can_compare_p (TYPE_MODE (inner_type)))
9882 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9883 else
9884 comparison = compare (exp, EQ, EQ);
9885 break;
9886 }
0006469d 9887
ca695ac9 9888 case NE_EXPR:
0e8c9172
RK
9889 {
9890 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9891
9892 if (integer_zerop (TREE_OPERAND (exp, 1)))
9893 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9894 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
201012cb 9895 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
0e8c9172
RK
9896 do_jump
9897 (fold
9898 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9899 fold (build (NE_EXPR, TREE_TYPE (exp),
c8465d86
RK
9900 fold (build1 (REALPART_EXPR,
9901 TREE_TYPE (inner_type),
0e8c9172 9902 TREE_OPERAND (exp, 0))),
c8465d86
RK
9903 fold (build1 (REALPART_EXPR,
9904 TREE_TYPE (inner_type),
0e8c9172
RK
9905 TREE_OPERAND (exp, 1))))),
9906 fold (build (NE_EXPR, TREE_TYPE (exp),
c8465d86
RK
9907 fold (build1 (IMAGPART_EXPR,
9908 TREE_TYPE (inner_type),
0e8c9172 9909 TREE_OPERAND (exp, 0))),
c8465d86
RK
9910 fold (build1 (IMAGPART_EXPR,
9911 TREE_TYPE (inner_type),
0e8c9172
RK
9912 TREE_OPERAND (exp, 1))))))),
9913 if_false_label, if_true_label);
9914 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9915 && !can_compare_p (TYPE_MODE (inner_type)))
9916 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9917 else
9918 comparison = compare (exp, NE, NE);
9919 break;
9920 }
0006469d 9921
ca695ac9
JB
9922 case LT_EXPR:
9923 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9924 == MODE_INT)
9925 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9926 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9927 else
9928 comparison = compare (exp, LT, LTU);
9929 break;
0006469d 9930
ca695ac9
JB
9931 case LE_EXPR:
9932 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9933 == MODE_INT)
9934 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9935 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9936 else
9937 comparison = compare (exp, LE, LEU);
9938 break;
0006469d 9939
ca695ac9
JB
9940 case GT_EXPR:
9941 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9942 == MODE_INT)
9943 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9944 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9945 else
9946 comparison = compare (exp, GT, GTU);
9947 break;
0006469d 9948
ca695ac9
JB
9949 case GE_EXPR:
9950 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9951 == MODE_INT)
9952 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9953 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9954 else
9955 comparison = compare (exp, GE, GEU);
9956 break;
0006469d 9957
ca695ac9
JB
9958 default:
9959 normal:
9960 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9961#if 0
9962 /* This is not needed any more and causes poor code since it causes
9963 comparisons and tests from non-SI objects to have different code
9964 sequences. */
9965 /* Copy to register to avoid generating bad insns by cse
9966 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9967 if (!cse_not_expected && GET_CODE (temp) == MEM)
9968 temp = copy_to_reg (temp);
9969#endif
9970 do_pending_stack_adjust ();
9971 if (GET_CODE (temp) == CONST_INT)
9972 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
9973 else if (GET_CODE (temp) == LABEL_REF)
9974 comparison = const_true_rtx;
9975 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9976 && !can_compare_p (GET_MODE (temp)))
9977 /* Note swapping the labels gives us not-equal. */
9978 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9979 else if (GET_MODE (temp) != VOIDmode)
9980 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
9981 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9982 GET_MODE (temp), NULL_RTX, 0);
9983 else
9984 abort ();
9985 }
0006469d 9986
ca695ac9
JB
9987 /* Do any postincrements in the expression that was tested. */
9988 emit_queue ();
0006469d 9989
ca695ac9
JB
9990 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
9991 straight into a conditional jump instruction as the jump condition.
9992 Otherwise, all the work has been done already. */
0006469d 9993
ca695ac9 9994 if (comparison == const_true_rtx)
0006469d 9995 {
ca695ac9
JB
9996 if (if_true_label)
9997 emit_jump (if_true_label);
0006469d 9998 }
ca695ac9
JB
9999 else if (comparison == const0_rtx)
10000 {
10001 if (if_false_label)
10002 emit_jump (if_false_label);
10003 }
10004 else if (comparison)
10005 do_jump_for_compare (comparison, if_false_label, if_true_label);
0006469d 10006
ca695ac9 10007 if (drop_through_label)
0006469d 10008 {
ca695ac9
JB
10009 /* If do_jump produces code that might be jumped around,
10010 do any stack adjusts from that code, before the place
10011 where control merges in. */
10012 do_pending_stack_adjust ();
10013 emit_label (drop_through_label);
10014 }
10015}
10016\f
10017/* Given a comparison expression EXP for values too wide to be compared
10018 with one insn, test the comparison and jump to the appropriate label.
10019 The code of EXP is ignored; we always test GT if SWAP is 0,
10020 and LT if SWAP is 1. */
0006469d 10021
ca695ac9
JB
10022static void
10023do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10024 tree exp;
10025 int swap;
10026 rtx if_false_label, if_true_label;
10027{
10028 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10029 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10030 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10031 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10032 rtx drop_through_label = 0;
10033 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10034 int i;
0006469d 10035
ca695ac9
JB
10036 if (! if_true_label || ! if_false_label)
10037 drop_through_label = gen_label_rtx ();
10038 if (! if_true_label)
10039 if_true_label = drop_through_label;
10040 if (! if_false_label)
10041 if_false_label = drop_through_label;
0006469d 10042
ca695ac9
JB
10043 /* Compare a word at a time, high order first. */
10044 for (i = 0; i < nwords; i++)
10045 {
10046 rtx comp;
10047 rtx op0_word, op1_word;
0006469d 10048
ca695ac9
JB
10049 if (WORDS_BIG_ENDIAN)
10050 {
10051 op0_word = operand_subword_force (op0, i, mode);
10052 op1_word = operand_subword_force (op1, i, mode);
10053 }
10054 else
10055 {
10056 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10057 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10058 }
0006469d 10059
ca695ac9
JB
10060 /* All but high-order word must be compared as unsigned. */
10061 comp = compare_from_rtx (op0_word, op1_word,
10062 (unsignedp || i > 0) ? GTU : GT,
10063 unsignedp, word_mode, NULL_RTX, 0);
10064 if (comp == const_true_rtx)
10065 emit_jump (if_true_label);
10066 else if (comp != const0_rtx)
10067 do_jump_for_compare (comp, NULL_RTX, if_true_label);
0006469d 10068
ca695ac9
JB
10069 /* Consider lower words only if these are equal. */
10070 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10071 NULL_RTX, 0);
10072 if (comp == const_true_rtx)
10073 emit_jump (if_false_label);
10074 else if (comp != const0_rtx)
10075 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10076 }
0006469d 10077
ca695ac9
JB
10078 if (if_false_label)
10079 emit_jump (if_false_label);
10080 if (drop_through_label)
10081 emit_label (drop_through_label);
0006469d
TW
10082}
10083
ca695ac9
JB
10084/* Compare OP0 with OP1, word at a time, in mode MODE.
10085 UNSIGNEDP says to do unsigned comparison.
10086 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
904762c8 10087
2e5ec6cf 10088void
ca695ac9
JB
10089do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10090 enum machine_mode mode;
10091 int unsignedp;
10092 rtx op0, op1;
10093 rtx if_false_label, if_true_label;
0006469d 10094{
ca695ac9
JB
10095 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10096 rtx drop_through_label = 0;
10097 int i;
0006469d 10098
ca695ac9
JB
10099 if (! if_true_label || ! if_false_label)
10100 drop_through_label = gen_label_rtx ();
10101 if (! if_true_label)
10102 if_true_label = drop_through_label;
10103 if (! if_false_label)
10104 if_false_label = drop_through_label;
0006469d 10105
ca695ac9
JB
10106 /* Compare a word at a time, high order first. */
10107 for (i = 0; i < nwords; i++)
0006469d 10108 {
ca695ac9
JB
10109 rtx comp;
10110 rtx op0_word, op1_word;
0006469d 10111
ca695ac9
JB
10112 if (WORDS_BIG_ENDIAN)
10113 {
10114 op0_word = operand_subword_force (op0, i, mode);
10115 op1_word = operand_subword_force (op1, i, mode);
10116 }
10117 else
10118 {
10119 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10120 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10121 }
0006469d 10122
ca695ac9
JB
10123 /* All but high-order word must be compared as unsigned. */
10124 comp = compare_from_rtx (op0_word, op1_word,
10125 (unsignedp || i > 0) ? GTU : GT,
10126 unsignedp, word_mode, NULL_RTX, 0);
10127 if (comp == const_true_rtx)
10128 emit_jump (if_true_label);
10129 else if (comp != const0_rtx)
10130 do_jump_for_compare (comp, NULL_RTX, if_true_label);
0006469d 10131
ca695ac9
JB
10132 /* Consider lower words only if these are equal. */
10133 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10134 NULL_RTX, 0);
10135 if (comp == const_true_rtx)
10136 emit_jump (if_false_label);
10137 else if (comp != const0_rtx)
10138 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10139 }
0006469d 10140
ca695ac9
JB
10141 if (if_false_label)
10142 emit_jump (if_false_label);
10143 if (drop_through_label)
10144 emit_label (drop_through_label);
0006469d 10145}
bbf6f052 10146
ca695ac9
JB
10147/* Given an EQ_EXPR expression EXP for values too wide to be compared
10148 with one insn, test the comparison and jump to the appropriate label. */
10149
10150static void
10151do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10152 tree exp;
10153 rtx if_false_label, if_true_label;
bbf6f052 10154{
ca695ac9
JB
10155 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10156 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10157 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10158 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10159 int i;
10160 rtx drop_through_label = 0;
bbf6f052 10161
ca695ac9
JB
10162 if (! if_false_label)
10163 drop_through_label = if_false_label = gen_label_rtx ();
bbf6f052 10164
ca695ac9
JB
10165 for (i = 0; i < nwords; i++)
10166 {
10167 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
10168 operand_subword_force (op1, i, mode),
10169 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10170 word_mode, NULL_RTX, 0);
10171 if (comp == const_true_rtx)
10172 emit_jump (if_false_label);
10173 else if (comp != const0_rtx)
10174 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10175 }
1499e0a8 10176
ca695ac9
JB
10177 if (if_true_label)
10178 emit_jump (if_true_label);
10179 if (drop_through_label)
10180 emit_label (drop_through_label);
10181}
10182\f
10183/* Jump according to whether OP0 is 0.
10184 We assume that OP0 has an integer mode that is too wide
10185 for the available compare insns. */
1499e0a8 10186
ca695ac9
JB
10187static void
10188do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10189 rtx op0;
10190 rtx if_false_label, if_true_label;
10191{
10192 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10193 int i;
10194 rtx drop_through_label = 0;
1499e0a8 10195
ca695ac9
JB
10196 if (! if_false_label)
10197 drop_through_label = if_false_label = gen_label_rtx ();
1499e0a8 10198
ca695ac9
JB
10199 for (i = 0; i < nwords; i++)
10200 {
10201 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
10202 GET_MODE (op0)),
10203 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
10204 if (comp == const_true_rtx)
10205 emit_jump (if_false_label);
10206 else if (comp != const0_rtx)
10207 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10208 }
1499e0a8 10209
ca695ac9
JB
10210 if (if_true_label)
10211 emit_jump (if_true_label);
10212 if (drop_through_label)
10213 emit_label (drop_through_label);
10214}
bbf6f052 10215
ca695ac9
JB
10216/* Given a comparison expression in rtl form, output conditional branches to
10217 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
bbf6f052 10218
ca695ac9
JB
10219static void
10220do_jump_for_compare (comparison, if_false_label, if_true_label)
10221 rtx comparison, if_false_label, if_true_label;
10222{
10223 if (if_true_label)
a358cee0 10224 {
ca695ac9
JB
10225 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10226 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
10227 else
10228 abort ();
a358cee0 10229
ca695ac9
JB
10230 if (if_false_label)
10231 emit_jump (if_false_label);
c980ac49 10232 }
ca695ac9 10233 else if (if_false_label)
bbf6f052 10234 {
ca695ac9 10235 rtx insn;
f12f485a 10236 rtx prev = get_last_insn ();
ca695ac9 10237 rtx branch = 0;
bbf6f052 10238
ca695ac9
JB
10239 /* Output the branch with the opposite condition. Then try to invert
10240 what is generated. If more than one insn is a branch, or if the
10241 branch is not the last insn written, abort. If we can't invert
10242 the branch, emit make a true label, redirect this jump to that,
10243 emit a jump to the false label and define the true label. */
bbf6f052 10244
ca695ac9 10245 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
34661f5c 10246 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])(if_false_label));
ca695ac9
JB
10247 else
10248 abort ();
bbf6f052 10249
41dfd40c
RK
10250 /* Here we get the first insn that was just emitted. It used to be the
10251 case that, on some machines, emitting the branch would discard
10252 the previous compare insn and emit a replacement. This isn't
10253 done anymore, but abort if we see that PREV is deleted. */
10254
ca695ac9 10255 if (prev == 0)
ca695ac9 10256 insn = get_insns ();
41dfd40c
RK
10257 else if (INSN_DELETED_P (prev))
10258 abort ();
ca695ac9 10259 else
41dfd40c 10260 insn = NEXT_INSN (prev);
bbf6f052 10261
34661f5c 10262 for (; insn; insn = NEXT_INSN (insn))
ca695ac9
JB
10263 if (GET_CODE (insn) == JUMP_INSN)
10264 {
10265 if (branch)
10266 abort ();
10267 branch = insn;
10268 }
10269
10270 if (branch != get_last_insn ())
10271 abort ();
10272
127e4d19 10273 JUMP_LABEL (branch) = if_false_label;
ca695ac9
JB
10274 if (! invert_jump (branch, if_false_label))
10275 {
10276 if_true_label = gen_label_rtx ();
10277 redirect_jump (branch, if_true_label);
10278 emit_jump (if_false_label);
10279 emit_label (if_true_label);
bbf6f052
RK
10280 }
10281 }
ca695ac9
JB
10282}
10283\f
10284/* Generate code for a comparison expression EXP
10285 (including code to compute the values to be compared)
10286 and set (CC0) according to the result.
10287 SIGNED_CODE should be the rtx operation for this comparison for
10288 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10289
10290 We force a stack adjustment unless there are currently
10291 things pushed on the stack that aren't yet used. */
10292
10293static rtx
10294compare (exp, signed_code, unsigned_code)
10295 register tree exp;
10296 enum rtx_code signed_code, unsigned_code;
10297{
10298 register rtx op0
10299 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10300 register rtx op1
10301 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10302 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
10303 register enum machine_mode mode = TYPE_MODE (type);
10304 int unsignedp = TREE_UNSIGNED (type);
10305 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
bbf6f052 10306
ca695ac9
JB
10307 return compare_from_rtx (op0, op1, code, unsignedp, mode,
10308 ((mode == BLKmode)
10309 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10310 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
10311}
bbf6f052 10312
ca695ac9
JB
10313/* Like compare but expects the values to compare as two rtx's.
10314 The decision as to signed or unsigned comparison must be made by the caller.
bbf6f052 10315
ca695ac9
JB
10316 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10317 compared.
bbf6f052 10318
ca695ac9
JB
10319 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10320 size of MODE should be used. */
bbf6f052 10321
ca695ac9
JB
10322rtx
10323compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10324 register rtx op0, op1;
10325 enum rtx_code code;
10326 int unsignedp;
10327 enum machine_mode mode;
10328 rtx size;
10329 int align;
10330{
10331 rtx tem;
bbf6f052 10332
ca695ac9
JB
10333 /* If one operand is constant, make it the second one. Only do this
10334 if the other operand is not constant as well. */
bbf6f052 10335
ca695ac9
JB
10336 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10337 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
10338 {
10339 tem = op0;
10340 op0 = op1;
10341 op1 = tem;
10342 code = swap_condition (code);
10343 }
bbf6f052 10344
ca695ac9 10345 if (flag_force_mem)
bbf6f052 10346 {
ca695ac9
JB
10347 op0 = force_not_mem (op0);
10348 op1 = force_not_mem (op1);
10349 }
bbf6f052 10350
ca695ac9 10351 do_pending_stack_adjust ();
bbf6f052 10352
ca695ac9
JB
10353 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10354 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10355 return tem;
bbf6f052 10356
ca695ac9
JB
10357#if 0
10358 /* There's no need to do this now that combine.c can eliminate lots of
10359 sign extensions. This can be less efficient in certain cases on other
10360 machines. */
bbf6f052 10361
ca695ac9
JB
10362 /* If this is a signed equality comparison, we can do it as an
10363 unsigned comparison since zero-extension is cheaper than sign
10364 extension and comparisons with zero are done as unsigned. This is
10365 the case even on machines that can do fast sign extension, since
10366 zero-extension is easier to combine with other operations than
10367 sign-extension is. If we are comparing against a constant, we must
10368 convert it to what it would look like unsigned. */
10369 if ((code == EQ || code == NE) && ! unsignedp
10370 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10371 {
10372 if (GET_CODE (op1) == CONST_INT
10373 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10374 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10375 unsignedp = 1;
bbf6f052 10376 }
ca695ac9
JB
10377#endif
10378
10379 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
bbf6f052 10380
ca695ac9 10381 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
bbf6f052
RK
10382}
10383\f
ca695ac9
JB
10384/* Generate code to calculate EXP using a store-flag instruction
10385 and return an rtx for the result. EXP is either a comparison
10386 or a TRUTH_NOT_EXPR whose operand is a comparison.
bbf6f052 10387
ca695ac9 10388 If TARGET is nonzero, store the result there if convenient.
bbf6f052 10389
ca695ac9
JB
10390 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10391 cheap.
bbf6f052 10392
ca695ac9
JB
10393 Return zero if there is no suitable set-flag instruction
10394 available on this machine.
bbf6f052 10395
ca695ac9
JB
10396 Once expand_expr has been called on the arguments of the comparison,
10397 we are committed to doing the store flag, since it is not safe to
10398 re-evaluate the expression. We emit the store-flag insn by calling
10399 emit_store_flag, but only expand the arguments if we have a reason
10400 to believe that emit_store_flag will be successful. If we think that
10401 it will, but it isn't, we have to simulate the store-flag with a
10402 set/jump/set sequence. */
bbf6f052 10403
ca695ac9
JB
10404static rtx
10405do_store_flag (exp, target, mode, only_cheap)
10406 tree exp;
10407 rtx target;
10408 enum machine_mode mode;
10409 int only_cheap;
bbf6f052 10410{
ca695ac9
JB
10411 enum rtx_code code;
10412 tree arg0, arg1, type;
10413 tree tem;
10414 enum machine_mode operand_mode;
10415 int invert = 0;
10416 int unsignedp;
10417 rtx op0, op1;
10418 enum insn_code icode;
10419 rtx subtarget = target;
10420 rtx result, label, pattern, jump_pat;
bbf6f052 10421
ca695ac9
JB
10422 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10423 result at the end. We can't simply invert the test since it would
10424 have already been inverted if it were valid. This case occurs for
10425 some floating-point comparisons. */
10426
10427 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10428 invert = 1, exp = TREE_OPERAND (exp, 0);
10429
10430 arg0 = TREE_OPERAND (exp, 0);
10431 arg1 = TREE_OPERAND (exp, 1);
10432 type = TREE_TYPE (arg0);
10433 operand_mode = TYPE_MODE (type);
10434 unsignedp = TREE_UNSIGNED (type);
10435
10436 /* We won't bother with BLKmode store-flag operations because it would mean
10437 passing a lot of information to emit_store_flag. */
10438 if (operand_mode == BLKmode)
10439 return 0;
10440
10441 STRIP_NOPS (arg0);
10442 STRIP_NOPS (arg1);
10443
10444 /* Get the rtx comparison code to use. We know that EXP is a comparison
10445 operation of some type. Some comparisons against 1 and -1 can be
10446 converted to comparisons with zero. Do so here so that the tests
10447 below will be aware that we have a comparison with zero. These
10448 tests will not catch constants in the first operand, but constants
10449 are rarely passed as the first operand. */
10450
10451 switch (TREE_CODE (exp))
10452 {
10453 case EQ_EXPR:
10454 code = EQ;
10455 break;
10456 case NE_EXPR:
10457 code = NE;
10458 break;
10459 case LT_EXPR:
10460 if (integer_onep (arg1))
10461 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10462 else
10463 code = unsignedp ? LTU : LT;
10464 break;
10465 case LE_EXPR:
10466 if (! unsignedp && integer_all_onesp (arg1))
10467 arg1 = integer_zero_node, code = LT;
10468 else
10469 code = unsignedp ? LEU : LE;
10470 break;
10471 case GT_EXPR:
10472 if (! unsignedp && integer_all_onesp (arg1))
10473 arg1 = integer_zero_node, code = GE;
10474 else
10475 code = unsignedp ? GTU : GT;
10476 break;
10477 case GE_EXPR:
10478 if (integer_onep (arg1))
10479 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10480 else
10481 code = unsignedp ? GEU : GE;
10482 break;
10483 default:
10484 abort ();
10485 }
bbf6f052 10486
ca695ac9
JB
10487 /* Put a constant second. */
10488 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
bbf6f052 10489 {
ca695ac9
JB
10490 tem = arg0; arg0 = arg1; arg1 = tem;
10491 code = swap_condition (code);
bbf6f052 10492 }
bbf6f052 10493
ca695ac9
JB
10494 /* If this is an equality or inequality test of a single bit, we can
10495 do this by shifting the bit being tested to the low-order bit and
10496 masking the result with the constant 1. If the condition was EQ,
10497 we xor it with 1. This does not require an scc insn and is faster
10498 than an scc insn even if we have it. */
bbf6f052 10499
ca695ac9
JB
10500 if ((code == NE || code == EQ)
10501 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10502 && integer_pow2p (TREE_OPERAND (arg0, 1))
10503 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
10504 {
10505 tree inner = TREE_OPERAND (arg0, 0);
10506 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
10507 NULL_RTX, VOIDmode, 0)));
10508 int ops_unsignedp;
bbf6f052 10509
ca695ac9
JB
10510 /* If INNER is a right shift of a constant and it plus BITNUM does
10511 not overflow, adjust BITNUM and INNER. */
bbf6f052 10512
ca695ac9
JB
10513 if (TREE_CODE (inner) == RSHIFT_EXPR
10514 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10515 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10516 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
10517 < TYPE_PRECISION (type)))
10518 {
10519 bitnum +=TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10520 inner = TREE_OPERAND (inner, 0);
10521 }
bbf6f052 10522
ca695ac9
JB
10523 /* If we are going to be able to omit the AND below, we must do our
10524 operations as unsigned. If we must use the AND, we have a choice.
10525 Normally unsigned is faster, but for some machines signed is. */
10526 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
ad92c826
RK
10527#ifdef LOAD_EXTEND_OP
10528 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
ca695ac9
JB
10529#else
10530 : 1
10531#endif
10532 );
bbf6f052 10533
ca695ac9
JB
10534 if (subtarget == 0 || GET_CODE (subtarget) != REG
10535 || GET_MODE (subtarget) != operand_mode
10536 || ! safe_from_p (subtarget, inner))
10537 subtarget = 0;
e7c33f54 10538
ca695ac9 10539 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
bbf6f052 10540
ca695ac9
JB
10541 if (bitnum != 0)
10542 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
0c316b20 10543 size_int (bitnum), subtarget, ops_unsignedp);
bbf6f052 10544
ca695ac9
JB
10545 if (GET_MODE (op0) != mode)
10546 op0 = convert_to_mode (mode, op0, ops_unsignedp);
bbf6f052 10547
ca695ac9 10548 if ((code == EQ && ! invert) || (code == NE && invert))
0c316b20 10549 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
ca695ac9 10550 ops_unsignedp, OPTAB_LIB_WIDEN);
bbf6f052 10551
ca695ac9
JB
10552 /* Put the AND last so it can combine with more things. */
10553 if (bitnum != TYPE_PRECISION (type) - 1)
0c316b20 10554 op0 = expand_and (op0, const1_rtx, subtarget);
bbf6f052 10555
ca695ac9
JB
10556 return op0;
10557 }
bbf6f052 10558
ca695ac9
JB
10559 /* Now see if we are likely to be able to do this. Return if not. */
10560 if (! can_compare_p (operand_mode))
10561 return 0;
10562 icode = setcc_gen_code[(int) code];
10563 if (icode == CODE_FOR_nothing
10564 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
10565 {
10566 /* We can only do this if it is one of the special cases that
10567 can be handled without an scc insn. */
10568 if ((code == LT && integer_zerop (arg1))
10569 || (! only_cheap && code == GE && integer_zerop (arg1)))
10570 ;
10571 else if (BRANCH_COST >= 0
10572 && ! only_cheap && (code == NE || code == EQ)
10573 && TREE_CODE (type) != REAL_TYPE
10574 && ((abs_optab->handlers[(int) operand_mode].insn_code
10575 != CODE_FOR_nothing)
10576 || (ffs_optab->handlers[(int) operand_mode].insn_code
10577 != CODE_FOR_nothing)))
10578 ;
10579 else
10580 return 0;
10581 }
10582
10583 preexpand_calls (exp);
10584 if (subtarget == 0 || GET_CODE (subtarget) != REG
10585 || GET_MODE (subtarget) != operand_mode
10586 || ! safe_from_p (subtarget, arg1))
10587 subtarget = 0;
bbf6f052 10588
ca695ac9
JB
10589 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10590 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
bbf6f052 10591
ca695ac9
JB
10592 if (target == 0)
10593 target = gen_reg_rtx (mode);
bbf6f052 10594
ca695ac9
JB
10595 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10596 because, if the emit_store_flag does anything it will succeed and
10597 OP0 and OP1 will not be used subsequently. */
bbf6f052 10598
ca695ac9
JB
10599 result = emit_store_flag (target, code,
10600 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10601 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10602 operand_mode, unsignedp, 1);
bbf6f052 10603
ca695ac9
JB
10604 if (result)
10605 {
10606 if (invert)
10607 result = expand_binop (mode, xor_optab, result, const1_rtx,
10608 result, 0, OPTAB_LIB_WIDEN);
10609 return result;
10610 }
bbf6f052 10611
ca695ac9
JB
10612 /* If this failed, we have to do this with set/compare/jump/set code. */
10613 if (target == 0 || GET_CODE (target) != REG
10614 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10615 target = gen_reg_rtx (GET_MODE (target));
bbf6f052 10616
ca695ac9
JB
10617 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10618 result = compare_from_rtx (op0, op1, code, unsignedp,
10619 operand_mode, NULL_RTX, 0);
10620 if (GET_CODE (result) == CONST_INT)
10621 return (((result == const0_rtx && ! invert)
10622 || (result != const0_rtx && invert))
10623 ? const0_rtx : const1_rtx);
bbf6f052 10624
ca695ac9
JB
10625 label = gen_label_rtx ();
10626 if (bcc_gen_fctn[(int) code] == 0)
10627 abort ();
bbf6f052 10628
ca695ac9
JB
10629 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10630 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10631 emit_label (label);
bbf6f052 10632
ca695ac9
JB
10633 return target;
10634}
10635\f
10636/* Generate a tablejump instruction (used for switch statements). */
bbf6f052 10637
ca695ac9 10638#ifdef HAVE_tablejump
bbf6f052 10639
ca695ac9
JB
10640/* INDEX is the value being switched on, with the lowest value
10641 in the table already subtracted.
10642 MODE is its expected mode (needed if INDEX is constant).
10643 RANGE is the length of the jump table.
10644 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
bbf6f052 10645
ca695ac9
JB
10646 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10647 index value is out of range. */
bbf6f052 10648
ca695ac9
JB
10649void
10650do_tablejump (index, mode, range, table_label, default_label)
10651 rtx index, range, table_label, default_label;
10652 enum machine_mode mode;
10653{
10654 register rtx temp, vector;
bbf6f052 10655
ca695ac9
JB
10656 /* Do an unsigned comparison (in the proper mode) between the index
10657 expression and the value which represents the length of the range.
10658 Since we just finished subtracting the lower bound of the range
10659 from the index expression, this comparison allows us to simultaneously
10660 check that the original index expression value is both greater than
10661 or equal to the minimum value of the range and less than or equal to
10662 the maximum value of the range. */
bbf6f052 10663
bf500664
RK
10664 emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
10665 emit_jump_insn (gen_bgtu (default_label));
bbf6f052 10666
ca695ac9
JB
10667 /* If index is in range, it must fit in Pmode.
10668 Convert to Pmode so we can index with it. */
10669 if (mode != Pmode)
10670 index = convert_to_mode (Pmode, index, 1);
bbf6f052 10671
ca695ac9
JB
10672 /* Don't let a MEM slip thru, because then INDEX that comes
10673 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10674 and break_out_memory_refs will go to work on it and mess it up. */
10675#ifdef PIC_CASE_VECTOR_ADDRESS
10676 if (flag_pic && GET_CODE (index) != REG)
10677 index = copy_to_mode_reg (Pmode, index);
10678#endif
bbf6f052 10679
ca695ac9
JB
10680 /* If flag_force_addr were to affect this address
10681 it could interfere with the tricky assumptions made
10682 about addresses that contain label-refs,
10683 which may be valid only very near the tablejump itself. */
10684 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10685 GET_MODE_SIZE, because this indicates how large insns are. The other
10686 uses should all be Pmode, because they are addresses. This code
10687 could fail if addresses and insns are not the same size. */
10688 index = gen_rtx (PLUS, Pmode,
10689 gen_rtx (MULT, Pmode, index,
10690 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10691 gen_rtx (LABEL_REF, Pmode, table_label));
10692#ifdef PIC_CASE_VECTOR_ADDRESS
10693 if (flag_pic)
10694 index = PIC_CASE_VECTOR_ADDRESS (index);
10695 else
10696#endif
10697 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10698 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10699 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
10700 RTX_UNCHANGING_P (vector) = 1;
10701 convert_move (temp, vector, 0);
bbf6f052 10702
ca695ac9 10703 emit_jump_insn (gen_tablejump (temp, table_label));
bbf6f052 10704
ca695ac9
JB
10705#ifndef CASE_VECTOR_PC_RELATIVE
10706 /* If we are generating PIC code or if the table is PC-relative, the
10707 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10708 if (! flag_pic)
10709 emit_barrier ();
bbf6f052 10710#endif
ca695ac9 10711}
bbf6f052 10712
ca695ac9 10713#endif /* HAVE_tablejump */
bbf6f052 10714
bbf6f052 10715
ca695ac9
JB
10716/* Emit a suitable bytecode to load a value from memory, assuming a pointer
10717 to that value is on the top of the stack. The resulting type is TYPE, and
10718 the source declaration is DECL. */
bbf6f052 10719
ca695ac9
JB
10720void
10721bc_load_memory (type, decl)
10722 tree type, decl;
10723{
10724 enum bytecode_opcode opcode;
10725
10726
10727 /* Bit fields are special. We only know about signed and
10728 unsigned ints, and enums. The latter are treated as
10729 signed integers. */
10730
10731 if (DECL_BIT_FIELD (decl))
10732 if (TREE_CODE (type) == ENUMERAL_TYPE
10733 || TREE_CODE (type) == INTEGER_TYPE)
10734 opcode = TREE_UNSIGNED (type) ? zxloadBI : sxloadBI;
10735 else
10736 abort ();
10737 else
10738 /* See corresponding comment in bc_store_memory(). */
10739 if (TYPE_MODE (type) == BLKmode
10740 || TYPE_MODE (type) == VOIDmode)
10741 return;
10742 else
6bd6178d 10743 opcode = mode_to_load_map [(int) TYPE_MODE (type)];
bbf6f052 10744
ca695ac9
JB
10745 if (opcode == neverneverland)
10746 abort ();
10747
10748 bc_emit_bytecode (opcode);
10749
10750#ifdef DEBUG_PRINT_CODE
10751 fputc ('\n', stderr);
10752#endif
bbf6f052 10753}
bbf6f052 10754
bbf6f052 10755
ca695ac9
JB
10756/* Store the contents of the second stack slot to the address in the
10757 top stack slot. DECL is the declaration of the destination and is used
10758 to determine whether we're dealing with a bitfield. */
bbf6f052 10759
ca695ac9
JB
10760void
10761bc_store_memory (type, decl)
10762 tree type, decl;
10763{
10764 enum bytecode_opcode opcode;
10765
10766
10767 if (DECL_BIT_FIELD (decl))
f81497d9 10768 {
ca695ac9
JB
10769 if (TREE_CODE (type) == ENUMERAL_TYPE
10770 || TREE_CODE (type) == INTEGER_TYPE)
10771 opcode = sstoreBI;
f81497d9 10772 else
ca695ac9 10773 abort ();
f81497d9 10774 }
ca695ac9
JB
10775 else
10776 if (TYPE_MODE (type) == BLKmode)
10777 {
10778 /* Copy structure. This expands to a block copy instruction, storeBLK.
10779 In addition to the arguments expected by the other store instructions,
10780 it also expects a type size (SImode) on top of the stack, which is the
10781 structure size in size units (usually bytes). The two first arguments
10782 are already on the stack; so we just put the size on level 1. For some
10783 other languages, the size may be variable, this is why we don't encode
10784 it as a storeBLK literal, but rather treat it as a full-fledged expression. */
10785
10786 bc_expand_expr (TYPE_SIZE (type));
10787 opcode = storeBLK;
10788 }
10789 else
6bd6178d 10790 opcode = mode_to_store_map [(int) TYPE_MODE (type)];
f81497d9 10791
ca695ac9
JB
10792 if (opcode == neverneverland)
10793 abort ();
10794
10795 bc_emit_bytecode (opcode);
10796
10797#ifdef DEBUG_PRINT_CODE
10798 fputc ('\n', stderr);
10799#endif
f81497d9
RS
10800}
10801
f81497d9 10802
ca695ac9
JB
10803/* Allocate local stack space sufficient to hold a value of the given
10804 SIZE at alignment boundary ALIGNMENT bits. ALIGNMENT must be an
10805 integral power of 2. A special case is locals of type VOID, which
10806 have size 0 and alignment 1 - any "voidish" SIZE or ALIGNMENT is
10807 remapped into the corresponding attribute of SI. */
10808
10809rtx
10810bc_allocate_local (size, alignment)
10811 int size, alignment;
f81497d9 10812{
ca695ac9
JB
10813 rtx retval;
10814 int byte_alignment;
f81497d9 10815
ca695ac9
JB
10816 if (size < 0)
10817 abort ();
f81497d9 10818
ca695ac9
JB
10819 /* Normalize size and alignment */
10820 if (!size)
10821 size = UNITS_PER_WORD;
bbf6f052 10822
ca695ac9
JB
10823 if (alignment < BITS_PER_UNIT)
10824 byte_alignment = 1 << (INT_ALIGN - 1);
10825 else
10826 /* Align */
10827 byte_alignment = alignment / BITS_PER_UNIT;
bbf6f052 10828
ca695ac9
JB
10829 if (local_vars_size & (byte_alignment - 1))
10830 local_vars_size += byte_alignment - (local_vars_size & (byte_alignment - 1));
bbf6f052 10831
ca695ac9
JB
10832 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
10833 local_vars_size += size;
bbf6f052 10834
ca695ac9 10835 return retval;
bbf6f052
RK
10836}
10837
bbf6f052 10838
ca695ac9
JB
10839/* Allocate variable-sized local array. Variable-sized arrays are
10840 actually pointers to the address in memory where they are stored. */
10841
10842rtx
10843bc_allocate_variable_array (size)
10844 tree size;
bbf6f052 10845{
ca695ac9
JB
10846 rtx retval;
10847 const int ptralign = (1 << (PTR_ALIGN - 1));
bbf6f052 10848
ca695ac9
JB
10849 /* Align pointer */
10850 if (local_vars_size & ptralign)
10851 local_vars_size += ptralign - (local_vars_size & ptralign);
bbf6f052 10852
ca695ac9
JB
10853 /* Note down local space needed: pointer to block; also return
10854 dummy rtx */
bbf6f052 10855
ca695ac9
JB
10856 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
10857 local_vars_size += POINTER_SIZE / BITS_PER_UNIT;
10858 return retval;
bbf6f052 10859}
bbf6f052 10860
bbf6f052 10861
ca695ac9
JB
10862/* Push the machine address for the given external variable offset. */
10863void
10864bc_load_externaddr (externaddr)
10865 rtx externaddr;
10866{
10867 bc_emit_bytecode (constP);
e7a42772
JB
10868 bc_emit_code_labelref (BYTECODE_LABEL (externaddr),
10869 BYTECODE_BC_LABEL (externaddr)->offset);
bbf6f052 10870
ca695ac9
JB
10871#ifdef DEBUG_PRINT_CODE
10872 fputc ('\n', stderr);
10873#endif
bbf6f052
RK
10874}
10875
bbf6f052 10876
ca695ac9
JB
10877/* Like above, but expects an IDENTIFIER. */
10878void
10879bc_load_externaddr_id (id, offset)
10880 tree id;
10881 int offset;
10882{
10883 if (!IDENTIFIER_POINTER (id))
10884 abort ();
bbf6f052 10885
ca695ac9 10886 bc_emit_bytecode (constP);
3d8e9bc2 10887 bc_emit_code_labelref (xstrdup (IDENTIFIER_POINTER (id)), offset);
bbf6f052 10888
ca695ac9
JB
10889#ifdef DEBUG_PRINT_CODE
10890 fputc ('\n', stderr);
10891#endif
10892}
bbf6f052 10893
bbf6f052 10894
ca695ac9
JB
10895/* Push the machine address for the given local variable offset. */
10896void
10897bc_load_localaddr (localaddr)
10898 rtx localaddr;
10899{
e7a42772 10900 bc_emit_instruction (localP, (HOST_WIDE_INT) BYTECODE_BC_LABEL (localaddr)->offset);
bbf6f052 10901}
bbf6f052 10902
bbf6f052 10903
ca695ac9
JB
10904/* Push the machine address for the given parameter offset.
10905 NOTE: offset is in bits. */
10906void
10907bc_load_parmaddr (parmaddr)
10908 rtx parmaddr;
bbf6f052 10909{
e7a42772
JB
10910 bc_emit_instruction (argP, ((HOST_WIDE_INT) BYTECODE_BC_LABEL (parmaddr)->offset
10911 / BITS_PER_UNIT));
ca695ac9 10912}
bbf6f052 10913
ca695ac9
JB
10914
10915/* Convert a[i] into *(a + i). */
10916tree
10917bc_canonicalize_array_ref (exp)
10918 tree exp;
10919{
10920 tree type = TREE_TYPE (exp);
10921 tree array_adr = build1 (ADDR_EXPR, TYPE_POINTER_TO (type),
10922 TREE_OPERAND (exp, 0));
10923 tree index = TREE_OPERAND (exp, 1);
10924
10925
10926 /* Convert the integer argument to a type the same size as a pointer
10927 so the multiply won't overflow spuriously. */
10928
10929 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
10930 index = convert (type_for_size (POINTER_SIZE, 0), index);
10931
10932 /* The array address isn't volatile even if the array is.
10933 (Of course this isn't terribly relevant since the bytecode
10934 translator treats nearly everything as volatile anyway.) */
10935 TREE_THIS_VOLATILE (array_adr) = 0;
10936
10937 return build1 (INDIRECT_REF, type,
10938 fold (build (PLUS_EXPR,
10939 TYPE_POINTER_TO (type),
10940 array_adr,
10941 fold (build (MULT_EXPR,
10942 TYPE_POINTER_TO (type),
10943 index,
10944 size_in_bytes (type))))));
bbf6f052
RK
10945}
10946
bbf6f052 10947
ca695ac9
JB
10948/* Load the address of the component referenced by the given
10949 COMPONENT_REF expression.
bbf6f052 10950
ca695ac9 10951 Returns innermost lvalue. */
bbf6f052 10952
ca695ac9
JB
10953tree
10954bc_expand_component_address (exp)
10955 tree exp;
bbf6f052 10956{
ca695ac9
JB
10957 tree tem, chain;
10958 enum machine_mode mode;
10959 int bitpos = 0;
10960 HOST_WIDE_INT SIval;
a7c5971a 10961
bbf6f052 10962
ca695ac9
JB
10963 tem = TREE_OPERAND (exp, 1);
10964 mode = DECL_MODE (tem);
bbf6f052 10965
ca695ac9
JB
10966
10967 /* Compute cumulative bit offset for nested component refs
10968 and array refs, and find the ultimate containing object. */
10969
10970 for (tem = exp;; tem = TREE_OPERAND (tem, 0))
bbf6f052 10971 {
ca695ac9
JB
10972 if (TREE_CODE (tem) == COMPONENT_REF)
10973 bitpos += TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (tem, 1)));
10974 else
10975 if (TREE_CODE (tem) == ARRAY_REF
10976 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
10977 && TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) == INTEGER_CST)
bbf6f052 10978
ca695ac9
JB
10979 bitpos += (TREE_INT_CST_LOW (TREE_OPERAND (tem, 1))
10980 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)))
10981 /* * TYPE_SIZE_UNIT (TREE_TYPE (tem)) */);
10982 else
10983 break;
10984 }
bbf6f052 10985
c02bd5d9 10986 bc_expand_expr (tem);
bbf6f052 10987
cd1b4b44 10988
ca695ac9
JB
10989 /* For bitfields also push their offset and size */
10990 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
10991 bc_push_offset_and_size (bitpos, /* DECL_SIZE_UNIT */ (TREE_OPERAND (exp, 1)));
10992 else
10993 if (SIval = bitpos / BITS_PER_UNIT)
10994 bc_emit_instruction (addconstPSI, SIval);
bbf6f052 10995
ca695ac9 10996 return (TREE_OPERAND (exp, 1));
bbf6f052 10997}
e7c33f54 10998
bbf6f052 10999
ca695ac9
JB
11000/* Emit code to push two SI constants */
11001void
11002bc_push_offset_and_size (offset, size)
11003 HOST_WIDE_INT offset, size;
11004{
11005 bc_emit_instruction (constSI, offset);
11006 bc_emit_instruction (constSI, size);
11007}
bbf6f052 11008
bbf6f052 11009
ca695ac9
JB
11010/* Emit byte code to push the address of the given lvalue expression to
11011 the stack. If it's a bit field, we also push offset and size info.
bbf6f052 11012
ca695ac9
JB
11013 Returns innermost component, which allows us to determine not only
11014 its type, but also whether it's a bitfield. */
11015
11016tree
11017bc_expand_address (exp)
bbf6f052 11018 tree exp;
bbf6f052 11019{
ca695ac9
JB
11020 /* Safeguard */
11021 if (!exp || TREE_CODE (exp) == ERROR_MARK)
11022 return (exp);
bbf6f052 11023
e7c33f54 11024
ca695ac9
JB
11025 switch (TREE_CODE (exp))
11026 {
11027 case ARRAY_REF:
e7c33f54 11028
ca695ac9 11029 return (bc_expand_address (bc_canonicalize_array_ref (exp)));
e7c33f54 11030
ca695ac9 11031 case COMPONENT_REF:
bbf6f052 11032
ca695ac9 11033 return (bc_expand_component_address (exp));
bbf6f052 11034
ca695ac9 11035 case INDIRECT_REF:
bbf6f052 11036
ca695ac9
JB
11037 bc_expand_expr (TREE_OPERAND (exp, 0));
11038
11039 /* For variable-sized types: retrieve pointer. Sometimes the
11040 TYPE_SIZE tree is NULL. Is this a bug or a feature? Let's
11041 also make sure we have an operand, just in case... */
11042
11043 if (TREE_OPERAND (exp, 0)
11044 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))
11045 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))) != INTEGER_CST)
11046 bc_emit_instruction (loadP);
11047
11048 /* If packed, also return offset and size */
11049 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 0)))
11050
11051 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (exp, 0))),
11052 TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (exp, 0))));
11053
11054 return (TREE_OPERAND (exp, 0));
11055
11056 case FUNCTION_DECL:
11057
e7a42772
JB
11058 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
11059 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
bbf6f052 11060 break;
ca695ac9
JB
11061
11062 case PARM_DECL:
11063
11064 bc_load_parmaddr (DECL_RTL (exp));
11065
11066 /* For variable-sized types: retrieve pointer */
11067 if (TYPE_SIZE (TREE_TYPE (exp))
11068 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
11069 bc_emit_instruction (loadP);
11070
11071 /* If packed, also return offset and size */
11072 if (DECL_BIT_FIELD (exp))
11073 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
11074 TREE_INT_CST_LOW (DECL_SIZE (exp)));
11075
bbf6f052 11076 break;
ca695ac9
JB
11077
11078 case RESULT_DECL:
11079
11080 bc_emit_instruction (returnP);
bbf6f052 11081 break;
ca695ac9
JB
11082
11083 case VAR_DECL:
11084
11085#if 0
e7a42772 11086 if (BYTECODE_LABEL (DECL_RTL (exp)))
ca695ac9
JB
11087 bc_load_externaddr (DECL_RTL (exp));
11088#endif
11089
11090 if (DECL_EXTERNAL (exp))
e7a42772 11091 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
eb862a37 11092 (BYTECODE_BC_LABEL (DECL_RTL (exp)))->offset);
bbf6f052 11093 else
ca695ac9
JB
11094 bc_load_localaddr (DECL_RTL (exp));
11095
11096 /* For variable-sized types: retrieve pointer */
11097 if (TYPE_SIZE (TREE_TYPE (exp))
11098 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
11099 bc_emit_instruction (loadP);
11100
11101 /* If packed, also return offset and size */
11102 if (DECL_BIT_FIELD (exp))
11103 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
11104 TREE_INT_CST_LOW (DECL_SIZE (exp)));
11105
bbf6f052 11106 break;
ca695ac9
JB
11107
11108 case STRING_CST:
11109 {
11110 rtx r;
11111
11112 bc_emit_bytecode (constP);
11113 r = output_constant_def (exp);
e7a42772 11114 bc_emit_code_labelref (BYTECODE_LABEL (r), BYTECODE_BC_LABEL (r)->offset);
ca695ac9
JB
11115
11116#ifdef DEBUG_PRINT_CODE
11117 fputc ('\n', stderr);
11118#endif
11119 }
bbf6f052 11120 break;
ca695ac9 11121
bbf6f052 11122 default:
bbf6f052 11123
ca695ac9
JB
11124 abort();
11125 break;
bbf6f052
RK
11126 }
11127
ca695ac9
JB
11128 /* Most lvalues don't have components. */
11129 return (exp);
11130}
bbf6f052 11131
ca695ac9
JB
11132
11133/* Emit a type code to be used by the runtime support in handling
11134 parameter passing. The type code consists of the machine mode
11135 plus the minimal alignment shifted left 8 bits. */
11136
11137tree
11138bc_runtime_type_code (type)
11139 tree type;
11140{
11141 int val;
11142
11143 switch (TREE_CODE (type))
bbf6f052 11144 {
ca695ac9
JB
11145 case VOID_TYPE:
11146 case INTEGER_TYPE:
11147 case REAL_TYPE:
11148 case COMPLEX_TYPE:
11149 case ENUMERAL_TYPE:
11150 case POINTER_TYPE:
11151 case RECORD_TYPE:
11152
6bd6178d 11153 val = (int) TYPE_MODE (type) | TYPE_ALIGN (type) << 8;
ca695ac9
JB
11154 break;
11155
11156 case ERROR_MARK:
11157
11158 val = 0;
11159 break;
11160
11161 default:
af508edd 11162
ca695ac9
JB
11163 abort ();
11164 }
11165 return build_int_2 (val, 0);
11166}
af508edd 11167
af508edd 11168
ca695ac9
JB
11169/* Generate constructor label */
11170char *
11171bc_gen_constr_label ()
11172{
11173 static int label_counter;
11174 static char label[20];
bbf6f052 11175
ca695ac9 11176 sprintf (label, "*LR%d", label_counter++);
bbf6f052 11177
ca695ac9
JB
11178 return (obstack_copy0 (&permanent_obstack, label, strlen (label)));
11179}
bbf6f052 11180
bbf6f052 11181
ca695ac9
JB
11182/* Evaluate constructor CONSTR and return pointer to it on level one. We
11183 expand the constructor data as static data, and push a pointer to it.
11184 The pointer is put in the pointer table and is retrieved by a constP
11185 bytecode instruction. We then loop and store each constructor member in
11186 the corresponding component. Finally, we return the original pointer on
11187 the stack. */
af508edd 11188
ca695ac9
JB
11189void
11190bc_expand_constructor (constr)
11191 tree constr;
11192{
11193 char *l;
11194 HOST_WIDE_INT ptroffs;
11195 rtx constr_rtx;
bbf6f052 11196
ca695ac9
JB
11197
11198 /* Literal constructors are handled as constants, whereas
11199 non-literals are evaluated and stored element by element
11200 into the data segment. */
11201
11202 /* Allocate space in proper segment and push pointer to space on stack.
11203 */
bbf6f052 11204
ca695ac9 11205 l = bc_gen_constr_label ();
bbf6f052 11206
ca695ac9 11207 if (TREE_CONSTANT (constr))
bbf6f052 11208 {
ca695ac9
JB
11209 text_section ();
11210
11211 bc_emit_const_labeldef (l);
11212 bc_output_constructor (constr, int_size_in_bytes (TREE_TYPE (constr)));
bbf6f052 11213 }
ca695ac9
JB
11214 else
11215 {
11216 data_section ();
bbf6f052 11217
ca695ac9
JB
11218 bc_emit_data_labeldef (l);
11219 bc_output_data_constructor (constr);
11220 }
bbf6f052 11221
ca695ac9
JB
11222
11223 /* Add reference to pointer table and recall pointer to stack;
11224 this code is common for both types of constructors: literals
11225 and non-literals. */
bbf6f052 11226
de7d9320
JB
11227 ptroffs = bc_define_pointer (l);
11228 bc_emit_instruction (constP, ptroffs);
d39985fa 11229
ca695ac9
JB
11230 /* This is all that has to be done if it's a literal. */
11231 if (TREE_CONSTANT (constr))
11232 return;
bbf6f052 11233
ca695ac9
JB
11234
11235 /* At this point, we have the pointer to the structure on top of the stack.
11236 Generate sequences of store_memory calls for the constructor. */
11237
11238 /* constructor type is structure */
11239 if (TREE_CODE (TREE_TYPE (constr)) == RECORD_TYPE)
e7c33f54 11240 {
ca695ac9
JB
11241 register tree elt;
11242
11243 /* If the constructor has fewer fields than the structure,
11244 clear the whole structure first. */
11245
11246 if (list_length (CONSTRUCTOR_ELTS (constr))
11247 != list_length (TYPE_FIELDS (TREE_TYPE (constr))))
11248 {
6d6e61ce 11249 bc_emit_instruction (duplicate);
ca695ac9
JB
11250 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
11251 bc_emit_instruction (clearBLK);
11252 }
11253
11254 /* Store each element of the constructor into the corresponding
11255 field of TARGET. */
11256
11257 for (elt = CONSTRUCTOR_ELTS (constr); elt; elt = TREE_CHAIN (elt))
11258 {
11259 register tree field = TREE_PURPOSE (elt);
11260 register enum machine_mode mode;
11261 int bitsize;
11262 int bitpos;
11263 int unsignedp;
11264
11265 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field)) /* * DECL_SIZE_UNIT (field) */;
11266 mode = DECL_MODE (field);
11267 unsignedp = TREE_UNSIGNED (field);
11268
11269 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
11270
11271 bc_store_field (elt, bitsize, bitpos, mode, TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
11272 /* The alignment of TARGET is
11273 at least what its type requires. */
11274 VOIDmode, 0,
11275 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
11276 int_size_in_bytes (TREE_TYPE (constr)));
11277 }
e7c33f54 11278 }
ca695ac9
JB
11279 else
11280
11281 /* Constructor type is array */
11282 if (TREE_CODE (TREE_TYPE (constr)) == ARRAY_TYPE)
11283 {
11284 register tree elt;
11285 register int i;
11286 tree domain = TYPE_DOMAIN (TREE_TYPE (constr));
11287 int minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
11288 int maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
11289 tree elttype = TREE_TYPE (TREE_TYPE (constr));
11290
11291 /* If the constructor has fewer fields than the structure,
11292 clear the whole structure first. */
11293
11294 if (list_length (CONSTRUCTOR_ELTS (constr)) < maxelt - minelt + 1)
11295 {
6d6e61ce 11296 bc_emit_instruction (duplicate);
ca695ac9
JB
11297 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
11298 bc_emit_instruction (clearBLK);
11299 }
11300
11301
11302 /* Store each element of the constructor into the corresponding
11303 element of TARGET, determined by counting the elements. */
11304
11305 for (elt = CONSTRUCTOR_ELTS (constr), i = 0;
11306 elt;
11307 elt = TREE_CHAIN (elt), i++)
11308 {
11309 register enum machine_mode mode;
11310 int bitsize;
11311 int bitpos;
11312 int unsignedp;
11313
11314 mode = TYPE_MODE (elttype);
11315 bitsize = GET_MODE_BITSIZE (mode);
11316 unsignedp = TREE_UNSIGNED (elttype);
11317
11318 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype))
11319 /* * TYPE_SIZE_UNIT (elttype) */ );
11320
11321 bc_store_field (elt, bitsize, bitpos, mode,
11322 TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
11323 /* The alignment of TARGET is
11324 at least what its type requires. */
11325 VOIDmode, 0,
11326 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
11327 int_size_in_bytes (TREE_TYPE (constr)));
11328 }
11329
11330 }
11331}
bbf6f052 11332
bbf6f052 11333
ca695ac9
JB
11334/* Store the value of EXP (an expression tree) into member FIELD of
11335 structure at address on stack, which has type TYPE, mode MODE and
11336 occupies BITSIZE bits, starting BITPOS bits from the beginning of the
11337 structure.
bbf6f052 11338
ca695ac9
JB
11339 ALIGN is the alignment that TARGET is known to have, measured in bytes.
11340 TOTAL_SIZE is its size in bytes, or -1 if variable. */
bbf6f052 11341
ca695ac9
JB
11342void
11343bc_store_field (field, bitsize, bitpos, mode, exp, type,
11344 value_mode, unsignedp, align, total_size)
11345 int bitsize, bitpos;
11346 enum machine_mode mode;
11347 tree field, exp, type;
11348 enum machine_mode value_mode;
11349 int unsignedp;
11350 int align;
11351 int total_size;
11352{
bbf6f052 11353
ca695ac9
JB
11354 /* Expand expression and copy pointer */
11355 bc_expand_expr (exp);
11356 bc_emit_instruction (over);
bbf6f052 11357
bbf6f052 11358
ca695ac9
JB
11359 /* If the component is a bit field, we cannot use addressing to access
11360 it. Use bit-field techniques to store in it. */
bbf6f052 11361
ca695ac9
JB
11362 if (DECL_BIT_FIELD (field))
11363 {
11364 bc_store_bit_field (bitpos, bitsize, unsignedp);
11365 return;
11366 }
11367 else
11368 /* Not bit field */
11369 {
11370 HOST_WIDE_INT offset = bitpos / BITS_PER_UNIT;
11371
11372 /* Advance pointer to the desired member */
11373 if (offset)
11374 bc_emit_instruction (addconstPSI, offset);
11375
11376 /* Store */
11377 bc_store_memory (type, field);
11378 }
11379}
bbf6f052 11380
ca695ac9
JB
11381
11382/* Store SI/SU in bitfield */
bbf6f052 11383void
ca695ac9
JB
11384bc_store_bit_field (offset, size, unsignedp)
11385 int offset, size, unsignedp;
bbf6f052 11386{
ca695ac9
JB
11387 /* Push bitfield offset and size */
11388 bc_push_offset_and_size (offset, size);
bbf6f052 11389
ca695ac9
JB
11390 /* Store */
11391 bc_emit_instruction (sstoreBI);
11392}
e87b4f3f 11393
88d3b7f0 11394
ca695ac9
JB
11395/* Load SI/SU from bitfield */
11396void
11397bc_load_bit_field (offset, size, unsignedp)
11398 int offset, size, unsignedp;
11399{
11400 /* Push bitfield offset and size */
11401 bc_push_offset_and_size (offset, size);
88d3b7f0 11402
ca695ac9
JB
11403 /* Load: sign-extend if signed, else zero-extend */
11404 bc_emit_instruction (unsignedp ? zxloadBI : sxloadBI);
11405}
709f5be1 11406
bbf6f052 11407
ca695ac9
JB
11408/* Adjust interpreter stack by NLEVELS. Positive means drop NLEVELS
11409 (adjust stack pointer upwards), negative means add that number of
11410 levels (adjust the stack pointer downwards). Only positive values
11411 normally make sense. */
bbf6f052 11412
ca695ac9
JB
11413void
11414bc_adjust_stack (nlevels)
11415 int nlevels;
11416{
11417 switch (nlevels)
11418 {
11419 case 0:
11420 break;
11421
11422 case 2:
11423 bc_emit_instruction (drop);
11424
11425 case 1:
11426 bc_emit_instruction (drop);
11427 break;
11428
11429 default:
11430
11431 bc_emit_instruction (adjstackSI, (HOST_WIDE_INT) nlevels);
11432 stack_depth -= nlevels;
11433 }
11434
a68c7608
RS
11435#if defined (VALIDATE_STACK_FOR_BC)
11436 VALIDATE_STACK_FOR_BC ();
bbf6f052
RK
11437#endif
11438}
This page took 1.868493 seconds and 5 git commands to generate.