]> gcc.gnu.org Git - gcc.git/blame - gcc/expr.c
(TRAMPOLINE_TEMPLATE, TRANSFER_FROM_TRAMPOLINE): Fix assembler syntax
[gcc.git] / gcc / expr.c
CommitLineData
bbf6f052 1/* Convert tree expression to rtl instructions, for GNU compiler.
e6d8c385 2 Copyright (C) 1988, 1992, 1993, 1994, 1995 Free Software Foundation, Inc.
bbf6f052
RK
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21#include "config.h"
ca695ac9 22#include "machmode.h"
bbf6f052
RK
23#include "rtl.h"
24#include "tree.h"
ca695ac9 25#include "obstack.h"
bbf6f052 26#include "flags.h"
bf76bb5a 27#include "regs.h"
bbf6f052
RK
28#include "function.h"
29#include "insn-flags.h"
30#include "insn-codes.h"
31#include "expr.h"
32#include "insn-config.h"
33#include "recog.h"
34#include "output.h"
bbf6f052
RK
35#include "typeclass.h"
36
ca695ac9
JB
37#include "bytecode.h"
38#include "bc-opcode.h"
39#include "bc-typecd.h"
40#include "bc-optab.h"
41#include "bc-emit.h"
42
43
bbf6f052
RK
44#define CEIL(x,y) (((x) + (y) - 1) / (y))
45
46/* Decide whether a function's arguments should be processed
bbc8a071
RK
47 from first to last or from last to first.
48
49 They should if the stack and args grow in opposite directions, but
50 only if we have push insns. */
bbf6f052 51
bbf6f052 52#ifdef PUSH_ROUNDING
bbc8a071 53
3319a347 54#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
bbf6f052
RK
55#define PUSH_ARGS_REVERSED /* If it's last to first */
56#endif
bbc8a071 57
bbf6f052
RK
58#endif
59
60#ifndef STACK_PUSH_CODE
61#ifdef STACK_GROWS_DOWNWARD
62#define STACK_PUSH_CODE PRE_DEC
63#else
64#define STACK_PUSH_CODE PRE_INC
65#endif
66#endif
67
68/* Like STACK_BOUNDARY but in units of bytes, not bits. */
69#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
70
71/* If this is nonzero, we do not bother generating VOLATILE
72 around volatile memory references, and we are willing to
73 output indirect addresses. If cse is to follow, we reject
74 indirect addresses so a useful potential cse is generated;
75 if it is used only once, instruction combination will produce
76 the same indirect address eventually. */
77int cse_not_expected;
78
79/* Nonzero to generate code for all the subroutines within an
80 expression before generating the upper levels of the expression.
81 Nowadays this is never zero. */
82int do_preexpand_calls = 1;
83
84/* Number of units that we should eventually pop off the stack.
85 These are the arguments to function calls that have already returned. */
86int pending_stack_adjust;
87
88/* Nonzero means stack pops must not be deferred, and deferred stack
89 pops must not be output. It is nonzero inside a function call,
90 inside a conditional expression, inside a statement expression,
91 and in other cases as well. */
92int inhibit_defer_pop;
93
94/* A list of all cleanups which belong to the arguments of
95 function calls being expanded by expand_call. */
96tree cleanups_this_call;
97
d93d4205
MS
98/* When temporaries are created by TARGET_EXPRs, they are created at
99 this level of temp_slot_level, so that they can remain allocated
100 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
101 of TARGET_EXPRs. */
102int target_temp_slot_level;
103
bbf6f052
RK
104/* Nonzero means __builtin_saveregs has already been done in this function.
105 The value is the pseudoreg containing the value __builtin_saveregs
106 returned. */
107static rtx saveregs_value;
108
dcf76fff
TW
109/* Similarly for __builtin_apply_args. */
110static rtx apply_args_value;
111
4969d05d
RK
112/* This structure is used by move_by_pieces to describe the move to
113 be performed. */
114
115struct move_by_pieces
116{
117 rtx to;
118 rtx to_addr;
119 int autinc_to;
120 int explicit_inc_to;
121 rtx from;
122 rtx from_addr;
123 int autinc_from;
124 int explicit_inc_from;
125 int len;
126 int offset;
127 int reverse;
128};
129
c02bd5d9
JB
130/* Used to generate bytecodes: keep track of size of local variables,
131 as well as depth of arithmetic stack. (Notice that variables are
132 stored on the machine's stack, not the arithmetic stack.) */
133
186f92ce 134extern int local_vars_size;
c02bd5d9
JB
135extern int stack_depth;
136extern int max_stack_depth;
292b1216 137extern struct obstack permanent_obstack;
c02bd5d9
JB
138
139
4969d05d
RK
140static rtx enqueue_insn PROTO((rtx, rtx));
141static int queued_subexp_p PROTO((rtx));
142static void init_queue PROTO((void));
143static void move_by_pieces PROTO((rtx, rtx, int, int));
144static int move_by_pieces_ninsns PROTO((unsigned int, int));
145static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
146 struct move_by_pieces *));
4969d05d
RK
147static void store_constructor PROTO((tree, rtx));
148static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
149 enum machine_mode, int, int, int));
6be58303 150static int get_inner_unaligned_p PROTO((tree));
4969d05d
RK
151static tree save_noncopied_parts PROTO((tree, tree));
152static tree init_noncopied_parts PROTO((tree, tree));
153static int safe_from_p PROTO((rtx, tree));
154static int fixed_type_p PROTO((tree));
155static int get_pointer_alignment PROTO((tree, unsigned));
156static tree string_constant PROTO((tree, tree *));
157static tree c_strlen PROTO((tree));
307b821c
RK
158static rtx expand_builtin PROTO((tree, rtx, rtx,
159 enum machine_mode, int));
0006469d
TW
160static int apply_args_size PROTO((void));
161static int apply_result_size PROTO((void));
162static rtx result_vector PROTO((int, rtx));
163static rtx expand_builtin_apply_args PROTO((void));
164static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
165static void expand_builtin_return PROTO((rtx));
4969d05d 166static rtx expand_increment PROTO((tree, int));
ca695ac9
JB
167rtx bc_expand_increment PROTO((struct increment_operator *, tree));
168tree bc_runtime_type_code PROTO((tree));
169rtx bc_allocate_local PROTO((int, int));
170void bc_store_memory PROTO((tree, tree));
171tree bc_expand_component_address PROTO((tree));
172tree bc_expand_address PROTO((tree));
173void bc_expand_constructor PROTO((tree));
174void bc_adjust_stack PROTO((int));
175tree bc_canonicalize_array_ref PROTO((tree));
176void bc_load_memory PROTO((tree, tree));
177void bc_load_externaddr PROTO((rtx));
178void bc_load_externaddr_id PROTO((tree, int));
179void bc_load_localaddr PROTO((rtx));
180void bc_load_parmaddr PROTO((rtx));
4969d05d
RK
181static void preexpand_calls PROTO((tree));
182static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
2e5ec6cf 183void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
4969d05d
RK
184static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
185static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
186static void do_jump_for_compare PROTO((rtx, rtx, rtx));
187static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
188static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
5dab5552 189static tree defer_cleanups_to PROTO((tree));
61d6b1cc 190extern void (*interim_eh_hook) PROTO((tree));
bbf6f052 191
4fa52007
RK
192/* Record for each mode whether we can move a register directly to or
193 from an object of that mode in memory. If we can't, we won't try
194 to use that mode directly when accessing a field of that mode. */
195
196static char direct_load[NUM_MACHINE_MODES];
197static char direct_store[NUM_MACHINE_MODES];
198
bbf6f052
RK
199/* MOVE_RATIO is the number of move instructions that is better than
200 a block move. */
201
202#ifndef MOVE_RATIO
266007a7 203#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
bbf6f052
RK
204#define MOVE_RATIO 2
205#else
206/* A value of around 6 would minimize code size; infinity would minimize
207 execution time. */
208#define MOVE_RATIO 15
209#endif
210#endif
e87b4f3f 211
266007a7 212/* This array records the insn_code of insns to perform block moves. */
e6677db3 213enum insn_code movstr_optab[NUM_MACHINE_MODES];
266007a7 214
e87b4f3f
RS
215/* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
216
217#ifndef SLOW_UNALIGNED_ACCESS
c7a7ac46 218#define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
e87b4f3f 219#endif
0006469d
TW
220
221/* Register mappings for target machines without register windows. */
222#ifndef INCOMING_REGNO
223#define INCOMING_REGNO(OUT) (OUT)
224#endif
225#ifndef OUTGOING_REGNO
226#define OUTGOING_REGNO(IN) (IN)
227#endif
bbf6f052 228\f
ca695ac9
JB
229/* Maps used to convert modes to const, load, and store bytecodes. */
230enum bytecode_opcode mode_to_const_map[MAX_MACHINE_MODE];
231enum bytecode_opcode mode_to_load_map[MAX_MACHINE_MODE];
232enum bytecode_opcode mode_to_store_map[MAX_MACHINE_MODE];
233
234/* Initialize maps used to convert modes to const, load, and store
235 bytecodes. */
236void
237bc_init_mode_to_opcode_maps ()
238{
239 int mode;
240
6bd6178d 241 for (mode = 0; mode < (int) MAX_MACHINE_MODE; mode++)
ca695ac9
JB
242 mode_to_const_map[mode] =
243 mode_to_load_map[mode] =
244 mode_to_store_map[mode] = neverneverland;
245
246#define DEF_MODEMAP(SYM, CODE, UCODE, CONST, LOAD, STORE) \
6bd6178d
RK
247 mode_to_const_map[(int) SYM] = CONST; \
248 mode_to_load_map[(int) SYM] = LOAD; \
249 mode_to_store_map[(int) SYM] = STORE;
ca695ac9
JB
250
251#include "modemap.def"
252#undef DEF_MODEMAP
253}
254\f
4fa52007 255/* This is run once per compilation to set up which modes can be used
266007a7 256 directly in memory and to initialize the block move optab. */
4fa52007
RK
257
258void
259init_expr_once ()
260{
261 rtx insn, pat;
262 enum machine_mode mode;
e2549997
RS
263 /* Try indexing by frame ptr and try by stack ptr.
264 It is known that on the Convex the stack ptr isn't a valid index.
265 With luck, one or the other is valid on any machine. */
4fa52007 266 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
e2549997 267 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
4fa52007
RK
268
269 start_sequence ();
270 insn = emit_insn (gen_rtx (SET, 0, 0));
271 pat = PATTERN (insn);
272
273 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
274 mode = (enum machine_mode) ((int) mode + 1))
275 {
276 int regno;
277 rtx reg;
278 int num_clobbers;
279
280 direct_load[(int) mode] = direct_store[(int) mode] = 0;
281 PUT_MODE (mem, mode);
e2549997 282 PUT_MODE (mem1, mode);
4fa52007 283
e6fe56a4
RK
284 /* See if there is some register that can be used in this mode and
285 directly loaded or stored from memory. */
286
7308a047
RS
287 if (mode != VOIDmode && mode != BLKmode)
288 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
289 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
290 regno++)
291 {
292 if (! HARD_REGNO_MODE_OK (regno, mode))
293 continue;
e6fe56a4 294
7308a047 295 reg = gen_rtx (REG, mode, regno);
e6fe56a4 296
7308a047
RS
297 SET_SRC (pat) = mem;
298 SET_DEST (pat) = reg;
299 if (recog (pat, insn, &num_clobbers) >= 0)
300 direct_load[(int) mode] = 1;
e6fe56a4 301
e2549997
RS
302 SET_SRC (pat) = mem1;
303 SET_DEST (pat) = reg;
304 if (recog (pat, insn, &num_clobbers) >= 0)
305 direct_load[(int) mode] = 1;
306
7308a047
RS
307 SET_SRC (pat) = reg;
308 SET_DEST (pat) = mem;
309 if (recog (pat, insn, &num_clobbers) >= 0)
310 direct_store[(int) mode] = 1;
e2549997
RS
311
312 SET_SRC (pat) = reg;
313 SET_DEST (pat) = mem1;
314 if (recog (pat, insn, &num_clobbers) >= 0)
315 direct_store[(int) mode] = 1;
7308a047 316 }
4fa52007
RK
317 }
318
319 end_sequence ();
320}
321
bbf6f052
RK
322/* This is run at the start of compiling a function. */
323
324void
325init_expr ()
326{
327 init_queue ();
328
329 pending_stack_adjust = 0;
330 inhibit_defer_pop = 0;
331 cleanups_this_call = 0;
332 saveregs_value = 0;
0006469d 333 apply_args_value = 0;
e87b4f3f 334 forced_labels = 0;
bbf6f052
RK
335}
336
337/* Save all variables describing the current status into the structure *P.
338 This is used before starting a nested function. */
339
340void
341save_expr_status (p)
342 struct function *p;
343{
344 /* Instead of saving the postincrement queue, empty it. */
345 emit_queue ();
346
347 p->pending_stack_adjust = pending_stack_adjust;
348 p->inhibit_defer_pop = inhibit_defer_pop;
349 p->cleanups_this_call = cleanups_this_call;
350 p->saveregs_value = saveregs_value;
0006469d 351 p->apply_args_value = apply_args_value;
e87b4f3f 352 p->forced_labels = forced_labels;
bbf6f052
RK
353
354 pending_stack_adjust = 0;
355 inhibit_defer_pop = 0;
356 cleanups_this_call = 0;
357 saveregs_value = 0;
0006469d 358 apply_args_value = 0;
e87b4f3f 359 forced_labels = 0;
bbf6f052
RK
360}
361
362/* Restore all variables describing the current status from the structure *P.
363 This is used after a nested function. */
364
365void
366restore_expr_status (p)
367 struct function *p;
368{
369 pending_stack_adjust = p->pending_stack_adjust;
370 inhibit_defer_pop = p->inhibit_defer_pop;
371 cleanups_this_call = p->cleanups_this_call;
372 saveregs_value = p->saveregs_value;
0006469d 373 apply_args_value = p->apply_args_value;
e87b4f3f 374 forced_labels = p->forced_labels;
bbf6f052
RK
375}
376\f
377/* Manage the queue of increment instructions to be output
378 for POSTINCREMENT_EXPR expressions, etc. */
379
380static rtx pending_chain;
381
382/* Queue up to increment (or change) VAR later. BODY says how:
383 BODY should be the same thing you would pass to emit_insn
384 to increment right away. It will go to emit_insn later on.
385
386 The value is a QUEUED expression to be used in place of VAR
387 where you want to guarantee the pre-incrementation value of VAR. */
388
389static rtx
390enqueue_insn (var, body)
391 rtx var, body;
392{
393 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
906c4e36 394 var, NULL_RTX, NULL_RTX, body, pending_chain);
bbf6f052
RK
395 return pending_chain;
396}
397
398/* Use protect_from_queue to convert a QUEUED expression
399 into something that you can put immediately into an instruction.
400 If the queued incrementation has not happened yet,
401 protect_from_queue returns the variable itself.
402 If the incrementation has happened, protect_from_queue returns a temp
403 that contains a copy of the old value of the variable.
404
405 Any time an rtx which might possibly be a QUEUED is to be put
406 into an instruction, it must be passed through protect_from_queue first.
407 QUEUED expressions are not meaningful in instructions.
408
409 Do not pass a value through protect_from_queue and then hold
410 on to it for a while before putting it in an instruction!
411 If the queue is flushed in between, incorrect code will result. */
412
413rtx
414protect_from_queue (x, modify)
415 register rtx x;
416 int modify;
417{
418 register RTX_CODE code = GET_CODE (x);
419
420#if 0 /* A QUEUED can hang around after the queue is forced out. */
421 /* Shortcut for most common case. */
422 if (pending_chain == 0)
423 return x;
424#endif
425
426 if (code != QUEUED)
427 {
e9baa644
RK
428 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
429 use of autoincrement. Make a copy of the contents of the memory
430 location rather than a copy of the address, but not if the value is
431 of mode BLKmode. Don't modify X in place since it might be
432 shared. */
bbf6f052
RK
433 if (code == MEM && GET_MODE (x) != BLKmode
434 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
435 {
436 register rtx y = XEXP (x, 0);
e9baa644
RK
437 register rtx new = gen_rtx (MEM, GET_MODE (x), QUEUED_VAR (y));
438
439 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
440 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
441 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
442
bbf6f052
RK
443 if (QUEUED_INSN (y))
444 {
e9baa644
RK
445 register rtx temp = gen_reg_rtx (GET_MODE (new));
446 emit_insn_before (gen_move_insn (temp, new),
bbf6f052
RK
447 QUEUED_INSN (y));
448 return temp;
449 }
e9baa644 450 return new;
bbf6f052
RK
451 }
452 /* Otherwise, recursively protect the subexpressions of all
453 the kinds of rtx's that can contain a QUEUED. */
454 if (code == MEM)
3f15938e
RS
455 {
456 rtx tem = protect_from_queue (XEXP (x, 0), 0);
457 if (tem != XEXP (x, 0))
458 {
459 x = copy_rtx (x);
460 XEXP (x, 0) = tem;
461 }
462 }
bbf6f052
RK
463 else if (code == PLUS || code == MULT)
464 {
3f15938e
RS
465 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
466 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
467 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
468 {
469 x = copy_rtx (x);
470 XEXP (x, 0) = new0;
471 XEXP (x, 1) = new1;
472 }
bbf6f052
RK
473 }
474 return x;
475 }
476 /* If the increment has not happened, use the variable itself. */
477 if (QUEUED_INSN (x) == 0)
478 return QUEUED_VAR (x);
479 /* If the increment has happened and a pre-increment copy exists,
480 use that copy. */
481 if (QUEUED_COPY (x) != 0)
482 return QUEUED_COPY (x);
483 /* The increment has happened but we haven't set up a pre-increment copy.
484 Set one up now, and use it. */
485 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
486 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
487 QUEUED_INSN (x));
488 return QUEUED_COPY (x);
489}
490
491/* Return nonzero if X contains a QUEUED expression:
492 if it contains anything that will be altered by a queued increment.
493 We handle only combinations of MEM, PLUS, MINUS and MULT operators
494 since memory addresses generally contain only those. */
495
496static int
497queued_subexp_p (x)
498 rtx x;
499{
500 register enum rtx_code code = GET_CODE (x);
501 switch (code)
502 {
503 case QUEUED:
504 return 1;
505 case MEM:
506 return queued_subexp_p (XEXP (x, 0));
507 case MULT:
508 case PLUS:
509 case MINUS:
510 return queued_subexp_p (XEXP (x, 0))
511 || queued_subexp_p (XEXP (x, 1));
512 }
513 return 0;
514}
515
516/* Perform all the pending incrementations. */
517
518void
519emit_queue ()
520{
521 register rtx p;
522 while (p = pending_chain)
523 {
524 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
525 pending_chain = QUEUED_NEXT (p);
526 }
527}
528
529static void
530init_queue ()
531{
532 if (pending_chain)
533 abort ();
534}
535\f
536/* Copy data from FROM to TO, where the machine modes are not the same.
537 Both modes may be integer, or both may be floating.
538 UNSIGNEDP should be nonzero if FROM is an unsigned type.
539 This causes zero-extension instead of sign-extension. */
540
541void
542convert_move (to, from, unsignedp)
543 register rtx to, from;
544 int unsignedp;
545{
546 enum machine_mode to_mode = GET_MODE (to);
547 enum machine_mode from_mode = GET_MODE (from);
548 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
549 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
550 enum insn_code code;
551 rtx libcall;
552
553 /* rtx code for making an equivalent value. */
554 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
555
556 to = protect_from_queue (to, 1);
557 from = protect_from_queue (from, 0);
558
559 if (to_real != from_real)
560 abort ();
561
1499e0a8
RK
562 /* If FROM is a SUBREG that indicates that we have already done at least
563 the required extension, strip it. We don't handle such SUBREGs as
564 TO here. */
565
566 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
567 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
568 >= GET_MODE_SIZE (to_mode))
569 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
570 from = gen_lowpart (to_mode, from), from_mode = to_mode;
571
572 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
573 abort ();
574
bbf6f052
RK
575 if (to_mode == from_mode
576 || (from_mode == VOIDmode && CONSTANT_P (from)))
577 {
578 emit_move_insn (to, from);
579 return;
580 }
581
582 if (to_real)
583 {
81d79e2c
RS
584 rtx value;
585
b424402e
RS
586#ifdef HAVE_extendqfhf2
587 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == HFmode)
588 {
589 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
590 return;
591 }
592#endif
593#ifdef HAVE_extendqfsf2
594 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == SFmode)
595 {
596 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
597 return;
598 }
599#endif
600#ifdef HAVE_extendqfdf2
601 if (HAVE_extendqfdf2 && from_mode == QFmode && to_mode == DFmode)
602 {
603 emit_unop_insn (CODE_FOR_extendqfdf2, to, from, UNKNOWN);
604 return;
605 }
606#endif
607#ifdef HAVE_extendqfxf2
608 if (HAVE_extendqfxf2 && from_mode == QFmode && to_mode == XFmode)
609 {
610 emit_unop_insn (CODE_FOR_extendqfxf2, to, from, UNKNOWN);
611 return;
612 }
613#endif
614#ifdef HAVE_extendqftf2
615 if (HAVE_extendqftf2 && from_mode == QFmode && to_mode == TFmode)
616 {
617 emit_unop_insn (CODE_FOR_extendqftf2, to, from, UNKNOWN);
618 return;
619 }
620#endif
621
03747aa3
RK
622#ifdef HAVE_extendhftqf2
623 if (HAVE_extendhftqf2 && from_mode == HFmode && to_mode == TQFmode)
624 {
625 emit_unop_insn (CODE_FOR_extendhftqf2, to, from, UNKNOWN);
626 return;
627 }
628#endif
629
b424402e
RS
630#ifdef HAVE_extendhfsf2
631 if (HAVE_extendhfsf2 && from_mode == HFmode && to_mode == SFmode)
632 {
633 emit_unop_insn (CODE_FOR_extendhfsf2, to, from, UNKNOWN);
634 return;
635 }
636#endif
637#ifdef HAVE_extendhfdf2
638 if (HAVE_extendhfdf2 && from_mode == HFmode && to_mode == DFmode)
639 {
640 emit_unop_insn (CODE_FOR_extendhfdf2, to, from, UNKNOWN);
641 return;
642 }
643#endif
644#ifdef HAVE_extendhfxf2
645 if (HAVE_extendhfxf2 && from_mode == HFmode && to_mode == XFmode)
646 {
647 emit_unop_insn (CODE_FOR_extendhfxf2, to, from, UNKNOWN);
648 return;
649 }
650#endif
651#ifdef HAVE_extendhftf2
652 if (HAVE_extendhftf2 && from_mode == HFmode && to_mode == TFmode)
653 {
654 emit_unop_insn (CODE_FOR_extendhftf2, to, from, UNKNOWN);
655 return;
656 }
657#endif
658
bbf6f052
RK
659#ifdef HAVE_extendsfdf2
660 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
661 {
662 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
663 return;
664 }
665#endif
b092b471
JW
666#ifdef HAVE_extendsfxf2
667 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
668 {
669 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
670 return;
671 }
672#endif
bbf6f052
RK
673#ifdef HAVE_extendsftf2
674 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
675 {
676 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
677 return;
678 }
679#endif
b092b471
JW
680#ifdef HAVE_extenddfxf2
681 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
682 {
683 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
684 return;
685 }
686#endif
bbf6f052
RK
687#ifdef HAVE_extenddftf2
688 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
689 {
690 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
691 return;
692 }
693#endif
b424402e
RS
694
695#ifdef HAVE_trunchfqf2
696 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
697 {
698 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
699 return;
700 }
701#endif
702#ifdef HAVE_truncsfqf2
703 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
704 {
705 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
706 return;
707 }
708#endif
709#ifdef HAVE_truncdfqf2
710 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
711 {
712 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
713 return;
714 }
715#endif
716#ifdef HAVE_truncxfqf2
717 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
718 {
719 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
720 return;
721 }
722#endif
723#ifdef HAVE_trunctfqf2
724 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
725 {
726 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
727 return;
728 }
729#endif
03747aa3
RK
730
731#ifdef HAVE_trunctqfhf2
732 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
733 {
734 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
735 return;
736 }
737#endif
b424402e
RS
738#ifdef HAVE_truncsfhf2
739 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
740 {
741 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
742 return;
743 }
744#endif
745#ifdef HAVE_truncdfhf2
746 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
747 {
748 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
749 return;
750 }
751#endif
752#ifdef HAVE_truncxfhf2
753 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
754 {
755 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
756 return;
757 }
758#endif
759#ifdef HAVE_trunctfhf2
760 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
761 {
762 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
763 return;
764 }
765#endif
bbf6f052
RK
766#ifdef HAVE_truncdfsf2
767 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
768 {
769 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
770 return;
771 }
772#endif
b092b471
JW
773#ifdef HAVE_truncxfsf2
774 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
775 {
776 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
777 return;
778 }
779#endif
bbf6f052
RK
780#ifdef HAVE_trunctfsf2
781 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
782 {
783 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
784 return;
785 }
786#endif
b092b471
JW
787#ifdef HAVE_truncxfdf2
788 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
789 {
790 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
791 return;
792 }
793#endif
bbf6f052
RK
794#ifdef HAVE_trunctfdf2
795 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
796 {
797 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
798 return;
799 }
800#endif
801
b092b471
JW
802 libcall = (rtx) 0;
803 switch (from_mode)
804 {
805 case SFmode:
806 switch (to_mode)
807 {
808 case DFmode:
809 libcall = extendsfdf2_libfunc;
810 break;
811
812 case XFmode:
813 libcall = extendsfxf2_libfunc;
814 break;
815
816 case TFmode:
817 libcall = extendsftf2_libfunc;
818 break;
819 }
820 break;
821
822 case DFmode:
823 switch (to_mode)
824 {
825 case SFmode:
826 libcall = truncdfsf2_libfunc;
827 break;
828
829 case XFmode:
830 libcall = extenddfxf2_libfunc;
831 break;
832
833 case TFmode:
834 libcall = extenddftf2_libfunc;
835 break;
836 }
837 break;
838
839 case XFmode:
840 switch (to_mode)
841 {
842 case SFmode:
843 libcall = truncxfsf2_libfunc;
844 break;
845
846 case DFmode:
847 libcall = truncxfdf2_libfunc;
848 break;
849 }
850 break;
851
852 case TFmode:
853 switch (to_mode)
854 {
855 case SFmode:
856 libcall = trunctfsf2_libfunc;
857 break;
858
859 case DFmode:
860 libcall = trunctfdf2_libfunc;
861 break;
862 }
863 break;
864 }
865
866 if (libcall == (rtx) 0)
867 /* This conversion is not implemented yet. */
bbf6f052
RK
868 abort ();
869
81d79e2c
RS
870 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
871 1, from, from_mode);
872 emit_move_insn (to, value);
bbf6f052
RK
873 return;
874 }
875
876 /* Now both modes are integers. */
877
878 /* Handle expanding beyond a word. */
879 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
880 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
881 {
882 rtx insns;
883 rtx lowpart;
884 rtx fill_value;
885 rtx lowfrom;
886 int i;
887 enum machine_mode lowpart_mode;
888 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
889
890 /* Try converting directly if the insn is supported. */
891 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
892 != CODE_FOR_nothing)
893 {
cd1b4b44
RK
894 /* If FROM is a SUBREG, put it into a register. Do this
895 so that we always generate the same set of insns for
896 better cse'ing; if an intermediate assignment occurred,
897 we won't be doing the operation directly on the SUBREG. */
898 if (optimize > 0 && GET_CODE (from) == SUBREG)
899 from = force_reg (from_mode, from);
bbf6f052
RK
900 emit_unop_insn (code, to, from, equiv_code);
901 return;
902 }
903 /* Next, try converting via full word. */
904 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
905 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
906 != CODE_FOR_nothing))
907 {
a81fee56
RS
908 if (GET_CODE (to) == REG)
909 emit_insn (gen_rtx (CLOBBER, VOIDmode, to));
bbf6f052
RK
910 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
911 emit_unop_insn (code, to,
912 gen_lowpart (word_mode, to), equiv_code);
913 return;
914 }
915
916 /* No special multiword conversion insn; do it by hand. */
917 start_sequence ();
918
5c5033c3
RK
919 /* Since we will turn this into a no conflict block, we must ensure
920 that the source does not overlap the target. */
921
922 if (reg_overlap_mentioned_p (to, from))
923 from = force_reg (from_mode, from);
924
bbf6f052
RK
925 /* Get a copy of FROM widened to a word, if necessary. */
926 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
927 lowpart_mode = word_mode;
928 else
929 lowpart_mode = from_mode;
930
931 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
932
933 lowpart = gen_lowpart (lowpart_mode, to);
934 emit_move_insn (lowpart, lowfrom);
935
936 /* Compute the value to put in each remaining word. */
937 if (unsignedp)
938 fill_value = const0_rtx;
939 else
940 {
941#ifdef HAVE_slt
942 if (HAVE_slt
943 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
944 && STORE_FLAG_VALUE == -1)
945 {
906c4e36
RK
946 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
947 lowpart_mode, 0, 0);
bbf6f052
RK
948 fill_value = gen_reg_rtx (word_mode);
949 emit_insn (gen_slt (fill_value));
950 }
951 else
952#endif
953 {
954 fill_value
955 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
956 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
906c4e36 957 NULL_RTX, 0);
bbf6f052
RK
958 fill_value = convert_to_mode (word_mode, fill_value, 1);
959 }
960 }
961
962 /* Fill the remaining words. */
963 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
964 {
965 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
966 rtx subword = operand_subword (to, index, 1, to_mode);
967
968 if (subword == 0)
969 abort ();
970
971 if (fill_value != subword)
972 emit_move_insn (subword, fill_value);
973 }
974
975 insns = get_insns ();
976 end_sequence ();
977
906c4e36 978 emit_no_conflict_block (insns, to, from, NULL_RTX,
2abec1b7 979 gen_rtx (equiv_code, to_mode, copy_rtx (from)));
bbf6f052
RK
980 return;
981 }
982
d3c64ee3
RS
983 /* Truncating multi-word to a word or less. */
984 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
985 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
bbf6f052 986 {
431a6eca
JW
987 if (!((GET_CODE (from) == MEM
988 && ! MEM_VOLATILE_P (from)
989 && direct_load[(int) to_mode]
990 && ! mode_dependent_address_p (XEXP (from, 0)))
991 || GET_CODE (from) == REG
992 || GET_CODE (from) == SUBREG))
993 from = force_reg (from_mode, from);
bbf6f052
RK
994 convert_move (to, gen_lowpart (word_mode, from), 0);
995 return;
996 }
997
998 /* Handle pointer conversion */ /* SPEE 900220 */
999 if (to_mode == PSImode)
1000 {
1001 if (from_mode != SImode)
1002 from = convert_to_mode (SImode, from, unsignedp);
1003
1f584163
DE
1004#ifdef HAVE_truncsipsi2
1005 if (HAVE_truncsipsi2)
bbf6f052 1006 {
1f584163 1007 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
bbf6f052
RK
1008 return;
1009 }
1f584163 1010#endif /* HAVE_truncsipsi2 */
bbf6f052
RK
1011 abort ();
1012 }
1013
1014 if (from_mode == PSImode)
1015 {
1016 if (to_mode != SImode)
1017 {
1018 from = convert_to_mode (SImode, from, unsignedp);
1019 from_mode = SImode;
1020 }
1021 else
1022 {
1f584163
DE
1023#ifdef HAVE_extendpsisi2
1024 if (HAVE_extendpsisi2)
bbf6f052 1025 {
1f584163 1026 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
bbf6f052
RK
1027 return;
1028 }
1f584163 1029#endif /* HAVE_extendpsisi2 */
bbf6f052
RK
1030 abort ();
1031 }
1032 }
1033
0407367d
RK
1034 if (to_mode == PDImode)
1035 {
1036 if (from_mode != DImode)
1037 from = convert_to_mode (DImode, from, unsignedp);
1038
1039#ifdef HAVE_truncdipdi2
1040 if (HAVE_truncdipdi2)
1041 {
1042 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1043 return;
1044 }
1045#endif /* HAVE_truncdipdi2 */
1046 abort ();
1047 }
1048
1049 if (from_mode == PDImode)
1050 {
1051 if (to_mode != DImode)
1052 {
1053 from = convert_to_mode (DImode, from, unsignedp);
1054 from_mode = DImode;
1055 }
1056 else
1057 {
1058#ifdef HAVE_extendpdidi2
1059 if (HAVE_extendpdidi2)
1060 {
1061 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1062 return;
1063 }
1064#endif /* HAVE_extendpdidi2 */
1065 abort ();
1066 }
1067 }
1068
bbf6f052
RK
1069 /* Now follow all the conversions between integers
1070 no more than a word long. */
1071
1072 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1073 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1074 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
d3c64ee3 1075 GET_MODE_BITSIZE (from_mode)))
bbf6f052 1076 {
d3c64ee3
RS
1077 if (!((GET_CODE (from) == MEM
1078 && ! MEM_VOLATILE_P (from)
1079 && direct_load[(int) to_mode]
1080 && ! mode_dependent_address_p (XEXP (from, 0)))
1081 || GET_CODE (from) == REG
1082 || GET_CODE (from) == SUBREG))
1083 from = force_reg (from_mode, from);
bbf6f052
RK
1084 emit_move_insn (to, gen_lowpart (to_mode, from));
1085 return;
1086 }
1087
d3c64ee3 1088 /* Handle extension. */
bbf6f052
RK
1089 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1090 {
1091 /* Convert directly if that works. */
1092 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1093 != CODE_FOR_nothing)
1094 {
1095 emit_unop_insn (code, to, from, equiv_code);
1096 return;
1097 }
1098 else
1099 {
1100 enum machine_mode intermediate;
1101
1102 /* Search for a mode to convert via. */
1103 for (intermediate = from_mode; intermediate != VOIDmode;
1104 intermediate = GET_MODE_WIDER_MODE (intermediate))
930b4e39
RK
1105 if (((can_extend_p (to_mode, intermediate, unsignedp)
1106 != CODE_FOR_nothing)
1107 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1108 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
bbf6f052
RK
1109 && (can_extend_p (intermediate, from_mode, unsignedp)
1110 != CODE_FOR_nothing))
1111 {
1112 convert_move (to, convert_to_mode (intermediate, from,
1113 unsignedp), unsignedp);
1114 return;
1115 }
1116
1117 /* No suitable intermediate mode. */
1118 abort ();
1119 }
1120 }
1121
1122 /* Support special truncate insns for certain modes. */
1123
1124 if (from_mode == DImode && to_mode == SImode)
1125 {
1126#ifdef HAVE_truncdisi2
1127 if (HAVE_truncdisi2)
1128 {
1129 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1130 return;
1131 }
1132#endif
1133 convert_move (to, force_reg (from_mode, from), unsignedp);
1134 return;
1135 }
1136
1137 if (from_mode == DImode && to_mode == HImode)
1138 {
1139#ifdef HAVE_truncdihi2
1140 if (HAVE_truncdihi2)
1141 {
1142 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1143 return;
1144 }
1145#endif
1146 convert_move (to, force_reg (from_mode, from), unsignedp);
1147 return;
1148 }
1149
1150 if (from_mode == DImode && to_mode == QImode)
1151 {
1152#ifdef HAVE_truncdiqi2
1153 if (HAVE_truncdiqi2)
1154 {
1155 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1156 return;
1157 }
1158#endif
1159 convert_move (to, force_reg (from_mode, from), unsignedp);
1160 return;
1161 }
1162
1163 if (from_mode == SImode && to_mode == HImode)
1164 {
1165#ifdef HAVE_truncsihi2
1166 if (HAVE_truncsihi2)
1167 {
1168 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1169 return;
1170 }
1171#endif
1172 convert_move (to, force_reg (from_mode, from), unsignedp);
1173 return;
1174 }
1175
1176 if (from_mode == SImode && to_mode == QImode)
1177 {
1178#ifdef HAVE_truncsiqi2
1179 if (HAVE_truncsiqi2)
1180 {
1181 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1182 return;
1183 }
1184#endif
1185 convert_move (to, force_reg (from_mode, from), unsignedp);
1186 return;
1187 }
1188
1189 if (from_mode == HImode && to_mode == QImode)
1190 {
1191#ifdef HAVE_trunchiqi2
1192 if (HAVE_trunchiqi2)
1193 {
1194 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1195 return;
1196 }
1197#endif
1198 convert_move (to, force_reg (from_mode, from), unsignedp);
1199 return;
1200 }
1201
b9bcad65
RK
1202 if (from_mode == TImode && to_mode == DImode)
1203 {
1204#ifdef HAVE_trunctidi2
1205 if (HAVE_trunctidi2)
1206 {
1207 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1208 return;
1209 }
1210#endif
1211 convert_move (to, force_reg (from_mode, from), unsignedp);
1212 return;
1213 }
1214
1215 if (from_mode == TImode && to_mode == SImode)
1216 {
1217#ifdef HAVE_trunctisi2
1218 if (HAVE_trunctisi2)
1219 {
1220 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1221 return;
1222 }
1223#endif
1224 convert_move (to, force_reg (from_mode, from), unsignedp);
1225 return;
1226 }
1227
1228 if (from_mode == TImode && to_mode == HImode)
1229 {
1230#ifdef HAVE_trunctihi2
1231 if (HAVE_trunctihi2)
1232 {
1233 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1234 return;
1235 }
1236#endif
1237 convert_move (to, force_reg (from_mode, from), unsignedp);
1238 return;
1239 }
1240
1241 if (from_mode == TImode && to_mode == QImode)
1242 {
1243#ifdef HAVE_trunctiqi2
1244 if (HAVE_trunctiqi2)
1245 {
1246 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1247 return;
1248 }
1249#endif
1250 convert_move (to, force_reg (from_mode, from), unsignedp);
1251 return;
1252 }
1253
bbf6f052
RK
1254 /* Handle truncation of volatile memrefs, and so on;
1255 the things that couldn't be truncated directly,
1256 and for which there was no special instruction. */
1257 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1258 {
1259 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1260 emit_move_insn (to, temp);
1261 return;
1262 }
1263
1264 /* Mode combination is not recognized. */
1265 abort ();
1266}
1267
1268/* Return an rtx for a value that would result
1269 from converting X to mode MODE.
1270 Both X and MODE may be floating, or both integer.
1271 UNSIGNEDP is nonzero if X is an unsigned value.
1272 This can be done by referring to a part of X in place
5d901c31
RS
1273 or by copying to a new temporary with conversion.
1274
1275 This function *must not* call protect_from_queue
1276 except when putting X into an insn (in which case convert_move does it). */
bbf6f052
RK
1277
1278rtx
1279convert_to_mode (mode, x, unsignedp)
1280 enum machine_mode mode;
1281 rtx x;
1282 int unsignedp;
5ffe63ed
RS
1283{
1284 return convert_modes (mode, VOIDmode, x, unsignedp);
1285}
1286
1287/* Return an rtx for a value that would result
1288 from converting X from mode OLDMODE to mode MODE.
1289 Both modes may be floating, or both integer.
1290 UNSIGNEDP is nonzero if X is an unsigned value.
1291
1292 This can be done by referring to a part of X in place
1293 or by copying to a new temporary with conversion.
1294
1295 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1296
1297 This function *must not* call protect_from_queue
1298 except when putting X into an insn (in which case convert_move does it). */
1299
1300rtx
1301convert_modes (mode, oldmode, x, unsignedp)
1302 enum machine_mode mode, oldmode;
1303 rtx x;
1304 int unsignedp;
bbf6f052
RK
1305{
1306 register rtx temp;
5ffe63ed 1307
1499e0a8
RK
1308 /* If FROM is a SUBREG that indicates that we have already done at least
1309 the required extension, strip it. */
1310
1311 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1312 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1313 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1314 x = gen_lowpart (mode, x);
bbf6f052 1315
64791b18
RK
1316 if (GET_MODE (x) != VOIDmode)
1317 oldmode = GET_MODE (x);
1318
5ffe63ed 1319 if (mode == oldmode)
bbf6f052
RK
1320 return x;
1321
1322 /* There is one case that we must handle specially: If we are converting
906c4e36 1323 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
bbf6f052
RK
1324 we are to interpret the constant as unsigned, gen_lowpart will do
1325 the wrong if the constant appears negative. What we want to do is
1326 make the high-order word of the constant zero, not all ones. */
1327
1328 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
906c4e36 1329 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
bbf6f052 1330 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
906c4e36 1331 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
bbf6f052
RK
1332
1333 /* We can do this with a gen_lowpart if both desired and current modes
1334 are integer, and this is either a constant integer, a register, or a
ba2e110c
RK
1335 non-volatile MEM. Except for the constant case where MODE is no
1336 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
bbf6f052 1337
ba2e110c
RK
1338 if ((GET_CODE (x) == CONST_INT
1339 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 1340 || (GET_MODE_CLASS (mode) == MODE_INT
5ffe63ed 1341 && GET_MODE_CLASS (oldmode) == MODE_INT
bbf6f052 1342 && (GET_CODE (x) == CONST_DOUBLE
5ffe63ed 1343 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
d57c66da
JW
1344 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1345 && direct_load[(int) mode])
2bf29316
JW
1346 || (GET_CODE (x) == REG
1347 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1348 GET_MODE_BITSIZE (GET_MODE (x)))))))))
ba2e110c
RK
1349 {
1350 /* ?? If we don't know OLDMODE, we have to assume here that
1351 X does not need sign- or zero-extension. This may not be
1352 the case, but it's the best we can do. */
1353 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1354 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1355 {
1356 HOST_WIDE_INT val = INTVAL (x);
1357 int width = GET_MODE_BITSIZE (oldmode);
1358
1359 /* We must sign or zero-extend in this case. Start by
1360 zero-extending, then sign extend if we need to. */
1361 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1362 if (! unsignedp
1363 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1364 val |= (HOST_WIDE_INT) (-1) << width;
1365
1366 return GEN_INT (val);
1367 }
1368
1369 return gen_lowpart (mode, x);
1370 }
bbf6f052
RK
1371
1372 temp = gen_reg_rtx (mode);
1373 convert_move (temp, x, unsignedp);
1374 return temp;
1375}
1376\f
1377/* Generate several move instructions to copy LEN bytes
1378 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1379 The caller must pass FROM and TO
1380 through protect_from_queue before calling.
1381 ALIGN (in bytes) is maximum alignment we can assume. */
1382
bbf6f052
RK
1383static void
1384move_by_pieces (to, from, len, align)
1385 rtx to, from;
1386 int len, align;
1387{
1388 struct move_by_pieces data;
1389 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
e87b4f3f 1390 int max_size = MOVE_MAX + 1;
bbf6f052
RK
1391
1392 data.offset = 0;
1393 data.to_addr = to_addr;
1394 data.from_addr = from_addr;
1395 data.to = to;
1396 data.from = from;
1397 data.autinc_to
1398 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1399 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1400 data.autinc_from
1401 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1402 || GET_CODE (from_addr) == POST_INC
1403 || GET_CODE (from_addr) == POST_DEC);
1404
1405 data.explicit_inc_from = 0;
1406 data.explicit_inc_to = 0;
1407 data.reverse
1408 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1409 if (data.reverse) data.offset = len;
1410 data.len = len;
1411
1412 /* If copying requires more than two move insns,
1413 copy addresses to registers (to make displacements shorter)
1414 and use post-increment if available. */
1415 if (!(data.autinc_from && data.autinc_to)
1416 && move_by_pieces_ninsns (len, align) > 2)
1417 {
1418#ifdef HAVE_PRE_DECREMENT
1419 if (data.reverse && ! data.autinc_from)
1420 {
1421 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1422 data.autinc_from = 1;
1423 data.explicit_inc_from = -1;
1424 }
1425#endif
1426#ifdef HAVE_POST_INCREMENT
1427 if (! data.autinc_from)
1428 {
1429 data.from_addr = copy_addr_to_reg (from_addr);
1430 data.autinc_from = 1;
1431 data.explicit_inc_from = 1;
1432 }
1433#endif
1434 if (!data.autinc_from && CONSTANT_P (from_addr))
1435 data.from_addr = copy_addr_to_reg (from_addr);
1436#ifdef HAVE_PRE_DECREMENT
1437 if (data.reverse && ! data.autinc_to)
1438 {
1439 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1440 data.autinc_to = 1;
1441 data.explicit_inc_to = -1;
1442 }
1443#endif
1444#ifdef HAVE_POST_INCREMENT
1445 if (! data.reverse && ! data.autinc_to)
1446 {
1447 data.to_addr = copy_addr_to_reg (to_addr);
1448 data.autinc_to = 1;
1449 data.explicit_inc_to = 1;
1450 }
1451#endif
1452 if (!data.autinc_to && CONSTANT_P (to_addr))
1453 data.to_addr = copy_addr_to_reg (to_addr);
1454 }
1455
c7a7ac46 1456 if (! SLOW_UNALIGNED_ACCESS
e87b4f3f 1457 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1458 align = MOVE_MAX;
bbf6f052
RK
1459
1460 /* First move what we can in the largest integer mode, then go to
1461 successively smaller modes. */
1462
1463 while (max_size > 1)
1464 {
1465 enum machine_mode mode = VOIDmode, tmode;
1466 enum insn_code icode;
1467
e7c33f54
RK
1468 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1469 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1470 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1471 mode = tmode;
1472
1473 if (mode == VOIDmode)
1474 break;
1475
1476 icode = mov_optab->handlers[(int) mode].insn_code;
1477 if (icode != CODE_FOR_nothing
1478 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1479 GET_MODE_SIZE (mode)))
1480 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1481
1482 max_size = GET_MODE_SIZE (mode);
1483 }
1484
1485 /* The code above should have handled everything. */
1486 if (data.len != 0)
1487 abort ();
1488}
1489
1490/* Return number of insns required to move L bytes by pieces.
1491 ALIGN (in bytes) is maximum alignment we can assume. */
1492
1493static int
1494move_by_pieces_ninsns (l, align)
1495 unsigned int l;
1496 int align;
1497{
1498 register int n_insns = 0;
e87b4f3f 1499 int max_size = MOVE_MAX + 1;
bbf6f052 1500
c7a7ac46 1501 if (! SLOW_UNALIGNED_ACCESS
e87b4f3f 1502 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1503 align = MOVE_MAX;
bbf6f052
RK
1504
1505 while (max_size > 1)
1506 {
1507 enum machine_mode mode = VOIDmode, tmode;
1508 enum insn_code icode;
1509
e7c33f54
RK
1510 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1511 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1512 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1513 mode = tmode;
1514
1515 if (mode == VOIDmode)
1516 break;
1517
1518 icode = mov_optab->handlers[(int) mode].insn_code;
1519 if (icode != CODE_FOR_nothing
1520 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1521 GET_MODE_SIZE (mode)))
1522 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1523
1524 max_size = GET_MODE_SIZE (mode);
1525 }
1526
1527 return n_insns;
1528}
1529
1530/* Subroutine of move_by_pieces. Move as many bytes as appropriate
1531 with move instructions for mode MODE. GENFUN is the gen_... function
1532 to make a move insn for that mode. DATA has all the other info. */
1533
1534static void
1535move_by_pieces_1 (genfun, mode, data)
1536 rtx (*genfun) ();
1537 enum machine_mode mode;
1538 struct move_by_pieces *data;
1539{
1540 register int size = GET_MODE_SIZE (mode);
1541 register rtx to1, from1;
1542
1543 while (data->len >= size)
1544 {
1545 if (data->reverse) data->offset -= size;
1546
1547 to1 = (data->autinc_to
1548 ? gen_rtx (MEM, mode, data->to_addr)
1549 : change_address (data->to, mode,
1550 plus_constant (data->to_addr, data->offset)));
1551 from1 =
1552 (data->autinc_from
1553 ? gen_rtx (MEM, mode, data->from_addr)
1554 : change_address (data->from, mode,
1555 plus_constant (data->from_addr, data->offset)));
1556
1557#ifdef HAVE_PRE_DECREMENT
1558 if (data->explicit_inc_to < 0)
906c4e36 1559 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
bbf6f052 1560 if (data->explicit_inc_from < 0)
906c4e36 1561 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
bbf6f052
RK
1562#endif
1563
1564 emit_insn ((*genfun) (to1, from1));
1565#ifdef HAVE_POST_INCREMENT
1566 if (data->explicit_inc_to > 0)
906c4e36 1567 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
bbf6f052 1568 if (data->explicit_inc_from > 0)
906c4e36 1569 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
bbf6f052
RK
1570#endif
1571
1572 if (! data->reverse) data->offset += size;
1573
1574 data->len -= size;
1575 }
1576}
1577\f
1578/* Emit code to move a block Y to a block X.
1579 This may be done with string-move instructions,
1580 with multiple scalar move instructions, or with a library call.
1581
1582 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1583 with mode BLKmode.
1584 SIZE is an rtx that says how long they are.
1585 ALIGN is the maximum alignment we can assume they have,
1586 measured in bytes. */
1587
1588void
1589emit_block_move (x, y, size, align)
1590 rtx x, y;
1591 rtx size;
1592 int align;
1593{
1594 if (GET_MODE (x) != BLKmode)
1595 abort ();
1596
1597 if (GET_MODE (y) != BLKmode)
1598 abort ();
1599
1600 x = protect_from_queue (x, 1);
1601 y = protect_from_queue (y, 0);
5d901c31 1602 size = protect_from_queue (size, 0);
bbf6f052
RK
1603
1604 if (GET_CODE (x) != MEM)
1605 abort ();
1606 if (GET_CODE (y) != MEM)
1607 abort ();
1608 if (size == 0)
1609 abort ();
1610
1611 if (GET_CODE (size) == CONST_INT
906c4e36 1612 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
bbf6f052
RK
1613 move_by_pieces (x, y, INTVAL (size), align);
1614 else
1615 {
1616 /* Try the most limited insn first, because there's no point
1617 including more than one in the machine description unless
1618 the more limited one has some advantage. */
266007a7 1619
0bba3f6f 1620 rtx opalign = GEN_INT (align);
266007a7
RK
1621 enum machine_mode mode;
1622
1623 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1624 mode = GET_MODE_WIDER_MODE (mode))
bbf6f052 1625 {
266007a7 1626 enum insn_code code = movstr_optab[(int) mode];
266007a7
RK
1627
1628 if (code != CODE_FOR_nothing
803090c4
RK
1629 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1630 here because if SIZE is less than the mode mask, as it is
8008b228 1631 returned by the macro, it will definitely be less than the
803090c4 1632 actual mode mask. */
8ca00751
RK
1633 && ((GET_CODE (size) == CONST_INT
1634 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1635 <= GET_MODE_MASK (mode)))
1636 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
0bba3f6f
RK
1637 && (insn_operand_predicate[(int) code][0] == 0
1638 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1639 && (insn_operand_predicate[(int) code][1] == 0
1640 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1641 && (insn_operand_predicate[(int) code][3] == 0
1642 || (*insn_operand_predicate[(int) code][3]) (opalign,
1643 VOIDmode)))
bbf6f052 1644 {
1ba1e2a8 1645 rtx op2;
266007a7
RK
1646 rtx last = get_last_insn ();
1647 rtx pat;
1648
1ba1e2a8 1649 op2 = convert_to_mode (mode, size, 1);
0bba3f6f
RK
1650 if (insn_operand_predicate[(int) code][2] != 0
1651 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
266007a7
RK
1652 op2 = copy_to_mode_reg (mode, op2);
1653
1654 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1655 if (pat)
1656 {
1657 emit_insn (pat);
1658 return;
1659 }
1660 else
1661 delete_insns_since (last);
bbf6f052
RK
1662 }
1663 }
bbf6f052
RK
1664
1665#ifdef TARGET_MEM_FUNCTIONS
d562e42e 1666 emit_library_call (memcpy_libfunc, 0,
bbf6f052
RK
1667 VOIDmode, 3, XEXP (x, 0), Pmode,
1668 XEXP (y, 0), Pmode,
0fa83258
RK
1669 convert_to_mode (TYPE_MODE (sizetype), size,
1670 TREE_UNSIGNED (sizetype)),
1671 TYPE_MODE (sizetype));
bbf6f052 1672#else
d562e42e 1673 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
1674 VOIDmode, 3, XEXP (y, 0), Pmode,
1675 XEXP (x, 0), Pmode,
0fa83258
RK
1676 convert_to_mode (TYPE_MODE (sizetype), size,
1677 TREE_UNSIGNED (sizetype)),
1678 TYPE_MODE (sizetype));
bbf6f052
RK
1679#endif
1680 }
1681}
1682\f
1683/* Copy all or part of a value X into registers starting at REGNO.
1684 The number of registers to be filled is NREGS. */
1685
1686void
1687move_block_to_reg (regno, x, nregs, mode)
1688 int regno;
1689 rtx x;
1690 int nregs;
1691 enum machine_mode mode;
1692{
1693 int i;
1694 rtx pat, last;
1695
72bb9717
RK
1696 if (nregs == 0)
1697 return;
1698
bbf6f052
RK
1699 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1700 x = validize_mem (force_const_mem (mode, x));
1701
1702 /* See if the machine can do this with a load multiple insn. */
1703#ifdef HAVE_load_multiple
c3a02afe 1704 if (HAVE_load_multiple)
bbf6f052 1705 {
c3a02afe
RK
1706 last = get_last_insn ();
1707 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1708 GEN_INT (nregs));
1709 if (pat)
1710 {
1711 emit_insn (pat);
1712 return;
1713 }
1714 else
1715 delete_insns_since (last);
bbf6f052 1716 }
bbf6f052
RK
1717#endif
1718
1719 for (i = 0; i < nregs; i++)
1720 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1721 operand_subword_force (x, i, mode));
1722}
1723
1724/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
0040593d
JW
1725 The number of registers to be filled is NREGS. SIZE indicates the number
1726 of bytes in the object X. */
1727
bbf6f052
RK
1728
1729void
0040593d 1730move_block_from_reg (regno, x, nregs, size)
bbf6f052
RK
1731 int regno;
1732 rtx x;
1733 int nregs;
0040593d 1734 int size;
bbf6f052
RK
1735{
1736 int i;
1737 rtx pat, last;
1738
0040593d
JW
1739 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1740 to the left before storing to memory. */
1741 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1742 {
1743 rtx tem = operand_subword (x, 0, 1, BLKmode);
1744 rtx shift;
1745
1746 if (tem == 0)
1747 abort ();
1748
1749 shift = expand_shift (LSHIFT_EXPR, word_mode,
1750 gen_rtx (REG, word_mode, regno),
1751 build_int_2 ((UNITS_PER_WORD - size)
1752 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1753 emit_move_insn (tem, shift);
1754 return;
1755 }
1756
bbf6f052
RK
1757 /* See if the machine can do this with a store multiple insn. */
1758#ifdef HAVE_store_multiple
c3a02afe 1759 if (HAVE_store_multiple)
bbf6f052 1760 {
c3a02afe
RK
1761 last = get_last_insn ();
1762 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1763 GEN_INT (nregs));
1764 if (pat)
1765 {
1766 emit_insn (pat);
1767 return;
1768 }
1769 else
1770 delete_insns_since (last);
bbf6f052 1771 }
bbf6f052
RK
1772#endif
1773
1774 for (i = 0; i < nregs; i++)
1775 {
1776 rtx tem = operand_subword (x, i, 1, BLKmode);
1777
1778 if (tem == 0)
1779 abort ();
1780
1781 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1782 }
1783}
1784
94b25f81
RK
1785/* Add a USE expression for REG to the (possibly empty) list pointed
1786 to by CALL_FUSAGE. REG must denote a hard register. */
bbf6f052
RK
1787
1788void
b3f8cf4a
RK
1789use_reg (call_fusage, reg)
1790 rtx *call_fusage, reg;
1791{
0304dfbb
DE
1792 if (GET_CODE (reg) != REG
1793 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
b3f8cf4a
RK
1794 abort();
1795
1796 *call_fusage
1797 = gen_rtx (EXPR_LIST, VOIDmode,
0304dfbb 1798 gen_rtx (USE, VOIDmode, reg), *call_fusage);
b3f8cf4a
RK
1799}
1800
94b25f81
RK
1801/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1802 starting at REGNO. All of these registers must be hard registers. */
b3f8cf4a
RK
1803
1804void
0304dfbb
DE
1805use_regs (call_fusage, regno, nregs)
1806 rtx *call_fusage;
bbf6f052
RK
1807 int regno;
1808 int nregs;
1809{
0304dfbb 1810 int i;
bbf6f052 1811
0304dfbb
DE
1812 if (regno + nregs > FIRST_PSEUDO_REGISTER)
1813 abort ();
1814
1815 for (i = 0; i < nregs; i++)
1816 use_reg (call_fusage, gen_rtx (REG, reg_raw_mode[regno + i], regno + i));
bbf6f052
RK
1817}
1818\f
1819/* Write zeros through the storage of OBJECT.
1820 If OBJECT has BLKmode, SIZE is its length in bytes. */
1821
1822void
1823clear_storage (object, size)
1824 rtx object;
1825 int size;
1826{
1827 if (GET_MODE (object) == BLKmode)
1828 {
1829#ifdef TARGET_MEM_FUNCTIONS
d562e42e 1830 emit_library_call (memset_libfunc, 0,
bbf6f052 1831 VOIDmode, 3,
88f63c77
RK
1832 XEXP (object, 0), Pmode, const0_rtx, ptr_mode,
1833 GEN_INT (size), ptr_mode);
bbf6f052 1834#else
d562e42e 1835 emit_library_call (bzero_libfunc, 0,
bbf6f052
RK
1836 VOIDmode, 2,
1837 XEXP (object, 0), Pmode,
88f63c77 1838 GEN_INT (size), ptr_mode);
bbf6f052
RK
1839#endif
1840 }
1841 else
1842 emit_move_insn (object, const0_rtx);
1843}
1844
1845/* Generate code to copy Y into X.
1846 Both Y and X must have the same mode, except that
1847 Y can be a constant with VOIDmode.
1848 This mode cannot be BLKmode; use emit_block_move for that.
1849
1850 Return the last instruction emitted. */
1851
1852rtx
1853emit_move_insn (x, y)
1854 rtx x, y;
1855{
1856 enum machine_mode mode = GET_MODE (x);
bbf6f052
RK
1857
1858 x = protect_from_queue (x, 1);
1859 y = protect_from_queue (y, 0);
1860
1861 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1862 abort ();
1863
1864 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1865 y = force_const_mem (mode, y);
1866
1867 /* If X or Y are memory references, verify that their addresses are valid
1868 for the machine. */
1869 if (GET_CODE (x) == MEM
1870 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1871 && ! push_operand (x, GET_MODE (x)))
1872 || (flag_force_addr
1873 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1874 x = change_address (x, VOIDmode, XEXP (x, 0));
1875
1876 if (GET_CODE (y) == MEM
1877 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1878 || (flag_force_addr
1879 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1880 y = change_address (y, VOIDmode, XEXP (y, 0));
1881
1882 if (mode == BLKmode)
1883 abort ();
1884
261c4230
RS
1885 return emit_move_insn_1 (x, y);
1886}
1887
1888/* Low level part of emit_move_insn.
1889 Called just like emit_move_insn, but assumes X and Y
1890 are basically valid. */
1891
1892rtx
1893emit_move_insn_1 (x, y)
1894 rtx x, y;
1895{
1896 enum machine_mode mode = GET_MODE (x);
1897 enum machine_mode submode;
1898 enum mode_class class = GET_MODE_CLASS (mode);
1899 int i;
1900
bbf6f052
RK
1901 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1902 return
1903 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1904
89742723 1905 /* Expand complex moves by moving real part and imag part, if possible. */
7308a047 1906 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
d0c76654
RK
1907 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
1908 * BITS_PER_UNIT),
1909 (class == MODE_COMPLEX_INT
1910 ? MODE_INT : MODE_FLOAT),
1911 0))
7308a047
RS
1912 && (mov_optab->handlers[(int) submode].insn_code
1913 != CODE_FOR_nothing))
1914 {
1915 /* Don't split destination if it is a stack push. */
1916 int stack = push_operand (x, GET_MODE (x));
6551fa4d 1917 rtx insns;
7308a047 1918
7308a047
RS
1919 /* If this is a stack, push the highpart first, so it
1920 will be in the argument order.
1921
1922 In that case, change_address is used only to convert
1923 the mode, not to change the address. */
c937357e
RS
1924 if (stack)
1925 {
e33c0d66
RS
1926 /* Note that the real part always precedes the imag part in memory
1927 regardless of machine's endianness. */
c937357e
RS
1928#ifdef STACK_GROWS_DOWNWARD
1929 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1930 (gen_rtx (MEM, submode, (XEXP (x, 0))),
e33c0d66 1931 gen_imagpart (submode, y)));
c937357e
RS
1932 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1933 (gen_rtx (MEM, submode, (XEXP (x, 0))),
e33c0d66 1934 gen_realpart (submode, y)));
c937357e
RS
1935#else
1936 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1937 (gen_rtx (MEM, submode, (XEXP (x, 0))),
e33c0d66 1938 gen_realpart (submode, y)));
c937357e
RS
1939 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1940 (gen_rtx (MEM, submode, (XEXP (x, 0))),
e33c0d66 1941 gen_imagpart (submode, y)));
c937357e
RS
1942#endif
1943 }
1944 else
1945 {
1946 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
976ff203 1947 (gen_realpart (submode, x), gen_realpart (submode, y)));
c937357e 1948 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
976ff203 1949 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
c937357e 1950 }
7308a047 1951
7a1ab50a 1952 return get_last_insn ();
7308a047
RS
1953 }
1954
bbf6f052
RK
1955 /* This will handle any multi-word mode that lacks a move_insn pattern.
1956 However, you will get better code if you define such patterns,
1957 even if they must turn into multiple assembler instructions. */
a4320483 1958 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
bbf6f052
RK
1959 {
1960 rtx last_insn = 0;
6551fa4d
JW
1961 rtx insns;
1962
a98c9f1a
RK
1963#ifdef PUSH_ROUNDING
1964
1965 /* If X is a push on the stack, do the push now and replace
1966 X with a reference to the stack pointer. */
1967 if (push_operand (x, GET_MODE (x)))
1968 {
1969 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
1970 x = change_address (x, VOIDmode, stack_pointer_rtx);
1971 }
1972#endif
1973
bbf6f052
RK
1974 for (i = 0;
1975 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1976 i++)
1977 {
1978 rtx xpart = operand_subword (x, i, 1, mode);
1979 rtx ypart = operand_subword (y, i, 1, mode);
1980
1981 /* If we can't get a part of Y, put Y into memory if it is a
1982 constant. Otherwise, force it into a register. If we still
1983 can't get a part of Y, abort. */
1984 if (ypart == 0 && CONSTANT_P (y))
1985 {
1986 y = force_const_mem (mode, y);
1987 ypart = operand_subword (y, i, 1, mode);
1988 }
1989 else if (ypart == 0)
1990 ypart = operand_subword_force (y, i, mode);
1991
1992 if (xpart == 0 || ypart == 0)
1993 abort ();
1994
1995 last_insn = emit_move_insn (xpart, ypart);
1996 }
6551fa4d 1997
bbf6f052
RK
1998 return last_insn;
1999 }
2000 else
2001 abort ();
2002}
2003\f
2004/* Pushing data onto the stack. */
2005
2006/* Push a block of length SIZE (perhaps variable)
2007 and return an rtx to address the beginning of the block.
2008 Note that it is not possible for the value returned to be a QUEUED.
2009 The value may be virtual_outgoing_args_rtx.
2010
2011 EXTRA is the number of bytes of padding to push in addition to SIZE.
2012 BELOW nonzero means this padding comes at low addresses;
2013 otherwise, the padding comes at high addresses. */
2014
2015rtx
2016push_block (size, extra, below)
2017 rtx size;
2018 int extra, below;
2019{
2020 register rtx temp;
88f63c77
RK
2021
2022 size = convert_modes (Pmode, ptr_mode, size, 1);
bbf6f052
RK
2023 if (CONSTANT_P (size))
2024 anti_adjust_stack (plus_constant (size, extra));
2025 else if (GET_CODE (size) == REG && extra == 0)
2026 anti_adjust_stack (size);
2027 else
2028 {
2029 rtx temp = copy_to_mode_reg (Pmode, size);
2030 if (extra != 0)
906c4e36 2031 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
bbf6f052
RK
2032 temp, 0, OPTAB_LIB_WIDEN);
2033 anti_adjust_stack (temp);
2034 }
2035
2036#ifdef STACK_GROWS_DOWNWARD
2037 temp = virtual_outgoing_args_rtx;
2038 if (extra != 0 && below)
2039 temp = plus_constant (temp, extra);
2040#else
2041 if (GET_CODE (size) == CONST_INT)
2042 temp = plus_constant (virtual_outgoing_args_rtx,
2043 - INTVAL (size) - (below ? 0 : extra));
2044 else if (extra != 0 && !below)
2045 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2046 negate_rtx (Pmode, plus_constant (size, extra)));
2047 else
2048 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2049 negate_rtx (Pmode, size));
2050#endif
2051
2052 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2053}
2054
87e38d84 2055rtx
bbf6f052
RK
2056gen_push_operand ()
2057{
2058 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2059}
2060
2061/* Generate code to push X onto the stack, assuming it has mode MODE and
2062 type TYPE.
2063 MODE is redundant except when X is a CONST_INT (since they don't
2064 carry mode info).
2065 SIZE is an rtx for the size of data to be copied (in bytes),
2066 needed only if X is BLKmode.
2067
2068 ALIGN (in bytes) is maximum alignment we can assume.
2069
cd048831
RK
2070 If PARTIAL and REG are both nonzero, then copy that many of the first
2071 words of X into registers starting with REG, and push the rest of X.
bbf6f052
RK
2072 The amount of space pushed is decreased by PARTIAL words,
2073 rounded *down* to a multiple of PARM_BOUNDARY.
2074 REG must be a hard register in this case.
cd048831
RK
2075 If REG is zero but PARTIAL is not, take any all others actions for an
2076 argument partially in registers, but do not actually load any
2077 registers.
bbf6f052
RK
2078
2079 EXTRA is the amount in bytes of extra space to leave next to this arg.
6dc42e49 2080 This is ignored if an argument block has already been allocated.
bbf6f052
RK
2081
2082 On a machine that lacks real push insns, ARGS_ADDR is the address of
2083 the bottom of the argument block for this call. We use indexing off there
2084 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2085 argument block has not been preallocated.
2086
2087 ARGS_SO_FAR is the size of args previously pushed for this call. */
2088
2089void
2090emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2091 args_addr, args_so_far)
2092 register rtx x;
2093 enum machine_mode mode;
2094 tree type;
2095 rtx size;
2096 int align;
2097 int partial;
2098 rtx reg;
2099 int extra;
2100 rtx args_addr;
2101 rtx args_so_far;
2102{
2103 rtx xinner;
2104 enum direction stack_direction
2105#ifdef STACK_GROWS_DOWNWARD
2106 = downward;
2107#else
2108 = upward;
2109#endif
2110
2111 /* Decide where to pad the argument: `downward' for below,
2112 `upward' for above, or `none' for don't pad it.
2113 Default is below for small data on big-endian machines; else above. */
2114 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2115
2116 /* Invert direction if stack is post-update. */
2117 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2118 if (where_pad != none)
2119 where_pad = (where_pad == downward ? upward : downward);
2120
2121 xinner = x = protect_from_queue (x, 0);
2122
2123 if (mode == BLKmode)
2124 {
2125 /* Copy a block into the stack, entirely or partially. */
2126
2127 register rtx temp;
2128 int used = partial * UNITS_PER_WORD;
2129 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2130 int skip;
2131
2132 if (size == 0)
2133 abort ();
2134
2135 used -= offset;
2136
2137 /* USED is now the # of bytes we need not copy to the stack
2138 because registers will take care of them. */
2139
2140 if (partial != 0)
2141 xinner = change_address (xinner, BLKmode,
2142 plus_constant (XEXP (xinner, 0), used));
2143
2144 /* If the partial register-part of the arg counts in its stack size,
2145 skip the part of stack space corresponding to the registers.
2146 Otherwise, start copying to the beginning of the stack space,
2147 by setting SKIP to 0. */
2148#ifndef REG_PARM_STACK_SPACE
2149 skip = 0;
2150#else
2151 skip = used;
2152#endif
2153
2154#ifdef PUSH_ROUNDING
2155 /* Do it with several push insns if that doesn't take lots of insns
2156 and if there is no difficulty with push insns that skip bytes
2157 on the stack for alignment purposes. */
2158 if (args_addr == 0
2159 && GET_CODE (size) == CONST_INT
2160 && skip == 0
2161 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2162 < MOVE_RATIO)
bbf6f052
RK
2163 /* Here we avoid the case of a structure whose weak alignment
2164 forces many pushes of a small amount of data,
2165 and such small pushes do rounding that causes trouble. */
c7a7ac46 2166 && ((! SLOW_UNALIGNED_ACCESS)
e87b4f3f 2167 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
bbf6f052 2168 || PUSH_ROUNDING (align) == align)
bbf6f052
RK
2169 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2170 {
2171 /* Push padding now if padding above and stack grows down,
2172 or if padding below and stack grows up.
2173 But if space already allocated, this has already been done. */
2174 if (extra && args_addr == 0
2175 && where_pad != none && where_pad != stack_direction)
906c4e36 2176 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2177
2178 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
2179 INTVAL (size) - used, align);
2180 }
2181 else
2182#endif /* PUSH_ROUNDING */
2183 {
2184 /* Otherwise make space on the stack and copy the data
2185 to the address of that space. */
2186
2187 /* Deduct words put into registers from the size we must copy. */
2188 if (partial != 0)
2189 {
2190 if (GET_CODE (size) == CONST_INT)
906c4e36 2191 size = GEN_INT (INTVAL (size) - used);
bbf6f052
RK
2192 else
2193 size = expand_binop (GET_MODE (size), sub_optab, size,
906c4e36
RK
2194 GEN_INT (used), NULL_RTX, 0,
2195 OPTAB_LIB_WIDEN);
bbf6f052
RK
2196 }
2197
2198 /* Get the address of the stack space.
2199 In this case, we do not deal with EXTRA separately.
2200 A single stack adjust will do. */
2201 if (! args_addr)
2202 {
2203 temp = push_block (size, extra, where_pad == downward);
2204 extra = 0;
2205 }
2206 else if (GET_CODE (args_so_far) == CONST_INT)
2207 temp = memory_address (BLKmode,
2208 plus_constant (args_addr,
2209 skip + INTVAL (args_so_far)));
2210 else
2211 temp = memory_address (BLKmode,
2212 plus_constant (gen_rtx (PLUS, Pmode,
2213 args_addr, args_so_far),
2214 skip));
2215
2216 /* TEMP is the address of the block. Copy the data there. */
2217 if (GET_CODE (size) == CONST_INT
2218 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2219 < MOVE_RATIO))
2220 {
2221 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
2222 INTVAL (size), align);
2223 goto ret;
2224 }
2225 /* Try the most limited insn first, because there's no point
2226 including more than one in the machine description unless
2227 the more limited one has some advantage. */
2228#ifdef HAVE_movstrqi
2229 if (HAVE_movstrqi
2230 && GET_CODE (size) == CONST_INT
2231 && ((unsigned) INTVAL (size)
2232 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
2233 {
c841050e
RS
2234 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
2235 xinner, size, GEN_INT (align));
2236 if (pat != 0)
2237 {
2238 emit_insn (pat);
2239 goto ret;
2240 }
bbf6f052
RK
2241 }
2242#endif
2243#ifdef HAVE_movstrhi
2244 if (HAVE_movstrhi
2245 && GET_CODE (size) == CONST_INT
2246 && ((unsigned) INTVAL (size)
2247 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
2248 {
c841050e
RS
2249 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
2250 xinner, size, GEN_INT (align));
2251 if (pat != 0)
2252 {
2253 emit_insn (pat);
2254 goto ret;
2255 }
bbf6f052
RK
2256 }
2257#endif
2258#ifdef HAVE_movstrsi
2259 if (HAVE_movstrsi)
2260 {
c841050e
RS
2261 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
2262 xinner, size, GEN_INT (align));
2263 if (pat != 0)
2264 {
2265 emit_insn (pat);
2266 goto ret;
2267 }
bbf6f052
RK
2268 }
2269#endif
2270#ifdef HAVE_movstrdi
2271 if (HAVE_movstrdi)
2272 {
c841050e
RS
2273 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
2274 xinner, size, GEN_INT (align));
2275 if (pat != 0)
2276 {
2277 emit_insn (pat);
2278 goto ret;
2279 }
bbf6f052
RK
2280 }
2281#endif
2282
2283#ifndef ACCUMULATE_OUTGOING_ARGS
2284 /* If the source is referenced relative to the stack pointer,
2285 copy it to another register to stabilize it. We do not need
2286 to do this if we know that we won't be changing sp. */
2287
2288 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2289 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2290 temp = copy_to_reg (temp);
2291#endif
2292
2293 /* Make inhibit_defer_pop nonzero around the library call
2294 to force it to pop the bcopy-arguments right away. */
2295 NO_DEFER_POP;
2296#ifdef TARGET_MEM_FUNCTIONS
d562e42e 2297 emit_library_call (memcpy_libfunc, 0,
bbf6f052 2298 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
0fa83258
RK
2299 convert_to_mode (TYPE_MODE (sizetype),
2300 size, TREE_UNSIGNED (sizetype)),
26ba80fc 2301 TYPE_MODE (sizetype));
bbf6f052 2302#else
d562e42e 2303 emit_library_call (bcopy_libfunc, 0,
bbf6f052 2304 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
0fa83258
RK
2305 convert_to_mode (TYPE_MODE (sizetype),
2306 size, TREE_UNSIGNED (sizetype)),
26ba80fc 2307 TYPE_MODE (sizetype));
bbf6f052
RK
2308#endif
2309 OK_DEFER_POP;
2310 }
2311 }
2312 else if (partial > 0)
2313 {
2314 /* Scalar partly in registers. */
2315
2316 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2317 int i;
2318 int not_stack;
2319 /* # words of start of argument
2320 that we must make space for but need not store. */
2321 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2322 int args_offset = INTVAL (args_so_far);
2323 int skip;
2324
2325 /* Push padding now if padding above and stack grows down,
2326 or if padding below and stack grows up.
2327 But if space already allocated, this has already been done. */
2328 if (extra && args_addr == 0
2329 && where_pad != none && where_pad != stack_direction)
906c4e36 2330 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2331
2332 /* If we make space by pushing it, we might as well push
2333 the real data. Otherwise, we can leave OFFSET nonzero
2334 and leave the space uninitialized. */
2335 if (args_addr == 0)
2336 offset = 0;
2337
2338 /* Now NOT_STACK gets the number of words that we don't need to
2339 allocate on the stack. */
2340 not_stack = partial - offset;
2341
2342 /* If the partial register-part of the arg counts in its stack size,
2343 skip the part of stack space corresponding to the registers.
2344 Otherwise, start copying to the beginning of the stack space,
2345 by setting SKIP to 0. */
2346#ifndef REG_PARM_STACK_SPACE
2347 skip = 0;
2348#else
2349 skip = not_stack;
2350#endif
2351
2352 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2353 x = validize_mem (force_const_mem (mode, x));
2354
2355 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2356 SUBREGs of such registers are not allowed. */
2357 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2358 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2359 x = copy_to_reg (x);
2360
2361 /* Loop over all the words allocated on the stack for this arg. */
2362 /* We can do it by words, because any scalar bigger than a word
2363 has a size a multiple of a word. */
2364#ifndef PUSH_ARGS_REVERSED
2365 for (i = not_stack; i < size; i++)
2366#else
2367 for (i = size - 1; i >= not_stack; i--)
2368#endif
2369 if (i >= not_stack + offset)
2370 emit_push_insn (operand_subword_force (x, i, mode),
906c4e36
RK
2371 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2372 0, args_addr,
2373 GEN_INT (args_offset + ((i - not_stack + skip)
bbf6f052
RK
2374 * UNITS_PER_WORD)));
2375 }
2376 else
2377 {
2378 rtx addr;
2379
2380 /* Push padding now if padding above and stack grows down,
2381 or if padding below and stack grows up.
2382 But if space already allocated, this has already been done. */
2383 if (extra && args_addr == 0
2384 && where_pad != none && where_pad != stack_direction)
906c4e36 2385 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2386
2387#ifdef PUSH_ROUNDING
2388 if (args_addr == 0)
2389 addr = gen_push_operand ();
2390 else
2391#endif
2392 if (GET_CODE (args_so_far) == CONST_INT)
2393 addr
2394 = memory_address (mode,
2395 plus_constant (args_addr, INTVAL (args_so_far)));
2396 else
2397 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2398 args_so_far));
2399
2400 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2401 }
2402
2403 ret:
2404 /* If part should go in registers, copy that part
2405 into the appropriate registers. Do this now, at the end,
2406 since mem-to-mem copies above may do function calls. */
cd048831 2407 if (partial > 0 && reg != 0)
bbf6f052
RK
2408 move_block_to_reg (REGNO (reg), x, partial, mode);
2409
2410 if (extra && args_addr == 0 && where_pad == stack_direction)
906c4e36 2411 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2412}
2413\f
bbf6f052
RK
2414/* Expand an assignment that stores the value of FROM into TO.
2415 If WANT_VALUE is nonzero, return an rtx for the value of TO.
709f5be1
RS
2416 (This may contain a QUEUED rtx;
2417 if the value is constant, this rtx is a constant.)
2418 Otherwise, the returned value is NULL_RTX.
bbf6f052
RK
2419
2420 SUGGEST_REG is no longer actually used.
2421 It used to mean, copy the value through a register
2422 and return that register, if that is possible.
709f5be1 2423 We now use WANT_VALUE to decide whether to do this. */
bbf6f052
RK
2424
2425rtx
2426expand_assignment (to, from, want_value, suggest_reg)
2427 tree to, from;
2428 int want_value;
2429 int suggest_reg;
2430{
2431 register rtx to_rtx = 0;
2432 rtx result;
2433
2434 /* Don't crash if the lhs of the assignment was erroneous. */
2435
2436 if (TREE_CODE (to) == ERROR_MARK)
709f5be1
RS
2437 {
2438 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
2439 return want_value ? result : NULL_RTX;
2440 }
bbf6f052 2441
ca695ac9
JB
2442 if (output_bytecode)
2443 {
2444 tree dest_innermost;
2445
2446 bc_expand_expr (from);
6d6e61ce 2447 bc_emit_instruction (duplicate);
ca695ac9
JB
2448
2449 dest_innermost = bc_expand_address (to);
2450
2451 /* Can't deduce from TYPE that we're dealing with a bitfield, so
2452 take care of it here. */
2453
2454 bc_store_memory (TREE_TYPE (to), dest_innermost);
2455 return NULL;
2456 }
2457
bbf6f052
RK
2458 /* Assignment of a structure component needs special treatment
2459 if the structure component's rtx is not simply a MEM.
6be58303
JW
2460 Assignment of an array element at a constant index, and assignment of
2461 an array element in an unaligned packed structure field, has the same
2462 problem. */
bbf6f052
RK
2463
2464 if (TREE_CODE (to) == COMPONENT_REF
2465 || TREE_CODE (to) == BIT_FIELD_REF
2466 || (TREE_CODE (to) == ARRAY_REF
6be58303
JW
2467 && ((TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2468 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST)
c7a7ac46 2469 || (SLOW_UNALIGNED_ACCESS && get_inner_unaligned_p (to)))))
bbf6f052
RK
2470 {
2471 enum machine_mode mode1;
2472 int bitsize;
2473 int bitpos;
7bb0943f 2474 tree offset;
bbf6f052
RK
2475 int unsignedp;
2476 int volatilep = 0;
0088fcb1 2477 tree tem;
d78d243c 2478 int alignment;
0088fcb1
RK
2479
2480 push_temp_slots ();
2481 tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
bbf6f052
RK
2482 &mode1, &unsignedp, &volatilep);
2483
2484 /* If we are going to use store_bit_field and extract_bit_field,
2485 make sure to_rtx will be safe for multiple use. */
2486
2487 if (mode1 == VOIDmode && want_value)
2488 tem = stabilize_reference (tem);
2489
d78d243c 2490 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
906c4e36 2491 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
2492 if (offset != 0)
2493 {
906c4e36 2494 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
2495
2496 if (GET_CODE (to_rtx) != MEM)
2497 abort ();
2498 to_rtx = change_address (to_rtx, VOIDmode,
88f63c77
RK
2499 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
2500 force_reg (ptr_mode, offset_rtx)));
d78d243c
RS
2501 /* If we have a variable offset, the known alignment
2502 is only that of the innermost structure containing the field.
2503 (Actually, we could sometimes do better by using the
2504 align of an element of the innermost array, but no need.) */
2505 if (TREE_CODE (to) == COMPONENT_REF
2506 || TREE_CODE (to) == BIT_FIELD_REF)
2507 alignment
2508 = TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (to, 0))) / BITS_PER_UNIT;
7bb0943f 2509 }
bbf6f052
RK
2510 if (volatilep)
2511 {
2512 if (GET_CODE (to_rtx) == MEM)
01188446
JW
2513 {
2514 /* When the offset is zero, to_rtx is the address of the
2515 structure we are storing into, and hence may be shared.
2516 We must make a new MEM before setting the volatile bit. */
2517 if (offset == 0)
2518 to_rtx = change_address (to_rtx, VOIDmode, XEXP (to_rtx, 0));
2519 MEM_VOLATILE_P (to_rtx) = 1;
2520 }
bbf6f052
RK
2521#if 0 /* This was turned off because, when a field is volatile
2522 in an object which is not volatile, the object may be in a register,
2523 and then we would abort over here. */
2524 else
2525 abort ();
2526#endif
2527 }
2528
2529 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2530 (want_value
2531 /* Spurious cast makes HPUX compiler happy. */
2532 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2533 : VOIDmode),
2534 unsignedp,
2535 /* Required alignment of containing datum. */
d78d243c 2536 alignment,
bbf6f052
RK
2537 int_size_in_bytes (TREE_TYPE (tem)));
2538 preserve_temp_slots (result);
2539 free_temp_slots ();
0088fcb1 2540 pop_temp_slots ();
bbf6f052 2541
709f5be1
RS
2542 /* If the value is meaningful, convert RESULT to the proper mode.
2543 Otherwise, return nothing. */
5ffe63ed
RS
2544 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
2545 TYPE_MODE (TREE_TYPE (from)),
2546 result,
2547 TREE_UNSIGNED (TREE_TYPE (to)))
709f5be1 2548 : NULL_RTX);
bbf6f052
RK
2549 }
2550
cd1db108
RS
2551 /* If the rhs is a function call and its value is not an aggregate,
2552 call the function before we start to compute the lhs.
2553 This is needed for correct code for cases such as
2554 val = setjmp (buf) on machines where reference to val
1ad87b63
RK
2555 requires loading up part of an address in a separate insn.
2556
2557 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
2558 a promoted variable where the zero- or sign- extension needs to be done.
2559 Handling this in the normal way is safe because no computation is done
2560 before the call. */
2561 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
2562 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
cd1db108 2563 {
0088fcb1
RK
2564 rtx value;
2565
2566 push_temp_slots ();
2567 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
cd1db108
RS
2568 if (to_rtx == 0)
2569 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
aaf87c45
JL
2570
2571 if (GET_MODE (to_rtx) == BLKmode)
2572 {
2573 int align = MIN (TYPE_ALIGN (TREE_TYPE (from)), BITS_PER_WORD);
2574 emit_block_move (to_rtx, value, expr_size (from), align);
2575 }
2576 else
2577 emit_move_insn (to_rtx, value);
cd1db108
RS
2578 preserve_temp_slots (to_rtx);
2579 free_temp_slots ();
0088fcb1 2580 pop_temp_slots ();
709f5be1 2581 return want_value ? to_rtx : NULL_RTX;
cd1db108
RS
2582 }
2583
bbf6f052
RK
2584 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2585 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2586
2587 if (to_rtx == 0)
906c4e36 2588 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
bbf6f052 2589
86d38d25
RS
2590 /* Don't move directly into a return register. */
2591 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
2592 {
0088fcb1
RK
2593 rtx temp;
2594
2595 push_temp_slots ();
2596 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
86d38d25
RS
2597 emit_move_insn (to_rtx, temp);
2598 preserve_temp_slots (to_rtx);
2599 free_temp_slots ();
0088fcb1 2600 pop_temp_slots ();
709f5be1 2601 return want_value ? to_rtx : NULL_RTX;
86d38d25
RS
2602 }
2603
bbf6f052
RK
2604 /* In case we are returning the contents of an object which overlaps
2605 the place the value is being stored, use a safe function when copying
2606 a value through a pointer into a structure value return block. */
2607 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2608 && current_function_returns_struct
2609 && !current_function_returns_pcc_struct)
2610 {
0088fcb1
RK
2611 rtx from_rtx, size;
2612
2613 push_temp_slots ();
33a20d10
RK
2614 size = expr_size (from);
2615 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
2616
2617#ifdef TARGET_MEM_FUNCTIONS
d562e42e 2618 emit_library_call (memcpy_libfunc, 0,
bbf6f052
RK
2619 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2620 XEXP (from_rtx, 0), Pmode,
0fa83258
RK
2621 convert_to_mode (TYPE_MODE (sizetype),
2622 size, TREE_UNSIGNED (sizetype)),
26ba80fc 2623 TYPE_MODE (sizetype));
bbf6f052 2624#else
d562e42e 2625 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
2626 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2627 XEXP (to_rtx, 0), Pmode,
0fa83258
RK
2628 convert_to_mode (TYPE_MODE (sizetype),
2629 size, TREE_UNSIGNED (sizetype)),
26ba80fc 2630 TYPE_MODE (sizetype));
bbf6f052
RK
2631#endif
2632
2633 preserve_temp_slots (to_rtx);
2634 free_temp_slots ();
0088fcb1 2635 pop_temp_slots ();
709f5be1 2636 return want_value ? to_rtx : NULL_RTX;
bbf6f052
RK
2637 }
2638
2639 /* Compute FROM and store the value in the rtx we got. */
2640
0088fcb1 2641 push_temp_slots ();
bbf6f052
RK
2642 result = store_expr (from, to_rtx, want_value);
2643 preserve_temp_slots (result);
2644 free_temp_slots ();
0088fcb1 2645 pop_temp_slots ();
709f5be1 2646 return want_value ? result : NULL_RTX;
bbf6f052
RK
2647}
2648
2649/* Generate code for computing expression EXP,
2650 and storing the value into TARGET.
bbf6f052
RK
2651 TARGET may contain a QUEUED rtx.
2652
709f5be1
RS
2653 If WANT_VALUE is nonzero, return a copy of the value
2654 not in TARGET, so that we can be sure to use the proper
2655 value in a containing expression even if TARGET has something
2656 else stored in it. If possible, we copy the value through a pseudo
2657 and return that pseudo. Or, if the value is constant, we try to
2658 return the constant. In some cases, we return a pseudo
2659 copied *from* TARGET.
2660
2661 If the mode is BLKmode then we may return TARGET itself.
2662 It turns out that in BLKmode it doesn't cause a problem.
2663 because C has no operators that could combine two different
2664 assignments into the same BLKmode object with different values
2665 with no sequence point. Will other languages need this to
2666 be more thorough?
2667
2668 If WANT_VALUE is 0, we return NULL, to make sure
2669 to catch quickly any cases where the caller uses the value
2670 and fails to set WANT_VALUE. */
bbf6f052
RK
2671
2672rtx
709f5be1 2673store_expr (exp, target, want_value)
bbf6f052
RK
2674 register tree exp;
2675 register rtx target;
709f5be1 2676 int want_value;
bbf6f052
RK
2677{
2678 register rtx temp;
2679 int dont_return_target = 0;
2680
2681 if (TREE_CODE (exp) == COMPOUND_EXPR)
2682 {
2683 /* Perform first part of compound expression, then assign from second
2684 part. */
2685 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2686 emit_queue ();
709f5be1 2687 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
bbf6f052
RK
2688 }
2689 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2690 {
2691 /* For conditional expression, get safe form of the target. Then
2692 test the condition, doing the appropriate assignment on either
2693 side. This avoids the creation of unnecessary temporaries.
2694 For non-BLKmode, it is more efficient not to do this. */
2695
2696 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2697
2698 emit_queue ();
2699 target = protect_from_queue (target, 1);
2700
2701 NO_DEFER_POP;
2702 jumpifnot (TREE_OPERAND (exp, 0), lab1);
709f5be1 2703 store_expr (TREE_OPERAND (exp, 1), target, 0);
bbf6f052
RK
2704 emit_queue ();
2705 emit_jump_insn (gen_jump (lab2));
2706 emit_barrier ();
2707 emit_label (lab1);
709f5be1 2708 store_expr (TREE_OPERAND (exp, 2), target, 0);
bbf6f052
RK
2709 emit_queue ();
2710 emit_label (lab2);
2711 OK_DEFER_POP;
709f5be1 2712 return want_value ? target : NULL_RTX;
bbf6f052 2713 }
709f5be1 2714 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
bbf6f052
RK
2715 && GET_MODE (target) != BLKmode)
2716 /* If target is in memory and caller wants value in a register instead,
2717 arrange that. Pass TARGET as target for expand_expr so that,
709f5be1 2718 if EXP is another assignment, WANT_VALUE will be nonzero for it.
c2e6aff6
RS
2719 We know expand_expr will not use the target in that case.
2720 Don't do this if TARGET is volatile because we are supposed
2721 to write it and then read it. */
bbf6f052 2722 {
906c4e36 2723 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
bbf6f052
RK
2724 GET_MODE (target), 0);
2725 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2726 temp = copy_to_reg (temp);
2727 dont_return_target = 1;
2728 }
2729 else if (queued_subexp_p (target))
709f5be1
RS
2730 /* If target contains a postincrement, let's not risk
2731 using it as the place to generate the rhs. */
bbf6f052
RK
2732 {
2733 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2734 {
2735 /* Expand EXP into a new pseudo. */
2736 temp = gen_reg_rtx (GET_MODE (target));
2737 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2738 }
2739 else
906c4e36 2740 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
709f5be1
RS
2741
2742 /* If target is volatile, ANSI requires accessing the value
2743 *from* the target, if it is accessed. So make that happen.
2744 In no case return the target itself. */
2745 if (! MEM_VOLATILE_P (target) && want_value)
2746 dont_return_target = 1;
bbf6f052 2747 }
1499e0a8
RK
2748 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2749 /* If this is an scalar in a register that is stored in a wider mode
2750 than the declared mode, compute the result into its declared mode
2751 and then convert to the wider mode. Our value is the computed
2752 expression. */
2753 {
5a32d038 2754 /* If we don't want a value, we can do the conversion inside EXP,
f635a84d
RK
2755 which will often result in some optimizations. Do the conversion
2756 in two steps: first change the signedness, if needed, then
2757 the extend. */
5a32d038 2758 if (! want_value)
f635a84d
RK
2759 {
2760 if (TREE_UNSIGNED (TREE_TYPE (exp))
2761 != SUBREG_PROMOTED_UNSIGNED_P (target))
2762 exp
2763 = convert
2764 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
2765 TREE_TYPE (exp)),
2766 exp);
2767
2768 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
2769 SUBREG_PROMOTED_UNSIGNED_P (target)),
2770 exp);
2771 }
5a32d038 2772
1499e0a8 2773 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
b258707c 2774
766f36c7
RK
2775 /* If TEMP is a volatile MEM and we want a result value, make
2776 the access now so it gets done only once. */
5a32d038 2777 if (GET_CODE (temp) == MEM && MEM_VOLATILE_P (temp) && want_value)
766f36c7
RK
2778 temp = copy_to_reg (temp);
2779
b258707c
RS
2780 /* If TEMP is a VOIDmode constant, use convert_modes to make
2781 sure that we properly convert it. */
2782 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
2783 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
2784 TYPE_MODE (TREE_TYPE (exp)), temp,
2785 SUBREG_PROMOTED_UNSIGNED_P (target));
2786
1499e0a8
RK
2787 convert_move (SUBREG_REG (target), temp,
2788 SUBREG_PROMOTED_UNSIGNED_P (target));
709f5be1 2789 return want_value ? temp : NULL_RTX;
1499e0a8 2790 }
bbf6f052
RK
2791 else
2792 {
2793 temp = expand_expr (exp, target, GET_MODE (target), 0);
766f36c7 2794 /* Return TARGET if it's a specified hardware register.
709f5be1
RS
2795 If TARGET is a volatile mem ref, either return TARGET
2796 or return a reg copied *from* TARGET; ANSI requires this.
2797
2798 Otherwise, if TEMP is not TARGET, return TEMP
2799 if it is constant (for efficiency),
2800 or if we really want the correct value. */
bbf6f052
RK
2801 if (!(target && GET_CODE (target) == REG
2802 && REGNO (target) < FIRST_PSEUDO_REGISTER)
709f5be1
RS
2803 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
2804 && temp != target
2805 && (CONSTANT_P (temp) || want_value))
bbf6f052
RK
2806 dont_return_target = 1;
2807 }
2808
b258707c
RS
2809 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
2810 the same as that of TARGET, adjust the constant. This is needed, for
2811 example, in case it is a CONST_DOUBLE and we want only a word-sized
2812 value. */
2813 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
c1da1f33 2814 && TREE_CODE (exp) != ERROR_MARK
b258707c
RS
2815 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2816 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
2817 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
2818
bbf6f052
RK
2819 /* If value was not generated in the target, store it there.
2820 Convert the value to TARGET's type first if nec. */
2821
2822 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2823 {
2824 target = protect_from_queue (target, 1);
2825 if (GET_MODE (temp) != GET_MODE (target)
2826 && GET_MODE (temp) != VOIDmode)
2827 {
2828 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2829 if (dont_return_target)
2830 {
2831 /* In this case, we will return TEMP,
2832 so make sure it has the proper mode.
2833 But don't forget to store the value into TARGET. */
2834 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2835 emit_move_insn (target, temp);
2836 }
2837 else
2838 convert_move (target, temp, unsignedp);
2839 }
2840
2841 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2842 {
2843 /* Handle copying a string constant into an array.
2844 The string constant may be shorter than the array.
2845 So copy just the string's actual length, and clear the rest. */
2846 rtx size;
22619c3f 2847 rtx addr;
bbf6f052 2848
e87b4f3f
RS
2849 /* Get the size of the data type of the string,
2850 which is actually the size of the target. */
2851 size = expr_size (exp);
2852 if (GET_CODE (size) == CONST_INT
2853 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2854 emit_block_move (target, temp, size,
2855 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2856 else
bbf6f052 2857 {
e87b4f3f
RS
2858 /* Compute the size of the data to copy from the string. */
2859 tree copy_size
c03b7665 2860 = size_binop (MIN_EXPR,
b50d17a1 2861 make_tree (sizetype, size),
c03b7665
RK
2862 convert (sizetype,
2863 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
906c4e36
RK
2864 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
2865 VOIDmode, 0);
e87b4f3f
RS
2866 rtx label = 0;
2867
2868 /* Copy that much. */
2869 emit_block_move (target, temp, copy_size_rtx,
2870 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2871
88f63c77
RK
2872 /* Figure out how much is left in TARGET that we have to clear.
2873 Do all calculations in ptr_mode. */
2874
2875 addr = XEXP (target, 0);
2876 addr = convert_modes (ptr_mode, Pmode, addr, 1);
2877
e87b4f3f
RS
2878 if (GET_CODE (copy_size_rtx) == CONST_INT)
2879 {
88f63c77 2880 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
22619c3f 2881 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
e87b4f3f
RS
2882 }
2883 else
2884 {
88f63c77
RK
2885 addr = force_reg (ptr_mode, addr);
2886 addr = expand_binop (ptr_mode, add_optab, addr,
906c4e36
RK
2887 copy_size_rtx, NULL_RTX, 0,
2888 OPTAB_LIB_WIDEN);
e87b4f3f 2889
88f63c77 2890 size = expand_binop (ptr_mode, sub_optab, size,
906c4e36
RK
2891 copy_size_rtx, NULL_RTX, 0,
2892 OPTAB_LIB_WIDEN);
e87b4f3f 2893
906c4e36 2894 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
e87b4f3f
RS
2895 GET_MODE (size), 0, 0);
2896 label = gen_label_rtx ();
2897 emit_jump_insn (gen_blt (label));
2898 }
2899
2900 if (size != const0_rtx)
2901 {
bbf6f052 2902#ifdef TARGET_MEM_FUNCTIONS
22619c3f 2903 emit_library_call (memset_libfunc, 0, VOIDmode, 3, addr,
88f63c77 2904 Pmode, const0_rtx, Pmode, size, ptr_mode);
bbf6f052 2905#else
d562e42e 2906 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
88f63c77 2907 addr, Pmode, size, ptr_mode);
bbf6f052 2908#endif
e87b4f3f 2909 }
22619c3f 2910
e87b4f3f
RS
2911 if (label)
2912 emit_label (label);
bbf6f052
RK
2913 }
2914 }
2915 else if (GET_MODE (temp) == BLKmode)
2916 emit_block_move (target, temp, expr_size (exp),
2917 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2918 else
2919 emit_move_insn (target, temp);
2920 }
709f5be1 2921
766f36c7
RK
2922 /* If we don't want a value, return NULL_RTX. */
2923 if (! want_value)
2924 return NULL_RTX;
2925
2926 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
2927 ??? The latter test doesn't seem to make sense. */
2928 else if (dont_return_target && GET_CODE (temp) != MEM)
bbf6f052 2929 return temp;
766f36c7
RK
2930
2931 /* Return TARGET itself if it is a hard register. */
2932 else if (want_value && GET_MODE (target) != BLKmode
2933 && ! (GET_CODE (target) == REG
2934 && REGNO (target) < FIRST_PSEUDO_REGISTER))
709f5be1 2935 return copy_to_reg (target);
766f36c7
RK
2936
2937 else
709f5be1 2938 return target;
bbf6f052
RK
2939}
2940\f
2941/* Store the value of constructor EXP into the rtx TARGET.
2942 TARGET is either a REG or a MEM. */
2943
2944static void
2945store_constructor (exp, target)
2946 tree exp;
2947 rtx target;
2948{
4af3895e
JVA
2949 tree type = TREE_TYPE (exp);
2950
bbf6f052
RK
2951 /* We know our target cannot conflict, since safe_from_p has been called. */
2952#if 0
2953 /* Don't try copying piece by piece into a hard register
2954 since that is vulnerable to being clobbered by EXP.
2955 Instead, construct in a pseudo register and then copy it all. */
2956 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
2957 {
2958 rtx temp = gen_reg_rtx (GET_MODE (target));
2959 store_constructor (exp, temp);
2960 emit_move_insn (target, temp);
2961 return;
2962 }
2963#endif
2964
e44842fe
RK
2965 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
2966 || TREE_CODE (type) == QUAL_UNION_TYPE)
bbf6f052
RK
2967 {
2968 register tree elt;
2969
4af3895e 2970 /* Inform later passes that the whole union value is dead. */
e44842fe
RK
2971 if (TREE_CODE (type) == UNION_TYPE
2972 || TREE_CODE (type) == QUAL_UNION_TYPE)
bbf6f052 2973 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
4af3895e
JVA
2974
2975 /* If we are building a static constructor into a register,
2976 set the initial value as zero so we can fold the value into
2977 a constant. */
2978 else if (GET_CODE (target) == REG && TREE_STATIC (exp))
2979 emit_move_insn (target, const0_rtx);
2980
bbf6f052
RK
2981 /* If the constructor has fewer fields than the structure,
2982 clear the whole structure first. */
2983 else if (list_length (CONSTRUCTOR_ELTS (exp))
4af3895e
JVA
2984 != list_length (TYPE_FIELDS (type)))
2985 clear_storage (target, int_size_in_bytes (type));
bbf6f052
RK
2986 else
2987 /* Inform later passes that the old value is dead. */
2988 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2989
2990 /* Store each element of the constructor into
2991 the corresponding field of TARGET. */
2992
2993 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
2994 {
2995 register tree field = TREE_PURPOSE (elt);
2996 register enum machine_mode mode;
2997 int bitsize;
b50d17a1 2998 int bitpos = 0;
bbf6f052 2999 int unsignedp;
b50d17a1
RK
3000 tree pos, constant = 0, offset = 0;
3001 rtx to_rtx = target;
bbf6f052 3002
f32fd778
RS
3003 /* Just ignore missing fields.
3004 We cleared the whole structure, above,
3005 if any fields are missing. */
3006 if (field == 0)
3007 continue;
3008
bbf6f052
RK
3009 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
3010 unsignedp = TREE_UNSIGNED (field);
3011 mode = DECL_MODE (field);
3012 if (DECL_BIT_FIELD (field))
3013 mode = VOIDmode;
3014
b50d17a1
RK
3015 pos = DECL_FIELD_BITPOS (field);
3016 if (TREE_CODE (pos) == INTEGER_CST)
3017 constant = pos;
3018 else if (TREE_CODE (pos) == PLUS_EXPR
3019 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3020 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
3021 else
3022 offset = pos;
3023
3024 if (constant)
cd11b87e 3025 bitpos = TREE_INT_CST_LOW (constant);
b50d17a1
RK
3026
3027 if (offset)
3028 {
3029 rtx offset_rtx;
3030
3031 if (contains_placeholder_p (offset))
3032 offset = build (WITH_RECORD_EXPR, sizetype,
3033 offset, exp);
bbf6f052 3034
b50d17a1
RK
3035 offset = size_binop (FLOOR_DIV_EXPR, offset,
3036 size_int (BITS_PER_UNIT));
bbf6f052 3037
b50d17a1
RK
3038 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3039 if (GET_CODE (to_rtx) != MEM)
3040 abort ();
3041
3042 to_rtx
3043 = change_address (to_rtx, VOIDmode,
88f63c77
RK
3044 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
3045 force_reg (ptr_mode, offset_rtx)));
b50d17a1
RK
3046 }
3047
3048 store_field (to_rtx, bitsize, bitpos, mode, TREE_VALUE (elt),
bbf6f052
RK
3049 /* The alignment of TARGET is
3050 at least what its type requires. */
3051 VOIDmode, 0,
4af3895e
JVA
3052 TYPE_ALIGN (type) / BITS_PER_UNIT,
3053 int_size_in_bytes (type));
bbf6f052
RK
3054 }
3055 }
4af3895e 3056 else if (TREE_CODE (type) == ARRAY_TYPE)
bbf6f052
RK
3057 {
3058 register tree elt;
3059 register int i;
4af3895e 3060 tree domain = TYPE_DOMAIN (type);
906c4e36
RK
3061 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
3062 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4af3895e 3063 tree elttype = TREE_TYPE (type);
bbf6f052
RK
3064
3065 /* If the constructor has fewer fields than the structure,
4af3895e
JVA
3066 clear the whole structure first. Similarly if this this is
3067 static constructor of a non-BLKmode object. */
bbf6f052 3068
4af3895e
JVA
3069 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
3070 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
597bb7f1 3071 clear_storage (target, int_size_in_bytes (type));
bbf6f052
RK
3072 else
3073 /* Inform later passes that the old value is dead. */
3074 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3075
3076 /* Store each element of the constructor into
3077 the corresponding element of TARGET, determined
3078 by counting the elements. */
3079 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
3080 elt;
3081 elt = TREE_CHAIN (elt), i++)
3082 {
3083 register enum machine_mode mode;
3084 int bitsize;
3085 int bitpos;
3086 int unsignedp;
03dc44a6
RS
3087 tree index = TREE_PURPOSE (elt);
3088 rtx xtarget = target;
bbf6f052
RK
3089
3090 mode = TYPE_MODE (elttype);
3091 bitsize = GET_MODE_BITSIZE (mode);
3092 unsignedp = TREE_UNSIGNED (elttype);
3093
5b6c44ff
RK
3094 if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
3095 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
03dc44a6 3096 {
03dc44a6
RS
3097 rtx pos_rtx, addr, xtarget;
3098 tree position;
3099
5b6c44ff
RK
3100 if (index == 0)
3101 index = size_int (i);
3102
3103 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3104 size_int (BITS_PER_UNIT));
3105 position = size_binop (MULT_EXPR, index, position);
03dc44a6
RS
3106 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3107 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
3108 xtarget = change_address (target, mode, addr);
3109 store_expr (TREE_VALUE (elt), xtarget, 0);
3110 }
3111 else
3112 {
3113 if (index != 0)
7c314719 3114 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
03dc44a6
RS
3115 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3116 else
3117 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3118
3119 store_field (xtarget, bitsize, bitpos, mode, TREE_VALUE (elt),
3120 /* The alignment of TARGET is
3121 at least what its type requires. */
3122 VOIDmode, 0,
3123 TYPE_ALIGN (type) / BITS_PER_UNIT,
3124 int_size_in_bytes (type));
3125 }
bbf6f052
RK
3126 }
3127 }
071a6595
PB
3128 /* set constructor assignments */
3129 else if (TREE_CODE (type) == SET_TYPE)
3130 {
3131 tree elt;
3132 rtx xtarget = XEXP (target, 0);
3133 int set_word_size = TYPE_ALIGN (type);
3134 int nbytes = int_size_in_bytes (type);
071a6595
PB
3135 tree non_const_elements;
3136 int need_to_clear_first;
3137 tree domain = TYPE_DOMAIN (type);
3138 tree domain_min, domain_max, bitlength;
3139
3140 /* The default implementation stategy is to extract the constant
3141 parts of the constructor, use that to initialize the target,
3142 and then "or" in whatever non-constant ranges we need in addition.
3143
3144 If a large set is all zero or all ones, it is
3145 probably better to set it using memset (if available) or bzero.
3146 Also, if a large set has just a single range, it may also be
3147 better to first clear all the first clear the set (using
3148 bzero/memset), and set the bits we want. */
3149
3150 /* Check for all zeros. */
3151 if (CONSTRUCTOR_ELTS (exp) == NULL_TREE)
3152 {
3153 clear_storage (target, nbytes);
3154 return;
3155 }
3156
3157 if (nbytes < 0)
3158 abort();
3159
071a6595
PB
3160 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
3161 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
3162 bitlength = size_binop (PLUS_EXPR,
3163 size_binop (MINUS_EXPR, domain_max, domain_min),
3164 size_one_node);
3165
3166 /* Check for range all ones, or at most a single range.
3167 (This optimization is only a win for big sets.) */
3168 if (GET_MODE (target) == BLKmode && nbytes > 16
3169 && TREE_CHAIN (CONSTRUCTOR_ELTS (exp)) == NULL_TREE)
3170 {
3171 need_to_clear_first = 1;
3172 non_const_elements = CONSTRUCTOR_ELTS (exp);
3173 }
3174 else
3175 {
b4ee5a72
PB
3176 int nbits = nbytes * BITS_PER_UNIT;
3177 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
3178 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
3179 char *bit_buffer = (char*) alloca (nbits);
3180 HOST_WIDE_INT word = 0;
3181 int bit_pos = 0;
3182 int ibit = 0;
3183 int offset = 0; /* In bytes from beginning of set. */
3184 non_const_elements = get_set_constructor_bits (exp,
3185 bit_buffer, nbits);
3186 for (;;)
071a6595 3187 {
b4ee5a72
PB
3188 if (bit_buffer[ibit])
3189 {
3190 if (BITS_BIG_ENDIAN)
3191 word |= (1 << (set_word_size - 1 - bit_pos));
3192 else
3193 word |= 1 << bit_pos;
3194 }
3195 bit_pos++; ibit++;
3196 if (bit_pos >= set_word_size || ibit == nbits)
071a6595 3197 {
b4ee5a72
PB
3198 rtx datum = GEN_INT (word);
3199 rtx to_rtx;
3200 /* The assumption here is that it is safe to use XEXP if
3201 the set is multi-word, but not if it's single-word. */
3202 if (GET_CODE (target) == MEM)
3203 to_rtx = change_address (target, mode,
3204 plus_constant (XEXP (target, 0),
3205 offset));
3206 else if (offset == 0)
3207 to_rtx = target;
3208 else
3209 abort ();
071a6595 3210 emit_move_insn (to_rtx, datum);
b4ee5a72
PB
3211 if (ibit == nbits)
3212 break;
3213 word = 0;
3214 bit_pos = 0;
3215 offset += set_word_size / BITS_PER_UNIT;
071a6595
PB
3216 }
3217 }
3218 need_to_clear_first = 0;
3219 }
3220
3221 for (elt = non_const_elements; elt != NULL_TREE; elt = TREE_CHAIN (elt))
3222 {
3223 /* start of range of element or NULL */
3224 tree startbit = TREE_PURPOSE (elt);
3225 /* end of range of element, or element value */
3226 tree endbit = TREE_VALUE (elt);
3227 HOST_WIDE_INT startb, endb;
3228 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
3229
3230 bitlength_rtx = expand_expr (bitlength,
3231 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
3232
3233 /* handle non-range tuple element like [ expr ] */
3234 if (startbit == NULL_TREE)
3235 {
3236 startbit = save_expr (endbit);
3237 endbit = startbit;
3238 }
3239 startbit = convert (sizetype, startbit);
3240 endbit = convert (sizetype, endbit);
3241 if (! integer_zerop (domain_min))
3242 {
3243 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
3244 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
3245 }
3246 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
3247 EXPAND_CONST_ADDRESS);
3248 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
3249 EXPAND_CONST_ADDRESS);
3250
3251 if (REG_P (target))
3252 {
3253 targetx = assign_stack_temp (GET_MODE (target),
3254 GET_MODE_SIZE (GET_MODE (target)),
3255 0);
3256 emit_move_insn (targetx, target);
3257 }
3258 else if (GET_CODE (target) == MEM)
3259 targetx = target;
3260 else
3261 abort ();
3262
3263#ifdef TARGET_MEM_FUNCTIONS
3264 /* Optimization: If startbit and endbit are
3265 constants divisble by BITS_PER_UNIT,
3266 call memset instead. */
3267 if (TREE_CODE (startbit) == INTEGER_CST
3268 && TREE_CODE (endbit) == INTEGER_CST
3269 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
3270 && (endb = TREE_INT_CST_LOW (endbit)) % BITS_PER_UNIT == 0)
3271 {
3272
3273 if (need_to_clear_first
3274 && endb - startb != nbytes * BITS_PER_UNIT)
3275 clear_storage (target, nbytes);
3276 need_to_clear_first = 0;
3277 emit_library_call (memset_libfunc, 0,
3278 VOIDmode, 3,
3279 plus_constant (XEXP (targetx, 0), startb),
3280 Pmode,
3281 constm1_rtx, Pmode,
3282 GEN_INT ((endb - startb) / BITS_PER_UNIT),
3283 Pmode);
3284 }
3285 else
3286#endif
3287 {
3288 if (need_to_clear_first)
3289 {
3290 clear_storage (target, nbytes);
3291 need_to_clear_first = 0;
3292 }
3293 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__setbits"),
3294 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
3295 bitlength_rtx, TYPE_MODE (sizetype),
3296 startbit_rtx, TYPE_MODE (sizetype),
3297 endbit_rtx, TYPE_MODE (sizetype));
3298 }
3299 if (REG_P (target))
3300 emit_move_insn (target, targetx);
3301 }
3302 }
bbf6f052
RK
3303
3304 else
3305 abort ();
3306}
3307
3308/* Store the value of EXP (an expression tree)
3309 into a subfield of TARGET which has mode MODE and occupies
3310 BITSIZE bits, starting BITPOS bits from the start of TARGET.
3311 If MODE is VOIDmode, it means that we are storing into a bit-field.
3312
3313 If VALUE_MODE is VOIDmode, return nothing in particular.
3314 UNSIGNEDP is not used in this case.
3315
3316 Otherwise, return an rtx for the value stored. This rtx
3317 has mode VALUE_MODE if that is convenient to do.
3318 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
3319
3320 ALIGN is the alignment that TARGET is known to have, measured in bytes.
3321 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
3322
3323static rtx
3324store_field (target, bitsize, bitpos, mode, exp, value_mode,
3325 unsignedp, align, total_size)
3326 rtx target;
3327 int bitsize, bitpos;
3328 enum machine_mode mode;
3329 tree exp;
3330 enum machine_mode value_mode;
3331 int unsignedp;
3332 int align;
3333 int total_size;
3334{
906c4e36 3335 HOST_WIDE_INT width_mask = 0;
bbf6f052 3336
906c4e36
RK
3337 if (bitsize < HOST_BITS_PER_WIDE_INT)
3338 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
bbf6f052
RK
3339
3340 /* If we are storing into an unaligned field of an aligned union that is
3341 in a register, we may have the mode of TARGET being an integer mode but
3342 MODE == BLKmode. In that case, get an aligned object whose size and
3343 alignment are the same as TARGET and store TARGET into it (we can avoid
3344 the store if the field being stored is the entire width of TARGET). Then
3345 call ourselves recursively to store the field into a BLKmode version of
3346 that object. Finally, load from the object into TARGET. This is not
3347 very efficient in general, but should only be slightly more expensive
3348 than the otherwise-required unaligned accesses. Perhaps this can be
3349 cleaned up later. */
3350
3351 if (mode == BLKmode
3352 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
3353 {
3354 rtx object = assign_stack_temp (GET_MODE (target),
3355 GET_MODE_SIZE (GET_MODE (target)), 0);
3356 rtx blk_object = copy_rtx (object);
3357
24a13950
JW
3358 MEM_IN_STRUCT_P (object) = 1;
3359 MEM_IN_STRUCT_P (blk_object) = 1;
bbf6f052
RK
3360 PUT_MODE (blk_object, BLKmode);
3361
3362 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
3363 emit_move_insn (object, target);
3364
3365 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
3366 align, total_size);
3367
46093b97
RS
3368 /* Even though we aren't returning target, we need to
3369 give it the updated value. */
bbf6f052
RK
3370 emit_move_insn (target, object);
3371
46093b97 3372 return blk_object;
bbf6f052
RK
3373 }
3374
3375 /* If the structure is in a register or if the component
3376 is a bit field, we cannot use addressing to access it.
3377 Use bit-field techniques or SUBREG to store in it. */
3378
4fa52007
RK
3379 if (mode == VOIDmode
3380 || (mode != BLKmode && ! direct_store[(int) mode])
3381 || GET_CODE (target) == REG
c980ac49 3382 || GET_CODE (target) == SUBREG
ccc98036
RS
3383 /* If the field isn't aligned enough to store as an ordinary memref,
3384 store it as a bit field. */
c7a7ac46 3385 || (SLOW_UNALIGNED_ACCESS
ccc98036 3386 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
c7a7ac46 3387 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
bbf6f052 3388 {
906c4e36 3389 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbd6cf73
RK
3390
3391 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
3392 MODE. */
3393 if (mode != VOIDmode && mode != BLKmode
3394 && mode != TYPE_MODE (TREE_TYPE (exp)))
3395 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
3396
bbf6f052
RK
3397 /* Store the value in the bitfield. */
3398 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
3399 if (value_mode != VOIDmode)
3400 {
3401 /* The caller wants an rtx for the value. */
3402 /* If possible, avoid refetching from the bitfield itself. */
3403 if (width_mask != 0
3404 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5c4d7cfb 3405 {
9074de27 3406 tree count;
5c4d7cfb 3407 enum machine_mode tmode;
86a2c12a 3408
5c4d7cfb
RS
3409 if (unsignedp)
3410 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
3411 tmode = GET_MODE (temp);
86a2c12a
RS
3412 if (tmode == VOIDmode)
3413 tmode = value_mode;
5c4d7cfb
RS
3414 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
3415 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
3416 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
3417 }
bbf6f052 3418 return extract_bit_field (target, bitsize, bitpos, unsignedp,
906c4e36
RK
3419 NULL_RTX, value_mode, 0, align,
3420 total_size);
bbf6f052
RK
3421 }
3422 return const0_rtx;
3423 }
3424 else
3425 {
3426 rtx addr = XEXP (target, 0);
3427 rtx to_rtx;
3428
3429 /* If a value is wanted, it must be the lhs;
3430 so make the address stable for multiple use. */
3431
3432 if (value_mode != VOIDmode && GET_CODE (addr) != REG
3433 && ! CONSTANT_ADDRESS_P (addr)
3434 /* A frame-pointer reference is already stable. */
3435 && ! (GET_CODE (addr) == PLUS
3436 && GET_CODE (XEXP (addr, 1)) == CONST_INT
3437 && (XEXP (addr, 0) == virtual_incoming_args_rtx
3438 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
3439 addr = copy_to_reg (addr);
3440
3441 /* Now build a reference to just the desired component. */
3442
3443 to_rtx = change_address (target, mode,
3444 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
3445 MEM_IN_STRUCT_P (to_rtx) = 1;
3446
3447 return store_expr (exp, to_rtx, value_mode != VOIDmode);
3448 }
3449}
3450\f
6be58303
JW
3451/* Return true if any object containing the innermost array is an unaligned
3452 packed structure field. */
3453
3454static int
3455get_inner_unaligned_p (exp)
3456 tree exp;
3457{
3458 int needed_alignment = TYPE_ALIGN (TREE_TYPE (exp));
3459
3460 while (1)
3461 {
3462 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
3463 {
3464 if (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
3465 < needed_alignment)
3466 return 1;
3467 }
3468 else if (TREE_CODE (exp) != ARRAY_REF
3469 && TREE_CODE (exp) != NON_LVALUE_EXPR
3470 && ! ((TREE_CODE (exp) == NOP_EXPR
3471 || TREE_CODE (exp) == CONVERT_EXPR)
3472 && (TYPE_MODE (TREE_TYPE (exp))
3473 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
3474 break;
3475
3476 exp = TREE_OPERAND (exp, 0);
3477 }
3478
3479 return 0;
3480}
3481
bbf6f052
RK
3482/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
3483 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
742920c7 3484 ARRAY_REFs and find the ultimate containing object, which we return.
bbf6f052
RK
3485
3486 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
3487 bit position, and *PUNSIGNEDP to the signedness of the field.
7bb0943f
RS
3488 If the position of the field is variable, we store a tree
3489 giving the variable offset (in units) in *POFFSET.
3490 This offset is in addition to the bit position.
3491 If the position is not variable, we store 0 in *POFFSET.
bbf6f052
RK
3492
3493 If any of the extraction expressions is volatile,
3494 we store 1 in *PVOLATILEP. Otherwise we don't change that.
3495
3496 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
3497 is a mode that can be used to access the field. In that case, *PBITSIZE
e7c33f54
RK
3498 is redundant.
3499
3500 If the field describes a variable-sized object, *PMODE is set to
3501 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
3502 this case, but the address of the object can be found. */
bbf6f052
RK
3503
3504tree
4969d05d
RK
3505get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
3506 punsignedp, pvolatilep)
bbf6f052
RK
3507 tree exp;
3508 int *pbitsize;
3509 int *pbitpos;
7bb0943f 3510 tree *poffset;
bbf6f052
RK
3511 enum machine_mode *pmode;
3512 int *punsignedp;
3513 int *pvolatilep;
3514{
b50d17a1 3515 tree orig_exp = exp;
bbf6f052
RK
3516 tree size_tree = 0;
3517 enum machine_mode mode = VOIDmode;
742920c7 3518 tree offset = integer_zero_node;
bbf6f052
RK
3519
3520 if (TREE_CODE (exp) == COMPONENT_REF)
3521 {
3522 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
3523 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
3524 mode = DECL_MODE (TREE_OPERAND (exp, 1));
3525 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
3526 }
3527 else if (TREE_CODE (exp) == BIT_FIELD_REF)
3528 {
3529 size_tree = TREE_OPERAND (exp, 1);
3530 *punsignedp = TREE_UNSIGNED (exp);
3531 }
3532 else
3533 {
3534 mode = TYPE_MODE (TREE_TYPE (exp));
3535 *pbitsize = GET_MODE_BITSIZE (mode);
3536 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3537 }
3538
3539 if (size_tree)
3540 {
3541 if (TREE_CODE (size_tree) != INTEGER_CST)
e7c33f54
RK
3542 mode = BLKmode, *pbitsize = -1;
3543 else
3544 *pbitsize = TREE_INT_CST_LOW (size_tree);
bbf6f052
RK
3545 }
3546
3547 /* Compute cumulative bit-offset for nested component-refs and array-refs,
3548 and find the ultimate containing object. */
3549
3550 *pbitpos = 0;
3551
3552 while (1)
3553 {
7bb0943f 3554 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
bbf6f052 3555 {
7bb0943f
RS
3556 tree pos = (TREE_CODE (exp) == COMPONENT_REF
3557 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
3558 : TREE_OPERAND (exp, 2));
e6d8c385 3559 tree constant = integer_zero_node, var = pos;
bbf6f052 3560
e7f3c83f
RK
3561 /* If this field hasn't been filled in yet, don't go
3562 past it. This should only happen when folding expressions
3563 made during type construction. */
3564 if (pos == 0)
3565 break;
3566
e6d8c385
RK
3567 /* Assume here that the offset is a multiple of a unit.
3568 If not, there should be an explicitly added constant. */
3569 if (TREE_CODE (pos) == PLUS_EXPR
3570 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3571 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
7bb0943f 3572 else if (TREE_CODE (pos) == INTEGER_CST)
e6d8c385
RK
3573 constant = pos, var = integer_zero_node;
3574
3575 *pbitpos += TREE_INT_CST_LOW (constant);
3576
3577 if (var)
3578 offset = size_binop (PLUS_EXPR, offset,
3579 size_binop (EXACT_DIV_EXPR, var,
3580 size_int (BITS_PER_UNIT)));
bbf6f052 3581 }
bbf6f052 3582
742920c7 3583 else if (TREE_CODE (exp) == ARRAY_REF)
bbf6f052 3584 {
742920c7
RK
3585 /* This code is based on the code in case ARRAY_REF in expand_expr
3586 below. We assume here that the size of an array element is
3587 always an integral multiple of BITS_PER_UNIT. */
3588
3589 tree index = TREE_OPERAND (exp, 1);
3590 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
3591 tree low_bound
3592 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
3593 tree index_type = TREE_TYPE (index);
3594
3595 if (! integer_zerop (low_bound))
3596 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
3597
3598 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
3599 {
3600 index = convert (type_for_size (POINTER_SIZE, 0), index);
3601 index_type = TREE_TYPE (index);
3602 }
3603
3604 index = fold (build (MULT_EXPR, index_type, index,
3605 TYPE_SIZE (TREE_TYPE (exp))));
3606
3607 if (TREE_CODE (index) == INTEGER_CST
3608 && TREE_INT_CST_HIGH (index) == 0)
3609 *pbitpos += TREE_INT_CST_LOW (index);
3610 else
3611 offset = size_binop (PLUS_EXPR, offset,
3612 size_binop (FLOOR_DIV_EXPR, index,
3613 size_int (BITS_PER_UNIT)));
bbf6f052
RK
3614 }
3615 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
3616 && ! ((TREE_CODE (exp) == NOP_EXPR
3617 || TREE_CODE (exp) == CONVERT_EXPR)
7f62854a
RK
3618 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
3619 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
3620 != UNION_TYPE))
bbf6f052
RK
3621 && (TYPE_MODE (TREE_TYPE (exp))
3622 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
3623 break;
7bb0943f
RS
3624
3625 /* If any reference in the chain is volatile, the effect is volatile. */
3626 if (TREE_THIS_VOLATILE (exp))
3627 *pvolatilep = 1;
bbf6f052
RK
3628 exp = TREE_OPERAND (exp, 0);
3629 }
3630
3631 /* If this was a bit-field, see if there is a mode that allows direct
3632 access in case EXP is in memory. */
e7f3c83f 3633 if (mode == VOIDmode && *pbitsize != 0 && *pbitpos % *pbitsize == 0)
bbf6f052
RK
3634 {
3635 mode = mode_for_size (*pbitsize, MODE_INT, 0);
3636 if (mode == BLKmode)
3637 mode = VOIDmode;
3638 }
3639
742920c7
RK
3640 if (integer_zerop (offset))
3641 offset = 0;
3642
b50d17a1
RK
3643 if (offset != 0 && contains_placeholder_p (offset))
3644 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
3645
bbf6f052 3646 *pmode = mode;
7bb0943f 3647 *poffset = offset;
bbf6f052
RK
3648 return exp;
3649}
3650\f
3651/* Given an rtx VALUE that may contain additions and multiplications,
3652 return an equivalent value that just refers to a register or memory.
3653 This is done by generating instructions to perform the arithmetic
c45a13a6
RK
3654 and returning a pseudo-register containing the value.
3655
3656 The returned value may be a REG, SUBREG, MEM or constant. */
bbf6f052
RK
3657
3658rtx
3659force_operand (value, target)
3660 rtx value, target;
3661{
3662 register optab binoptab = 0;
3663 /* Use a temporary to force order of execution of calls to
3664 `force_operand'. */
3665 rtx tmp;
3666 register rtx op2;
3667 /* Use subtarget as the target for operand 0 of a binary operation. */
3668 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3669
3670 if (GET_CODE (value) == PLUS)
3671 binoptab = add_optab;
3672 else if (GET_CODE (value) == MINUS)
3673 binoptab = sub_optab;
3674 else if (GET_CODE (value) == MULT)
3675 {
3676 op2 = XEXP (value, 1);
3677 if (!CONSTANT_P (op2)
3678 && !(GET_CODE (op2) == REG && op2 != subtarget))
3679 subtarget = 0;
3680 tmp = force_operand (XEXP (value, 0), subtarget);
3681 return expand_mult (GET_MODE (value), tmp,
906c4e36 3682 force_operand (op2, NULL_RTX),
bbf6f052
RK
3683 target, 0);
3684 }
3685
3686 if (binoptab)
3687 {
3688 op2 = XEXP (value, 1);
3689 if (!CONSTANT_P (op2)
3690 && !(GET_CODE (op2) == REG && op2 != subtarget))
3691 subtarget = 0;
3692 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
3693 {
3694 binoptab = add_optab;
3695 op2 = negate_rtx (GET_MODE (value), op2);
3696 }
3697
3698 /* Check for an addition with OP2 a constant integer and our first
3699 operand a PLUS of a virtual register and something else. In that
3700 case, we want to emit the sum of the virtual register and the
3701 constant first and then add the other value. This allows virtual
3702 register instantiation to simply modify the constant rather than
3703 creating another one around this addition. */
3704 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
3705 && GET_CODE (XEXP (value, 0)) == PLUS
3706 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
3707 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
3708 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
3709 {
3710 rtx temp = expand_binop (GET_MODE (value), binoptab,
3711 XEXP (XEXP (value, 0), 0), op2,
3712 subtarget, 0, OPTAB_LIB_WIDEN);
3713 return expand_binop (GET_MODE (value), binoptab, temp,
3714 force_operand (XEXP (XEXP (value, 0), 1), 0),
3715 target, 0, OPTAB_LIB_WIDEN);
3716 }
3717
3718 tmp = force_operand (XEXP (value, 0), subtarget);
3719 return expand_binop (GET_MODE (value), binoptab, tmp,
906c4e36 3720 force_operand (op2, NULL_RTX),
bbf6f052 3721 target, 0, OPTAB_LIB_WIDEN);
8008b228 3722 /* We give UNSIGNEDP = 0 to expand_binop
bbf6f052
RK
3723 because the only operations we are expanding here are signed ones. */
3724 }
3725 return value;
3726}
3727\f
3728/* Subroutine of expand_expr:
3729 save the non-copied parts (LIST) of an expr (LHS), and return a list
3730 which can restore these values to their previous values,
3731 should something modify their storage. */
3732
3733static tree
3734save_noncopied_parts (lhs, list)
3735 tree lhs;
3736 tree list;
3737{
3738 tree tail;
3739 tree parts = 0;
3740
3741 for (tail = list; tail; tail = TREE_CHAIN (tail))
3742 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3743 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
3744 else
3745 {
3746 tree part = TREE_VALUE (tail);
3747 tree part_type = TREE_TYPE (part);
906c4e36 3748 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
bbf6f052
RK
3749 rtx target = assign_stack_temp (TYPE_MODE (part_type),
3750 int_size_in_bytes (part_type), 0);
3668e76e 3751 MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (part_type);
bbf6f052 3752 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
906c4e36 3753 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
bbf6f052 3754 parts = tree_cons (to_be_saved,
906c4e36
RK
3755 build (RTL_EXPR, part_type, NULL_TREE,
3756 (tree) target),
bbf6f052
RK
3757 parts);
3758 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
3759 }
3760 return parts;
3761}
3762
3763/* Subroutine of expand_expr:
3764 record the non-copied parts (LIST) of an expr (LHS), and return a list
3765 which specifies the initial values of these parts. */
3766
3767static tree
3768init_noncopied_parts (lhs, list)
3769 tree lhs;
3770 tree list;
3771{
3772 tree tail;
3773 tree parts = 0;
3774
3775 for (tail = list; tail; tail = TREE_CHAIN (tail))
3776 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3777 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
3778 else
3779 {
3780 tree part = TREE_VALUE (tail);
3781 tree part_type = TREE_TYPE (part);
906c4e36 3782 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
bbf6f052
RK
3783 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
3784 }
3785 return parts;
3786}
3787
3788/* Subroutine of expand_expr: return nonzero iff there is no way that
3789 EXP can reference X, which is being modified. */
3790
3791static int
3792safe_from_p (x, exp)
3793 rtx x;
3794 tree exp;
3795{
3796 rtx exp_rtl = 0;
3797 int i, nops;
3798
6676e72f
RK
3799 if (x == 0
3800 /* If EXP has varying size, we MUST use a target since we currently
3801 have no way of allocating temporaries of variable size. So we
3802 assume here that something at a higher level has prevented a
3803 clash. This is somewhat bogus, but the best we can do. */
45524ce9
RK
3804 || (TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
3805 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST))
bbf6f052
RK
3806 return 1;
3807
3808 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
3809 find the underlying pseudo. */
3810 if (GET_CODE (x) == SUBREG)
3811 {
3812 x = SUBREG_REG (x);
3813 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3814 return 0;
3815 }
3816
3817 /* If X is a location in the outgoing argument area, it is always safe. */
3818 if (GET_CODE (x) == MEM
3819 && (XEXP (x, 0) == virtual_outgoing_args_rtx
3820 || (GET_CODE (XEXP (x, 0)) == PLUS
3821 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
3822 return 1;
3823
3824 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3825 {
3826 case 'd':
3827 exp_rtl = DECL_RTL (exp);
3828 break;
3829
3830 case 'c':
3831 return 1;
3832
3833 case 'x':
3834 if (TREE_CODE (exp) == TREE_LIST)
f32fd778
RS
3835 return ((TREE_VALUE (exp) == 0
3836 || safe_from_p (x, TREE_VALUE (exp)))
bbf6f052
RK
3837 && (TREE_CHAIN (exp) == 0
3838 || safe_from_p (x, TREE_CHAIN (exp))));
3839 else
3840 return 0;
3841
3842 case '1':
3843 return safe_from_p (x, TREE_OPERAND (exp, 0));
3844
3845 case '2':
3846 case '<':
3847 return (safe_from_p (x, TREE_OPERAND (exp, 0))
3848 && safe_from_p (x, TREE_OPERAND (exp, 1)));
3849
3850 case 'e':
3851 case 'r':
3852 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
3853 the expression. If it is set, we conflict iff we are that rtx or
3854 both are in memory. Otherwise, we check all operands of the
3855 expression recursively. */
3856
3857 switch (TREE_CODE (exp))
3858 {
3859 case ADDR_EXPR:
e44842fe
RK
3860 return (staticp (TREE_OPERAND (exp, 0))
3861 || safe_from_p (x, TREE_OPERAND (exp, 0)));
bbf6f052
RK
3862
3863 case INDIRECT_REF:
3864 if (GET_CODE (x) == MEM)
3865 return 0;
3866 break;
3867
3868 case CALL_EXPR:
3869 exp_rtl = CALL_EXPR_RTL (exp);
3870 if (exp_rtl == 0)
3871 {
3872 /* Assume that the call will clobber all hard registers and
3873 all of memory. */
3874 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3875 || GET_CODE (x) == MEM)
3876 return 0;
3877 }
3878
3879 break;
3880
3881 case RTL_EXPR:
3882 exp_rtl = RTL_EXPR_RTL (exp);
3883 if (exp_rtl == 0)
3884 /* We don't know what this can modify. */
3885 return 0;
3886
3887 break;
3888
3889 case WITH_CLEANUP_EXPR:
3890 exp_rtl = RTL_EXPR_RTL (exp);
3891 break;
3892
5dab5552
MS
3893 case CLEANUP_POINT_EXPR:
3894 return safe_from_p (x, TREE_OPERAND (exp, 0));
3895
bbf6f052
RK
3896 case SAVE_EXPR:
3897 exp_rtl = SAVE_EXPR_RTL (exp);
3898 break;
3899
8129842c
RS
3900 case BIND_EXPR:
3901 /* The only operand we look at is operand 1. The rest aren't
3902 part of the expression. */
3903 return safe_from_p (x, TREE_OPERAND (exp, 1));
3904
bbf6f052
RK
3905 case METHOD_CALL_EXPR:
3906 /* This takes a rtx argument, but shouldn't appear here. */
3907 abort ();
3908 }
3909
3910 /* If we have an rtx, we do not need to scan our operands. */
3911 if (exp_rtl)
3912 break;
3913
3914 nops = tree_code_length[(int) TREE_CODE (exp)];
3915 for (i = 0; i < nops; i++)
3916 if (TREE_OPERAND (exp, i) != 0
3917 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3918 return 0;
3919 }
3920
3921 /* If we have an rtl, find any enclosed object. Then see if we conflict
3922 with it. */
3923 if (exp_rtl)
3924 {
3925 if (GET_CODE (exp_rtl) == SUBREG)
3926 {
3927 exp_rtl = SUBREG_REG (exp_rtl);
3928 if (GET_CODE (exp_rtl) == REG
3929 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3930 return 0;
3931 }
3932
3933 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3934 are memory and EXP is not readonly. */
3935 return ! (rtx_equal_p (x, exp_rtl)
3936 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3937 && ! TREE_READONLY (exp)));
3938 }
3939
3940 /* If we reach here, it is safe. */
3941 return 1;
3942}
3943
3944/* Subroutine of expand_expr: return nonzero iff EXP is an
3945 expression whose type is statically determinable. */
3946
3947static int
3948fixed_type_p (exp)
3949 tree exp;
3950{
3951 if (TREE_CODE (exp) == PARM_DECL
3952 || TREE_CODE (exp) == VAR_DECL
3953 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3954 || TREE_CODE (exp) == COMPONENT_REF
3955 || TREE_CODE (exp) == ARRAY_REF)
3956 return 1;
3957 return 0;
3958}
3959\f
3960/* expand_expr: generate code for computing expression EXP.
3961 An rtx for the computed value is returned. The value is never null.
3962 In the case of a void EXP, const0_rtx is returned.
3963
3964 The value may be stored in TARGET if TARGET is nonzero.
3965 TARGET is just a suggestion; callers must assume that
3966 the rtx returned may not be the same as TARGET.
3967
3968 If TARGET is CONST0_RTX, it means that the value will be ignored.
3969
3970 If TMODE is not VOIDmode, it suggests generating the
3971 result in mode TMODE. But this is done only when convenient.
3972 Otherwise, TMODE is ignored and the value generated in its natural mode.
3973 TMODE is just a suggestion; callers must assume that
3974 the rtx returned may not have mode TMODE.
3975
d6a5ac33
RK
3976 Note that TARGET may have neither TMODE nor MODE. In that case, it
3977 probably will not be used.
bbf6f052
RK
3978
3979 If MODIFIER is EXPAND_SUM then when EXP is an addition
3980 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3981 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3982 products as above, or REG or MEM, or constant.
3983 Ordinarily in such cases we would output mul or add instructions
3984 and then return a pseudo reg containing the sum.
3985
3986 EXPAND_INITIALIZER is much like EXPAND_SUM except that
3987 it also marks a label as absolutely required (it can't be dead).
26fcb35a 3988 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
d6a5ac33
RK
3989 This is used for outputting expressions used in initializers.
3990
3991 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
3992 with a constant address even if that address is not normally legitimate.
3993 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
bbf6f052
RK
3994
3995rtx
3996expand_expr (exp, target, tmode, modifier)
3997 register tree exp;
3998 rtx target;
3999 enum machine_mode tmode;
4000 enum expand_modifier modifier;
4001{
b50d17a1
RK
4002 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
4003 This is static so it will be accessible to our recursive callees. */
4004 static tree placeholder_list = 0;
bbf6f052
RK
4005 register rtx op0, op1, temp;
4006 tree type = TREE_TYPE (exp);
4007 int unsignedp = TREE_UNSIGNED (type);
4008 register enum machine_mode mode = TYPE_MODE (type);
4009 register enum tree_code code = TREE_CODE (exp);
4010 optab this_optab;
4011 /* Use subtarget as the target for operand 0 of a binary operation. */
4012 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4013 rtx original_target = target;
ca695ac9 4014 /* Maybe defer this until sure not doing bytecode? */
dd27116b
RK
4015 int ignore = (target == const0_rtx
4016 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
4d87de75
RS
4017 || code == CONVERT_EXPR || code == REFERENCE_EXPR
4018 || code == COND_EXPR)
dd27116b 4019 && TREE_CODE (type) == VOID_TYPE));
bbf6f052
RK
4020 tree context;
4021
ca695ac9 4022
1d556704 4023 if (output_bytecode && modifier != EXPAND_INITIALIZER)
ca695ac9
JB
4024 {
4025 bc_expand_expr (exp);
4026 return NULL;
4027 }
4028
bbf6f052
RK
4029 /* Don't use hard regs as subtargets, because the combiner
4030 can only handle pseudo regs. */
4031 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
4032 subtarget = 0;
4033 /* Avoid subtargets inside loops,
4034 since they hide some invariant expressions. */
4035 if (preserve_subexpressions_p ())
4036 subtarget = 0;
4037
dd27116b
RK
4038 /* If we are going to ignore this result, we need only do something
4039 if there is a side-effect somewhere in the expression. If there
b50d17a1
RK
4040 is, short-circuit the most common cases here. Note that we must
4041 not call expand_expr with anything but const0_rtx in case this
4042 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
bbf6f052 4043
dd27116b
RK
4044 if (ignore)
4045 {
4046 if (! TREE_SIDE_EFFECTS (exp))
4047 return const0_rtx;
4048
4049 /* Ensure we reference a volatile object even if value is ignored. */
4050 if (TREE_THIS_VOLATILE (exp)
4051 && TREE_CODE (exp) != FUNCTION_DECL
4052 && mode != VOIDmode && mode != BLKmode)
4053 {
4054 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
4055 if (GET_CODE (temp) == MEM)
4056 temp = copy_to_reg (temp);
4057 return const0_rtx;
4058 }
4059
4060 if (TREE_CODE_CLASS (code) == '1')
4061 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4062 VOIDmode, modifier);
4063 else if (TREE_CODE_CLASS (code) == '2'
4064 || TREE_CODE_CLASS (code) == '<')
4065 {
4066 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
4067 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
4068 return const0_rtx;
4069 }
4070 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
4071 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
4072 /* If the second operand has no side effects, just evaluate
4073 the first. */
4074 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4075 VOIDmode, modifier);
dd27116b 4076
90764a87 4077 target = 0;
dd27116b 4078 }
bbf6f052 4079
e44842fe
RK
4080 /* If will do cse, generate all results into pseudo registers
4081 since 1) that allows cse to find more things
4082 and 2) otherwise cse could produce an insn the machine
4083 cannot support. */
4084
bbf6f052
RK
4085 if (! cse_not_expected && mode != BLKmode && target
4086 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
4087 target = subtarget;
4088
bbf6f052
RK
4089 switch (code)
4090 {
4091 case LABEL_DECL:
b552441b
RS
4092 {
4093 tree function = decl_function_context (exp);
4094 /* Handle using a label in a containing function. */
4095 if (function != current_function_decl && function != 0)
4096 {
4097 struct function *p = find_function_data (function);
4098 /* Allocate in the memory associated with the function
4099 that the label is in. */
4100 push_obstacks (p->function_obstack,
4101 p->function_maybepermanent_obstack);
4102
4103 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4104 label_rtx (exp), p->forced_labels);
4105 pop_obstacks ();
4106 }
4107 else if (modifier == EXPAND_INITIALIZER)
4108 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4109 label_rtx (exp), forced_labels);
26fcb35a 4110 temp = gen_rtx (MEM, FUNCTION_MODE,
b552441b 4111 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
26fcb35a
RS
4112 if (function != current_function_decl && function != 0)
4113 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
4114 return temp;
b552441b 4115 }
bbf6f052
RK
4116
4117 case PARM_DECL:
4118 if (DECL_RTL (exp) == 0)
4119 {
4120 error_with_decl (exp, "prior parameter's size depends on `%s'");
4af3895e 4121 return CONST0_RTX (mode);
bbf6f052
RK
4122 }
4123
d6a5ac33
RK
4124 /* ... fall through ... */
4125
bbf6f052 4126 case VAR_DECL:
2dca20cd
RS
4127 /* If a static var's type was incomplete when the decl was written,
4128 but the type is complete now, lay out the decl now. */
4129 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4130 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
4131 {
4132 push_obstacks_nochange ();
4133 end_temporary_allocation ();
4134 layout_decl (exp, 0);
4135 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
4136 pop_obstacks ();
4137 }
d6a5ac33
RK
4138
4139 /* ... fall through ... */
4140
2dca20cd 4141 case FUNCTION_DECL:
bbf6f052
RK
4142 case RESULT_DECL:
4143 if (DECL_RTL (exp) == 0)
4144 abort ();
d6a5ac33 4145
e44842fe
RK
4146 /* Ensure variable marked as used even if it doesn't go through
4147 a parser. If it hasn't be used yet, write out an external
4148 definition. */
4149 if (! TREE_USED (exp))
4150 {
4151 assemble_external (exp);
4152 TREE_USED (exp) = 1;
4153 }
4154
bbf6f052
RK
4155 /* Handle variables inherited from containing functions. */
4156 context = decl_function_context (exp);
4157
4158 /* We treat inline_function_decl as an alias for the current function
4159 because that is the inline function whose vars, types, etc.
4160 are being merged into the current function.
4161 See expand_inline_function. */
d6a5ac33 4162
bbf6f052
RK
4163 if (context != 0 && context != current_function_decl
4164 && context != inline_function_decl
4165 /* If var is static, we don't need a static chain to access it. */
4166 && ! (GET_CODE (DECL_RTL (exp)) == MEM
4167 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
4168 {
4169 rtx addr;
4170
4171 /* Mark as non-local and addressable. */
81feeecb 4172 DECL_NONLOCAL (exp) = 1;
bbf6f052
RK
4173 mark_addressable (exp);
4174 if (GET_CODE (DECL_RTL (exp)) != MEM)
4175 abort ();
4176 addr = XEXP (DECL_RTL (exp), 0);
4177 if (GET_CODE (addr) == MEM)
d6a5ac33
RK
4178 addr = gen_rtx (MEM, Pmode,
4179 fix_lexical_addr (XEXP (addr, 0), exp));
bbf6f052
RK
4180 else
4181 addr = fix_lexical_addr (addr, exp);
4182 return change_address (DECL_RTL (exp), mode, addr);
4183 }
4af3895e 4184
bbf6f052
RK
4185 /* This is the case of an array whose size is to be determined
4186 from its initializer, while the initializer is still being parsed.
4187 See expand_decl. */
d6a5ac33 4188
bbf6f052
RK
4189 if (GET_CODE (DECL_RTL (exp)) == MEM
4190 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
4191 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
4192 XEXP (DECL_RTL (exp), 0));
d6a5ac33
RK
4193
4194 /* If DECL_RTL is memory, we are in the normal case and either
4195 the address is not valid or it is not a register and -fforce-addr
4196 is specified, get the address into a register. */
4197
bbf6f052
RK
4198 if (GET_CODE (DECL_RTL (exp)) == MEM
4199 && modifier != EXPAND_CONST_ADDRESS
4200 && modifier != EXPAND_SUM
d6a5ac33
RK
4201 && modifier != EXPAND_INITIALIZER
4202 && (! memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
bbf6f052 4203 || (flag_force_addr
d6a5ac33
RK
4204 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
4205 return change_address (DECL_RTL (exp), VOIDmode,
4206 copy_rtx (XEXP (DECL_RTL (exp), 0)));
1499e0a8
RK
4207
4208 /* If the mode of DECL_RTL does not match that of the decl, it
4209 must be a promoted value. We return a SUBREG of the wanted mode,
4210 but mark it so that we know that it was already extended. */
4211
4212 if (GET_CODE (DECL_RTL (exp)) == REG
4213 && GET_MODE (DECL_RTL (exp)) != mode)
4214 {
1499e0a8
RK
4215 /* Get the signedness used for this variable. Ensure we get the
4216 same mode we got when the variable was declared. */
78911e8b
RK
4217 if (GET_MODE (DECL_RTL (exp))
4218 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
1499e0a8
RK
4219 abort ();
4220
4221 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
4222 SUBREG_PROMOTED_VAR_P (temp) = 1;
4223 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4224 return temp;
4225 }
4226
bbf6f052
RK
4227 return DECL_RTL (exp);
4228
4229 case INTEGER_CST:
4230 return immed_double_const (TREE_INT_CST_LOW (exp),
4231 TREE_INT_CST_HIGH (exp),
4232 mode);
4233
4234 case CONST_DECL:
4235 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
4236
4237 case REAL_CST:
4238 /* If optimized, generate immediate CONST_DOUBLE
4239 which will be turned into memory by reload if necessary.
4240
4241 We used to force a register so that loop.c could see it. But
4242 this does not allow gen_* patterns to perform optimizations with
4243 the constants. It also produces two insns in cases like "x = 1.0;".
4244 On most machines, floating-point constants are not permitted in
4245 many insns, so we'd end up copying it to a register in any case.
4246
4247 Now, we do the copying in expand_binop, if appropriate. */
4248 return immed_real_const (exp);
4249
4250 case COMPLEX_CST:
4251 case STRING_CST:
4252 if (! TREE_CST_RTL (exp))
4253 output_constant_def (exp);
4254
4255 /* TREE_CST_RTL probably contains a constant address.
4256 On RISC machines where a constant address isn't valid,
4257 make some insns to get that address into a register. */
4258 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
4259 && modifier != EXPAND_CONST_ADDRESS
4260 && modifier != EXPAND_INITIALIZER
4261 && modifier != EXPAND_SUM
d6a5ac33
RK
4262 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
4263 || (flag_force_addr
4264 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
bbf6f052
RK
4265 return change_address (TREE_CST_RTL (exp), VOIDmode,
4266 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
4267 return TREE_CST_RTL (exp);
4268
4269 case SAVE_EXPR:
4270 context = decl_function_context (exp);
d6a5ac33 4271
bbf6f052
RK
4272 /* We treat inline_function_decl as an alias for the current function
4273 because that is the inline function whose vars, types, etc.
4274 are being merged into the current function.
4275 See expand_inline_function. */
4276 if (context == current_function_decl || context == inline_function_decl)
4277 context = 0;
4278
4279 /* If this is non-local, handle it. */
4280 if (context)
4281 {
4282 temp = SAVE_EXPR_RTL (exp);
4283 if (temp && GET_CODE (temp) == REG)
4284 {
4285 put_var_into_stack (exp);
4286 temp = SAVE_EXPR_RTL (exp);
4287 }
4288 if (temp == 0 || GET_CODE (temp) != MEM)
4289 abort ();
4290 return change_address (temp, mode,
4291 fix_lexical_addr (XEXP (temp, 0), exp));
4292 }
4293 if (SAVE_EXPR_RTL (exp) == 0)
4294 {
4295 if (mode == BLKmode)
34a25822
RK
4296 {
4297 temp
4298 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
05e3bdb9 4299 MEM_IN_STRUCT_P (temp) = AGGREGATE_TYPE_P (type);
34a25822 4300 }
bbf6f052 4301 else
78911e8b 4302 temp = gen_reg_rtx (promote_mode (type, mode, &unsignedp, 0));
1499e0a8 4303
bbf6f052 4304 SAVE_EXPR_RTL (exp) = temp;
bbf6f052
RK
4305 if (!optimize && GET_CODE (temp) == REG)
4306 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
4307 save_expr_regs);
ff78f773
RK
4308
4309 /* If the mode of TEMP does not match that of the expression, it
4310 must be a promoted value. We pass store_expr a SUBREG of the
4311 wanted mode but mark it so that we know that it was already
4312 extended. Note that `unsignedp' was modified above in
4313 this case. */
4314
4315 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
4316 {
4317 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
4318 SUBREG_PROMOTED_VAR_P (temp) = 1;
4319 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4320 }
4321
4322 store_expr (TREE_OPERAND (exp, 0), temp, 0);
bbf6f052 4323 }
1499e0a8
RK
4324
4325 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
4326 must be a promoted value. We return a SUBREG of the wanted mode,
adc22a04 4327 but mark it so that we know that it was already extended. */
1499e0a8
RK
4328
4329 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
4330 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
4331 {
e70d22c8
RK
4332 /* Compute the signedness and make the proper SUBREG. */
4333 promote_mode (type, mode, &unsignedp, 0);
4334 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
1499e0a8
RK
4335 SUBREG_PROMOTED_VAR_P (temp) = 1;
4336 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4337 return temp;
4338 }
4339
bbf6f052
RK
4340 return SAVE_EXPR_RTL (exp);
4341
b50d17a1
RK
4342 case PLACEHOLDER_EXPR:
4343 /* If there is an object on the head of the placeholder list,
4344 see if some object in it's references is of type TYPE. For
4345 further information, see tree.def. */
4346 if (placeholder_list)
4347 {
4348 tree object;
f59d43a9 4349 tree old_list = placeholder_list;
b50d17a1
RK
4350
4351 for (object = TREE_PURPOSE (placeholder_list);
4352 TREE_TYPE (object) != type
4353 && (TREE_CODE_CLASS (TREE_CODE (object)) == 'r'
4805bfa0
RK
4354 || TREE_CODE_CLASS (TREE_CODE (object)) == '1'
4355 || TREE_CODE_CLASS (TREE_CODE (object)) == '2'
4356 || TREE_CODE_CLASS (TREE_CODE (object)) == 'e');
b50d17a1
RK
4357 object = TREE_OPERAND (object, 0))
4358 ;
4359
4805bfa0 4360 if (object && TREE_TYPE (object) == type)
f59d43a9
RK
4361 {
4362 /* Expand this object skipping the list entries before
4363 it was found in case it is also a PLACEHOLDER_EXPR.
4364 In that case, we want to translate it using subsequent
4365 entries. */
4366 placeholder_list = TREE_CHAIN (placeholder_list);
4367 temp = expand_expr (object, original_target, tmode, modifier);
4368 placeholder_list = old_list;
4369 return temp;
4370 }
b50d17a1
RK
4371 }
4372
4373 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
4374 abort ();
4375
4376 case WITH_RECORD_EXPR:
4377 /* Put the object on the placeholder list, expand our first operand,
4378 and pop the list. */
4379 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
4380 placeholder_list);
4381 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
4382 tmode, modifier);
4383 placeholder_list = TREE_CHAIN (placeholder_list);
4384 return target;
4385
bbf6f052 4386 case EXIT_EXPR:
e44842fe
RK
4387 expand_exit_loop_if_false (NULL_PTR,
4388 invert_truthvalue (TREE_OPERAND (exp, 0)));
bbf6f052
RK
4389 return const0_rtx;
4390
4391 case LOOP_EXPR:
0088fcb1 4392 push_temp_slots ();
bbf6f052
RK
4393 expand_start_loop (1);
4394 expand_expr_stmt (TREE_OPERAND (exp, 0));
4395 expand_end_loop ();
0088fcb1 4396 pop_temp_slots ();
bbf6f052
RK
4397
4398 return const0_rtx;
4399
4400 case BIND_EXPR:
4401 {
4402 tree vars = TREE_OPERAND (exp, 0);
4403 int vars_need_expansion = 0;
4404
4405 /* Need to open a binding contour here because
4406 if there are any cleanups they most be contained here. */
4407 expand_start_bindings (0);
4408
2df53c0b
RS
4409 /* Mark the corresponding BLOCK for output in its proper place. */
4410 if (TREE_OPERAND (exp, 2) != 0
4411 && ! TREE_USED (TREE_OPERAND (exp, 2)))
4412 insert_block (TREE_OPERAND (exp, 2));
bbf6f052
RK
4413
4414 /* If VARS have not yet been expanded, expand them now. */
4415 while (vars)
4416 {
4417 if (DECL_RTL (vars) == 0)
4418 {
4419 vars_need_expansion = 1;
4420 expand_decl (vars);
4421 }
4422 expand_decl_init (vars);
4423 vars = TREE_CHAIN (vars);
4424 }
4425
4426 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
4427
4428 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
4429
4430 return temp;
4431 }
4432
4433 case RTL_EXPR:
4434 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
4435 abort ();
4436 emit_insns (RTL_EXPR_SEQUENCE (exp));
4437 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
99310285 4438 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
ca814259 4439 free_temps_for_rtl_expr (exp);
bbf6f052
RK
4440 return RTL_EXPR_RTL (exp);
4441
4442 case CONSTRUCTOR:
dd27116b
RK
4443 /* If we don't need the result, just ensure we evaluate any
4444 subexpressions. */
4445 if (ignore)
4446 {
4447 tree elt;
4448 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4449 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
4450 return const0_rtx;
4451 }
3207b172 4452
4af3895e
JVA
4453 /* All elts simple constants => refer to a constant in memory. But
4454 if this is a non-BLKmode mode, let it store a field at a time
4455 since that should make a CONST_INT or CONST_DOUBLE when we
3207b172 4456 fold. Likewise, if we have a target we can use, it is best to
d720b9d1
RK
4457 store directly into the target unless the type is large enough
4458 that memcpy will be used. If we are making an initializer and
3207b172 4459 all operands are constant, put it in memory as well. */
dd27116b 4460 else if ((TREE_STATIC (exp)
3207b172
RK
4461 && ((mode == BLKmode
4462 && ! (target != 0 && safe_from_p (target, exp)))
d720b9d1
RK
4463 || TREE_ADDRESSABLE (exp)
4464 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
4465 && (move_by_pieces_ninsns
4466 (TREE_INT_CST_LOW (TYPE_SIZE (type)),
4467 TYPE_ALIGN (type))
4468 > MOVE_RATIO))))
dd27116b 4469 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
bbf6f052
RK
4470 {
4471 rtx constructor = output_constant_def (exp);
b552441b
RS
4472 if (modifier != EXPAND_CONST_ADDRESS
4473 && modifier != EXPAND_INITIALIZER
4474 && modifier != EXPAND_SUM
d6a5ac33
RK
4475 && (! memory_address_p (GET_MODE (constructor),
4476 XEXP (constructor, 0))
4477 || (flag_force_addr
4478 && GET_CODE (XEXP (constructor, 0)) != REG)))
bbf6f052
RK
4479 constructor = change_address (constructor, VOIDmode,
4480 XEXP (constructor, 0));
4481 return constructor;
4482 }
4483
bbf6f052
RK
4484 else
4485 {
4486 if (target == 0 || ! safe_from_p (target, exp))
4487 {
4488 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
d6a5ac33 4489 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
bbf6f052
RK
4490 else
4491 {
3b94d087
RS
4492 target
4493 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
05e3bdb9 4494 if (AGGREGATE_TYPE_P (type))
3b94d087 4495 MEM_IN_STRUCT_P (target) = 1;
bbf6f052
RK
4496 }
4497 }
4498 store_constructor (exp, target);
4499 return target;
4500 }
4501
4502 case INDIRECT_REF:
4503 {
4504 tree exp1 = TREE_OPERAND (exp, 0);
4505 tree exp2;
4506
4507 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
4508 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
4509 This code has the same general effect as simply doing
4510 expand_expr on the save expr, except that the expression PTR
4511 is computed for use as a memory address. This means different
4512 code, suitable for indexing, may be generated. */
4513 if (TREE_CODE (exp1) == SAVE_EXPR
4514 && SAVE_EXPR_RTL (exp1) == 0
88f63c77 4515 && TYPE_MODE (TREE_TYPE (exp1)) == ptr_mode)
bbf6f052 4516 {
906c4e36
RK
4517 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
4518 VOIDmode, EXPAND_SUM);
bbf6f052
RK
4519 op0 = memory_address (mode, temp);
4520 op0 = copy_all_regs (op0);
4521 SAVE_EXPR_RTL (exp1) = op0;
4522 }
4523 else
4524 {
906c4e36 4525 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
bbf6f052
RK
4526 op0 = memory_address (mode, op0);
4527 }
8c8a8e34
JW
4528
4529 temp = gen_rtx (MEM, mode, op0);
4530 /* If address was computed by addition,
4531 mark this as an element of an aggregate. */
4532 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4533 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
4534 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
05e3bdb9 4535 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
8c8a8e34
JW
4536 || (TREE_CODE (exp1) == ADDR_EXPR
4537 && (exp2 = TREE_OPERAND (exp1, 0))
05e3bdb9 4538 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
8c8a8e34 4539 MEM_IN_STRUCT_P (temp) = 1;
2c4c436a 4540 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
89742723 4541#if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
bbf6f052
RK
4542 a location is accessed through a pointer to const does not mean
4543 that the value there can never change. */
8c8a8e34 4544 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
bbf6f052 4545#endif
8c8a8e34
JW
4546 return temp;
4547 }
bbf6f052
RK
4548
4549 case ARRAY_REF:
742920c7
RK
4550 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
4551 abort ();
bbf6f052 4552
bbf6f052 4553 {
742920c7
RK
4554 tree array = TREE_OPERAND (exp, 0);
4555 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
4556 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4557 tree index = TREE_OPERAND (exp, 1);
4558 tree index_type = TREE_TYPE (index);
bbf6f052 4559 int i;
bbf6f052 4560
b50d17a1
RK
4561 if (TREE_CODE (low_bound) != INTEGER_CST
4562 && contains_placeholder_p (low_bound))
4563 low_bound = build (WITH_RECORD_EXPR, sizetype, low_bound, exp);
4564
d4c89139
PB
4565 /* Optimize the special-case of a zero lower bound.
4566
4567 We convert the low_bound to sizetype to avoid some problems
4568 with constant folding. (E.g. suppose the lower bound is 1,
4569 and its mode is QI. Without the conversion, (ARRAY
4570 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
4571 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
4572
4573 But sizetype isn't quite right either (especially if
4574 the lowbound is negative). FIXME */
4575
742920c7 4576 if (! integer_zerop (low_bound))
d4c89139
PB
4577 index = fold (build (MINUS_EXPR, index_type, index,
4578 convert (sizetype, low_bound)));
742920c7 4579
6be58303
JW
4580 if ((TREE_CODE (index) != INTEGER_CST
4581 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
c7a7ac46 4582 && (! SLOW_UNALIGNED_ACCESS || ! get_inner_unaligned_p (exp)))
742920c7 4583 {
6be58303
JW
4584 /* Nonconstant array index or nonconstant element size, and
4585 not an array in an unaligned (packed) structure field.
742920c7
RK
4586 Generate the tree for *(&array+index) and expand that,
4587 except do it in a language-independent way
4588 and don't complain about non-lvalue arrays.
4589 `mark_addressable' should already have been called
4590 for any array for which this case will be reached. */
4591
4592 /* Don't forget the const or volatile flag from the array
4593 element. */
4594 tree variant_type = build_type_variant (type,
4595 TREE_READONLY (exp),
4596 TREE_THIS_VOLATILE (exp));
4597 tree array_adr = build1 (ADDR_EXPR,
4598 build_pointer_type (variant_type), array);
4599 tree elt;
b50d17a1 4600 tree size = size_in_bytes (type);
742920c7
RK
4601
4602 /* Convert the integer argument to a type the same size as a
4603 pointer so the multiply won't overflow spuriously. */
4604 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
4605 index = convert (type_for_size (POINTER_SIZE, 0), index);
4606
b50d17a1
RK
4607 if (TREE_CODE (size) != INTEGER_CST
4608 && contains_placeholder_p (size))
4609 size = build (WITH_RECORD_EXPR, sizetype, size, exp);
4610
742920c7
RK
4611 /* Don't think the address has side effects
4612 just because the array does.
4613 (In some cases the address might have side effects,
4614 and we fail to record that fact here. However, it should not
4615 matter, since expand_expr should not care.) */
4616 TREE_SIDE_EFFECTS (array_adr) = 0;
4617
4618 elt = build1 (INDIRECT_REF, type,
4619 fold (build (PLUS_EXPR,
4620 TYPE_POINTER_TO (variant_type),
4621 array_adr,
4622 fold (build (MULT_EXPR,
4623 TYPE_POINTER_TO (variant_type),
b50d17a1 4624 index, size)))));
742920c7
RK
4625
4626 /* Volatility, etc., of new expression is same as old
4627 expression. */
4628 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
4629 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
4630 TREE_READONLY (elt) = TREE_READONLY (exp);
4631
4632 return expand_expr (elt, target, tmode, modifier);
4633 }
4634
4635 /* Fold an expression like: "foo"[2].
ad2e7dd0
RK
4636 This is not done in fold so it won't happen inside &.
4637 Don't fold if this is for wide characters since it's too
4638 difficult to do correctly and this is a very rare case. */
742920c7
RK
4639
4640 if (TREE_CODE (array) == STRING_CST
4641 && TREE_CODE (index) == INTEGER_CST
4642 && !TREE_INT_CST_HIGH (index)
307b821c 4643 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
ad2e7dd0
RK
4644 && GET_MODE_CLASS (mode) == MODE_INT
4645 && GET_MODE_SIZE (mode) == 1)
307b821c 4646 return GEN_INT (TREE_STRING_POINTER (array)[i]);
bbf6f052 4647
742920c7
RK
4648 /* If this is a constant index into a constant array,
4649 just get the value from the array. Handle both the cases when
4650 we have an explicit constructor and when our operand is a variable
4651 that was declared const. */
4af3895e 4652
742920c7
RK
4653 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
4654 {
4655 if (TREE_CODE (index) == INTEGER_CST
4656 && TREE_INT_CST_HIGH (index) == 0)
4657 {
4658 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
4659
4660 i = TREE_INT_CST_LOW (index);
4661 while (elem && i--)
4662 elem = TREE_CHAIN (elem);
4663 if (elem)
4664 return expand_expr (fold (TREE_VALUE (elem)), target,
4665 tmode, modifier);
4666 }
4667 }
4af3895e 4668
742920c7
RK
4669 else if (optimize >= 1
4670 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
4671 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
4672 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
4673 {
4674 if (TREE_CODE (index) == INTEGER_CST
4675 && TREE_INT_CST_HIGH (index) == 0)
4676 {
4677 tree init = DECL_INITIAL (array);
4678
4679 i = TREE_INT_CST_LOW (index);
4680 if (TREE_CODE (init) == CONSTRUCTOR)
4681 {
4682 tree elem = CONSTRUCTOR_ELTS (init);
4683
03dc44a6
RS
4684 while (elem
4685 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
742920c7
RK
4686 elem = TREE_CHAIN (elem);
4687 if (elem)
4688 return expand_expr (fold (TREE_VALUE (elem)), target,
4689 tmode, modifier);
4690 }
4691 else if (TREE_CODE (init) == STRING_CST
4692 && i < TREE_STRING_LENGTH (init))
307b821c 4693 return GEN_INT (TREE_STRING_POINTER (init)[i]);
742920c7
RK
4694 }
4695 }
4696 }
8c8a8e34 4697
bbf6f052
RK
4698 /* Treat array-ref with constant index as a component-ref. */
4699
4700 case COMPONENT_REF:
4701 case BIT_FIELD_REF:
4af3895e 4702 /* If the operand is a CONSTRUCTOR, we can just extract the
7a0b7b9a
RK
4703 appropriate field if it is present. Don't do this if we have
4704 already written the data since we want to refer to that copy
4705 and varasm.c assumes that's what we'll do. */
4af3895e 4706 if (code != ARRAY_REF
7a0b7b9a
RK
4707 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
4708 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
4af3895e
JVA
4709 {
4710 tree elt;
4711
4712 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
4713 elt = TREE_CHAIN (elt))
4714 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
4715 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
4716 }
4717
bbf6f052
RK
4718 {
4719 enum machine_mode mode1;
4720 int bitsize;
4721 int bitpos;
7bb0943f 4722 tree offset;
bbf6f052 4723 int volatilep = 0;
7bb0943f 4724 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
bbf6f052 4725 &mode1, &unsignedp, &volatilep);
034f9101 4726 int alignment;
bbf6f052 4727
e7f3c83f
RK
4728 /* If we got back the original object, something is wrong. Perhaps
4729 we are evaluating an expression too early. In any event, don't
4730 infinitely recurse. */
4731 if (tem == exp)
4732 abort ();
4733
bbf6f052
RK
4734 /* In some cases, we will be offsetting OP0's address by a constant.
4735 So get it as a sum, if possible. If we will be using it
4736 directly in an insn, we validate it. */
906c4e36 4737 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_SUM);
bbf6f052 4738
8c8a8e34 4739 /* If this is a constant, put it into a register if it is a
8008b228 4740 legitimate constant and memory if it isn't. */
8c8a8e34
JW
4741 if (CONSTANT_P (op0))
4742 {
4743 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
f2878c6b 4744 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
8c8a8e34
JW
4745 op0 = force_reg (mode, op0);
4746 else
4747 op0 = validize_mem (force_const_mem (mode, op0));
4748 }
4749
034f9101 4750 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
7bb0943f
RS
4751 if (offset != 0)
4752 {
906c4e36 4753 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
4754
4755 if (GET_CODE (op0) != MEM)
4756 abort ();
4757 op0 = change_address (op0, VOIDmode,
88f63c77
RK
4758 gen_rtx (PLUS, ptr_mode, XEXP (op0, 0),
4759 force_reg (ptr_mode, offset_rtx)));
034f9101
RS
4760 /* If we have a variable offset, the known alignment
4761 is only that of the innermost structure containing the field.
4762 (Actually, we could sometimes do better by using the
4763 size of an element of the innermost array, but no need.) */
4764 if (TREE_CODE (exp) == COMPONENT_REF
4765 || TREE_CODE (exp) == BIT_FIELD_REF)
4766 alignment = (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
4767 / BITS_PER_UNIT);
7bb0943f
RS
4768 }
4769
bbf6f052
RK
4770 /* Don't forget about volatility even if this is a bitfield. */
4771 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
4772 {
4773 op0 = copy_rtx (op0);
4774 MEM_VOLATILE_P (op0) = 1;
4775 }
4776
ccc98036
RS
4777 /* In cases where an aligned union has an unaligned object
4778 as a field, we might be extracting a BLKmode value from
4779 an integer-mode (e.g., SImode) object. Handle this case
4780 by doing the extract into an object as wide as the field
4781 (which we know to be the width of a basic mode), then
4782 storing into memory, and changing the mode to BLKmode. */
bbf6f052 4783 if (mode1 == VOIDmode
ccc98036 4784 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
f9409c3a
JW
4785 || (modifier != EXPAND_CONST_ADDRESS
4786 && modifier != EXPAND_SUM
4787 && modifier != EXPAND_INITIALIZER
4788 && ((mode1 != BLKmode && ! direct_load[(int) mode1])
4789 /* If the field isn't aligned enough to fetch as a memref,
4790 fetch it as a bit field. */
4791 || (SLOW_UNALIGNED_ACCESS
4792 && ((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
4793 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
bbf6f052 4794 {
bbf6f052
RK
4795 enum machine_mode ext_mode = mode;
4796
4797 if (ext_mode == BLKmode)
4798 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
4799
4800 if (ext_mode == BLKmode)
4801 abort ();
4802
4803 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
4804 unsignedp, target, ext_mode, ext_mode,
034f9101 4805 alignment,
bbf6f052
RK
4806 int_size_in_bytes (TREE_TYPE (tem)));
4807 if (mode == BLKmode)
4808 {
4809 rtx new = assign_stack_temp (ext_mode,
4810 bitsize / BITS_PER_UNIT, 0);
4811
4812 emit_move_insn (new, op0);
4813 op0 = copy_rtx (new);
4814 PUT_MODE (op0, BLKmode);
092dded9 4815 MEM_IN_STRUCT_P (op0) = 1;
bbf6f052
RK
4816 }
4817
4818 return op0;
4819 }
4820
4821 /* Get a reference to just this component. */
4822 if (modifier == EXPAND_CONST_ADDRESS
4823 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4824 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
4825 (bitpos / BITS_PER_UNIT)));
4826 else
4827 op0 = change_address (op0, mode1,
4828 plus_constant (XEXP (op0, 0),
4829 (bitpos / BITS_PER_UNIT)));
4830 MEM_IN_STRUCT_P (op0) = 1;
4831 MEM_VOLATILE_P (op0) |= volatilep;
4832 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
4833 return op0;
4834 if (target == 0)
4835 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4836 convert_move (target, op0, unsignedp);
4837 return target;
4838 }
4839
4840 case OFFSET_REF:
4841 {
da120c2f 4842 tree base = build1 (ADDR_EXPR, type, TREE_OPERAND (exp, 0));
bbf6f052 4843 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
906c4e36 4844 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
bbf6f052
RK
4845 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
4846 MEM_IN_STRUCT_P (temp) = 1;
f94cc92f 4847 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
89742723 4848#if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
bbf6f052
RK
4849 a location is accessed through a pointer to const does not mean
4850 that the value there can never change. */
4851 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
4852#endif
4853 return temp;
4854 }
4855
4856 /* Intended for a reference to a buffer of a file-object in Pascal.
4857 But it's not certain that a special tree code will really be
4858 necessary for these. INDIRECT_REF might work for them. */
4859 case BUFFER_REF:
4860 abort ();
4861
7308a047 4862 case IN_EXPR:
7308a047 4863 {
d6a5ac33
RK
4864 /* Pascal set IN expression.
4865
4866 Algorithm:
4867 rlo = set_low - (set_low%bits_per_word);
4868 the_word = set [ (index - rlo)/bits_per_word ];
4869 bit_index = index % bits_per_word;
4870 bitmask = 1 << bit_index;
4871 return !!(the_word & bitmask); */
4872
7308a047
RS
4873 tree set = TREE_OPERAND (exp, 0);
4874 tree index = TREE_OPERAND (exp, 1);
d6a5ac33 4875 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7308a047 4876 tree set_type = TREE_TYPE (set);
7308a047
RS
4877 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
4878 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
d6a5ac33
RK
4879 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
4880 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
4881 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
4882 rtx setval = expand_expr (set, 0, VOIDmode, 0);
4883 rtx setaddr = XEXP (setval, 0);
4884 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7308a047
RS
4885 rtx rlow;
4886 rtx diff, quo, rem, addr, bit, result;
7308a047 4887
d6a5ac33
RK
4888 preexpand_calls (exp);
4889
4890 /* If domain is empty, answer is no. Likewise if index is constant
4891 and out of bounds. */
4892 if ((TREE_CODE (set_high_bound) == INTEGER_CST
4893 && TREE_CODE (set_low_bound) == INTEGER_CST
4894 && tree_int_cst_lt (set_high_bound, set_low_bound)
4895 || (TREE_CODE (index) == INTEGER_CST
4896 && TREE_CODE (set_low_bound) == INTEGER_CST
4897 && tree_int_cst_lt (index, set_low_bound))
4898 || (TREE_CODE (set_high_bound) == INTEGER_CST
4899 && TREE_CODE (index) == INTEGER_CST
4900 && tree_int_cst_lt (set_high_bound, index))))
7308a047
RS
4901 return const0_rtx;
4902
d6a5ac33
RK
4903 if (target == 0)
4904 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7308a047
RS
4905
4906 /* If we get here, we have to generate the code for both cases
4907 (in range and out of range). */
4908
4909 op0 = gen_label_rtx ();
4910 op1 = gen_label_rtx ();
4911
4912 if (! (GET_CODE (index_val) == CONST_INT
4913 && GET_CODE (lo_r) == CONST_INT))
4914 {
17938e57 4915 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
d6a5ac33 4916 GET_MODE (index_val), iunsignedp, 0);
7308a047
RS
4917 emit_jump_insn (gen_blt (op1));
4918 }
4919
4920 if (! (GET_CODE (index_val) == CONST_INT
4921 && GET_CODE (hi_r) == CONST_INT))
4922 {
17938e57 4923 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
d6a5ac33 4924 GET_MODE (index_val), iunsignedp, 0);
7308a047
RS
4925 emit_jump_insn (gen_bgt (op1));
4926 }
4927
4928 /* Calculate the element number of bit zero in the first word
4929 of the set. */
4930 if (GET_CODE (lo_r) == CONST_INT)
17938e57
RK
4931 rlow = GEN_INT (INTVAL (lo_r)
4932 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7308a047 4933 else
17938e57
RK
4934 rlow = expand_binop (index_mode, and_optab, lo_r,
4935 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
d6a5ac33 4936 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047 4937
d6a5ac33
RK
4938 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
4939 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047
RS
4940
4941 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
d6a5ac33 4942 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7308a047 4943 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
d6a5ac33
RK
4944 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
4945
7308a047 4946 addr = memory_address (byte_mode,
d6a5ac33
RK
4947 expand_binop (index_mode, add_optab, diff,
4948 setaddr, NULL_RTX, iunsignedp,
17938e57 4949 OPTAB_LIB_WIDEN));
d6a5ac33 4950
7308a047
RS
4951 /* Extract the bit we want to examine */
4952 bit = expand_shift (RSHIFT_EXPR, byte_mode,
17938e57
RK
4953 gen_rtx (MEM, byte_mode, addr),
4954 make_tree (TREE_TYPE (index), rem),
4955 NULL_RTX, 1);
4956 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
4957 GET_MODE (target) == byte_mode ? target : 0,
7308a047 4958 1, OPTAB_LIB_WIDEN);
17938e57
RK
4959
4960 if (result != target)
4961 convert_move (target, result, 1);
7308a047
RS
4962
4963 /* Output the code to handle the out-of-range case. */
4964 emit_jump (op0);
4965 emit_label (op1);
4966 emit_move_insn (target, const0_rtx);
4967 emit_label (op0);
4968 return target;
4969 }
4970
bbf6f052
RK
4971 case WITH_CLEANUP_EXPR:
4972 if (RTL_EXPR_RTL (exp) == 0)
4973 {
4974 RTL_EXPR_RTL (exp)
6fcc9690 4975 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
906c4e36
RK
4976 cleanups_this_call
4977 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
bbf6f052
RK
4978 /* That's it for this cleanup. */
4979 TREE_OPERAND (exp, 2) = 0;
61d6b1cc 4980 (*interim_eh_hook) (NULL_TREE);
bbf6f052
RK
4981 }
4982 return RTL_EXPR_RTL (exp);
4983
5dab5552
MS
4984 case CLEANUP_POINT_EXPR:
4985 {
d93d4205 4986 extern int temp_slot_level;
5dab5552 4987 tree old_cleanups = cleanups_this_call;
d93d4205
MS
4988 int old_temp_level = target_temp_slot_level;
4989 push_temp_slots ();
4990 target_temp_slot_level = temp_slot_level;
f283f66b
JM
4991 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
4992 /* If we're going to use this value, load it up now. */
4993 if (! ignore)
4994 op0 = force_not_mem (op0);
5dab5552 4995 expand_cleanups_to (old_cleanups);
d93d4205
MS
4996 preserve_temp_slots (op0);
4997 free_temp_slots ();
4998 pop_temp_slots ();
4999 target_temp_slot_level = old_temp_level;
5dab5552
MS
5000 }
5001 return op0;
5002
bbf6f052
RK
5003 case CALL_EXPR:
5004 /* Check for a built-in function. */
5005 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
d6a5ac33
RK
5006 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5007 == FUNCTION_DECL)
bbf6f052
RK
5008 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5009 return expand_builtin (exp, target, subtarget, tmode, ignore);
d6a5ac33 5010
bbf6f052
RK
5011 /* If this call was expanded already by preexpand_calls,
5012 just return the result we got. */
5013 if (CALL_EXPR_RTL (exp) != 0)
5014 return CALL_EXPR_RTL (exp);
d6a5ac33 5015
8129842c 5016 return expand_call (exp, target, ignore);
bbf6f052
RK
5017
5018 case NON_LVALUE_EXPR:
5019 case NOP_EXPR:
5020 case CONVERT_EXPR:
5021 case REFERENCE_EXPR:
bbf6f052
RK
5022 if (TREE_CODE (type) == UNION_TYPE)
5023 {
5024 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
5025 if (target == 0)
5026 {
5027 if (mode == BLKmode)
5028 {
5029 if (TYPE_SIZE (type) == 0
5030 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5031 abort ();
5032 target = assign_stack_temp (BLKmode,
5033 (TREE_INT_CST_LOW (TYPE_SIZE (type))
5034 + BITS_PER_UNIT - 1)
5035 / BITS_PER_UNIT, 0);
3668e76e 5036 MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (type);
bbf6f052
RK
5037 }
5038 else
d6a5ac33 5039 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
bbf6f052 5040 }
d6a5ac33 5041
bbf6f052
RK
5042 if (GET_CODE (target) == MEM)
5043 /* Store data into beginning of memory target. */
5044 store_expr (TREE_OPERAND (exp, 0),
1499e0a8
RK
5045 change_address (target, TYPE_MODE (valtype), 0), 0);
5046
bbf6f052
RK
5047 else if (GET_CODE (target) == REG)
5048 /* Store this field into a union of the proper type. */
5049 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
5050 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
5051 VOIDmode, 0, 1,
5052 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
5053 else
5054 abort ();
5055
5056 /* Return the entire union. */
5057 return target;
5058 }
d6a5ac33 5059
7f62854a
RK
5060 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5061 {
5062 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
5063 modifier);
5064
5065 /* If the signedness of the conversion differs and OP0 is
5066 a promoted SUBREG, clear that indication since we now
5067 have to do the proper extension. */
5068 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
5069 && GET_CODE (op0) == SUBREG)
5070 SUBREG_PROMOTED_VAR_P (op0) = 0;
5071
5072 return op0;
5073 }
5074
1499e0a8 5075 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
12342f90
RS
5076 if (GET_MODE (op0) == mode)
5077 return op0;
12342f90 5078
d6a5ac33
RK
5079 /* If OP0 is a constant, just convert it into the proper mode. */
5080 if (CONSTANT_P (op0))
5081 return
5082 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5083 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
12342f90 5084
26fcb35a
RS
5085 if (modifier == EXPAND_INITIALIZER)
5086 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
d6a5ac33 5087
bbf6f052
RK
5088 if (flag_force_mem && GET_CODE (op0) == MEM)
5089 op0 = copy_to_reg (op0);
5090
5091 if (target == 0)
d6a5ac33
RK
5092 return
5093 convert_to_mode (mode, op0,
5094 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052 5095 else
d6a5ac33
RK
5096 convert_move (target, op0,
5097 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052
RK
5098 return target;
5099
5100 case PLUS_EXPR:
5101 /* We come here from MINUS_EXPR when the second operand is a constant. */
5102 plus_expr:
5103 this_optab = add_optab;
5104
5105 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
5106 something else, make sure we add the register to the constant and
5107 then to the other thing. This case can occur during strength
5108 reduction and doing it this way will produce better code if the
5109 frame pointer or argument pointer is eliminated.
5110
5111 fold-const.c will ensure that the constant is always in the inner
5112 PLUS_EXPR, so the only case we need to do anything about is if
5113 sp, ap, or fp is our second argument, in which case we must swap
5114 the innermost first argument and our second argument. */
5115
5116 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5117 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
5118 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
5119 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
5120 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
5121 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
5122 {
5123 tree t = TREE_OPERAND (exp, 1);
5124
5125 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5126 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
5127 }
5128
88f63c77 5129 /* If the result is to be ptr_mode and we are adding an integer to
bbf6f052
RK
5130 something, we might be forming a constant. So try to use
5131 plus_constant. If it produces a sum and we can't accept it,
5132 use force_operand. This allows P = &ARR[const] to generate
5133 efficient code on machines where a SYMBOL_REF is not a valid
5134 address.
5135
5136 If this is an EXPAND_SUM call, always return the sum. */
c980ac49 5137 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
88f63c77 5138 || mode == ptr_mode)
bbf6f052 5139 {
c980ac49
RS
5140 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
5141 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5142 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
5143 {
5144 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
5145 EXPAND_SUM);
5146 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
5147 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5148 op1 = force_operand (op1, target);
5149 return op1;
5150 }
bbf6f052 5151
c980ac49
RS
5152 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
5153 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
5154 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
5155 {
5156 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
5157 EXPAND_SUM);
5158 if (! CONSTANT_P (op0))
5159 {
5160 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5161 VOIDmode, modifier);
709f5be1
RS
5162 /* Don't go to both_summands if modifier
5163 says it's not right to return a PLUS. */
5164 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5165 goto binop2;
c980ac49
RS
5166 goto both_summands;
5167 }
5168 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
5169 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5170 op0 = force_operand (op0, target);
5171 return op0;
5172 }
bbf6f052
RK
5173 }
5174
5175 /* No sense saving up arithmetic to be done
5176 if it's all in the wrong mode to form part of an address.
5177 And force_operand won't know whether to sign-extend or
5178 zero-extend. */
5179 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
88f63c77 5180 || mode != ptr_mode)
c980ac49 5181 goto binop;
bbf6f052
RK
5182
5183 preexpand_calls (exp);
5184 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5185 subtarget = 0;
5186
5187 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
906c4e36 5188 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
bbf6f052 5189
c980ac49 5190 both_summands:
bbf6f052
RK
5191 /* Make sure any term that's a sum with a constant comes last. */
5192 if (GET_CODE (op0) == PLUS
5193 && CONSTANT_P (XEXP (op0, 1)))
5194 {
5195 temp = op0;
5196 op0 = op1;
5197 op1 = temp;
5198 }
5199 /* If adding to a sum including a constant,
5200 associate it to put the constant outside. */
5201 if (GET_CODE (op1) == PLUS
5202 && CONSTANT_P (XEXP (op1, 1)))
5203 {
5204 rtx constant_term = const0_rtx;
5205
5206 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
5207 if (temp != 0)
5208 op0 = temp;
6f90e075
JW
5209 /* Ensure that MULT comes first if there is one. */
5210 else if (GET_CODE (op0) == MULT)
5211 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
bbf6f052
RK
5212 else
5213 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
5214
5215 /* Let's also eliminate constants from op0 if possible. */
5216 op0 = eliminate_constant_term (op0, &constant_term);
5217
5218 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
5219 their sum should be a constant. Form it into OP1, since the
5220 result we want will then be OP0 + OP1. */
5221
5222 temp = simplify_binary_operation (PLUS, mode, constant_term,
5223 XEXP (op1, 1));
5224 if (temp != 0)
5225 op1 = temp;
5226 else
5227 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
5228 }
5229
5230 /* Put a constant term last and put a multiplication first. */
5231 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
5232 temp = op1, op1 = op0, op0 = temp;
5233
5234 temp = simplify_binary_operation (PLUS, mode, op0, op1);
5235 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
5236
5237 case MINUS_EXPR:
ea87523e
RK
5238 /* For initializers, we are allowed to return a MINUS of two
5239 symbolic constants. Here we handle all cases when both operands
5240 are constant. */
bbf6f052
RK
5241 /* Handle difference of two symbolic constants,
5242 for the sake of an initializer. */
5243 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5244 && really_constant_p (TREE_OPERAND (exp, 0))
5245 && really_constant_p (TREE_OPERAND (exp, 1)))
5246 {
906c4e36
RK
5247 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
5248 VOIDmode, modifier);
5249 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5250 VOIDmode, modifier);
ea87523e 5251
ea87523e
RK
5252 /* If the last operand is a CONST_INT, use plus_constant of
5253 the negated constant. Else make the MINUS. */
5254 if (GET_CODE (op1) == CONST_INT)
5255 return plus_constant (op0, - INTVAL (op1));
5256 else
5257 return gen_rtx (MINUS, mode, op0, op1);
bbf6f052
RK
5258 }
5259 /* Convert A - const to A + (-const). */
5260 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
5261 {
ae431183
RK
5262 tree negated = fold (build1 (NEGATE_EXPR, type,
5263 TREE_OPERAND (exp, 1)));
5264
5265 /* Deal with the case where we can't negate the constant
5266 in TYPE. */
5267 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
5268 {
5269 tree newtype = signed_type (type);
5270 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
5271 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
5272 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
5273
5274 if (! TREE_OVERFLOW (newneg))
5275 return expand_expr (convert (type,
5276 build (PLUS_EXPR, newtype,
5277 newop0, newneg)),
5278 target, tmode, modifier);
5279 }
5280 else
5281 {
5282 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
5283 goto plus_expr;
5284 }
bbf6f052
RK
5285 }
5286 this_optab = sub_optab;
5287 goto binop;
5288
5289 case MULT_EXPR:
5290 preexpand_calls (exp);
5291 /* If first operand is constant, swap them.
5292 Thus the following special case checks need only
5293 check the second operand. */
5294 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5295 {
5296 register tree t1 = TREE_OPERAND (exp, 0);
5297 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
5298 TREE_OPERAND (exp, 1) = t1;
5299 }
5300
5301 /* Attempt to return something suitable for generating an
5302 indexed address, for machines that support that. */
5303
88f63c77 5304 if (modifier == EXPAND_SUM && mode == ptr_mode
bbf6f052 5305 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
906c4e36 5306 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
5307 {
5308 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
5309
5310 /* Apply distributive law if OP0 is x+c. */
5311 if (GET_CODE (op0) == PLUS
5312 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
5313 return gen_rtx (PLUS, mode,
5314 gen_rtx (MULT, mode, XEXP (op0, 0),
906c4e36
RK
5315 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
5316 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
5317 * INTVAL (XEXP (op0, 1))));
bbf6f052
RK
5318
5319 if (GET_CODE (op0) != REG)
906c4e36 5320 op0 = force_operand (op0, NULL_RTX);
bbf6f052
RK
5321 if (GET_CODE (op0) != REG)
5322 op0 = copy_to_mode_reg (mode, op0);
5323
5324 return gen_rtx (MULT, mode, op0,
906c4e36 5325 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
bbf6f052
RK
5326 }
5327
5328 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5329 subtarget = 0;
5330
5331 /* Check for multiplying things that have been extended
5332 from a narrower type. If this machine supports multiplying
5333 in that narrower type with a result in the desired type,
5334 do it that way, and avoid the explicit type-conversion. */
5335 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
5336 && TREE_CODE (type) == INTEGER_TYPE
5337 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5338 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
5339 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
5340 && int_fits_type_p (TREE_OPERAND (exp, 1),
5341 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5342 /* Don't use a widening multiply if a shift will do. */
5343 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
906c4e36 5344 > HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
5345 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
5346 ||
5347 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
5348 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
5349 ==
5350 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
5351 /* If both operands are extended, they must either both
5352 be zero-extended or both be sign-extended. */
5353 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
5354 ==
5355 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
5356 {
5357 enum machine_mode innermode
5358 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
5359 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5360 ? umul_widen_optab : smul_widen_optab);
5361 if (mode == GET_MODE_WIDER_MODE (innermode)
5362 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
5363 {
5364 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
906c4e36 5365 NULL_RTX, VOIDmode, 0);
bbf6f052 5366 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
906c4e36
RK
5367 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5368 VOIDmode, 0);
bbf6f052
RK
5369 else
5370 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
906c4e36 5371 NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5372 goto binop2;
5373 }
5374 }
5375 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 5376 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5377 return expand_mult (mode, op0, op1, target, unsignedp);
5378
5379 case TRUNC_DIV_EXPR:
5380 case FLOOR_DIV_EXPR:
5381 case CEIL_DIV_EXPR:
5382 case ROUND_DIV_EXPR:
5383 case EXACT_DIV_EXPR:
5384 preexpand_calls (exp);
5385 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5386 subtarget = 0;
5387 /* Possible optimization: compute the dividend with EXPAND_SUM
5388 then if the divisor is constant can optimize the case
5389 where some terms of the dividend have coeffs divisible by it. */
5390 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 5391 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5392 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
5393
5394 case RDIV_EXPR:
5395 this_optab = flodiv_optab;
5396 goto binop;
5397
5398 case TRUNC_MOD_EXPR:
5399 case FLOOR_MOD_EXPR:
5400 case CEIL_MOD_EXPR:
5401 case ROUND_MOD_EXPR:
5402 preexpand_calls (exp);
5403 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5404 subtarget = 0;
5405 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 5406 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5407 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
5408
5409 case FIX_ROUND_EXPR:
5410 case FIX_FLOOR_EXPR:
5411 case FIX_CEIL_EXPR:
5412 abort (); /* Not used for C. */
5413
5414 case FIX_TRUNC_EXPR:
906c4e36 5415 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5416 if (target == 0)
5417 target = gen_reg_rtx (mode);
5418 expand_fix (target, op0, unsignedp);
5419 return target;
5420
5421 case FLOAT_EXPR:
906c4e36 5422 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5423 if (target == 0)
5424 target = gen_reg_rtx (mode);
5425 /* expand_float can't figure out what to do if FROM has VOIDmode.
5426 So give it the correct mode. With -O, cse will optimize this. */
5427 if (GET_MODE (op0) == VOIDmode)
5428 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5429 op0);
5430 expand_float (target, op0,
5431 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5432 return target;
5433
5434 case NEGATE_EXPR:
5b22bee8 5435 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
bbf6f052
RK
5436 temp = expand_unop (mode, neg_optab, op0, target, 0);
5437 if (temp == 0)
5438 abort ();
5439 return temp;
5440
5441 case ABS_EXPR:
5442 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5443
2d7050fd 5444 /* Handle complex values specially. */
d6a5ac33
RK
5445 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
5446 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
5447 return expand_complex_abs (mode, op0, target, unsignedp);
2d7050fd 5448
bbf6f052
RK
5449 /* Unsigned abs is simply the operand. Testing here means we don't
5450 risk generating incorrect code below. */
5451 if (TREE_UNSIGNED (type))
5452 return op0;
5453
2e5ec6cf
RK
5454 return expand_abs (mode, op0, target, unsignedp,
5455 safe_from_p (target, TREE_OPERAND (exp, 0)));
bbf6f052
RK
5456
5457 case MAX_EXPR:
5458 case MIN_EXPR:
5459 target = original_target;
5460 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
fc155707 5461 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
d6a5ac33 5462 || GET_MODE (target) != mode
bbf6f052
RK
5463 || (GET_CODE (target) == REG
5464 && REGNO (target) < FIRST_PSEUDO_REGISTER))
5465 target = gen_reg_rtx (mode);
906c4e36 5466 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5467 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5468
5469 /* First try to do it with a special MIN or MAX instruction.
5470 If that does not win, use a conditional jump to select the proper
5471 value. */
5472 this_optab = (TREE_UNSIGNED (type)
5473 ? (code == MIN_EXPR ? umin_optab : umax_optab)
5474 : (code == MIN_EXPR ? smin_optab : smax_optab));
5475
5476 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
5477 OPTAB_WIDEN);
5478 if (temp != 0)
5479 return temp;
5480
fa2981d8
JW
5481 /* At this point, a MEM target is no longer useful; we will get better
5482 code without it. */
5483
5484 if (GET_CODE (target) == MEM)
5485 target = gen_reg_rtx (mode);
5486
ee456b1c
RK
5487 if (target != op0)
5488 emit_move_insn (target, op0);
d6a5ac33 5489
bbf6f052 5490 op0 = gen_label_rtx ();
d6a5ac33 5491
f81497d9
RS
5492 /* If this mode is an integer too wide to compare properly,
5493 compare word by word. Rely on cse to optimize constant cases. */
d6a5ac33 5494 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
bbf6f052 5495 {
f81497d9 5496 if (code == MAX_EXPR)
d6a5ac33
RK
5497 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
5498 target, op1, NULL_RTX, op0);
bbf6f052 5499 else
d6a5ac33
RK
5500 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
5501 op1, target, NULL_RTX, op0);
ee456b1c 5502 emit_move_insn (target, op1);
bbf6f052 5503 }
f81497d9
RS
5504 else
5505 {
5506 if (code == MAX_EXPR)
5507 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
ee456b1c
RK
5508 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
5509 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
f81497d9
RS
5510 else
5511 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
ee456b1c
RK
5512 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
5513 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
f81497d9 5514 if (temp == const0_rtx)
ee456b1c 5515 emit_move_insn (target, op1);
f81497d9
RS
5516 else if (temp != const_true_rtx)
5517 {
5518 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
5519 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
5520 else
5521 abort ();
ee456b1c 5522 emit_move_insn (target, op1);
f81497d9
RS
5523 }
5524 }
bbf6f052
RK
5525 emit_label (op0);
5526 return target;
5527
bbf6f052
RK
5528 case BIT_NOT_EXPR:
5529 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5530 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
5531 if (temp == 0)
5532 abort ();
5533 return temp;
5534
5535 case FFS_EXPR:
5536 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5537 temp = expand_unop (mode, ffs_optab, op0, target, 1);
5538 if (temp == 0)
5539 abort ();
5540 return temp;
5541
d6a5ac33
RK
5542 /* ??? Can optimize bitwise operations with one arg constant.
5543 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
5544 and (a bitwise1 b) bitwise2 b (etc)
5545 but that is probably not worth while. */
5546
5547 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
5548 boolean values when we want in all cases to compute both of them. In
5549 general it is fastest to do TRUTH_AND_EXPR by computing both operands
5550 as actual zero-or-1 values and then bitwise anding. In cases where
5551 there cannot be any side effects, better code would be made by
5552 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
5553 how to recognize those cases. */
5554
bbf6f052
RK
5555 case TRUTH_AND_EXPR:
5556 case BIT_AND_EXPR:
5557 this_optab = and_optab;
5558 goto binop;
5559
bbf6f052
RK
5560 case TRUTH_OR_EXPR:
5561 case BIT_IOR_EXPR:
5562 this_optab = ior_optab;
5563 goto binop;
5564
874726a8 5565 case TRUTH_XOR_EXPR:
bbf6f052
RK
5566 case BIT_XOR_EXPR:
5567 this_optab = xor_optab;
5568 goto binop;
5569
5570 case LSHIFT_EXPR:
5571 case RSHIFT_EXPR:
5572 case LROTATE_EXPR:
5573 case RROTATE_EXPR:
5574 preexpand_calls (exp);
5575 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5576 subtarget = 0;
5577 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5578 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
5579 unsignedp);
5580
d6a5ac33
RK
5581 /* Could determine the answer when only additive constants differ. Also,
5582 the addition of one can be handled by changing the condition. */
bbf6f052
RK
5583 case LT_EXPR:
5584 case LE_EXPR:
5585 case GT_EXPR:
5586 case GE_EXPR:
5587 case EQ_EXPR:
5588 case NE_EXPR:
5589 preexpand_calls (exp);
5590 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
5591 if (temp != 0)
5592 return temp;
d6a5ac33 5593
bbf6f052
RK
5594 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
5595 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
5596 && original_target
5597 && GET_CODE (original_target) == REG
5598 && (GET_MODE (original_target)
5599 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5600 {
d6a5ac33
RK
5601 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
5602 VOIDmode, 0);
5603
bbf6f052
RK
5604 if (temp != original_target)
5605 temp = copy_to_reg (temp);
d6a5ac33 5606
bbf6f052 5607 op1 = gen_label_rtx ();
906c4e36 5608 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
bbf6f052
RK
5609 GET_MODE (temp), unsignedp, 0);
5610 emit_jump_insn (gen_beq (op1));
5611 emit_move_insn (temp, const1_rtx);
5612 emit_label (op1);
5613 return temp;
5614 }
d6a5ac33 5615
bbf6f052
RK
5616 /* If no set-flag instruction, must generate a conditional
5617 store into a temporary variable. Drop through
5618 and handle this like && and ||. */
5619
5620 case TRUTH_ANDIF_EXPR:
5621 case TRUTH_ORIF_EXPR:
e44842fe
RK
5622 if (! ignore
5623 && (target == 0 || ! safe_from_p (target, exp)
5624 /* Make sure we don't have a hard reg (such as function's return
5625 value) live across basic blocks, if not optimizing. */
5626 || (!optimize && GET_CODE (target) == REG
5627 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
bbf6f052 5628 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
e44842fe
RK
5629
5630 if (target)
5631 emit_clr_insn (target);
5632
bbf6f052
RK
5633 op1 = gen_label_rtx ();
5634 jumpifnot (exp, op1);
e44842fe
RK
5635
5636 if (target)
5637 emit_0_to_1_insn (target);
5638
bbf6f052 5639 emit_label (op1);
e44842fe 5640 return ignore ? const0_rtx : target;
bbf6f052
RK
5641
5642 case TRUTH_NOT_EXPR:
5643 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5644 /* The parser is careful to generate TRUTH_NOT_EXPR
5645 only with operands that are always zero or one. */
906c4e36 5646 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
bbf6f052
RK
5647 target, 1, OPTAB_LIB_WIDEN);
5648 if (temp == 0)
5649 abort ();
5650 return temp;
5651
5652 case COMPOUND_EXPR:
5653 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
5654 emit_queue ();
5655 return expand_expr (TREE_OPERAND (exp, 1),
5656 (ignore ? const0_rtx : target),
5657 VOIDmode, 0);
5658
5659 case COND_EXPR:
5660 {
5dab5552
MS
5661 rtx flag = NULL_RTX;
5662 tree left_cleanups = NULL_TREE;
5663 tree right_cleanups = NULL_TREE;
5664
5665 /* Used to save a pointer to the place to put the setting of
5666 the flag that indicates if this side of the conditional was
5667 taken. We backpatch the code, if we find out later that we
5668 have any conditional cleanups that need to be performed. */
5669 rtx dest_right_flag = NULL_RTX;
5670 rtx dest_left_flag = NULL_RTX;
5671
bbf6f052
RK
5672 /* Note that COND_EXPRs whose type is a structure or union
5673 are required to be constructed to contain assignments of
5674 a temporary variable, so that we can evaluate them here
5675 for side effect only. If type is void, we must do likewise. */
5676
5677 /* If an arm of the branch requires a cleanup,
5678 only that cleanup is performed. */
5679
5680 tree singleton = 0;
5681 tree binary_op = 0, unary_op = 0;
5682 tree old_cleanups = cleanups_this_call;
bbf6f052
RK
5683
5684 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
5685 convert it to our mode, if necessary. */
5686 if (integer_onep (TREE_OPERAND (exp, 1))
5687 && integer_zerop (TREE_OPERAND (exp, 2))
5688 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
5689 {
dd27116b
RK
5690 if (ignore)
5691 {
5692 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5693 modifier);
5694 return const0_rtx;
5695 }
5696
bbf6f052
RK
5697 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
5698 if (GET_MODE (op0) == mode)
5699 return op0;
d6a5ac33 5700
bbf6f052
RK
5701 if (target == 0)
5702 target = gen_reg_rtx (mode);
5703 convert_move (target, op0, unsignedp);
5704 return target;
5705 }
5706
5707 /* If we are not to produce a result, we have no target. Otherwise,
5708 if a target was specified use it; it will not be used as an
5709 intermediate target unless it is safe. If no target, use a
5710 temporary. */
5711
dd27116b 5712 if (ignore)
bbf6f052
RK
5713 temp = 0;
5714 else if (original_target
d6a5ac33 5715 && safe_from_p (original_target, TREE_OPERAND (exp, 0))
2d444001
RK
5716 && GET_MODE (original_target) == mode
5717 && ! (GET_CODE (original_target) == MEM
5718 && MEM_VOLATILE_P (original_target)))
bbf6f052
RK
5719 temp = original_target;
5720 else if (mode == BLKmode)
5721 {
5722 if (TYPE_SIZE (type) == 0
5723 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5724 abort ();
673bc773 5725
bbf6f052
RK
5726 temp = assign_stack_temp (BLKmode,
5727 (TREE_INT_CST_LOW (TYPE_SIZE (type))
5728 + BITS_PER_UNIT - 1)
5729 / BITS_PER_UNIT, 0);
05e3bdb9 5730 MEM_IN_STRUCT_P (temp) = AGGREGATE_TYPE_P (type);
bbf6f052
RK
5731 }
5732 else
5733 temp = gen_reg_rtx (mode);
5734
5735 /* Check for X ? A + B : A. If we have this, we can copy
5736 A to the output and conditionally add B. Similarly for unary
5737 operations. Don't do this if X has side-effects because
5738 those side effects might affect A or B and the "?" operation is
5739 a sequence point in ANSI. (We test for side effects later.) */
5740
5741 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
5742 && operand_equal_p (TREE_OPERAND (exp, 2),
5743 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
5744 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
5745 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
5746 && operand_equal_p (TREE_OPERAND (exp, 1),
5747 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
5748 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
5749 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
5750 && operand_equal_p (TREE_OPERAND (exp, 2),
5751 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
5752 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
5753 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
5754 && operand_equal_p (TREE_OPERAND (exp, 1),
5755 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
5756 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
5757
5758 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
5759 operation, do this as A + (X != 0). Similarly for other simple
5760 binary operators. */
dd27116b 5761 if (temp && singleton && binary_op
bbf6f052
RK
5762 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5763 && (TREE_CODE (binary_op) == PLUS_EXPR
5764 || TREE_CODE (binary_op) == MINUS_EXPR
5765 || TREE_CODE (binary_op) == BIT_IOR_EXPR
9fbd9f58 5766 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
bbf6f052
RK
5767 && integer_onep (TREE_OPERAND (binary_op, 1))
5768 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
5769 {
5770 rtx result;
5771 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
5772 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
5773 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
2d444001 5774 : xor_optab);
bbf6f052
RK
5775
5776 /* If we had X ? A : A + 1, do this as A + (X == 0).
5777
5778 We have to invert the truth value here and then put it
5779 back later if do_store_flag fails. We cannot simply copy
5780 TREE_OPERAND (exp, 0) to another variable and modify that
5781 because invert_truthvalue can modify the tree pointed to
5782 by its argument. */
5783 if (singleton == TREE_OPERAND (exp, 1))
5784 TREE_OPERAND (exp, 0)
5785 = invert_truthvalue (TREE_OPERAND (exp, 0));
5786
5787 result = do_store_flag (TREE_OPERAND (exp, 0),
906c4e36
RK
5788 (safe_from_p (temp, singleton)
5789 ? temp : NULL_RTX),
bbf6f052
RK
5790 mode, BRANCH_COST <= 1);
5791
5792 if (result)
5793 {
906c4e36 5794 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5795 return expand_binop (mode, boptab, op1, result, temp,
5796 unsignedp, OPTAB_LIB_WIDEN);
5797 }
5798 else if (singleton == TREE_OPERAND (exp, 1))
5799 TREE_OPERAND (exp, 0)
5800 = invert_truthvalue (TREE_OPERAND (exp, 0));
5801 }
5802
5803 NO_DEFER_POP;
5804 op0 = gen_label_rtx ();
5805
5dab5552 5806 flag = gen_reg_rtx (word_mode);
bbf6f052
RK
5807 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
5808 {
5809 if (temp != 0)
5810 {
5811 /* If the target conflicts with the other operand of the
5812 binary op, we can't use it. Also, we can't use the target
5813 if it is a hard register, because evaluating the condition
5814 might clobber it. */
5815 if ((binary_op
5816 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
5817 || (GET_CODE (temp) == REG
5818 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
5819 temp = gen_reg_rtx (mode);
5820 store_expr (singleton, temp, 0);
5821 }
5822 else
906c4e36 5823 expand_expr (singleton,
2937cf87 5824 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5dab5552 5825 dest_left_flag = get_last_insn ();
bbf6f052
RK
5826 if (singleton == TREE_OPERAND (exp, 1))
5827 jumpif (TREE_OPERAND (exp, 0), op0);
5828 else
5829 jumpifnot (TREE_OPERAND (exp, 0), op0);
5830
5dab5552
MS
5831 /* Allows cleanups up to here. */
5832 old_cleanups = cleanups_this_call;
bbf6f052
RK
5833 if (binary_op && temp == 0)
5834 /* Just touch the other operand. */
5835 expand_expr (TREE_OPERAND (binary_op, 1),
906c4e36 5836 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5837 else if (binary_op)
5838 store_expr (build (TREE_CODE (binary_op), type,
5839 make_tree (type, temp),
5840 TREE_OPERAND (binary_op, 1)),
5841 temp, 0);
5842 else
5843 store_expr (build1 (TREE_CODE (unary_op), type,
5844 make_tree (type, temp)),
5845 temp, 0);
5846 op1 = op0;
5dab5552 5847 dest_right_flag = get_last_insn ();
bbf6f052
RK
5848 }
5849#if 0
5850 /* This is now done in jump.c and is better done there because it
5851 produces shorter register lifetimes. */
5852
5853 /* Check for both possibilities either constants or variables
5854 in registers (but not the same as the target!). If so, can
5855 save branches by assigning one, branching, and assigning the
5856 other. */
5857 else if (temp && GET_MODE (temp) != BLKmode
5858 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
5859 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
5860 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
5861 && DECL_RTL (TREE_OPERAND (exp, 1))
5862 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
5863 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
5864 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
5865 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
5866 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
5867 && DECL_RTL (TREE_OPERAND (exp, 2))
5868 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
5869 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
5870 {
5871 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5872 temp = gen_reg_rtx (mode);
5873 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5dab5552 5874 dest_left_flag = get_last_insn ();
bbf6f052 5875 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552
MS
5876
5877 /* Allows cleanups up to here. */
5878 old_cleanups = cleanups_this_call;
bbf6f052
RK
5879 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5880 op1 = op0;
5dab5552 5881 dest_right_flag = get_last_insn ();
bbf6f052
RK
5882 }
5883#endif
5884 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
5885 comparison operator. If we have one of these cases, set the
5886 output to A, branch on A (cse will merge these two references),
5887 then set the output to FOO. */
5888 else if (temp
5889 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5890 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5891 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5892 TREE_OPERAND (exp, 1), 0)
5893 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5894 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
5895 {
5896 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5897 temp = gen_reg_rtx (mode);
5898 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5dab5552 5899 dest_left_flag = get_last_insn ();
bbf6f052 5900 jumpif (TREE_OPERAND (exp, 0), op0);
5dab5552
MS
5901
5902 /* Allows cleanups up to here. */
5903 old_cleanups = cleanups_this_call;
bbf6f052
RK
5904 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5905 op1 = op0;
5dab5552 5906 dest_right_flag = get_last_insn ();
bbf6f052
RK
5907 }
5908 else if (temp
5909 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5910 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5911 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5912 TREE_OPERAND (exp, 2), 0)
5913 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5914 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
5915 {
5916 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5917 temp = gen_reg_rtx (mode);
5918 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5dab5552 5919 dest_left_flag = get_last_insn ();
bbf6f052 5920 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552
MS
5921
5922 /* Allows cleanups up to here. */
5923 old_cleanups = cleanups_this_call;
bbf6f052
RK
5924 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5925 op1 = op0;
5dab5552 5926 dest_right_flag = get_last_insn ();
bbf6f052
RK
5927 }
5928 else
5929 {
5930 op1 = gen_label_rtx ();
5931 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552
MS
5932
5933 /* Allows cleanups up to here. */
5934 old_cleanups = cleanups_this_call;
bbf6f052
RK
5935 if (temp != 0)
5936 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5937 else
906c4e36
RK
5938 expand_expr (TREE_OPERAND (exp, 1),
5939 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5dab5552
MS
5940 dest_left_flag = get_last_insn ();
5941
5942 /* Handle conditional cleanups, if any. */
5943 left_cleanups = defer_cleanups_to (old_cleanups);
bbf6f052
RK
5944
5945 emit_queue ();
5946 emit_jump_insn (gen_jump (op1));
5947 emit_barrier ();
5948 emit_label (op0);
5949 if (temp != 0)
5950 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5951 else
906c4e36
RK
5952 expand_expr (TREE_OPERAND (exp, 2),
5953 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5dab5552 5954 dest_right_flag = get_last_insn ();
bbf6f052
RK
5955 }
5956
5dab5552
MS
5957 /* Handle conditional cleanups, if any. */
5958 right_cleanups = defer_cleanups_to (old_cleanups);
bbf6f052
RK
5959
5960 emit_queue ();
5961 emit_label (op1);
5962 OK_DEFER_POP;
5dab5552
MS
5963
5964 /* Add back in, any conditional cleanups. */
5965 if (left_cleanups || right_cleanups)
5966 {
5967 tree new_cleanups;
5968 tree cond;
5969 rtx last;
5970
5971 /* Now that we know that a flag is needed, go back and add in the
5972 setting of the flag. */
5973
5974 /* Do the left side flag. */
5975 last = get_last_insn ();
5976 /* Flag left cleanups as needed. */
5977 emit_move_insn (flag, const1_rtx);
5978 /* ??? deprecated, use sequences instead. */
5979 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_left_flag);
5980
5981 /* Do the right side flag. */
5982 last = get_last_insn ();
5983 /* Flag left cleanups as needed. */
5984 emit_move_insn (flag, const0_rtx);
5985 /* ??? deprecated, use sequences instead. */
5986 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_right_flag);
5987
5988 /* convert flag, which is an rtx, into a tree. */
5989 cond = make_node (RTL_EXPR);
5990 TREE_TYPE (cond) = integer_type_node;
5991 RTL_EXPR_RTL (cond) = flag;
5992 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
5993
5994 if (! left_cleanups)
5995 left_cleanups = integer_zero_node;
5996 if (! right_cleanups)
5997 right_cleanups = integer_zero_node;
fd67d2b6
JM
5998 new_cleanups = build (COND_EXPR, void_type_node,
5999 truthvalue_conversion (cond),
5dab5552
MS
6000 left_cleanups, right_cleanups);
6001 new_cleanups = fold (new_cleanups);
6002
6003 /* Now add in the conditionalized cleanups. */
6004 cleanups_this_call
6005 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
61d6b1cc 6006 (*interim_eh_hook) (NULL_TREE);
5dab5552 6007 }
bbf6f052
RK
6008 return temp;
6009 }
6010
6011 case TARGET_EXPR:
6012 {
61d6b1cc 6013 int need_exception_region = 0;
bbf6f052
RK
6014 /* Something needs to be initialized, but we didn't know
6015 where that thing was when building the tree. For example,
6016 it could be the return value of a function, or a parameter
6017 to a function which lays down in the stack, or a temporary
6018 variable which must be passed by reference.
6019
6020 We guarantee that the expression will either be constructed
6021 or copied into our original target. */
6022
6023 tree slot = TREE_OPERAND (exp, 0);
5c062816 6024 tree exp1;
61d6b1cc 6025 rtx temp;
bbf6f052
RK
6026
6027 if (TREE_CODE (slot) != VAR_DECL)
6028 abort ();
6029
6030 if (target == 0)
6031 {
6032 if (DECL_RTL (slot) != 0)
ac993f4f
MS
6033 {
6034 target = DECL_RTL (slot);
5c062816 6035 /* If we have already expanded the slot, so don't do
ac993f4f 6036 it again. (mrs) */
5c062816
MS
6037 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6038 return target;
ac993f4f 6039 }
bbf6f052
RK
6040 else
6041 {
d93d4205 6042 target = assign_stack_temp (mode, int_size_in_bytes (type), 2);
3668e76e 6043 MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (type);
bbf6f052
RK
6044 /* All temp slots at this level must not conflict. */
6045 preserve_temp_slots (target);
6046 DECL_RTL (slot) = target;
bbf6f052 6047
e287fd6e
RK
6048 /* Since SLOT is not known to the called function
6049 to belong to its stack frame, we must build an explicit
6050 cleanup. This case occurs when we must build up a reference
6051 to pass the reference as an argument. In this case,
6052 it is very likely that such a reference need not be
6053 built here. */
6054
6055 if (TREE_OPERAND (exp, 2) == 0)
6056 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
6057 if (TREE_OPERAND (exp, 2))
19d3f3c5
MS
6058 {
6059 cleanups_this_call = tree_cons (NULL_TREE,
6060 TREE_OPERAND (exp, 2),
6061 cleanups_this_call);
61d6b1cc 6062 need_exception_region = 1;
19d3f3c5 6063 }
e287fd6e 6064 }
bbf6f052
RK
6065 }
6066 else
6067 {
6068 /* This case does occur, when expanding a parameter which
6069 needs to be constructed on the stack. The target
6070 is the actual stack address that we want to initialize.
6071 The function we call will perform the cleanup in this case. */
6072
8c042b47
RS
6073 /* If we have already assigned it space, use that space,
6074 not target that we were passed in, as our target
6075 parameter is only a hint. */
6076 if (DECL_RTL (slot) != 0)
6077 {
6078 target = DECL_RTL (slot);
6079 /* If we have already expanded the slot, so don't do
6080 it again. (mrs) */
6081 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6082 return target;
6083 }
6084
bbf6f052
RK
6085 DECL_RTL (slot) = target;
6086 }
6087
5c062816
MS
6088 exp1 = TREE_OPERAND (exp, 1);
6089 /* Mark it as expanded. */
6090 TREE_OPERAND (exp, 1) = NULL_TREE;
6091
61d6b1cc
MS
6092 temp = expand_expr (exp1, target, tmode, modifier);
6093
6094 if (need_exception_region)
6095 (*interim_eh_hook) (NULL_TREE);
6096
6097 return temp;
bbf6f052
RK
6098 }
6099
6100 case INIT_EXPR:
6101 {
6102 tree lhs = TREE_OPERAND (exp, 0);
6103 tree rhs = TREE_OPERAND (exp, 1);
6104 tree noncopied_parts = 0;
6105 tree lhs_type = TREE_TYPE (lhs);
6106
6107 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
6108 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
6109 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
6110 TYPE_NONCOPIED_PARTS (lhs_type));
6111 while (noncopied_parts != 0)
6112 {
6113 expand_assignment (TREE_VALUE (noncopied_parts),
6114 TREE_PURPOSE (noncopied_parts), 0, 0);
6115 noncopied_parts = TREE_CHAIN (noncopied_parts);
6116 }
6117 return temp;
6118 }
6119
6120 case MODIFY_EXPR:
6121 {
6122 /* If lhs is complex, expand calls in rhs before computing it.
6123 That's so we don't compute a pointer and save it over a call.
6124 If lhs is simple, compute it first so we can give it as a
6125 target if the rhs is just a call. This avoids an extra temp and copy
6126 and that prevents a partial-subsumption which makes bad code.
6127 Actually we could treat component_ref's of vars like vars. */
6128
6129 tree lhs = TREE_OPERAND (exp, 0);
6130 tree rhs = TREE_OPERAND (exp, 1);
6131 tree noncopied_parts = 0;
6132 tree lhs_type = TREE_TYPE (lhs);
6133
6134 temp = 0;
6135
6136 if (TREE_CODE (lhs) != VAR_DECL
6137 && TREE_CODE (lhs) != RESULT_DECL
6138 && TREE_CODE (lhs) != PARM_DECL)
6139 preexpand_calls (exp);
6140
6141 /* Check for |= or &= of a bitfield of size one into another bitfield
6142 of size 1. In this case, (unless we need the result of the
6143 assignment) we can do this more efficiently with a
6144 test followed by an assignment, if necessary.
6145
6146 ??? At this point, we can't get a BIT_FIELD_REF here. But if
6147 things change so we do, this code should be enhanced to
6148 support it. */
6149 if (ignore
6150 && TREE_CODE (lhs) == COMPONENT_REF
6151 && (TREE_CODE (rhs) == BIT_IOR_EXPR
6152 || TREE_CODE (rhs) == BIT_AND_EXPR)
6153 && TREE_OPERAND (rhs, 0) == lhs
6154 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
6155 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
6156 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
6157 {
6158 rtx label = gen_label_rtx ();
6159
6160 do_jump (TREE_OPERAND (rhs, 1),
6161 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
6162 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
6163 expand_assignment (lhs, convert (TREE_TYPE (rhs),
6164 (TREE_CODE (rhs) == BIT_IOR_EXPR
6165 ? integer_one_node
6166 : integer_zero_node)),
6167 0, 0);
e7c33f54 6168 do_pending_stack_adjust ();
bbf6f052
RK
6169 emit_label (label);
6170 return const0_rtx;
6171 }
6172
6173 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
6174 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
6175 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
6176 TYPE_NONCOPIED_PARTS (lhs_type));
6177
6178 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
6179 while (noncopied_parts != 0)
6180 {
6181 expand_assignment (TREE_PURPOSE (noncopied_parts),
6182 TREE_VALUE (noncopied_parts), 0, 0);
6183 noncopied_parts = TREE_CHAIN (noncopied_parts);
6184 }
6185 return temp;
6186 }
6187
6188 case PREINCREMENT_EXPR:
6189 case PREDECREMENT_EXPR:
6190 return expand_increment (exp, 0);
6191
6192 case POSTINCREMENT_EXPR:
6193 case POSTDECREMENT_EXPR:
6194 /* Faster to treat as pre-increment if result is not used. */
6195 return expand_increment (exp, ! ignore);
6196
6197 case ADDR_EXPR:
987c71d9
RK
6198 /* If nonzero, TEMP will be set to the address of something that might
6199 be a MEM corresponding to a stack slot. */
6200 temp = 0;
6201
bbf6f052
RK
6202 /* Are we taking the address of a nested function? */
6203 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
b001a02f 6204 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
bbf6f052
RK
6205 {
6206 op0 = trampoline_address (TREE_OPERAND (exp, 0));
6207 op0 = force_operand (op0, target);
6208 }
682ba3a6
RK
6209 /* If we are taking the address of something erroneous, just
6210 return a zero. */
6211 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
6212 return const0_rtx;
bbf6f052
RK
6213 else
6214 {
e287fd6e
RK
6215 /* We make sure to pass const0_rtx down if we came in with
6216 ignore set, to avoid doing the cleanups twice for something. */
6217 op0 = expand_expr (TREE_OPERAND (exp, 0),
6218 ignore ? const0_rtx : NULL_RTX, VOIDmode,
bbf6f052
RK
6219 (modifier == EXPAND_INITIALIZER
6220 ? modifier : EXPAND_CONST_ADDRESS));
896102d0 6221
119af78a
RK
6222 /* If we are going to ignore the result, OP0 will have been set
6223 to const0_rtx, so just return it. Don't get confused and
6224 think we are taking the address of the constant. */
6225 if (ignore)
6226 return op0;
6227
896102d0
RK
6228 /* We would like the object in memory. If it is a constant,
6229 we can have it be statically allocated into memory. For
682ba3a6 6230 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
896102d0
RK
6231 memory and store the value into it. */
6232
6233 if (CONSTANT_P (op0))
6234 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6235 op0);
987c71d9 6236 else if (GET_CODE (op0) == MEM)
af5b53ed
RK
6237 {
6238 mark_temp_addr_taken (op0);
6239 temp = XEXP (op0, 0);
6240 }
896102d0 6241
682ba3a6
RK
6242 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6243 || GET_CODE (op0) == CONCAT)
896102d0
RK
6244 {
6245 /* If this object is in a register, it must be not
6246 be BLKmode. */
6247 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
6248 enum machine_mode inner_mode = TYPE_MODE (inner_type);
6249 rtx memloc
6250 = assign_stack_temp (inner_mode,
6251 int_size_in_bytes (inner_type), 1);
3668e76e 6252 MEM_IN_STRUCT_P (memloc) = AGGREGATE_TYPE_P (inner_type);
896102d0 6253
7a0b7b9a 6254 mark_temp_addr_taken (memloc);
896102d0
RK
6255 emit_move_insn (memloc, op0);
6256 op0 = memloc;
6257 }
6258
bbf6f052
RK
6259 if (GET_CODE (op0) != MEM)
6260 abort ();
6261
6262 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
88f63c77
RK
6263 {
6264 temp = XEXP (op0, 0);
6265#ifdef POINTERS_EXTEND_UNSIGNED
6266 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
6267 && mode == ptr_mode)
9fcfcce7 6268 temp = convert_memory_address (ptr_mode, temp);
88f63c77
RK
6269#endif
6270 return temp;
6271 }
987c71d9 6272
bbf6f052
RK
6273 op0 = force_operand (XEXP (op0, 0), target);
6274 }
987c71d9 6275
bbf6f052 6276 if (flag_force_addr && GET_CODE (op0) != REG)
987c71d9
RK
6277 op0 = force_reg (Pmode, op0);
6278
6279 if (GET_CODE (op0) == REG)
6280 mark_reg_pointer (op0);
6281
6282 /* If we might have had a temp slot, add an equivalent address
6283 for it. */
6284 if (temp != 0)
6285 update_temp_slot_address (temp, op0);
6286
88f63c77
RK
6287#ifdef POINTERS_EXTEND_UNSIGNED
6288 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
6289 && mode == ptr_mode)
9fcfcce7 6290 op0 = convert_memory_address (ptr_mode, op0);
88f63c77
RK
6291#endif
6292
bbf6f052
RK
6293 return op0;
6294
6295 case ENTRY_VALUE_EXPR:
6296 abort ();
6297
7308a047
RS
6298 /* COMPLEX type for Extended Pascal & Fortran */
6299 case COMPLEX_EXPR:
6300 {
6301 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6551fa4d 6302 rtx insns;
7308a047
RS
6303
6304 /* Get the rtx code of the operands. */
6305 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6306 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
6307
6308 if (! target)
6309 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6310
6551fa4d 6311 start_sequence ();
7308a047
RS
6312
6313 /* Move the real (op0) and imaginary (op1) parts to their location. */
2d7050fd
RS
6314 emit_move_insn (gen_realpart (mode, target), op0);
6315 emit_move_insn (gen_imagpart (mode, target), op1);
7308a047 6316
6551fa4d
JW
6317 insns = get_insns ();
6318 end_sequence ();
6319
7308a047 6320 /* Complex construction should appear as a single unit. */
6551fa4d
JW
6321 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
6322 each with a separate pseudo as destination.
6323 It's not correct for flow to treat them as a unit. */
6d6e61ce 6324 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
6325 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
6326 else
6327 emit_insns (insns);
7308a047
RS
6328
6329 return target;
6330 }
6331
6332 case REALPART_EXPR:
2d7050fd
RS
6333 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6334 return gen_realpart (mode, op0);
7308a047
RS
6335
6336 case IMAGPART_EXPR:
2d7050fd
RS
6337 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6338 return gen_imagpart (mode, op0);
7308a047
RS
6339
6340 case CONJ_EXPR:
6341 {
62acb978 6342 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7308a047 6343 rtx imag_t;
6551fa4d 6344 rtx insns;
7308a047
RS
6345
6346 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6347
6348 if (! target)
d6a5ac33 6349 target = gen_reg_rtx (mode);
7308a047 6350
6551fa4d 6351 start_sequence ();
7308a047
RS
6352
6353 /* Store the realpart and the negated imagpart to target. */
62acb978
RK
6354 emit_move_insn (gen_realpart (partmode, target),
6355 gen_realpart (partmode, op0));
7308a047 6356
62acb978
RK
6357 imag_t = gen_imagpart (partmode, target);
6358 temp = expand_unop (partmode, neg_optab,
6359 gen_imagpart (partmode, op0), imag_t, 0);
7308a047
RS
6360 if (temp != imag_t)
6361 emit_move_insn (imag_t, temp);
6362
6551fa4d
JW
6363 insns = get_insns ();
6364 end_sequence ();
6365
d6a5ac33
RK
6366 /* Conjugate should appear as a single unit
6367 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
6551fa4d
JW
6368 each with a separate pseudo as destination.
6369 It's not correct for flow to treat them as a unit. */
6d6e61ce 6370 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
6371 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
6372 else
6373 emit_insns (insns);
7308a047
RS
6374
6375 return target;
6376 }
6377
bbf6f052 6378 case ERROR_MARK:
66538193
RS
6379 op0 = CONST0_RTX (tmode);
6380 if (op0 != 0)
6381 return op0;
bbf6f052
RK
6382 return const0_rtx;
6383
6384 default:
90764a87 6385 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
bbf6f052
RK
6386 }
6387
6388 /* Here to do an ordinary binary operator, generating an instruction
6389 from the optab already placed in `this_optab'. */
6390 binop:
6391 preexpand_calls (exp);
6392 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6393 subtarget = 0;
6394 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 6395 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6396 binop2:
6397 temp = expand_binop (mode, this_optab, op0, op1, target,
6398 unsignedp, OPTAB_LIB_WIDEN);
6399 if (temp == 0)
6400 abort ();
6401 return temp;
6402}
bbf6f052 6403
bbf6f052 6404
ca695ac9
JB
6405/* Emit bytecode to evaluate the given expression EXP to the stack. */
6406void
6407bc_expand_expr (exp)
6408 tree exp;
bbf6f052 6409{
ca695ac9
JB
6410 enum tree_code code;
6411 tree type, arg0;
6412 rtx r;
6413 struct binary_operator *binoptab;
6414 struct unary_operator *unoptab;
6415 struct increment_operator *incroptab;
6416 struct bc_label *lab, *lab1;
6417 enum bytecode_opcode opcode;
6418
6419
6420 code = TREE_CODE (exp);
6421
6422 switch (code)
bbf6f052 6423 {
ca695ac9
JB
6424 case PARM_DECL:
6425
6426 if (DECL_RTL (exp) == 0)
bbf6f052 6427 {
ca695ac9
JB
6428 error_with_decl (exp, "prior parameter's size depends on `%s'");
6429 return;
bbf6f052 6430 }
ca695ac9
JB
6431
6432 bc_load_parmaddr (DECL_RTL (exp));
6433 bc_load_memory (TREE_TYPE (exp), exp);
6434
6435 return;
6436
6437 case VAR_DECL:
6438
6439 if (DECL_RTL (exp) == 0)
6440 abort ();
6441
6442#if 0
e7a42772 6443 if (BYTECODE_LABEL (DECL_RTL (exp)))
ca695ac9
JB
6444 bc_load_externaddr (DECL_RTL (exp));
6445 else
6446 bc_load_localaddr (DECL_RTL (exp));
6447#endif
6448 if (TREE_PUBLIC (exp))
e7a42772
JB
6449 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
6450 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
ca695ac9
JB
6451 else
6452 bc_load_localaddr (DECL_RTL (exp));
6453
6454 bc_load_memory (TREE_TYPE (exp), exp);
6455 return;
6456
6457 case INTEGER_CST:
6458
6459#ifdef DEBUG_PRINT_CODE
6460 fprintf (stderr, " [%x]\n", TREE_INT_CST_LOW (exp));
6461#endif
6bd6178d 6462 bc_emit_instruction (mode_to_const_map[(int) (DECL_BIT_FIELD (exp)
ca695ac9 6463 ? SImode
6bd6178d 6464 : TYPE_MODE (TREE_TYPE (exp)))],
ca695ac9
JB
6465 (HOST_WIDE_INT) TREE_INT_CST_LOW (exp));
6466 return;
6467
6468 case REAL_CST:
6469
c02bd5d9 6470#if 0
ca695ac9
JB
6471#ifdef DEBUG_PRINT_CODE
6472 fprintf (stderr, " [%g]\n", (double) TREE_INT_CST_LOW (exp));
6473#endif
c02bd5d9 6474 /* FIX THIS: find a better way to pass real_cst's. -bson */
ca695ac9
JB
6475 bc_emit_instruction (mode_to_const_map[TYPE_MODE (TREE_TYPE (exp))],
6476 (double) TREE_REAL_CST (exp));
c02bd5d9
JB
6477#else
6478 abort ();
6479#endif
6480
ca695ac9
JB
6481 return;
6482
6483 case CALL_EXPR:
6484
6485 /* We build a call description vector describing the type of
6486 the return value and of the arguments; this call vector,
6487 together with a pointer to a location for the return value
6488 and the base of the argument list, is passed to the low
6489 level machine dependent call subroutine, which is responsible
6490 for putting the arguments wherever real functions expect
6491 them, as well as getting the return value back. */
6492 {
6493 tree calldesc = 0, arg;
6494 int nargs = 0, i;
6495 rtx retval;
6496
6497 /* Push the evaluated args on the evaluation stack in reverse
6498 order. Also make an entry for each arg in the calldesc
6499 vector while we're at it. */
6500
6501 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
6502
6503 for (arg = TREE_OPERAND (exp, 1); arg; arg = TREE_CHAIN (arg))
6504 {
6505 ++nargs;
6506 bc_expand_expr (TREE_VALUE (arg));
6507
6508 calldesc = tree_cons ((tree) 0,
6509 size_in_bytes (TREE_TYPE (TREE_VALUE (arg))),
6510 calldesc);
6511 calldesc = tree_cons ((tree) 0,
6512 bc_runtime_type_code (TREE_TYPE (TREE_VALUE (arg))),
6513 calldesc);
6514 }
6515
6516 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
6517
6518 /* Allocate a location for the return value and push its
6519 address on the evaluation stack. Also make an entry
6520 at the front of the calldesc for the return value type. */
6521
6522 type = TREE_TYPE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
6523 retval = bc_allocate_local (int_size_in_bytes (type), TYPE_ALIGN (type));
6524 bc_load_localaddr (retval);
6525
6526 calldesc = tree_cons ((tree) 0, size_in_bytes (type), calldesc);
6527 calldesc = tree_cons ((tree) 0, bc_runtime_type_code (type), calldesc);
6528
6529 /* Prepend the argument count. */
6530 calldesc = tree_cons ((tree) 0,
6531 build_int_2 (nargs, 0),
6532 calldesc);
6533
6534 /* Push the address of the call description vector on the stack. */
6535 calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
6536 TREE_TYPE (calldesc) = build_array_type (integer_type_node,
6537 build_index_type (build_int_2 (nargs * 2, 0)));
6538 r = output_constant_def (calldesc);
6539 bc_load_externaddr (r);
6540
6541 /* Push the address of the function to be called. */
6542 bc_expand_expr (TREE_OPERAND (exp, 0));
6543
6544 /* Call the function, popping its address and the calldesc vector
6545 address off the evaluation stack in the process. */
6546 bc_emit_instruction (call);
6547
6548 /* Pop the arguments off the stack. */
6549 bc_adjust_stack (nargs);
6550
6551 /* Load the return value onto the stack. */
6552 bc_load_localaddr (retval);
6553 bc_load_memory (type, TREE_OPERAND (exp, 0));
6554 }
6555 return;
6556
6557 case SAVE_EXPR:
6558
6559 if (!SAVE_EXPR_RTL (exp))
bbf6f052 6560 {
ca695ac9
JB
6561 /* First time around: copy to local variable */
6562 SAVE_EXPR_RTL (exp) = bc_allocate_local (int_size_in_bytes (TREE_TYPE (exp)),
6563 TYPE_ALIGN (TREE_TYPE(exp)));
6564 bc_expand_expr (TREE_OPERAND (exp, 0));
6d6e61ce 6565 bc_emit_instruction (duplicate);
ca695ac9
JB
6566
6567 bc_load_localaddr (SAVE_EXPR_RTL (exp));
6568 bc_store_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
bbf6f052 6569 }
ca695ac9 6570 else
bbf6f052 6571 {
ca695ac9
JB
6572 /* Consecutive reference: use saved copy */
6573 bc_load_localaddr (SAVE_EXPR_RTL (exp));
6574 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
bbf6f052 6575 }
ca695ac9
JB
6576 return;
6577
6578#if 0
6579 /* FIXME: the XXXX_STMT codes have been removed in GCC2, but
6580 how are they handled instead? */
6581 case LET_STMT:
6582
6583 TREE_USED (exp) = 1;
6584 bc_expand_expr (STMT_BODY (exp));
6585 return;
6586#endif
6587
6588 case NOP_EXPR:
6589 case CONVERT_EXPR:
6590
6591 bc_expand_expr (TREE_OPERAND (exp, 0));
6592 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)), TREE_TYPE (exp));
6593 return;
6594
6595 case MODIFY_EXPR:
6596
c02bd5d9 6597 expand_assignment (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 0, 0);
ca695ac9
JB
6598 return;
6599
6600 case ADDR_EXPR:
6601
6602 bc_expand_address (TREE_OPERAND (exp, 0));
6603 return;
6604
6605 case INDIRECT_REF:
6606
6607 bc_expand_expr (TREE_OPERAND (exp, 0));
6608 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6609 return;
6610
6611 case ARRAY_REF:
6612
6613 bc_expand_expr (bc_canonicalize_array_ref (exp));
6614 return;
6615
6616 case COMPONENT_REF:
6617
6618 bc_expand_component_address (exp);
6619
6620 /* If we have a bitfield, generate a proper load */
6621 bc_load_memory (TREE_TYPE (TREE_OPERAND (exp, 1)), TREE_OPERAND (exp, 1));
6622 return;
6623
6624 case COMPOUND_EXPR:
6625
6626 bc_expand_expr (TREE_OPERAND (exp, 0));
6627 bc_emit_instruction (drop);
6628 bc_expand_expr (TREE_OPERAND (exp, 1));
6629 return;
6630
6631 case COND_EXPR:
6632
6633 bc_expand_expr (TREE_OPERAND (exp, 0));
6634 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
6635 lab = bc_get_bytecode_label ();
c02bd5d9 6636 bc_emit_bytecode (xjumpifnot);
ca695ac9
JB
6637 bc_emit_bytecode_labelref (lab);
6638
6639#ifdef DEBUG_PRINT_CODE
6640 fputc ('\n', stderr);
6641#endif
6642 bc_expand_expr (TREE_OPERAND (exp, 1));
6643 lab1 = bc_get_bytecode_label ();
6644 bc_emit_bytecode (jump);
6645 bc_emit_bytecode_labelref (lab1);
6646
6647#ifdef DEBUG_PRINT_CODE
6648 fputc ('\n', stderr);
6649#endif
6650
6651 bc_emit_bytecode_labeldef (lab);
6652 bc_expand_expr (TREE_OPERAND (exp, 2));
6653 bc_emit_bytecode_labeldef (lab1);
6654 return;
6655
6656 case TRUTH_ANDIF_EXPR:
6657
c02bd5d9 6658 opcode = xjumpifnot;
ca695ac9
JB
6659 goto andorif;
6660
6661 case TRUTH_ORIF_EXPR:
6662
c02bd5d9 6663 opcode = xjumpif;
ca695ac9
JB
6664 goto andorif;
6665
6666 case PLUS_EXPR:
6667
6668 binoptab = optab_plus_expr;
6669 goto binop;
6670
6671 case MINUS_EXPR:
6672
6673 binoptab = optab_minus_expr;
6674 goto binop;
6675
6676 case MULT_EXPR:
6677
6678 binoptab = optab_mult_expr;
6679 goto binop;
6680
6681 case TRUNC_DIV_EXPR:
6682 case FLOOR_DIV_EXPR:
6683 case CEIL_DIV_EXPR:
6684 case ROUND_DIV_EXPR:
6685 case EXACT_DIV_EXPR:
6686
6687 binoptab = optab_trunc_div_expr;
6688 goto binop;
6689
6690 case TRUNC_MOD_EXPR:
6691 case FLOOR_MOD_EXPR:
6692 case CEIL_MOD_EXPR:
6693 case ROUND_MOD_EXPR:
6694
6695 binoptab = optab_trunc_mod_expr;
6696 goto binop;
6697
6698 case FIX_ROUND_EXPR:
6699 case FIX_FLOOR_EXPR:
6700 case FIX_CEIL_EXPR:
6701 abort (); /* Not used for C. */
6702
6703 case FIX_TRUNC_EXPR:
6704 case FLOAT_EXPR:
6705 case MAX_EXPR:
6706 case MIN_EXPR:
6707 case FFS_EXPR:
6708 case LROTATE_EXPR:
6709 case RROTATE_EXPR:
6710 abort (); /* FIXME */
6711
6712 case RDIV_EXPR:
6713
6714 binoptab = optab_rdiv_expr;
6715 goto binop;
6716
6717 case BIT_AND_EXPR:
6718
6719 binoptab = optab_bit_and_expr;
6720 goto binop;
6721
6722 case BIT_IOR_EXPR:
6723
6724 binoptab = optab_bit_ior_expr;
6725 goto binop;
6726
6727 case BIT_XOR_EXPR:
6728
6729 binoptab = optab_bit_xor_expr;
6730 goto binop;
6731
6732 case LSHIFT_EXPR:
6733
6734 binoptab = optab_lshift_expr;
6735 goto binop;
6736
6737 case RSHIFT_EXPR:
6738
6739 binoptab = optab_rshift_expr;
6740 goto binop;
6741
6742 case TRUTH_AND_EXPR:
6743
6744 binoptab = optab_truth_and_expr;
6745 goto binop;
6746
6747 case TRUTH_OR_EXPR:
6748
6749 binoptab = optab_truth_or_expr;
6750 goto binop;
6751
6752 case LT_EXPR:
6753
6754 binoptab = optab_lt_expr;
6755 goto binop;
6756
6757 case LE_EXPR:
6758
6759 binoptab = optab_le_expr;
6760 goto binop;
6761
6762 case GE_EXPR:
6763
6764 binoptab = optab_ge_expr;
6765 goto binop;
6766
6767 case GT_EXPR:
6768
6769 binoptab = optab_gt_expr;
6770 goto binop;
6771
6772 case EQ_EXPR:
6773
6774 binoptab = optab_eq_expr;
6775 goto binop;
6776
6777 case NE_EXPR:
6778
6779 binoptab = optab_ne_expr;
6780 goto binop;
6781
6782 case NEGATE_EXPR:
6783
6784 unoptab = optab_negate_expr;
6785 goto unop;
6786
6787 case BIT_NOT_EXPR:
6788
6789 unoptab = optab_bit_not_expr;
6790 goto unop;
6791
6792 case TRUTH_NOT_EXPR:
6793
6794 unoptab = optab_truth_not_expr;
6795 goto unop;
6796
6797 case PREDECREMENT_EXPR:
6798
6799 incroptab = optab_predecrement_expr;
6800 goto increment;
6801
6802 case PREINCREMENT_EXPR:
6803
6804 incroptab = optab_preincrement_expr;
6805 goto increment;
6806
6807 case POSTDECREMENT_EXPR:
6808
6809 incroptab = optab_postdecrement_expr;
6810 goto increment;
6811
6812 case POSTINCREMENT_EXPR:
6813
6814 incroptab = optab_postincrement_expr;
6815 goto increment;
6816
6817 case CONSTRUCTOR:
6818
6819 bc_expand_constructor (exp);
6820 return;
6821
6822 case ERROR_MARK:
6823 case RTL_EXPR:
6824
6825 return;
6826
6827 case BIND_EXPR:
6828 {
6829 tree vars = TREE_OPERAND (exp, 0);
6830 int vars_need_expansion = 0;
6831
6832 /* Need to open a binding contour here because
6833 if there are any cleanups they most be contained here. */
6834 expand_start_bindings (0);
6835
6836 /* Mark the corresponding BLOCK for output. */
6837 if (TREE_OPERAND (exp, 2) != 0)
6838 TREE_USED (TREE_OPERAND (exp, 2)) = 1;
6839
6840 /* If VARS have not yet been expanded, expand them now. */
6841 while (vars)
6842 {
6843 if (DECL_RTL (vars) == 0)
6844 {
6845 vars_need_expansion = 1;
9bac07c3 6846 expand_decl (vars);
ca695ac9 6847 }
9bac07c3 6848 expand_decl_init (vars);
ca695ac9
JB
6849 vars = TREE_CHAIN (vars);
6850 }
6851
6852 bc_expand_expr (TREE_OPERAND (exp, 1));
6853
6854 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6855
6856 return;
6857 }
6858 }
6859
6860 abort ();
6861
6862 binop:
6863
6864 bc_expand_binary_operation (binoptab, TREE_TYPE (exp),
6865 TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1));
6866 return;
6867
6868
6869 unop:
6870
6871 bc_expand_unary_operation (unoptab, TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6872 return;
6873
6874
6875 andorif:
6876
6877 bc_expand_expr (TREE_OPERAND (exp, 0));
6878 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
6879 lab = bc_get_bytecode_label ();
6880
6d6e61ce 6881 bc_emit_instruction (duplicate);
ca695ac9
JB
6882 bc_emit_bytecode (opcode);
6883 bc_emit_bytecode_labelref (lab);
6884
6885#ifdef DEBUG_PRINT_CODE
6886 fputc ('\n', stderr);
6887#endif
6888
6889 bc_emit_instruction (drop);
6890
6891 bc_expand_expr (TREE_OPERAND (exp, 1));
6892 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)));
6893 bc_emit_bytecode_labeldef (lab);
6894 return;
6895
6896
6897 increment:
6898
6899 type = TREE_TYPE (TREE_OPERAND (exp, 0));
6900
6901 /* Push the quantum. */
6902 bc_expand_expr (TREE_OPERAND (exp, 1));
6903
6904 /* Convert it to the lvalue's type. */
6905 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)), type);
6906
6907 /* Push the address of the lvalue */
c02bd5d9 6908 bc_expand_expr (build1 (ADDR_EXPR, TYPE_POINTER_TO (type), TREE_OPERAND (exp, 0)));
ca695ac9
JB
6909
6910 /* Perform actual increment */
c02bd5d9 6911 bc_expand_increment (incroptab, type);
ca695ac9
JB
6912 return;
6913}
6914\f
6915/* Return the alignment in bits of EXP, a pointer valued expression.
6916 But don't return more than MAX_ALIGN no matter what.
6917 The alignment returned is, by default, the alignment of the thing that
6918 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
6919
6920 Otherwise, look at the expression to see if we can do better, i.e., if the
6921 expression is actually pointing at an object whose alignment is tighter. */
6922
6923static int
6924get_pointer_alignment (exp, max_align)
6925 tree exp;
6926 unsigned max_align;
6927{
6928 unsigned align, inner;
6929
6930 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
6931 return 0;
6932
6933 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
6934 align = MIN (align, max_align);
6935
6936 while (1)
6937 {
6938 switch (TREE_CODE (exp))
6939 {
6940 case NOP_EXPR:
6941 case CONVERT_EXPR:
6942 case NON_LVALUE_EXPR:
6943 exp = TREE_OPERAND (exp, 0);
6944 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
6945 return align;
6946 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
8dc2fbcf 6947 align = MIN (inner, max_align);
ca695ac9
JB
6948 break;
6949
6950 case PLUS_EXPR:
6951 /* If sum of pointer + int, restrict our maximum alignment to that
6952 imposed by the integer. If not, we can't do any better than
6953 ALIGN. */
6954 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
6955 return align;
6956
6957 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
6958 & (max_align - 1))
6959 != 0)
6960 max_align >>= 1;
6961
6962 exp = TREE_OPERAND (exp, 0);
6963 break;
6964
6965 case ADDR_EXPR:
6966 /* See what we are pointing at and look at its alignment. */
6967 exp = TREE_OPERAND (exp, 0);
6968 if (TREE_CODE (exp) == FUNCTION_DECL)
8dc2fbcf 6969 align = FUNCTION_BOUNDARY;
ca695ac9 6970 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
8dc2fbcf 6971 align = DECL_ALIGN (exp);
ca695ac9
JB
6972#ifdef CONSTANT_ALIGNMENT
6973 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
6974 align = CONSTANT_ALIGNMENT (exp, align);
6975#endif
6976 return MIN (align, max_align);
6977
6978 default:
6979 return align;
6980 }
6981 }
6982}
6983\f
6984/* Return the tree node and offset if a given argument corresponds to
6985 a string constant. */
6986
6987static tree
6988string_constant (arg, ptr_offset)
6989 tree arg;
6990 tree *ptr_offset;
6991{
6992 STRIP_NOPS (arg);
6993
6994 if (TREE_CODE (arg) == ADDR_EXPR
6995 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
6996 {
6997 *ptr_offset = integer_zero_node;
6998 return TREE_OPERAND (arg, 0);
6999 }
7000 else if (TREE_CODE (arg) == PLUS_EXPR)
7001 {
7002 tree arg0 = TREE_OPERAND (arg, 0);
7003 tree arg1 = TREE_OPERAND (arg, 1);
7004
7005 STRIP_NOPS (arg0);
7006 STRIP_NOPS (arg1);
7007
7008 if (TREE_CODE (arg0) == ADDR_EXPR
7009 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
7010 {
7011 *ptr_offset = arg1;
7012 return TREE_OPERAND (arg0, 0);
7013 }
7014 else if (TREE_CODE (arg1) == ADDR_EXPR
7015 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
7016 {
7017 *ptr_offset = arg0;
7018 return TREE_OPERAND (arg1, 0);
7019 }
7020 }
7021
7022 return 0;
7023}
7024
7025/* Compute the length of a C string. TREE_STRING_LENGTH is not the right
7026 way, because it could contain a zero byte in the middle.
7027 TREE_STRING_LENGTH is the size of the character array, not the string.
7028
7029 Unfortunately, string_constant can't access the values of const char
7030 arrays with initializers, so neither can we do so here. */
7031
7032static tree
7033c_strlen (src)
7034 tree src;
7035{
7036 tree offset_node;
7037 int offset, max;
7038 char *ptr;
7039
7040 src = string_constant (src, &offset_node);
7041 if (src == 0)
7042 return 0;
7043 max = TREE_STRING_LENGTH (src);
7044 ptr = TREE_STRING_POINTER (src);
7045 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
7046 {
7047 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
7048 compute the offset to the following null if we don't know where to
7049 start searching for it. */
7050 int i;
7051 for (i = 0; i < max; i++)
7052 if (ptr[i] == 0)
7053 return 0;
7054 /* We don't know the starting offset, but we do know that the string
7055 has no internal zero bytes. We can assume that the offset falls
7056 within the bounds of the string; otherwise, the programmer deserves
7057 what he gets. Subtract the offset from the length of the string,
7058 and return that. */
7059 /* This would perhaps not be valid if we were dealing with named
7060 arrays in addition to literal string constants. */
7061 return size_binop (MINUS_EXPR, size_int (max), offset_node);
7062 }
7063
7064 /* We have a known offset into the string. Start searching there for
7065 a null character. */
7066 if (offset_node == 0)
7067 offset = 0;
7068 else
7069 {
7070 /* Did we get a long long offset? If so, punt. */
7071 if (TREE_INT_CST_HIGH (offset_node) != 0)
7072 return 0;
7073 offset = TREE_INT_CST_LOW (offset_node);
7074 }
7075 /* If the offset is known to be out of bounds, warn, and call strlen at
7076 runtime. */
7077 if (offset < 0 || offset > max)
7078 {
7079 warning ("offset outside bounds of constant string");
7080 return 0;
7081 }
7082 /* Use strlen to search for the first zero byte. Since any strings
7083 constructed with build_string will have nulls appended, we win even
7084 if we get handed something like (char[4])"abcd".
7085
7086 Since OFFSET is our starting index into the string, no further
7087 calculation is needed. */
7088 return size_int (strlen (ptr + offset));
7089}
7090\f
7091/* Expand an expression EXP that calls a built-in function,
7092 with result going to TARGET if that's convenient
7093 (and in mode MODE if that's convenient).
7094 SUBTARGET may be used as the target for computing one of EXP's operands.
7095 IGNORE is nonzero if the value is to be ignored. */
7096
98aad286
RK
7097#define CALLED_AS_BUILT_IN(NODE) \
7098 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
7099
ca695ac9
JB
7100static rtx
7101expand_builtin (exp, target, subtarget, mode, ignore)
7102 tree exp;
7103 rtx target;
7104 rtx subtarget;
7105 enum machine_mode mode;
7106 int ignore;
7107{
7108 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7109 tree arglist = TREE_OPERAND (exp, 1);
7110 rtx op0;
7111 rtx lab1, insns;
7112 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
7113 optab builtin_optab;
7114
7115 switch (DECL_FUNCTION_CODE (fndecl))
7116 {
7117 case BUILT_IN_ABS:
7118 case BUILT_IN_LABS:
7119 case BUILT_IN_FABS:
7120 /* build_function_call changes these into ABS_EXPR. */
7121 abort ();
7122
7123 case BUILT_IN_SIN:
7124 case BUILT_IN_COS:
ba558a85
RK
7125 /* Treat these like sqrt, but only if the user asks for them. */
7126 if (! flag_fast_math)
7127 break;
ca695ac9
JB
7128 case BUILT_IN_FSQRT:
7129 /* If not optimizing, call the library function. */
7130 if (! optimize)
7131 break;
7132
7133 if (arglist == 0
7134 /* Arg could be wrong type if user redeclared this fcn wrong. */
7135 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
7b073ca6 7136 break;
ca695ac9
JB
7137
7138 /* Stabilize and compute the argument. */
7139 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
7140 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
7141 {
7142 exp = copy_node (exp);
7143 arglist = copy_node (arglist);
7144 TREE_OPERAND (exp, 1) = arglist;
7145 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
7146 }
7147 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
7148
7149 /* Make a suitable register to place result in. */
7150 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7151
7152 emit_queue ();
7153 start_sequence ();
7154
7155 switch (DECL_FUNCTION_CODE (fndecl))
7156 {
7157 case BUILT_IN_SIN:
7158 builtin_optab = sin_optab; break;
7159 case BUILT_IN_COS:
7160 builtin_optab = cos_optab; break;
7161 case BUILT_IN_FSQRT:
7162 builtin_optab = sqrt_optab; break;
7163 default:
7164 abort ();
7165 }
7166
7167 /* Compute into TARGET.
7168 Set TARGET to wherever the result comes back. */
7169 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
7170 builtin_optab, op0, target, 0);
7171
7172 /* If we were unable to expand via the builtin, stop the
7173 sequence (without outputting the insns) and break, causing
7174 a call the the library function. */
7175 if (target == 0)
7176 {
7177 end_sequence ();
7178 break;
7179 }
7180
7181 /* Check the results by default. But if flag_fast_math is turned on,
7182 then assume sqrt will always be called with valid arguments. */
7183
7184 if (! flag_fast_math)
7185 {
7186 /* Don't define the builtin FP instructions
7187 if your machine is not IEEE. */
7188 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
7189 abort ();
7190
7191 lab1 = gen_label_rtx ();
7192
7193 /* Test the result; if it is NaN, set errno=EDOM because
7194 the argument was not in the domain. */
7195 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
7196 emit_jump_insn (gen_beq (lab1));
7197
4ac09687 7198#ifdef TARGET_EDOM
ca695ac9
JB
7199 {
7200#ifdef GEN_ERRNO_RTX
7201 rtx errno_rtx = GEN_ERRNO_RTX;
7202#else
7203 rtx errno_rtx
e74a2201 7204 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "errno"));
ca695ac9
JB
7205#endif
7206
7207 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
7208 }
7209#else
7210 /* We can't set errno=EDOM directly; let the library call do it.
7211 Pop the arguments right away in case the call gets deleted. */
7212 NO_DEFER_POP;
7213 expand_call (exp, target, 0);
7214 OK_DEFER_POP;
7215#endif
7216
7217 emit_label (lab1);
7218 }
7219
7220 /* Output the entire sequence. */
7221 insns = get_insns ();
7222 end_sequence ();
7223 emit_insns (insns);
7224
7225 return target;
7226
7227 /* __builtin_apply_args returns block of memory allocated on
7228 the stack into which is stored the arg pointer, structure
7229 value address, static chain, and all the registers that might
7230 possibly be used in performing a function call. The code is
7231 moved to the start of the function so the incoming values are
7232 saved. */
7233 case BUILT_IN_APPLY_ARGS:
7234 /* Don't do __builtin_apply_args more than once in a function.
7235 Save the result of the first call and reuse it. */
7236 if (apply_args_value != 0)
7237 return apply_args_value;
7238 {
7239 /* When this function is called, it means that registers must be
7240 saved on entry to this function. So we migrate the
7241 call to the first insn of this function. */
7242 rtx temp;
7243 rtx seq;
7244
7245 start_sequence ();
7246 temp = expand_builtin_apply_args ();
7247 seq = get_insns ();
7248 end_sequence ();
7249
7250 apply_args_value = temp;
7251
7252 /* Put the sequence after the NOTE that starts the function.
7253 If this is inside a SEQUENCE, make the outer-level insn
7254 chain current, so the code is placed at the start of the
7255 function. */
7256 push_topmost_sequence ();
7257 emit_insns_before (seq, NEXT_INSN (get_insns ()));
7258 pop_topmost_sequence ();
7259 return temp;
7260 }
7261
7262 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7263 FUNCTION with a copy of the parameters described by
7264 ARGUMENTS, and ARGSIZE. It returns a block of memory
7265 allocated on the stack into which is stored all the registers
7266 that might possibly be used for returning the result of a
7267 function. ARGUMENTS is the value returned by
7268 __builtin_apply_args. ARGSIZE is the number of bytes of
7269 arguments that must be copied. ??? How should this value be
7270 computed? We'll also need a safe worst case value for varargs
7271 functions. */
7272 case BUILT_IN_APPLY:
7273 if (arglist == 0
7274 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7275 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7276 || TREE_CHAIN (arglist) == 0
7277 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
7278 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
7279 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7280 return const0_rtx;
7281 else
7282 {
7283 int i;
7284 tree t;
7285 rtx ops[3];
7286
7287 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
7288 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
7289
7290 return expand_builtin_apply (ops[0], ops[1], ops[2]);
7291 }
7292
7293 /* __builtin_return (RESULT) causes the function to return the
7294 value described by RESULT. RESULT is address of the block of
7295 memory returned by __builtin_apply. */
7296 case BUILT_IN_RETURN:
7297 if (arglist
7298 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7299 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
7300 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
7301 NULL_RTX, VOIDmode, 0));
7302 return const0_rtx;
7303
7304 case BUILT_IN_SAVEREGS:
7305 /* Don't do __builtin_saveregs more than once in a function.
7306 Save the result of the first call and reuse it. */
7307 if (saveregs_value != 0)
7308 return saveregs_value;
7309 {
7310 /* When this function is called, it means that registers must be
7311 saved on entry to this function. So we migrate the
7312 call to the first insn of this function. */
7313 rtx temp;
7314 rtx seq;
ca695ac9
JB
7315
7316 /* Now really call the function. `expand_call' does not call
7317 expand_builtin, so there is no danger of infinite recursion here. */
7318 start_sequence ();
7319
7320#ifdef EXPAND_BUILTIN_SAVEREGS
7321 /* Do whatever the machine needs done in this case. */
7322 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
7323#else
7324 /* The register where the function returns its value
7325 is likely to have something else in it, such as an argument.
7326 So preserve that register around the call. */
d0c76654 7327
ca695ac9
JB
7328 if (value_mode != VOIDmode)
7329 {
d0c76654
RK
7330 rtx valreg = hard_libcall_value (value_mode);
7331 rtx saved_valreg = gen_reg_rtx (value_mode);
7332
ca695ac9 7333 emit_move_insn (saved_valreg, valreg);
d0c76654
RK
7334 temp = expand_call (exp, target, ignore);
7335 emit_move_insn (valreg, saved_valreg);
ca695ac9 7336 }
d0c76654
RK
7337 else
7338 /* Generate the call, putting the value in a pseudo. */
7339 temp = expand_call (exp, target, ignore);
ca695ac9
JB
7340#endif
7341
7342 seq = get_insns ();
7343 end_sequence ();
7344
7345 saveregs_value = temp;
7346
7347 /* Put the sequence after the NOTE that starts the function.
7348 If this is inside a SEQUENCE, make the outer-level insn
7349 chain current, so the code is placed at the start of the
7350 function. */
7351 push_topmost_sequence ();
7352 emit_insns_before (seq, NEXT_INSN (get_insns ()));
7353 pop_topmost_sequence ();
7354 return temp;
7355 }
7356
7357 /* __builtin_args_info (N) returns word N of the arg space info
7358 for the current function. The number and meanings of words
7359 is controlled by the definition of CUMULATIVE_ARGS. */
7360 case BUILT_IN_ARGS_INFO:
7361 {
7362 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
7363 int i;
7364 int *word_ptr = (int *) &current_function_args_info;
7365 tree type, elts, result;
7366
7367 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
7368 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
7369 __FILE__, __LINE__);
7370
7371 if (arglist != 0)
7372 {
7373 tree arg = TREE_VALUE (arglist);
7374 if (TREE_CODE (arg) != INTEGER_CST)
7375 error ("argument of `__builtin_args_info' must be constant");
7376 else
7377 {
7378 int wordnum = TREE_INT_CST_LOW (arg);
7379
7380 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
7381 error ("argument of `__builtin_args_info' out of range");
7382 else
7383 return GEN_INT (word_ptr[wordnum]);
7384 }
7385 }
7386 else
7387 error ("missing argument in `__builtin_args_info'");
7388
7389 return const0_rtx;
7390
7391#if 0
7392 for (i = 0; i < nwords; i++)
7393 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
7394
7395 type = build_array_type (integer_type_node,
7396 build_index_type (build_int_2 (nwords, 0)));
7397 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
7398 TREE_CONSTANT (result) = 1;
7399 TREE_STATIC (result) = 1;
7400 result = build (INDIRECT_REF, build_pointer_type (type), result);
7401 TREE_CONSTANT (result) = 1;
7402 return expand_expr (result, NULL_RTX, VOIDmode, 0);
7403#endif
7404 }
7405
17bbab26 7406 /* Return the address of the first anonymous stack arg. */
ca695ac9
JB
7407 case BUILT_IN_NEXT_ARG:
7408 {
7409 tree fntype = TREE_TYPE (current_function_decl);
c4dfe0fc 7410
33162beb
DE
7411 if ((TYPE_ARG_TYPES (fntype) == 0
7412 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
7413 == void_type_node))
7414 && ! current_function_varargs)
ca695ac9
JB
7415 {
7416 error ("`va_start' used in function with fixed args");
7417 return const0_rtx;
7418 }
c4dfe0fc 7419
e4493c04
RK
7420 if (arglist)
7421 {
7422 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
7423 tree arg = TREE_VALUE (arglist);
7424
7425 /* Strip off all nops for the sake of the comparison. This
7426 is not quite the same as STRIP_NOPS. It does more. */
7427 while (TREE_CODE (arg) == NOP_EXPR
7428 || TREE_CODE (arg) == CONVERT_EXPR
7429 || TREE_CODE (arg) == NON_LVALUE_EXPR)
7430 arg = TREE_OPERAND (arg, 0);
7431 if (arg != last_parm)
7432 warning ("second parameter of `va_start' not last named argument");
7433 }
5b4ff0de 7434 else if (! current_function_varargs)
e4493c04
RK
7435 /* Evidently an out of date version of <stdarg.h>; can't validate
7436 va_start's second argument, but can still work as intended. */
7437 warning ("`__builtin_next_arg' called without an argument");
ca695ac9
JB
7438 }
7439
7440 return expand_binop (Pmode, add_optab,
7441 current_function_internal_arg_pointer,
7442 current_function_arg_offset_rtx,
7443 NULL_RTX, 0, OPTAB_LIB_WIDEN);
7444
7445 case BUILT_IN_CLASSIFY_TYPE:
7446 if (arglist != 0)
7447 {
7448 tree type = TREE_TYPE (TREE_VALUE (arglist));
7449 enum tree_code code = TREE_CODE (type);
7450 if (code == VOID_TYPE)
7451 return GEN_INT (void_type_class);
7452 if (code == INTEGER_TYPE)
7453 return GEN_INT (integer_type_class);
7454 if (code == CHAR_TYPE)
7455 return GEN_INT (char_type_class);
7456 if (code == ENUMERAL_TYPE)
7457 return GEN_INT (enumeral_type_class);
7458 if (code == BOOLEAN_TYPE)
7459 return GEN_INT (boolean_type_class);
7460 if (code == POINTER_TYPE)
7461 return GEN_INT (pointer_type_class);
7462 if (code == REFERENCE_TYPE)
7463 return GEN_INT (reference_type_class);
7464 if (code == OFFSET_TYPE)
7465 return GEN_INT (offset_type_class);
7466 if (code == REAL_TYPE)
7467 return GEN_INT (real_type_class);
7468 if (code == COMPLEX_TYPE)
7469 return GEN_INT (complex_type_class);
7470 if (code == FUNCTION_TYPE)
7471 return GEN_INT (function_type_class);
7472 if (code == METHOD_TYPE)
7473 return GEN_INT (method_type_class);
7474 if (code == RECORD_TYPE)
7475 return GEN_INT (record_type_class);
7476 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
7477 return GEN_INT (union_type_class);
7478 if (code == ARRAY_TYPE)
4042d440
PB
7479 {
7480 if (TYPE_STRING_FLAG (type))
7481 return GEN_INT (string_type_class);
7482 else
7483 return GEN_INT (array_type_class);
7484 }
ca695ac9
JB
7485 if (code == SET_TYPE)
7486 return GEN_INT (set_type_class);
7487 if (code == FILE_TYPE)
7488 return GEN_INT (file_type_class);
7489 if (code == LANG_TYPE)
7490 return GEN_INT (lang_type_class);
7491 }
7492 return GEN_INT (no_type_class);
7493
7494 case BUILT_IN_CONSTANT_P:
7495 if (arglist == 0)
7496 return const0_rtx;
7497 else
7498 return (TREE_CODE_CLASS (TREE_CODE (TREE_VALUE (arglist))) == 'c'
7499 ? const1_rtx : const0_rtx);
7500
7501 case BUILT_IN_FRAME_ADDRESS:
7502 /* The argument must be a nonnegative integer constant.
7503 It counts the number of frames to scan up the stack.
7504 The value is the address of that frame. */
7505 case BUILT_IN_RETURN_ADDRESS:
7506 /* The argument must be a nonnegative integer constant.
7507 It counts the number of frames to scan up the stack.
7508 The value is the return address saved in that frame. */
7509 if (arglist == 0)
7510 /* Warning about missing arg was already issued. */
7511 return const0_rtx;
7512 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
7513 {
7514 error ("invalid arg to `__builtin_return_address'");
7515 return const0_rtx;
7516 }
153c149b 7517 else if (tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
ca695ac9
JB
7518 {
7519 error ("invalid arg to `__builtin_return_address'");
7520 return const0_rtx;
7521 }
7522 else
7523 {
7524 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
7525 rtx tem = frame_pointer_rtx;
7526 int i;
7527
7528 /* Some machines need special handling before we can access arbitrary
7529 frames. For example, on the sparc, we must first flush all
7530 register windows to the stack. */
7531#ifdef SETUP_FRAME_ADDRESSES
7532 SETUP_FRAME_ADDRESSES ();
7533#endif
7534
7535 /* On the sparc, the return address is not in the frame, it is
7536 in a register. There is no way to access it off of the current
7537 frame pointer, but it can be accessed off the previous frame
7538 pointer by reading the value from the register window save
7539 area. */
7540#ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
7541 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_RETURN_ADDRESS)
7542 count--;
7543#endif
7544
7545 /* Scan back COUNT frames to the specified frame. */
7546 for (i = 0; i < count; i++)
7547 {
7548 /* Assume the dynamic chain pointer is in the word that
7549 the frame address points to, unless otherwise specified. */
7550#ifdef DYNAMIC_CHAIN_ADDRESS
7551 tem = DYNAMIC_CHAIN_ADDRESS (tem);
7552#endif
7553 tem = memory_address (Pmode, tem);
7554 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
7555 }
7556
7557 /* For __builtin_frame_address, return what we've got. */
7558 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
7559 return tem;
7560
7561 /* For __builtin_return_address,
7562 Get the return address from that frame. */
7563#ifdef RETURN_ADDR_RTX
7564 return RETURN_ADDR_RTX (count, tem);
7565#else
7566 tem = memory_address (Pmode,
7567 plus_constant (tem, GET_MODE_SIZE (Pmode)));
7568 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
7569#endif
7570 }
7571
7572 case BUILT_IN_ALLOCA:
7573 if (arglist == 0
7574 /* Arg could be non-integer if user redeclared this fcn wrong. */
7575 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
7b073ca6 7576 break;
1ee86d15 7577
ca695ac9
JB
7578 /* Compute the argument. */
7579 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
7580
7581 /* Allocate the desired space. */
1ee86d15 7582 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
ca695ac9
JB
7583
7584 case BUILT_IN_FFS:
7585 /* If not optimizing, call the library function. */
98aad286 7586 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
ca695ac9
JB
7587 break;
7588
7589 if (arglist == 0
7590 /* Arg could be non-integer if user redeclared this fcn wrong. */
7591 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
7b073ca6 7592 break;
ca695ac9
JB
7593
7594 /* Compute the argument. */
7595 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
7596 /* Compute ffs, into TARGET if possible.
7597 Set TARGET to wherever the result comes back. */
7598 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
7599 ffs_optab, op0, target, 1);
7600 if (target == 0)
7601 abort ();
7602 return target;
7603
7604 case BUILT_IN_STRLEN:
7605 /* If not optimizing, call the library function. */
98aad286 7606 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
ca695ac9
JB
7607 break;
7608
7609 if (arglist == 0
7610 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7611 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
7b073ca6 7612 break;
ca695ac9
JB
7613 else
7614 {
7615 tree src = TREE_VALUE (arglist);
7616 tree len = c_strlen (src);
7617
7618 int align
7619 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7620
7621 rtx result, src_rtx, char_rtx;
7622 enum machine_mode insn_mode = value_mode, char_mode;
7623 enum insn_code icode;
7624
7625 /* If the length is known, just return it. */
7626 if (len != 0)
7627 return expand_expr (len, target, mode, 0);
7628
7629 /* If SRC is not a pointer type, don't do this operation inline. */
7630 if (align == 0)
7631 break;
7632
7633 /* Call a function if we can't compute strlen in the right mode. */
7634
7635 while (insn_mode != VOIDmode)
7636 {
7637 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
7638 if (icode != CODE_FOR_nothing)
7639 break;
bbf6f052 7640
ca695ac9
JB
7641 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
7642 }
7643 if (insn_mode == VOIDmode)
7644 break;
bbf6f052 7645
ca695ac9
JB
7646 /* Make a place to write the result of the instruction. */
7647 result = target;
7648 if (! (result != 0
7649 && GET_CODE (result) == REG
7650 && GET_MODE (result) == insn_mode
7651 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
7652 result = gen_reg_rtx (insn_mode);
bbf6f052 7653
ca695ac9
JB
7654 /* Make sure the operands are acceptable to the predicates. */
7655
7656 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
7657 result = gen_reg_rtx (insn_mode);
7658
7659 src_rtx = memory_address (BLKmode,
88f63c77 7660 expand_expr (src, NULL_RTX, ptr_mode,
ca695ac9
JB
7661 EXPAND_NORMAL));
7662 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
7663 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
7664
7665 char_rtx = const0_rtx;
7666 char_mode = insn_operand_mode[(int)icode][2];
7667 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
7668 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
7669
7670 emit_insn (GEN_FCN (icode) (result,
7671 gen_rtx (MEM, BLKmode, src_rtx),
7672 char_rtx, GEN_INT (align)));
7673
7674 /* Return the value in the proper mode for this function. */
7675 if (GET_MODE (result) == value_mode)
7676 return result;
7677 else if (target != 0)
7678 {
7679 convert_move (target, result, 0);
7680 return target;
7681 }
7682 else
7683 return convert_to_mode (value_mode, result, 0);
7684 }
7685
7686 case BUILT_IN_STRCPY:
e87b4f3f 7687 /* If not optimizing, call the library function. */
98aad286 7688 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
e87b4f3f
RS
7689 break;
7690
7691 if (arglist == 0
ca695ac9
JB
7692 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7693 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7694 || TREE_CHAIN (arglist) == 0
7695 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
7b073ca6 7696 break;
ca695ac9 7697 else
db0e6d01 7698 {
ca695ac9 7699 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
e7c33f54 7700
ca695ac9
JB
7701 if (len == 0)
7702 break;
e7c33f54 7703
ca695ac9 7704 len = size_binop (PLUS_EXPR, len, integer_one_node);
e7c33f54 7705
ca695ac9 7706 chainon (arglist, build_tree_list (NULL_TREE, len));
1bbddf11
JVA
7707 }
7708
ca695ac9
JB
7709 /* Drops in. */
7710 case BUILT_IN_MEMCPY:
7711 /* If not optimizing, call the library function. */
98aad286 7712 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
ca695ac9 7713 break;
e7c33f54 7714
ca695ac9
JB
7715 if (arglist == 0
7716 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7717 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7718 || TREE_CHAIN (arglist) == 0
7719 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
7720 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
7721 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7b073ca6 7722 break;
ca695ac9 7723 else
e7c33f54 7724 {
ca695ac9
JB
7725 tree dest = TREE_VALUE (arglist);
7726 tree src = TREE_VALUE (TREE_CHAIN (arglist));
7727 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
e87b4f3f 7728
ca695ac9
JB
7729 int src_align
7730 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7731 int dest_align
7732 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7733 rtx dest_rtx, dest_mem, src_mem;
60bac6ea 7734
ca695ac9
JB
7735 /* If either SRC or DEST is not a pointer type, don't do
7736 this operation in-line. */
7737 if (src_align == 0 || dest_align == 0)
7738 {
7739 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
7740 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
7741 break;
7742 }
7743
88f63c77 7744 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
ca695ac9
JB
7745 dest_mem = gen_rtx (MEM, BLKmode,
7746 memory_address (BLKmode, dest_rtx));
7747 src_mem = gen_rtx (MEM, BLKmode,
7748 memory_address (BLKmode,
7749 expand_expr (src, NULL_RTX,
88f63c77
RK
7750 ptr_mode,
7751 EXPAND_SUM)));
ca695ac9
JB
7752
7753 /* Copy word part most expediently. */
7754 emit_block_move (dest_mem, src_mem,
7755 expand_expr (len, NULL_RTX, VOIDmode, 0),
7756 MIN (src_align, dest_align));
7757 return dest_rtx;
7758 }
7759
7760/* These comparison functions need an instruction that returns an actual
7761 index. An ordinary compare that just sets the condition codes
7762 is not enough. */
7763#ifdef HAVE_cmpstrsi
7764 case BUILT_IN_STRCMP:
7765 /* If not optimizing, call the library function. */
98aad286 7766 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
ca695ac9
JB
7767 break;
7768
7769 if (arglist == 0
7770 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7771 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7772 || TREE_CHAIN (arglist) == 0
7773 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
7b073ca6 7774 break;
ca695ac9
JB
7775 else if (!HAVE_cmpstrsi)
7776 break;
7777 {
7778 tree arg1 = TREE_VALUE (arglist);
7779 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
7780 tree offset;
7781 tree len, len2;
7782
7783 len = c_strlen (arg1);
7784 if (len)
7785 len = size_binop (PLUS_EXPR, integer_one_node, len);
7786 len2 = c_strlen (arg2);
7787 if (len2)
7788 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
7789
7790 /* If we don't have a constant length for the first, use the length
7791 of the second, if we know it. We don't require a constant for
7792 this case; some cost analysis could be done if both are available
7793 but neither is constant. For now, assume they're equally cheap.
7794
7795 If both strings have constant lengths, use the smaller. This
7796 could arise if optimization results in strcpy being called with
7797 two fixed strings, or if the code was machine-generated. We should
7798 add some code to the `memcmp' handler below to deal with such
7799 situations, someday. */
7800 if (!len || TREE_CODE (len) != INTEGER_CST)
7801 {
7802 if (len2)
7803 len = len2;
7804 else if (len == 0)
7805 break;
7806 }
7807 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
7808 {
7809 if (tree_int_cst_lt (len2, len))
7810 len = len2;
7811 }
7812
7813 chainon (arglist, build_tree_list (NULL_TREE, len));
7814 }
7815
7816 /* Drops in. */
7817 case BUILT_IN_MEMCMP:
7818 /* If not optimizing, call the library function. */
98aad286 7819 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
ca695ac9
JB
7820 break;
7821
7822 if (arglist == 0
7823 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7824 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7825 || TREE_CHAIN (arglist) == 0
7826 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
7827 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
7828 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7b073ca6 7829 break;
ca695ac9
JB
7830 else if (!HAVE_cmpstrsi)
7831 break;
7832 {
7833 tree arg1 = TREE_VALUE (arglist);
7834 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
7835 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7836 rtx result;
7837
7838 int arg1_align
7839 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7840 int arg2_align
7841 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7842 enum machine_mode insn_mode
7843 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
60bac6ea 7844
ca695ac9
JB
7845 /* If we don't have POINTER_TYPE, call the function. */
7846 if (arg1_align == 0 || arg2_align == 0)
7847 {
7848 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
7849 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
7850 break;
7851 }
60bac6ea 7852
ca695ac9
JB
7853 /* Make a place to write the result of the instruction. */
7854 result = target;
7855 if (! (result != 0
7856 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
7857 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
7858 result = gen_reg_rtx (insn_mode);
60bac6ea 7859
ca695ac9
JB
7860 emit_insn (gen_cmpstrsi (result,
7861 gen_rtx (MEM, BLKmode,
88f63c77
RK
7862 expand_expr (arg1, NULL_RTX,
7863 ptr_mode,
ca695ac9
JB
7864 EXPAND_NORMAL)),
7865 gen_rtx (MEM, BLKmode,
88f63c77
RK
7866 expand_expr (arg2, NULL_RTX,
7867 ptr_mode,
ca695ac9
JB
7868 EXPAND_NORMAL)),
7869 expand_expr (len, NULL_RTX, VOIDmode, 0),
7870 GEN_INT (MIN (arg1_align, arg2_align))));
60bac6ea 7871
ca695ac9
JB
7872 /* Return the value in the proper mode for this function. */
7873 mode = TYPE_MODE (TREE_TYPE (exp));
7874 if (GET_MODE (result) == mode)
7875 return result;
7876 else if (target != 0)
7877 {
7878 convert_move (target, result, 0);
7879 return target;
60bac6ea 7880 }
ca695ac9
JB
7881 else
7882 return convert_to_mode (mode, result, 0);
7883 }
60bac6ea 7884#else
ca695ac9
JB
7885 case BUILT_IN_STRCMP:
7886 case BUILT_IN_MEMCMP:
7887 break;
60bac6ea
RS
7888#endif
7889
ca695ac9
JB
7890 default: /* just do library call, if unknown builtin */
7891 error ("built-in function `%s' not currently supported",
7892 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
7893 }
e87b4f3f 7894
ca695ac9
JB
7895 /* The switch statement above can drop through to cause the function
7896 to be called normally. */
e7c33f54 7897
ca695ac9
JB
7898 return expand_call (exp, target, ignore);
7899}
7900\f
7901/* Built-in functions to perform an untyped call and return. */
0006469d 7902
ca695ac9
JB
7903/* For each register that may be used for calling a function, this
7904 gives a mode used to copy the register's value. VOIDmode indicates
7905 the register is not used for calling a function. If the machine
7906 has register windows, this gives only the outbound registers.
7907 INCOMING_REGNO gives the corresponding inbound register. */
7908static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
0006469d 7909
ca695ac9
JB
7910/* For each register that may be used for returning values, this gives
7911 a mode used to copy the register's value. VOIDmode indicates the
7912 register is not used for returning values. If the machine has
7913 register windows, this gives only the outbound registers.
7914 INCOMING_REGNO gives the corresponding inbound register. */
7915static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
0006469d 7916
ca695ac9
JB
7917/* For each register that may be used for calling a function, this
7918 gives the offset of that register into the block returned by
7919 __bultin_apply_args. 0 indicates that the register is not
7920 used for calling a function. */
7921static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
0006469d 7922
ca695ac9
JB
7923/* Return the offset of register REGNO into the block returned by
7924 __builtin_apply_args. This is not declared static, since it is
7925 needed in objc-act.c. */
0006469d 7926
ca695ac9
JB
7927int
7928apply_args_register_offset (regno)
7929 int regno;
7930{
7931 apply_args_size ();
0006469d 7932
ca695ac9
JB
7933 /* Arguments are always put in outgoing registers (in the argument
7934 block) if such make sense. */
7935#ifdef OUTGOING_REGNO
7936 regno = OUTGOING_REGNO(regno);
7937#endif
7938 return apply_args_reg_offset[regno];
7939}
0006469d 7940
ca695ac9
JB
7941/* Return the size required for the block returned by __builtin_apply_args,
7942 and initialize apply_args_mode. */
0006469d 7943
ca695ac9
JB
7944static int
7945apply_args_size ()
7946{
7947 static int size = -1;
7948 int align, regno;
7949 enum machine_mode mode;
bbf6f052 7950
ca695ac9
JB
7951 /* The values computed by this function never change. */
7952 if (size < 0)
7953 {
7954 /* The first value is the incoming arg-pointer. */
7955 size = GET_MODE_SIZE (Pmode);
bbf6f052 7956
ca695ac9
JB
7957 /* The second value is the structure value address unless this is
7958 passed as an "invisible" first argument. */
7959 if (struct_value_rtx)
7960 size += GET_MODE_SIZE (Pmode);
7961
7962 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7963 if (FUNCTION_ARG_REGNO_P (regno))
bbf6f052 7964 {
ca695ac9
JB
7965 /* Search for the proper mode for copying this register's
7966 value. I'm not sure this is right, but it works so far. */
7967 enum machine_mode best_mode = VOIDmode;
7968
7969 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
7970 mode != VOIDmode;
7971 mode = GET_MODE_WIDER_MODE (mode))
7972 if (HARD_REGNO_MODE_OK (regno, mode)
7973 && HARD_REGNO_NREGS (regno, mode) == 1)
7974 best_mode = mode;
7975
7976 if (best_mode == VOIDmode)
7977 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
7978 mode != VOIDmode;
7979 mode = GET_MODE_WIDER_MODE (mode))
7980 if (HARD_REGNO_MODE_OK (regno, mode)
7981 && (mov_optab->handlers[(int) mode].insn_code
7982 != CODE_FOR_nothing))
7983 best_mode = mode;
7984
7985 mode = best_mode;
7986 if (mode == VOIDmode)
7987 abort ();
7988
7989 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7990 if (size % align != 0)
7991 size = CEIL (size, align) * align;
7992 apply_args_reg_offset[regno] = size;
7993 size += GET_MODE_SIZE (mode);
7994 apply_args_mode[regno] = mode;
7995 }
7996 else
7997 {
7998 apply_args_mode[regno] = VOIDmode;
7999 apply_args_reg_offset[regno] = 0;
bbf6f052 8000 }
ca695ac9
JB
8001 }
8002 return size;
8003}
bbf6f052 8004
ca695ac9
JB
8005/* Return the size required for the block returned by __builtin_apply,
8006 and initialize apply_result_mode. */
bbf6f052 8007
ca695ac9
JB
8008static int
8009apply_result_size ()
8010{
8011 static int size = -1;
8012 int align, regno;
8013 enum machine_mode mode;
bbf6f052 8014
ca695ac9
JB
8015 /* The values computed by this function never change. */
8016 if (size < 0)
8017 {
8018 size = 0;
bbf6f052 8019
ca695ac9
JB
8020 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8021 if (FUNCTION_VALUE_REGNO_P (regno))
8022 {
8023 /* Search for the proper mode for copying this register's
8024 value. I'm not sure this is right, but it works so far. */
8025 enum machine_mode best_mode = VOIDmode;
bbf6f052 8026
ca695ac9
JB
8027 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
8028 mode != TImode;
8029 mode = GET_MODE_WIDER_MODE (mode))
8030 if (HARD_REGNO_MODE_OK (regno, mode))
8031 best_mode = mode;
bbf6f052 8032
ca695ac9
JB
8033 if (best_mode == VOIDmode)
8034 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
8035 mode != VOIDmode;
8036 mode = GET_MODE_WIDER_MODE (mode))
8037 if (HARD_REGNO_MODE_OK (regno, mode)
8038 && (mov_optab->handlers[(int) mode].insn_code
8039 != CODE_FOR_nothing))
8040 best_mode = mode;
bbf6f052 8041
ca695ac9
JB
8042 mode = best_mode;
8043 if (mode == VOIDmode)
8044 abort ();
bbf6f052 8045
ca695ac9
JB
8046 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8047 if (size % align != 0)
8048 size = CEIL (size, align) * align;
8049 size += GET_MODE_SIZE (mode);
8050 apply_result_mode[regno] = mode;
bbf6f052
RK
8051 }
8052 else
ca695ac9 8053 apply_result_mode[regno] = VOIDmode;
bbf6f052 8054
ca695ac9
JB
8055 /* Allow targets that use untyped_call and untyped_return to override
8056 the size so that machine-specific information can be stored here. */
8057#ifdef APPLY_RESULT_SIZE
8058 size = APPLY_RESULT_SIZE;
8059#endif
8060 }
8061 return size;
8062}
bbf6f052 8063
ca695ac9
JB
8064#if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
8065/* Create a vector describing the result block RESULT. If SAVEP is true,
8066 the result block is used to save the values; otherwise it is used to
8067 restore the values. */
bbf6f052 8068
ca695ac9
JB
8069static rtx
8070result_vector (savep, result)
8071 int savep;
8072 rtx result;
8073{
8074 int regno, size, align, nelts;
8075 enum machine_mode mode;
8076 rtx reg, mem;
8077 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
8078
8079 size = nelts = 0;
8080 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8081 if ((mode = apply_result_mode[regno]) != VOIDmode)
8082 {
8083 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8084 if (size % align != 0)
8085 size = CEIL (size, align) * align;
18992995 8086 reg = gen_rtx (REG, mode, savep ? regno : INCOMING_REGNO (regno));
ca695ac9
JB
8087 mem = change_address (result, mode,
8088 plus_constant (XEXP (result, 0), size));
8089 savevec[nelts++] = (savep
8090 ? gen_rtx (SET, VOIDmode, mem, reg)
8091 : gen_rtx (SET, VOIDmode, reg, mem));
8092 size += GET_MODE_SIZE (mode);
bbf6f052 8093 }
ca695ac9
JB
8094 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
8095}
8096#endif /* HAVE_untyped_call or HAVE_untyped_return */
bbf6f052 8097
ca695ac9
JB
8098/* Save the state required to perform an untyped call with the same
8099 arguments as were passed to the current function. */
8100
8101static rtx
8102expand_builtin_apply_args ()
8103{
8104 rtx registers;
8105 int size, align, regno;
8106 enum machine_mode mode;
8107
8108 /* Create a block where the arg-pointer, structure value address,
8109 and argument registers can be saved. */
8110 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
8111
8112 /* Walk past the arg-pointer and structure value address. */
8113 size = GET_MODE_SIZE (Pmode);
8114 if (struct_value_rtx)
8115 size += GET_MODE_SIZE (Pmode);
8116
8117 /* Save each register used in calling a function to the block. */
8118 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8119 if ((mode = apply_args_mode[regno]) != VOIDmode)
bbf6f052 8120 {
ca695ac9
JB
8121 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8122 if (size % align != 0)
8123 size = CEIL (size, align) * align;
8124 emit_move_insn (change_address (registers, mode,
8125 plus_constant (XEXP (registers, 0),
8126 size)),
8127 gen_rtx (REG, mode, INCOMING_REGNO (regno)));
8128 size += GET_MODE_SIZE (mode);
bbf6f052
RK
8129 }
8130
ca695ac9
JB
8131 /* Save the arg pointer to the block. */
8132 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
8133 copy_to_reg (virtual_incoming_args_rtx));
8134 size = GET_MODE_SIZE (Pmode);
bbf6f052 8135
ca695ac9
JB
8136 /* Save the structure value address unless this is passed as an
8137 "invisible" first argument. */
8138 if (struct_value_incoming_rtx)
8139 {
8140 emit_move_insn (change_address (registers, Pmode,
8141 plus_constant (XEXP (registers, 0),
8142 size)),
8143 copy_to_reg (struct_value_incoming_rtx));
8144 size += GET_MODE_SIZE (Pmode);
8145 }
8146
8147 /* Return the address of the block. */
8148 return copy_addr_to_reg (XEXP (registers, 0));
8149}
8150
8151/* Perform an untyped call and save the state required to perform an
8152 untyped return of whatever value was returned by the given function. */
8153
8154static rtx
8155expand_builtin_apply (function, arguments, argsize)
8156 rtx function, arguments, argsize;
8157{
8158 int size, align, regno;
8159 enum machine_mode mode;
8160 rtx incoming_args, result, reg, dest, call_insn;
8161 rtx old_stack_level = 0;
b3f8cf4a 8162 rtx call_fusage = 0;
bbf6f052 8163
ca695ac9
JB
8164 /* Create a block where the return registers can be saved. */
8165 result = assign_stack_local (BLKmode, apply_result_size (), -1);
bbf6f052 8166
ca695ac9 8167 /* ??? The argsize value should be adjusted here. */
bbf6f052 8168
ca695ac9
JB
8169 /* Fetch the arg pointer from the ARGUMENTS block. */
8170 incoming_args = gen_reg_rtx (Pmode);
8171 emit_move_insn (incoming_args,
8172 gen_rtx (MEM, Pmode, arguments));
8173#ifndef STACK_GROWS_DOWNWARD
8174 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
8175 incoming_args, 0, OPTAB_LIB_WIDEN);
46b68a37
JW
8176#endif
8177
ca695ac9
JB
8178 /* Perform postincrements before actually calling the function. */
8179 emit_queue ();
46b68a37 8180
ca695ac9
JB
8181 /* Push a new argument block and copy the arguments. */
8182 do_pending_stack_adjust ();
8183 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
bbf6f052 8184
ca695ac9
JB
8185 /* Push a block of memory onto the stack to store the memory arguments.
8186 Save the address in a register, and copy the memory arguments. ??? I
8187 haven't figured out how the calling convention macros effect this,
8188 but it's likely that the source and/or destination addresses in
8189 the block copy will need updating in machine specific ways. */
8190 dest = copy_addr_to_reg (push_block (argsize, 0, 0));
8191 emit_block_move (gen_rtx (MEM, BLKmode, dest),
8192 gen_rtx (MEM, BLKmode, incoming_args),
8193 argsize,
8194 PARM_BOUNDARY / BITS_PER_UNIT);
bbf6f052 8195
ca695ac9
JB
8196 /* Refer to the argument block. */
8197 apply_args_size ();
8198 arguments = gen_rtx (MEM, BLKmode, arguments);
8199
8200 /* Walk past the arg-pointer and structure value address. */
8201 size = GET_MODE_SIZE (Pmode);
8202 if (struct_value_rtx)
8203 size += GET_MODE_SIZE (Pmode);
8204
8205 /* Restore each of the registers previously saved. Make USE insns
8206 for each of these registers for use in making the call. */
8207 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8208 if ((mode = apply_args_mode[regno]) != VOIDmode)
8209 {
8210 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8211 if (size % align != 0)
8212 size = CEIL (size, align) * align;
8213 reg = gen_rtx (REG, mode, regno);
8214 emit_move_insn (reg,
8215 change_address (arguments, mode,
8216 plus_constant (XEXP (arguments, 0),
8217 size)));
8218
b3f8cf4a 8219 use_reg (&call_fusage, reg);
ca695ac9
JB
8220 size += GET_MODE_SIZE (mode);
8221 }
8222
8223 /* Restore the structure value address unless this is passed as an
8224 "invisible" first argument. */
8225 size = GET_MODE_SIZE (Pmode);
8226 if (struct_value_rtx)
8227 {
8228 rtx value = gen_reg_rtx (Pmode);
8229 emit_move_insn (value,
8230 change_address (arguments, Pmode,
8231 plus_constant (XEXP (arguments, 0),
8232 size)));
8233 emit_move_insn (struct_value_rtx, value);
8234 if (GET_CODE (struct_value_rtx) == REG)
b3f8cf4a 8235 use_reg (&call_fusage, struct_value_rtx);
ca695ac9
JB
8236 size += GET_MODE_SIZE (Pmode);
8237 }
bbf6f052 8238
ca695ac9 8239 /* All arguments and registers used for the call are set up by now! */
b3f8cf4a 8240 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
bbf6f052 8241
ca695ac9
JB
8242 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
8243 and we don't want to load it into a register as an optimization,
8244 because prepare_call_address already did it if it should be done. */
8245 if (GET_CODE (function) != SYMBOL_REF)
8246 function = memory_address (FUNCTION_MODE, function);
bbf6f052 8247
ca695ac9
JB
8248 /* Generate the actual call instruction and save the return value. */
8249#ifdef HAVE_untyped_call
8250 if (HAVE_untyped_call)
8251 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
8252 result, result_vector (1, result)));
8253 else
8254#endif
8255#ifdef HAVE_call_value
8256 if (HAVE_call_value)
8257 {
8258 rtx valreg = 0;
bbf6f052 8259
ca695ac9
JB
8260 /* Locate the unique return register. It is not possible to
8261 express a call that sets more than one return register using
8262 call_value; use untyped_call for that. In fact, untyped_call
8263 only needs to save the return registers in the given block. */
8264 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8265 if ((mode = apply_result_mode[regno]) != VOIDmode)
8266 {
8267 if (valreg)
8268 abort (); /* HAVE_untyped_call required. */
8269 valreg = gen_rtx (REG, mode, regno);
8270 }
bbf6f052 8271
ca695ac9
JB
8272 emit_call_insn (gen_call_value (valreg,
8273 gen_rtx (MEM, FUNCTION_MODE, function),
8274 const0_rtx, NULL_RTX, const0_rtx));
bbf6f052 8275
ca695ac9
JB
8276 emit_move_insn (change_address (result, GET_MODE (valreg),
8277 XEXP (result, 0)),
8278 valreg);
8279 }
8280 else
8281#endif
8282 abort ();
bbf6f052 8283
b3f8cf4a 8284 /* Find the CALL insn we just emitted. */
ca695ac9
JB
8285 for (call_insn = get_last_insn ();
8286 call_insn && GET_CODE (call_insn) != CALL_INSN;
8287 call_insn = PREV_INSN (call_insn))
8288 ;
bbf6f052 8289
ca695ac9
JB
8290 if (! call_insn)
8291 abort ();
bbf6f052 8292
6d100794
RK
8293 /* Put the register usage information on the CALL. If there is already
8294 some usage information, put ours at the end. */
8295 if (CALL_INSN_FUNCTION_USAGE (call_insn))
8296 {
8297 rtx link;
8298
8299 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
8300 link = XEXP (link, 1))
8301 ;
8302
8303 XEXP (link, 1) = call_fusage;
8304 }
8305 else
8306 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
e7c33f54 8307
ca695ac9
JB
8308 /* Restore the stack. */
8309 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
e7c33f54 8310
ca695ac9
JB
8311 /* Return the address of the result block. */
8312 return copy_addr_to_reg (XEXP (result, 0));
8313}
e7c33f54 8314
ca695ac9 8315/* Perform an untyped return. */
e7c33f54 8316
ca695ac9
JB
8317static void
8318expand_builtin_return (result)
8319 rtx result;
8320{
8321 int size, align, regno;
8322 enum machine_mode mode;
8323 rtx reg;
b3f8cf4a 8324 rtx call_fusage = 0;
e7c33f54 8325
ca695ac9
JB
8326 apply_result_size ();
8327 result = gen_rtx (MEM, BLKmode, result);
e7c33f54 8328
ca695ac9
JB
8329#ifdef HAVE_untyped_return
8330 if (HAVE_untyped_return)
8331 {
8332 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
8333 emit_barrier ();
8334 return;
8335 }
8336#endif
e7c33f54 8337
ca695ac9
JB
8338 /* Restore the return value and note that each value is used. */
8339 size = 0;
8340 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8341 if ((mode = apply_result_mode[regno]) != VOIDmode)
8342 {
8343 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8344 if (size % align != 0)
8345 size = CEIL (size, align) * align;
8346 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
8347 emit_move_insn (reg,
8348 change_address (result, mode,
8349 plus_constant (XEXP (result, 0),
8350 size)));
e7c33f54 8351
b3f8cf4a 8352 push_to_sequence (call_fusage);
ca695ac9 8353 emit_insn (gen_rtx (USE, VOIDmode, reg));
b3f8cf4a 8354 call_fusage = get_insns ();
ca695ac9
JB
8355 end_sequence ();
8356 size += GET_MODE_SIZE (mode);
8357 }
e7c33f54 8358
ca695ac9 8359 /* Put the USE insns before the return. */
b3f8cf4a 8360 emit_insns (call_fusage);
e7c33f54 8361
ca695ac9
JB
8362 /* Return whatever values was restored by jumping directly to the end
8363 of the function. */
8364 expand_null_return ();
8365}
8366\f
8367/* Expand code for a post- or pre- increment or decrement
8368 and return the RTX for the result.
8369 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
e7c33f54 8370
ca695ac9
JB
8371static rtx
8372expand_increment (exp, post)
8373 register tree exp;
8374 int post;
8375{
8376 register rtx op0, op1;
8377 register rtx temp, value;
8378 register tree incremented = TREE_OPERAND (exp, 0);
8379 optab this_optab = add_optab;
8380 int icode;
8381 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8382 int op0_is_copy = 0;
8383 int single_insn = 0;
a97f5a86
RS
8384 /* 1 means we can't store into OP0 directly,
8385 because it is a subreg narrower than a word,
8386 and we don't dare clobber the rest of the word. */
8387 int bad_subreg = 0;
e7c33f54 8388
ca695ac9 8389 if (output_bytecode)
c02bd5d9
JB
8390 {
8391 bc_expand_expr (exp);
8392 return NULL_RTX;
8393 }
e7c33f54 8394
ca695ac9
JB
8395 /* Stabilize any component ref that might need to be
8396 evaluated more than once below. */
8397 if (!post
8398 || TREE_CODE (incremented) == BIT_FIELD_REF
8399 || (TREE_CODE (incremented) == COMPONENT_REF
8400 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8401 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8402 incremented = stabilize_reference (incremented);
8403 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8404 ones into save exprs so that they don't accidentally get evaluated
8405 more than once by the code below. */
8406 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8407 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8408 incremented = save_expr (incremented);
bbf6f052 8409
ca695ac9
JB
8410 /* Compute the operands as RTX.
8411 Note whether OP0 is the actual lvalue or a copy of it:
8412 I believe it is a copy iff it is a register or subreg
8413 and insns were generated in computing it. */
bbf6f052 8414
ca695ac9
JB
8415 temp = get_last_insn ();
8416 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
bbf6f052 8417
ca695ac9
JB
8418 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8419 in place but intead must do sign- or zero-extension during assignment,
8420 so we copy it into a new register and let the code below use it as
8421 a copy.
bbf6f052 8422
ca695ac9
JB
8423 Note that we can safely modify this SUBREG since it is know not to be
8424 shared (it was made by the expand_expr call above). */
bbf6f052 8425
ca695ac9 8426 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
3e073e72
RK
8427 {
8428 if (post)
8429 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8430 else
8431 bad_subreg = 1;
8432 }
a97f5a86
RS
8433 else if (GET_CODE (op0) == SUBREG
8434 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
79777b79
RK
8435 {
8436 /* We cannot increment this SUBREG in place. If we are
8437 post-incrementing, get a copy of the old value. Otherwise,
8438 just mark that we cannot increment in place. */
8439 if (post)
8440 op0 = copy_to_reg (op0);
8441 else
8442 bad_subreg = 1;
8443 }
bbf6f052 8444
ca695ac9
JB
8445 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
8446 && temp != get_last_insn ());
8447 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052 8448
ca695ac9
JB
8449 /* Decide whether incrementing or decrementing. */
8450 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
8451 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8452 this_optab = sub_optab;
bbf6f052 8453
ca695ac9
JB
8454 /* Convert decrement by a constant into a negative increment. */
8455 if (this_optab == sub_optab
8456 && GET_CODE (op1) == CONST_INT)
8457 {
8458 op1 = GEN_INT (- INTVAL (op1));
8459 this_optab = add_optab;
8460 }
bbf6f052 8461
ca695ac9
JB
8462 /* For a preincrement, see if we can do this with a single instruction. */
8463 if (!post)
8464 {
8465 icode = (int) this_optab->handlers[(int) mode].insn_code;
8466 if (icode != (int) CODE_FOR_nothing
8467 /* Make sure that OP0 is valid for operands 0 and 1
8468 of the insn we want to queue. */
8469 && (*insn_operand_predicate[icode][0]) (op0, mode)
8470 && (*insn_operand_predicate[icode][1]) (op0, mode)
8471 && (*insn_operand_predicate[icode][2]) (op1, mode))
8472 single_insn = 1;
8473 }
bbf6f052 8474
ca695ac9
JB
8475 /* If OP0 is not the actual lvalue, but rather a copy in a register,
8476 then we cannot just increment OP0. We must therefore contrive to
8477 increment the original value. Then, for postincrement, we can return
8478 OP0 since it is a copy of the old value. For preincrement, expand here
a97f5a86
RS
8479 unless we can do it with a single insn.
8480
8481 Likewise if storing directly into OP0 would clobber high bits
8482 we need to preserve (bad_subreg). */
8483 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
ca695ac9
JB
8484 {
8485 /* This is the easiest way to increment the value wherever it is.
8486 Problems with multiple evaluation of INCREMENTED are prevented
8487 because either (1) it is a component_ref or preincrement,
8488 in which case it was stabilized above, or (2) it is an array_ref
8489 with constant index in an array in a register, which is
8490 safe to reevaluate. */
8491 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
8492 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8493 ? MINUS_EXPR : PLUS_EXPR),
8494 TREE_TYPE (exp),
8495 incremented,
8496 TREE_OPERAND (exp, 1));
8497 temp = expand_assignment (incremented, newexp, ! post, 0);
8498 return post ? op0 : temp;
8499 }
bbf6f052 8500
ca695ac9
JB
8501 if (post)
8502 {
8503 /* We have a true reference to the value in OP0.
8504 If there is an insn to add or subtract in this mode, queue it.
8505 Queueing the increment insn avoids the register shuffling
8506 that often results if we must increment now and first save
8507 the old value for subsequent use. */
bbf6f052 8508
ca695ac9
JB
8509#if 0 /* Turned off to avoid making extra insn for indexed memref. */
8510 op0 = stabilize (op0);
8511#endif
bbf6f052 8512
ca695ac9
JB
8513 icode = (int) this_optab->handlers[(int) mode].insn_code;
8514 if (icode != (int) CODE_FOR_nothing
8515 /* Make sure that OP0 is valid for operands 0 and 1
8516 of the insn we want to queue. */
8517 && (*insn_operand_predicate[icode][0]) (op0, mode)
8518 && (*insn_operand_predicate[icode][1]) (op0, mode))
8519 {
8520 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
8521 op1 = force_reg (mode, op1);
bbf6f052 8522
ca695ac9
JB
8523 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
8524 }
8525 }
bbf6f052 8526
ca695ac9
JB
8527 /* Preincrement, or we can't increment with one simple insn. */
8528 if (post)
8529 /* Save a copy of the value before inc or dec, to return it later. */
8530 temp = value = copy_to_reg (op0);
8531 else
8532 /* Arrange to return the incremented value. */
8533 /* Copy the rtx because expand_binop will protect from the queue,
8534 and the results of that would be invalid for us to return
8535 if our caller does emit_queue before using our result. */
8536 temp = copy_rtx (value = op0);
bbf6f052 8537
ca695ac9
JB
8538 /* Increment however we can. */
8539 op1 = expand_binop (mode, this_optab, value, op1, op0,
8540 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
8541 /* Make sure the value is stored into OP0. */
8542 if (op1 != op0)
8543 emit_move_insn (op0, op1);
bbf6f052 8544
ca695ac9
JB
8545 return temp;
8546}
8547\f
8548/* Expand all function calls contained within EXP, innermost ones first.
8549 But don't look within expressions that have sequence points.
8550 For each CALL_EXPR, record the rtx for its value
8551 in the CALL_EXPR_RTL field. */
bbf6f052 8552
ca695ac9
JB
8553static void
8554preexpand_calls (exp)
8555 tree exp;
8556{
8557 register int nops, i;
8558 int type = TREE_CODE_CLASS (TREE_CODE (exp));
bbf6f052 8559
ca695ac9
JB
8560 if (! do_preexpand_calls)
8561 return;
bbf6f052 8562
ca695ac9 8563 /* Only expressions and references can contain calls. */
bbf6f052 8564
ca695ac9
JB
8565 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
8566 return;
bbf6f052 8567
ca695ac9
JB
8568 switch (TREE_CODE (exp))
8569 {
8570 case CALL_EXPR:
8571 /* Do nothing if already expanded. */
8572 if (CALL_EXPR_RTL (exp) != 0)
8573 return;
bbf6f052 8574
ca695ac9
JB
8575 /* Do nothing to built-in functions. */
8576 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
8577 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
6676e72f
RK
8578 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
8579 /* Do nothing if the call returns a variable-sized object. */
8580 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST)
ca695ac9
JB
8581 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
8582 return;
bbf6f052 8583
ca695ac9
JB
8584 case COMPOUND_EXPR:
8585 case COND_EXPR:
8586 case TRUTH_ANDIF_EXPR:
8587 case TRUTH_ORIF_EXPR:
8588 /* If we find one of these, then we can be sure
8589 the adjust will be done for it (since it makes jumps).
8590 Do it now, so that if this is inside an argument
8591 of a function, we don't get the stack adjustment
8592 after some other args have already been pushed. */
8593 do_pending_stack_adjust ();
8594 return;
bbf6f052 8595
ca695ac9
JB
8596 case BLOCK:
8597 case RTL_EXPR:
8598 case WITH_CLEANUP_EXPR:
8599 return;
bbf6f052 8600
ca695ac9
JB
8601 case SAVE_EXPR:
8602 if (SAVE_EXPR_RTL (exp) != 0)
8603 return;
8604 }
bbf6f052 8605
ca695ac9
JB
8606 nops = tree_code_length[(int) TREE_CODE (exp)];
8607 for (i = 0; i < nops; i++)
8608 if (TREE_OPERAND (exp, i) != 0)
8609 {
8610 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
8611 if (type == 'e' || type == '<' || type == '1' || type == '2'
8612 || type == 'r')
8613 preexpand_calls (TREE_OPERAND (exp, i));
8614 }
bbf6f052
RK
8615}
8616\f
ca695ac9
JB
8617/* At the start of a function, record that we have no previously-pushed
8618 arguments waiting to be popped. */
0006469d 8619
ca695ac9
JB
8620void
8621init_pending_stack_adjust ()
8622{
8623 pending_stack_adjust = 0;
8624}
fb2ca25a 8625
ca695ac9
JB
8626/* When exiting from function, if safe, clear out any pending stack adjust
8627 so the adjustment won't get done. */
904762c8 8628
ca695ac9
JB
8629void
8630clear_pending_stack_adjust ()
fb2ca25a 8631{
ca695ac9
JB
8632#ifdef EXIT_IGNORE_STACK
8633 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
8634 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
8635 && ! flag_inline_functions)
8636 pending_stack_adjust = 0;
fb2ca25a 8637#endif
fb2ca25a
KKT
8638}
8639
ca695ac9
JB
8640/* Pop any previously-pushed arguments that have not been popped yet. */
8641
8642void
8643do_pending_stack_adjust ()
8644{
8645 if (inhibit_defer_pop == 0)
8646 {
8647 if (pending_stack_adjust != 0)
8648 adjust_stack (GEN_INT (pending_stack_adjust));
8649 pending_stack_adjust = 0;
8650 }
8651}
8652
5dab5552
MS
8653/* Defer the expansion all cleanups up to OLD_CLEANUPS.
8654 Returns the cleanups to be performed. */
8655
8656static tree
8657defer_cleanups_to (old_cleanups)
8658 tree old_cleanups;
8659{
8660 tree new_cleanups = NULL_TREE;
8661 tree cleanups = cleanups_this_call;
8662 tree last = NULL_TREE;
8663
8664 while (cleanups_this_call != old_cleanups)
8665 {
61d6b1cc 8666 (*interim_eh_hook) (TREE_VALUE (cleanups_this_call));
4ea8537b 8667 last = cleanups_this_call;
5dab5552
MS
8668 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
8669 }
8670
8671 if (last)
8672 {
8673 /* Remove the list from the chain of cleanups. */
8674 TREE_CHAIN (last) = NULL_TREE;
8675
8676 /* reverse them so that we can build them in the right order. */
8677 cleanups = nreverse (cleanups);
8678
8679 while (cleanups)
8680 {
8681 if (new_cleanups)
8682 new_cleanups = build (COMPOUND_EXPR, TREE_TYPE (new_cleanups),
8683 TREE_VALUE (cleanups), new_cleanups);
8684 else
8685 new_cleanups = TREE_VALUE (cleanups);
8686
8687 cleanups = TREE_CHAIN (cleanups);
8688 }
8689 }
8690
8691 return new_cleanups;
8692}
8693
ca695ac9
JB
8694/* Expand all cleanups up to OLD_CLEANUPS.
8695 Needed here, and also for language-dependent calls. */
904762c8 8696
ca695ac9
JB
8697void
8698expand_cleanups_to (old_cleanups)
8699 tree old_cleanups;
0006469d 8700{
ca695ac9 8701 while (cleanups_this_call != old_cleanups)
0006469d 8702 {
61d6b1cc 8703 (*interim_eh_hook) (TREE_VALUE (cleanups_this_call));
d3158f1a 8704 expand_expr (TREE_VALUE (cleanups_this_call), const0_rtx, VOIDmode, 0);
ca695ac9
JB
8705 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
8706 }
8707}
8708\f
8709/* Expand conditional expressions. */
0006469d 8710
ca695ac9
JB
8711/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
8712 LABEL is an rtx of code CODE_LABEL, in this function and all the
8713 functions here. */
0006469d 8714
ca695ac9
JB
8715void
8716jumpifnot (exp, label)
8717 tree exp;
8718 rtx label;
8719{
8720 do_jump (exp, label, NULL_RTX);
8721}
0006469d 8722
ca695ac9 8723/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
0006469d 8724
ca695ac9
JB
8725void
8726jumpif (exp, label)
8727 tree exp;
8728 rtx label;
8729{
8730 do_jump (exp, NULL_RTX, label);
8731}
0006469d 8732
ca695ac9
JB
8733/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
8734 the result is zero, or IF_TRUE_LABEL if the result is one.
8735 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
8736 meaning fall through in that case.
0006469d 8737
ca695ac9
JB
8738 do_jump always does any pending stack adjust except when it does not
8739 actually perform a jump. An example where there is no jump
8740 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
0006469d 8741
ca695ac9
JB
8742 This function is responsible for optimizing cases such as
8743 &&, || and comparison operators in EXP. */
904762c8 8744
ca695ac9
JB
8745void
8746do_jump (exp, if_false_label, if_true_label)
8747 tree exp;
8748 rtx if_false_label, if_true_label;
0006469d 8749{
ca695ac9
JB
8750 register enum tree_code code = TREE_CODE (exp);
8751 /* Some cases need to create a label to jump to
8752 in order to properly fall through.
8753 These cases set DROP_THROUGH_LABEL nonzero. */
8754 rtx drop_through_label = 0;
8755 rtx temp;
8756 rtx comparison = 0;
8757 int i;
8758 tree type;
2f6e6d22 8759 enum machine_mode mode;
0006469d 8760
ca695ac9 8761 emit_queue ();
0006469d 8762
ca695ac9
JB
8763 switch (code)
8764 {
8765 case ERROR_MARK:
8766 break;
0006469d 8767
ca695ac9
JB
8768 case INTEGER_CST:
8769 temp = integer_zerop (exp) ? if_false_label : if_true_label;
8770 if (temp)
8771 emit_jump (temp);
8772 break;
0006469d 8773
ca695ac9
JB
8774#if 0
8775 /* This is not true with #pragma weak */
8776 case ADDR_EXPR:
8777 /* The address of something can never be zero. */
8778 if (if_true_label)
8779 emit_jump (if_true_label);
8780 break;
8781#endif
0006469d 8782
ca695ac9
JB
8783 case NOP_EXPR:
8784 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
8785 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
8786 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
8787 goto normal;
8788 case CONVERT_EXPR:
8789 /* If we are narrowing the operand, we have to do the compare in the
8790 narrower mode. */
8791 if ((TYPE_PRECISION (TREE_TYPE (exp))
8792 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8793 goto normal;
8794 case NON_LVALUE_EXPR:
8795 case REFERENCE_EXPR:
8796 case ABS_EXPR:
8797 case NEGATE_EXPR:
8798 case LROTATE_EXPR:
8799 case RROTATE_EXPR:
8800 /* These cannot change zero->non-zero or vice versa. */
8801 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8802 break;
0006469d 8803
ca695ac9
JB
8804#if 0
8805 /* This is never less insns than evaluating the PLUS_EXPR followed by
8806 a test and can be longer if the test is eliminated. */
8807 case PLUS_EXPR:
8808 /* Reduce to minus. */
8809 exp = build (MINUS_EXPR, TREE_TYPE (exp),
8810 TREE_OPERAND (exp, 0),
8811 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
8812 TREE_OPERAND (exp, 1))));
8813 /* Process as MINUS. */
0006469d 8814#endif
0006469d 8815
ca695ac9
JB
8816 case MINUS_EXPR:
8817 /* Non-zero iff operands of minus differ. */
8818 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
8819 TREE_OPERAND (exp, 0),
8820 TREE_OPERAND (exp, 1)),
8821 NE, NE);
8822 break;
904762c8 8823
ca695ac9
JB
8824 case BIT_AND_EXPR:
8825 /* If we are AND'ing with a small constant, do this comparison in the
8826 smallest type that fits. If the machine doesn't have comparisons
8827 that small, it will be converted back to the wider comparison.
8828 This helps if we are testing the sign bit of a narrower object.
8829 combine can't do this for us because it can't know whether a
8830 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
0006469d 8831
ca695ac9
JB
8832 if (! SLOW_BYTE_ACCESS
8833 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8834 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
8835 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
2f6e6d22
RK
8836 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
8837 && (type = type_for_mode (mode, 1)) != 0
ca695ac9
JB
8838 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
8839 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
8840 != CODE_FOR_nothing))
8841 {
8842 do_jump (convert (type, exp), if_false_label, if_true_label);
8843 break;
8844 }
8845 goto normal;
904762c8 8846
ca695ac9
JB
8847 case TRUTH_NOT_EXPR:
8848 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8849 break;
0006469d 8850
ca695ac9 8851 case TRUTH_ANDIF_EXPR:
7ee055f4
MS
8852 {
8853 rtx seq1, seq2;
8854 tree cleanups, old_cleanups;
8855
8856 if (if_false_label == 0)
8857 if_false_label = drop_through_label = gen_label_rtx ();
8858 start_sequence ();
8859 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
8860 seq1 = get_insns ();
8861 end_sequence ();
8862
8863 old_cleanups = cleanups_this_call;
8864 start_sequence ();
8865 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8866 seq2 = get_insns ();
8867 end_sequence ();
8868
8869 cleanups = defer_cleanups_to (old_cleanups);
8870 if (cleanups)
8871 {
8872 rtx flag = gen_reg_rtx (word_mode);
8873 tree new_cleanups;
8874 tree cond;
8875
8876 /* Flag cleanups as not needed. */
8877 emit_move_insn (flag, const0_rtx);
8878 emit_insns (seq1);
8879
8880 /* Flag cleanups as needed. */
8881 emit_move_insn (flag, const1_rtx);
8882 emit_insns (seq2);
8883
8884 /* convert flag, which is an rtx, into a tree. */
8885 cond = make_node (RTL_EXPR);
8886 TREE_TYPE (cond) = integer_type_node;
8887 RTL_EXPR_RTL (cond) = flag;
8888 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
8889
8890 new_cleanups = build (COND_EXPR, void_type_node,
8891 truthvalue_conversion (cond),
8892 cleanups, integer_zero_node);
8893 new_cleanups = fold (new_cleanups);
8894
8895 /* Now add in the conditionalized cleanups. */
8896 cleanups_this_call
8897 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
8898 (*interim_eh_hook) (NULL_TREE);
8899 }
8900 else
8901 {
8902 emit_insns (seq1);
8903 emit_insns (seq2);
8904 }
8905 }
ca695ac9 8906 break;
0006469d 8907
ca695ac9 8908 case TRUTH_ORIF_EXPR:
7ee055f4
MS
8909 {
8910 rtx seq1, seq2;
8911 tree cleanups, old_cleanups;
8912
8913 if (if_true_label == 0)
8914 if_true_label = drop_through_label = gen_label_rtx ();
8915 start_sequence ();
8916 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
8917 seq1 = get_insns ();
8918 end_sequence ();
8919
8920 old_cleanups = cleanups_this_call;
8921 start_sequence ();
8922 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8923 seq2 = get_insns ();
8924 end_sequence ();
8925
8926 cleanups = defer_cleanups_to (old_cleanups);
8927 if (cleanups)
8928 {
8929 rtx flag = gen_reg_rtx (word_mode);
8930 tree new_cleanups;
8931 tree cond;
8932
8933 /* Flag cleanups as not needed. */
8934 emit_move_insn (flag, const0_rtx);
8935 emit_insns (seq1);
8936
8937 /* Flag cleanups as needed. */
8938 emit_move_insn (flag, const1_rtx);
8939 emit_insns (seq2);
8940
8941 /* convert flag, which is an rtx, into a tree. */
8942 cond = make_node (RTL_EXPR);
8943 TREE_TYPE (cond) = integer_type_node;
8944 RTL_EXPR_RTL (cond) = flag;
8945 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
8946
8947 new_cleanups = build (COND_EXPR, void_type_node,
8948 truthvalue_conversion (cond),
8949 cleanups, integer_zero_node);
8950 new_cleanups = fold (new_cleanups);
8951
8952 /* Now add in the conditionalized cleanups. */
8953 cleanups_this_call
8954 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
8955 (*interim_eh_hook) (NULL_TREE);
8956 }
8957 else
8958 {
8959 emit_insns (seq1);
8960 emit_insns (seq2);
8961 }
8962 }
ca695ac9 8963 break;
0006469d 8964
ca695ac9 8965 case COMPOUND_EXPR:
0088fcb1 8966 push_temp_slots ();
ca695ac9
JB
8967 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8968 free_temp_slots ();
0088fcb1 8969 pop_temp_slots ();
ca695ac9
JB
8970 emit_queue ();
8971 do_pending_stack_adjust ();
8972 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8973 break;
0006469d 8974
ca695ac9
JB
8975 case COMPONENT_REF:
8976 case BIT_FIELD_REF:
8977 case ARRAY_REF:
8978 {
8979 int bitsize, bitpos, unsignedp;
8980 enum machine_mode mode;
8981 tree type;
8982 tree offset;
8983 int volatilep = 0;
0006469d 8984
ca695ac9
JB
8985 /* Get description of this reference. We don't actually care
8986 about the underlying object here. */
8987 get_inner_reference (exp, &bitsize, &bitpos, &offset,
8988 &mode, &unsignedp, &volatilep);
0006469d 8989
ca695ac9
JB
8990 type = type_for_size (bitsize, unsignedp);
8991 if (! SLOW_BYTE_ACCESS
8992 && type != 0 && bitsize >= 0
8993 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
8994 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
8995 != CODE_FOR_nothing))
8996 {
8997 do_jump (convert (type, exp), if_false_label, if_true_label);
8998 break;
8999 }
9000 goto normal;
9001 }
0006469d 9002
ca695ac9
JB
9003 case COND_EXPR:
9004 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9005 if (integer_onep (TREE_OPERAND (exp, 1))
9006 && integer_zerop (TREE_OPERAND (exp, 2)))
9007 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
904762c8 9008
ca695ac9
JB
9009 else if (integer_zerop (TREE_OPERAND (exp, 1))
9010 && integer_onep (TREE_OPERAND (exp, 2)))
9011 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
0006469d 9012
ca695ac9
JB
9013 else
9014 {
9015 register rtx label1 = gen_label_rtx ();
9016 drop_through_label = gen_label_rtx ();
9017 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9018 /* Now the THEN-expression. */
9019 do_jump (TREE_OPERAND (exp, 1),
9020 if_false_label ? if_false_label : drop_through_label,
9021 if_true_label ? if_true_label : drop_through_label);
9022 /* In case the do_jump just above never jumps. */
9023 do_pending_stack_adjust ();
9024 emit_label (label1);
9025 /* Now the ELSE-expression. */
9026 do_jump (TREE_OPERAND (exp, 2),
9027 if_false_label ? if_false_label : drop_through_label,
9028 if_true_label ? if_true_label : drop_through_label);
9029 }
9030 break;
0006469d 9031
ca695ac9
JB
9032 case EQ_EXPR:
9033 if (integer_zerop (TREE_OPERAND (exp, 1)))
9034 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
0766f239
RS
9035 else if (((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9036 == MODE_INT)
9037 &&
9038 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9039 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_FLOAT
9040 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_INT)
ca695ac9
JB
9041 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9042 else
9043 comparison = compare (exp, EQ, EQ);
9044 break;
0006469d 9045
ca695ac9
JB
9046 case NE_EXPR:
9047 if (integer_zerop (TREE_OPERAND (exp, 1)))
9048 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
0766f239
RS
9049 else if (((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9050 == MODE_INT)
9051 &&
9052 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9053 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_FLOAT
9054 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_INT)
ca695ac9
JB
9055 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9056 else
9057 comparison = compare (exp, NE, NE);
9058 break;
0006469d 9059
ca695ac9
JB
9060 case LT_EXPR:
9061 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9062 == MODE_INT)
9063 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9064 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9065 else
9066 comparison = compare (exp, LT, LTU);
9067 break;
0006469d 9068
ca695ac9
JB
9069 case LE_EXPR:
9070 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9071 == MODE_INT)
9072 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9073 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9074 else
9075 comparison = compare (exp, LE, LEU);
9076 break;
0006469d 9077
ca695ac9
JB
9078 case GT_EXPR:
9079 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9080 == MODE_INT)
9081 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9082 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9083 else
9084 comparison = compare (exp, GT, GTU);
9085 break;
0006469d 9086
ca695ac9
JB
9087 case GE_EXPR:
9088 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9089 == MODE_INT)
9090 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9091 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9092 else
9093 comparison = compare (exp, GE, GEU);
9094 break;
0006469d 9095
ca695ac9
JB
9096 default:
9097 normal:
9098 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9099#if 0
9100 /* This is not needed any more and causes poor code since it causes
9101 comparisons and tests from non-SI objects to have different code
9102 sequences. */
9103 /* Copy to register to avoid generating bad insns by cse
9104 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9105 if (!cse_not_expected && GET_CODE (temp) == MEM)
9106 temp = copy_to_reg (temp);
9107#endif
9108 do_pending_stack_adjust ();
9109 if (GET_CODE (temp) == CONST_INT)
9110 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
9111 else if (GET_CODE (temp) == LABEL_REF)
9112 comparison = const_true_rtx;
9113 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9114 && !can_compare_p (GET_MODE (temp)))
9115 /* Note swapping the labels gives us not-equal. */
9116 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9117 else if (GET_MODE (temp) != VOIDmode)
9118 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
9119 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9120 GET_MODE (temp), NULL_RTX, 0);
9121 else
9122 abort ();
9123 }
0006469d 9124
ca695ac9
JB
9125 /* Do any postincrements in the expression that was tested. */
9126 emit_queue ();
0006469d 9127
ca695ac9
JB
9128 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
9129 straight into a conditional jump instruction as the jump condition.
9130 Otherwise, all the work has been done already. */
0006469d 9131
ca695ac9 9132 if (comparison == const_true_rtx)
0006469d 9133 {
ca695ac9
JB
9134 if (if_true_label)
9135 emit_jump (if_true_label);
0006469d 9136 }
ca695ac9
JB
9137 else if (comparison == const0_rtx)
9138 {
9139 if (if_false_label)
9140 emit_jump (if_false_label);
9141 }
9142 else if (comparison)
9143 do_jump_for_compare (comparison, if_false_label, if_true_label);
0006469d 9144
ca695ac9 9145 if (drop_through_label)
0006469d 9146 {
ca695ac9
JB
9147 /* If do_jump produces code that might be jumped around,
9148 do any stack adjusts from that code, before the place
9149 where control merges in. */
9150 do_pending_stack_adjust ();
9151 emit_label (drop_through_label);
9152 }
9153}
9154\f
9155/* Given a comparison expression EXP for values too wide to be compared
9156 with one insn, test the comparison and jump to the appropriate label.
9157 The code of EXP is ignored; we always test GT if SWAP is 0,
9158 and LT if SWAP is 1. */
0006469d 9159
ca695ac9
JB
9160static void
9161do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9162 tree exp;
9163 int swap;
9164 rtx if_false_label, if_true_label;
9165{
9166 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9167 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9168 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9169 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9170 rtx drop_through_label = 0;
9171 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9172 int i;
0006469d 9173
ca695ac9
JB
9174 if (! if_true_label || ! if_false_label)
9175 drop_through_label = gen_label_rtx ();
9176 if (! if_true_label)
9177 if_true_label = drop_through_label;
9178 if (! if_false_label)
9179 if_false_label = drop_through_label;
0006469d 9180
ca695ac9
JB
9181 /* Compare a word at a time, high order first. */
9182 for (i = 0; i < nwords; i++)
9183 {
9184 rtx comp;
9185 rtx op0_word, op1_word;
0006469d 9186
ca695ac9
JB
9187 if (WORDS_BIG_ENDIAN)
9188 {
9189 op0_word = operand_subword_force (op0, i, mode);
9190 op1_word = operand_subword_force (op1, i, mode);
9191 }
9192 else
9193 {
9194 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9195 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9196 }
0006469d 9197
ca695ac9
JB
9198 /* All but high-order word must be compared as unsigned. */
9199 comp = compare_from_rtx (op0_word, op1_word,
9200 (unsignedp || i > 0) ? GTU : GT,
9201 unsignedp, word_mode, NULL_RTX, 0);
9202 if (comp == const_true_rtx)
9203 emit_jump (if_true_label);
9204 else if (comp != const0_rtx)
9205 do_jump_for_compare (comp, NULL_RTX, if_true_label);
0006469d 9206
ca695ac9
JB
9207 /* Consider lower words only if these are equal. */
9208 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
9209 NULL_RTX, 0);
9210 if (comp == const_true_rtx)
9211 emit_jump (if_false_label);
9212 else if (comp != const0_rtx)
9213 do_jump_for_compare (comp, NULL_RTX, if_false_label);
9214 }
0006469d 9215
ca695ac9
JB
9216 if (if_false_label)
9217 emit_jump (if_false_label);
9218 if (drop_through_label)
9219 emit_label (drop_through_label);
0006469d
TW
9220}
9221
ca695ac9
JB
9222/* Compare OP0 with OP1, word at a time, in mode MODE.
9223 UNSIGNEDP says to do unsigned comparison.
9224 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
904762c8 9225
2e5ec6cf 9226void
ca695ac9
JB
9227do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9228 enum machine_mode mode;
9229 int unsignedp;
9230 rtx op0, op1;
9231 rtx if_false_label, if_true_label;
0006469d 9232{
ca695ac9
JB
9233 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9234 rtx drop_through_label = 0;
9235 int i;
0006469d 9236
ca695ac9
JB
9237 if (! if_true_label || ! if_false_label)
9238 drop_through_label = gen_label_rtx ();
9239 if (! if_true_label)
9240 if_true_label = drop_through_label;
9241 if (! if_false_label)
9242 if_false_label = drop_through_label;
0006469d 9243
ca695ac9
JB
9244 /* Compare a word at a time, high order first. */
9245 for (i = 0; i < nwords; i++)
0006469d 9246 {
ca695ac9
JB
9247 rtx comp;
9248 rtx op0_word, op1_word;
0006469d 9249
ca695ac9
JB
9250 if (WORDS_BIG_ENDIAN)
9251 {
9252 op0_word = operand_subword_force (op0, i, mode);
9253 op1_word = operand_subword_force (op1, i, mode);
9254 }
9255 else
9256 {
9257 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9258 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9259 }
0006469d 9260
ca695ac9
JB
9261 /* All but high-order word must be compared as unsigned. */
9262 comp = compare_from_rtx (op0_word, op1_word,
9263 (unsignedp || i > 0) ? GTU : GT,
9264 unsignedp, word_mode, NULL_RTX, 0);
9265 if (comp == const_true_rtx)
9266 emit_jump (if_true_label);
9267 else if (comp != const0_rtx)
9268 do_jump_for_compare (comp, NULL_RTX, if_true_label);
0006469d 9269
ca695ac9
JB
9270 /* Consider lower words only if these are equal. */
9271 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
9272 NULL_RTX, 0);
9273 if (comp == const_true_rtx)
9274 emit_jump (if_false_label);
9275 else if (comp != const0_rtx)
9276 do_jump_for_compare (comp, NULL_RTX, if_false_label);
9277 }
0006469d 9278
ca695ac9
JB
9279 if (if_false_label)
9280 emit_jump (if_false_label);
9281 if (drop_through_label)
9282 emit_label (drop_through_label);
0006469d 9283}
bbf6f052 9284
ca695ac9
JB
9285/* Given an EQ_EXPR expression EXP for values too wide to be compared
9286 with one insn, test the comparison and jump to the appropriate label. */
9287
9288static void
9289do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9290 tree exp;
9291 rtx if_false_label, if_true_label;
bbf6f052 9292{
ca695ac9
JB
9293 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9294 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9295 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9296 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9297 int i;
9298 rtx drop_through_label = 0;
bbf6f052 9299
ca695ac9
JB
9300 if (! if_false_label)
9301 drop_through_label = if_false_label = gen_label_rtx ();
bbf6f052 9302
ca695ac9
JB
9303 for (i = 0; i < nwords; i++)
9304 {
9305 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
9306 operand_subword_force (op1, i, mode),
9307 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9308 word_mode, NULL_RTX, 0);
9309 if (comp == const_true_rtx)
9310 emit_jump (if_false_label);
9311 else if (comp != const0_rtx)
9312 do_jump_for_compare (comp, if_false_label, NULL_RTX);
9313 }
1499e0a8 9314
ca695ac9
JB
9315 if (if_true_label)
9316 emit_jump (if_true_label);
9317 if (drop_through_label)
9318 emit_label (drop_through_label);
9319}
9320\f
9321/* Jump according to whether OP0 is 0.
9322 We assume that OP0 has an integer mode that is too wide
9323 for the available compare insns. */
1499e0a8 9324
ca695ac9
JB
9325static void
9326do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9327 rtx op0;
9328 rtx if_false_label, if_true_label;
9329{
9330 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9331 int i;
9332 rtx drop_through_label = 0;
1499e0a8 9333
ca695ac9
JB
9334 if (! if_false_label)
9335 drop_through_label = if_false_label = gen_label_rtx ();
1499e0a8 9336
ca695ac9
JB
9337 for (i = 0; i < nwords; i++)
9338 {
9339 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
9340 GET_MODE (op0)),
9341 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
9342 if (comp == const_true_rtx)
9343 emit_jump (if_false_label);
9344 else if (comp != const0_rtx)
9345 do_jump_for_compare (comp, if_false_label, NULL_RTX);
9346 }
1499e0a8 9347
ca695ac9
JB
9348 if (if_true_label)
9349 emit_jump (if_true_label);
9350 if (drop_through_label)
9351 emit_label (drop_through_label);
9352}
bbf6f052 9353
ca695ac9
JB
9354/* Given a comparison expression in rtl form, output conditional branches to
9355 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
bbf6f052 9356
ca695ac9
JB
9357static void
9358do_jump_for_compare (comparison, if_false_label, if_true_label)
9359 rtx comparison, if_false_label, if_true_label;
9360{
9361 if (if_true_label)
a358cee0 9362 {
ca695ac9
JB
9363 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
9364 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
9365 else
9366 abort ();
a358cee0 9367
ca695ac9
JB
9368 if (if_false_label)
9369 emit_jump (if_false_label);
c980ac49 9370 }
ca695ac9 9371 else if (if_false_label)
bbf6f052 9372 {
ca695ac9 9373 rtx insn;
f12f485a 9374 rtx prev = get_last_insn ();
ca695ac9 9375 rtx branch = 0;
bbf6f052 9376
ca695ac9
JB
9377 /* Output the branch with the opposite condition. Then try to invert
9378 what is generated. If more than one insn is a branch, or if the
9379 branch is not the last insn written, abort. If we can't invert
9380 the branch, emit make a true label, redirect this jump to that,
9381 emit a jump to the false label and define the true label. */
bbf6f052 9382
ca695ac9 9383 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
34661f5c 9384 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])(if_false_label));
ca695ac9
JB
9385 else
9386 abort ();
bbf6f052 9387
41dfd40c
RK
9388 /* Here we get the first insn that was just emitted. It used to be the
9389 case that, on some machines, emitting the branch would discard
9390 the previous compare insn and emit a replacement. This isn't
9391 done anymore, but abort if we see that PREV is deleted. */
9392
ca695ac9 9393 if (prev == 0)
ca695ac9 9394 insn = get_insns ();
41dfd40c
RK
9395 else if (INSN_DELETED_P (prev))
9396 abort ();
ca695ac9 9397 else
41dfd40c 9398 insn = NEXT_INSN (prev);
bbf6f052 9399
34661f5c 9400 for (; insn; insn = NEXT_INSN (insn))
ca695ac9
JB
9401 if (GET_CODE (insn) == JUMP_INSN)
9402 {
9403 if (branch)
9404 abort ();
9405 branch = insn;
9406 }
9407
9408 if (branch != get_last_insn ())
9409 abort ();
9410
127e4d19 9411 JUMP_LABEL (branch) = if_false_label;
ca695ac9
JB
9412 if (! invert_jump (branch, if_false_label))
9413 {
9414 if_true_label = gen_label_rtx ();
9415 redirect_jump (branch, if_true_label);
9416 emit_jump (if_false_label);
9417 emit_label (if_true_label);
bbf6f052
RK
9418 }
9419 }
ca695ac9
JB
9420}
9421\f
9422/* Generate code for a comparison expression EXP
9423 (including code to compute the values to be compared)
9424 and set (CC0) according to the result.
9425 SIGNED_CODE should be the rtx operation for this comparison for
9426 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
9427
9428 We force a stack adjustment unless there are currently
9429 things pushed on the stack that aren't yet used. */
9430
9431static rtx
9432compare (exp, signed_code, unsigned_code)
9433 register tree exp;
9434 enum rtx_code signed_code, unsigned_code;
9435{
9436 register rtx op0
9437 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9438 register rtx op1
9439 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9440 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
9441 register enum machine_mode mode = TYPE_MODE (type);
9442 int unsignedp = TREE_UNSIGNED (type);
9443 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
bbf6f052 9444
ca695ac9
JB
9445 return compare_from_rtx (op0, op1, code, unsignedp, mode,
9446 ((mode == BLKmode)
9447 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
9448 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
9449}
bbf6f052 9450
ca695ac9
JB
9451/* Like compare but expects the values to compare as two rtx's.
9452 The decision as to signed or unsigned comparison must be made by the caller.
bbf6f052 9453
ca695ac9
JB
9454 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9455 compared.
bbf6f052 9456
ca695ac9
JB
9457 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9458 size of MODE should be used. */
bbf6f052 9459
ca695ac9
JB
9460rtx
9461compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
9462 register rtx op0, op1;
9463 enum rtx_code code;
9464 int unsignedp;
9465 enum machine_mode mode;
9466 rtx size;
9467 int align;
9468{
9469 rtx tem;
bbf6f052 9470
ca695ac9
JB
9471 /* If one operand is constant, make it the second one. Only do this
9472 if the other operand is not constant as well. */
bbf6f052 9473
ca695ac9
JB
9474 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9475 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9476 {
9477 tem = op0;
9478 op0 = op1;
9479 op1 = tem;
9480 code = swap_condition (code);
9481 }
bbf6f052 9482
ca695ac9 9483 if (flag_force_mem)
bbf6f052 9484 {
ca695ac9
JB
9485 op0 = force_not_mem (op0);
9486 op1 = force_not_mem (op1);
9487 }
bbf6f052 9488
ca695ac9 9489 do_pending_stack_adjust ();
bbf6f052 9490
ca695ac9
JB
9491 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9492 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9493 return tem;
bbf6f052 9494
ca695ac9
JB
9495#if 0
9496 /* There's no need to do this now that combine.c can eliminate lots of
9497 sign extensions. This can be less efficient in certain cases on other
9498 machines. */
bbf6f052 9499
ca695ac9
JB
9500 /* If this is a signed equality comparison, we can do it as an
9501 unsigned comparison since zero-extension is cheaper than sign
9502 extension and comparisons with zero are done as unsigned. This is
9503 the case even on machines that can do fast sign extension, since
9504 zero-extension is easier to combine with other operations than
9505 sign-extension is. If we are comparing against a constant, we must
9506 convert it to what it would look like unsigned. */
9507 if ((code == EQ || code == NE) && ! unsignedp
9508 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9509 {
9510 if (GET_CODE (op1) == CONST_INT
9511 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9512 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9513 unsignedp = 1;
bbf6f052 9514 }
ca695ac9
JB
9515#endif
9516
9517 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
bbf6f052 9518
ca695ac9 9519 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
bbf6f052
RK
9520}
9521\f
ca695ac9
JB
9522/* Generate code to calculate EXP using a store-flag instruction
9523 and return an rtx for the result. EXP is either a comparison
9524 or a TRUTH_NOT_EXPR whose operand is a comparison.
bbf6f052 9525
ca695ac9 9526 If TARGET is nonzero, store the result there if convenient.
bbf6f052 9527
ca695ac9
JB
9528 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
9529 cheap.
bbf6f052 9530
ca695ac9
JB
9531 Return zero if there is no suitable set-flag instruction
9532 available on this machine.
bbf6f052 9533
ca695ac9
JB
9534 Once expand_expr has been called on the arguments of the comparison,
9535 we are committed to doing the store flag, since it is not safe to
9536 re-evaluate the expression. We emit the store-flag insn by calling
9537 emit_store_flag, but only expand the arguments if we have a reason
9538 to believe that emit_store_flag will be successful. If we think that
9539 it will, but it isn't, we have to simulate the store-flag with a
9540 set/jump/set sequence. */
bbf6f052 9541
ca695ac9
JB
9542static rtx
9543do_store_flag (exp, target, mode, only_cheap)
9544 tree exp;
9545 rtx target;
9546 enum machine_mode mode;
9547 int only_cheap;
bbf6f052 9548{
ca695ac9
JB
9549 enum rtx_code code;
9550 tree arg0, arg1, type;
9551 tree tem;
9552 enum machine_mode operand_mode;
9553 int invert = 0;
9554 int unsignedp;
9555 rtx op0, op1;
9556 enum insn_code icode;
9557 rtx subtarget = target;
9558 rtx result, label, pattern, jump_pat;
bbf6f052 9559
ca695ac9
JB
9560 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9561 result at the end. We can't simply invert the test since it would
9562 have already been inverted if it were valid. This case occurs for
9563 some floating-point comparisons. */
9564
9565 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9566 invert = 1, exp = TREE_OPERAND (exp, 0);
9567
9568 arg0 = TREE_OPERAND (exp, 0);
9569 arg1 = TREE_OPERAND (exp, 1);
9570 type = TREE_TYPE (arg0);
9571 operand_mode = TYPE_MODE (type);
9572 unsignedp = TREE_UNSIGNED (type);
9573
9574 /* We won't bother with BLKmode store-flag operations because it would mean
9575 passing a lot of information to emit_store_flag. */
9576 if (operand_mode == BLKmode)
9577 return 0;
9578
9579 STRIP_NOPS (arg0);
9580 STRIP_NOPS (arg1);
9581
9582 /* Get the rtx comparison code to use. We know that EXP is a comparison
9583 operation of some type. Some comparisons against 1 and -1 can be
9584 converted to comparisons with zero. Do so here so that the tests
9585 below will be aware that we have a comparison with zero. These
9586 tests will not catch constants in the first operand, but constants
9587 are rarely passed as the first operand. */
9588
9589 switch (TREE_CODE (exp))
9590 {
9591 case EQ_EXPR:
9592 code = EQ;
9593 break;
9594 case NE_EXPR:
9595 code = NE;
9596 break;
9597 case LT_EXPR:
9598 if (integer_onep (arg1))
9599 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9600 else
9601 code = unsignedp ? LTU : LT;
9602 break;
9603 case LE_EXPR:
9604 if (! unsignedp && integer_all_onesp (arg1))
9605 arg1 = integer_zero_node, code = LT;
9606 else
9607 code = unsignedp ? LEU : LE;
9608 break;
9609 case GT_EXPR:
9610 if (! unsignedp && integer_all_onesp (arg1))
9611 arg1 = integer_zero_node, code = GE;
9612 else
9613 code = unsignedp ? GTU : GT;
9614 break;
9615 case GE_EXPR:
9616 if (integer_onep (arg1))
9617 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9618 else
9619 code = unsignedp ? GEU : GE;
9620 break;
9621 default:
9622 abort ();
9623 }
bbf6f052 9624
ca695ac9
JB
9625 /* Put a constant second. */
9626 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
bbf6f052 9627 {
ca695ac9
JB
9628 tem = arg0; arg0 = arg1; arg1 = tem;
9629 code = swap_condition (code);
bbf6f052 9630 }
bbf6f052 9631
ca695ac9
JB
9632 /* If this is an equality or inequality test of a single bit, we can
9633 do this by shifting the bit being tested to the low-order bit and
9634 masking the result with the constant 1. If the condition was EQ,
9635 we xor it with 1. This does not require an scc insn and is faster
9636 than an scc insn even if we have it. */
bbf6f052 9637
ca695ac9
JB
9638 if ((code == NE || code == EQ)
9639 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9640 && integer_pow2p (TREE_OPERAND (arg0, 1))
9641 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
9642 {
9643 tree inner = TREE_OPERAND (arg0, 0);
9644 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
9645 NULL_RTX, VOIDmode, 0)));
9646 int ops_unsignedp;
bbf6f052 9647
ca695ac9
JB
9648 /* If INNER is a right shift of a constant and it plus BITNUM does
9649 not overflow, adjust BITNUM and INNER. */
bbf6f052 9650
ca695ac9
JB
9651 if (TREE_CODE (inner) == RSHIFT_EXPR
9652 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
9653 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
9654 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
9655 < TYPE_PRECISION (type)))
9656 {
9657 bitnum +=TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
9658 inner = TREE_OPERAND (inner, 0);
9659 }
bbf6f052 9660
ca695ac9
JB
9661 /* If we are going to be able to omit the AND below, we must do our
9662 operations as unsigned. If we must use the AND, we have a choice.
9663 Normally unsigned is faster, but for some machines signed is. */
9664 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
ad92c826
RK
9665#ifdef LOAD_EXTEND_OP
9666 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
ca695ac9
JB
9667#else
9668 : 1
9669#endif
9670 );
bbf6f052 9671
ca695ac9
JB
9672 if (subtarget == 0 || GET_CODE (subtarget) != REG
9673 || GET_MODE (subtarget) != operand_mode
9674 || ! safe_from_p (subtarget, inner))
9675 subtarget = 0;
e7c33f54 9676
ca695ac9 9677 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
bbf6f052 9678
ca695ac9
JB
9679 if (bitnum != 0)
9680 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
0c316b20 9681 size_int (bitnum), subtarget, ops_unsignedp);
bbf6f052 9682
ca695ac9
JB
9683 if (GET_MODE (op0) != mode)
9684 op0 = convert_to_mode (mode, op0, ops_unsignedp);
bbf6f052 9685
ca695ac9 9686 if ((code == EQ && ! invert) || (code == NE && invert))
0c316b20 9687 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
ca695ac9 9688 ops_unsignedp, OPTAB_LIB_WIDEN);
bbf6f052 9689
ca695ac9
JB
9690 /* Put the AND last so it can combine with more things. */
9691 if (bitnum != TYPE_PRECISION (type) - 1)
0c316b20 9692 op0 = expand_and (op0, const1_rtx, subtarget);
bbf6f052 9693
ca695ac9
JB
9694 return op0;
9695 }
bbf6f052 9696
ca695ac9
JB
9697 /* Now see if we are likely to be able to do this. Return if not. */
9698 if (! can_compare_p (operand_mode))
9699 return 0;
9700 icode = setcc_gen_code[(int) code];
9701 if (icode == CODE_FOR_nothing
9702 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
9703 {
9704 /* We can only do this if it is one of the special cases that
9705 can be handled without an scc insn. */
9706 if ((code == LT && integer_zerop (arg1))
9707 || (! only_cheap && code == GE && integer_zerop (arg1)))
9708 ;
9709 else if (BRANCH_COST >= 0
9710 && ! only_cheap && (code == NE || code == EQ)
9711 && TREE_CODE (type) != REAL_TYPE
9712 && ((abs_optab->handlers[(int) operand_mode].insn_code
9713 != CODE_FOR_nothing)
9714 || (ffs_optab->handlers[(int) operand_mode].insn_code
9715 != CODE_FOR_nothing)))
9716 ;
9717 else
9718 return 0;
9719 }
9720
9721 preexpand_calls (exp);
9722 if (subtarget == 0 || GET_CODE (subtarget) != REG
9723 || GET_MODE (subtarget) != operand_mode
9724 || ! safe_from_p (subtarget, arg1))
9725 subtarget = 0;
bbf6f052 9726
ca695ac9
JB
9727 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
9728 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
bbf6f052 9729
ca695ac9
JB
9730 if (target == 0)
9731 target = gen_reg_rtx (mode);
bbf6f052 9732
ca695ac9
JB
9733 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9734 because, if the emit_store_flag does anything it will succeed and
9735 OP0 and OP1 will not be used subsequently. */
bbf6f052 9736
ca695ac9
JB
9737 result = emit_store_flag (target, code,
9738 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9739 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9740 operand_mode, unsignedp, 1);
bbf6f052 9741
ca695ac9
JB
9742 if (result)
9743 {
9744 if (invert)
9745 result = expand_binop (mode, xor_optab, result, const1_rtx,
9746 result, 0, OPTAB_LIB_WIDEN);
9747 return result;
9748 }
bbf6f052 9749
ca695ac9
JB
9750 /* If this failed, we have to do this with set/compare/jump/set code. */
9751 if (target == 0 || GET_CODE (target) != REG
9752 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9753 target = gen_reg_rtx (GET_MODE (target));
bbf6f052 9754
ca695ac9
JB
9755 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9756 result = compare_from_rtx (op0, op1, code, unsignedp,
9757 operand_mode, NULL_RTX, 0);
9758 if (GET_CODE (result) == CONST_INT)
9759 return (((result == const0_rtx && ! invert)
9760 || (result != const0_rtx && invert))
9761 ? const0_rtx : const1_rtx);
bbf6f052 9762
ca695ac9
JB
9763 label = gen_label_rtx ();
9764 if (bcc_gen_fctn[(int) code] == 0)
9765 abort ();
bbf6f052 9766
ca695ac9
JB
9767 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9768 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9769 emit_label (label);
bbf6f052 9770
ca695ac9
JB
9771 return target;
9772}
9773\f
9774/* Generate a tablejump instruction (used for switch statements). */
bbf6f052 9775
ca695ac9 9776#ifdef HAVE_tablejump
bbf6f052 9777
ca695ac9
JB
9778/* INDEX is the value being switched on, with the lowest value
9779 in the table already subtracted.
9780 MODE is its expected mode (needed if INDEX is constant).
9781 RANGE is the length of the jump table.
9782 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
bbf6f052 9783
ca695ac9
JB
9784 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9785 index value is out of range. */
bbf6f052 9786
ca695ac9
JB
9787void
9788do_tablejump (index, mode, range, table_label, default_label)
9789 rtx index, range, table_label, default_label;
9790 enum machine_mode mode;
9791{
9792 register rtx temp, vector;
bbf6f052 9793
ca695ac9
JB
9794 /* Do an unsigned comparison (in the proper mode) between the index
9795 expression and the value which represents the length of the range.
9796 Since we just finished subtracting the lower bound of the range
9797 from the index expression, this comparison allows us to simultaneously
9798 check that the original index expression value is both greater than
9799 or equal to the minimum value of the range and less than or equal to
9800 the maximum value of the range. */
bbf6f052 9801
bf500664
RK
9802 emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
9803 emit_jump_insn (gen_bgtu (default_label));
bbf6f052 9804
ca695ac9
JB
9805 /* If index is in range, it must fit in Pmode.
9806 Convert to Pmode so we can index with it. */
9807 if (mode != Pmode)
9808 index = convert_to_mode (Pmode, index, 1);
bbf6f052 9809
ca695ac9
JB
9810 /* Don't let a MEM slip thru, because then INDEX that comes
9811 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9812 and break_out_memory_refs will go to work on it and mess it up. */
9813#ifdef PIC_CASE_VECTOR_ADDRESS
9814 if (flag_pic && GET_CODE (index) != REG)
9815 index = copy_to_mode_reg (Pmode, index);
9816#endif
bbf6f052 9817
ca695ac9
JB
9818 /* If flag_force_addr were to affect this address
9819 it could interfere with the tricky assumptions made
9820 about addresses that contain label-refs,
9821 which may be valid only very near the tablejump itself. */
9822 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9823 GET_MODE_SIZE, because this indicates how large insns are. The other
9824 uses should all be Pmode, because they are addresses. This code
9825 could fail if addresses and insns are not the same size. */
9826 index = gen_rtx (PLUS, Pmode,
9827 gen_rtx (MULT, Pmode, index,
9828 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9829 gen_rtx (LABEL_REF, Pmode, table_label));
9830#ifdef PIC_CASE_VECTOR_ADDRESS
9831 if (flag_pic)
9832 index = PIC_CASE_VECTOR_ADDRESS (index);
9833 else
9834#endif
9835 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9836 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9837 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
9838 RTX_UNCHANGING_P (vector) = 1;
9839 convert_move (temp, vector, 0);
bbf6f052 9840
ca695ac9 9841 emit_jump_insn (gen_tablejump (temp, table_label));
bbf6f052 9842
ca695ac9
JB
9843#ifndef CASE_VECTOR_PC_RELATIVE
9844 /* If we are generating PIC code or if the table is PC-relative, the
9845 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9846 if (! flag_pic)
9847 emit_barrier ();
bbf6f052 9848#endif
ca695ac9 9849}
bbf6f052 9850
ca695ac9 9851#endif /* HAVE_tablejump */
bbf6f052 9852
bbf6f052 9853
ca695ac9
JB
9854/* Emit a suitable bytecode to load a value from memory, assuming a pointer
9855 to that value is on the top of the stack. The resulting type is TYPE, and
9856 the source declaration is DECL. */
bbf6f052 9857
ca695ac9
JB
9858void
9859bc_load_memory (type, decl)
9860 tree type, decl;
9861{
9862 enum bytecode_opcode opcode;
9863
9864
9865 /* Bit fields are special. We only know about signed and
9866 unsigned ints, and enums. The latter are treated as
9867 signed integers. */
9868
9869 if (DECL_BIT_FIELD (decl))
9870 if (TREE_CODE (type) == ENUMERAL_TYPE
9871 || TREE_CODE (type) == INTEGER_TYPE)
9872 opcode = TREE_UNSIGNED (type) ? zxloadBI : sxloadBI;
9873 else
9874 abort ();
9875 else
9876 /* See corresponding comment in bc_store_memory(). */
9877 if (TYPE_MODE (type) == BLKmode
9878 || TYPE_MODE (type) == VOIDmode)
9879 return;
9880 else
6bd6178d 9881 opcode = mode_to_load_map [(int) TYPE_MODE (type)];
bbf6f052 9882
ca695ac9
JB
9883 if (opcode == neverneverland)
9884 abort ();
9885
9886 bc_emit_bytecode (opcode);
9887
9888#ifdef DEBUG_PRINT_CODE
9889 fputc ('\n', stderr);
9890#endif
bbf6f052 9891}
bbf6f052 9892
bbf6f052 9893
ca695ac9
JB
9894/* Store the contents of the second stack slot to the address in the
9895 top stack slot. DECL is the declaration of the destination and is used
9896 to determine whether we're dealing with a bitfield. */
bbf6f052 9897
ca695ac9
JB
9898void
9899bc_store_memory (type, decl)
9900 tree type, decl;
9901{
9902 enum bytecode_opcode opcode;
9903
9904
9905 if (DECL_BIT_FIELD (decl))
f81497d9 9906 {
ca695ac9
JB
9907 if (TREE_CODE (type) == ENUMERAL_TYPE
9908 || TREE_CODE (type) == INTEGER_TYPE)
9909 opcode = sstoreBI;
f81497d9 9910 else
ca695ac9 9911 abort ();
f81497d9 9912 }
ca695ac9
JB
9913 else
9914 if (TYPE_MODE (type) == BLKmode)
9915 {
9916 /* Copy structure. This expands to a block copy instruction, storeBLK.
9917 In addition to the arguments expected by the other store instructions,
9918 it also expects a type size (SImode) on top of the stack, which is the
9919 structure size in size units (usually bytes). The two first arguments
9920 are already on the stack; so we just put the size on level 1. For some
9921 other languages, the size may be variable, this is why we don't encode
9922 it as a storeBLK literal, but rather treat it as a full-fledged expression. */
9923
9924 bc_expand_expr (TYPE_SIZE (type));
9925 opcode = storeBLK;
9926 }
9927 else
6bd6178d 9928 opcode = mode_to_store_map [(int) TYPE_MODE (type)];
f81497d9 9929
ca695ac9
JB
9930 if (opcode == neverneverland)
9931 abort ();
9932
9933 bc_emit_bytecode (opcode);
9934
9935#ifdef DEBUG_PRINT_CODE
9936 fputc ('\n', stderr);
9937#endif
f81497d9
RS
9938}
9939
f81497d9 9940
ca695ac9
JB
9941/* Allocate local stack space sufficient to hold a value of the given
9942 SIZE at alignment boundary ALIGNMENT bits. ALIGNMENT must be an
9943 integral power of 2. A special case is locals of type VOID, which
9944 have size 0 and alignment 1 - any "voidish" SIZE or ALIGNMENT is
9945 remapped into the corresponding attribute of SI. */
9946
9947rtx
9948bc_allocate_local (size, alignment)
9949 int size, alignment;
f81497d9 9950{
ca695ac9
JB
9951 rtx retval;
9952 int byte_alignment;
f81497d9 9953
ca695ac9
JB
9954 if (size < 0)
9955 abort ();
f81497d9 9956
ca695ac9
JB
9957 /* Normalize size and alignment */
9958 if (!size)
9959 size = UNITS_PER_WORD;
bbf6f052 9960
ca695ac9
JB
9961 if (alignment < BITS_PER_UNIT)
9962 byte_alignment = 1 << (INT_ALIGN - 1);
9963 else
9964 /* Align */
9965 byte_alignment = alignment / BITS_PER_UNIT;
bbf6f052 9966
ca695ac9
JB
9967 if (local_vars_size & (byte_alignment - 1))
9968 local_vars_size += byte_alignment - (local_vars_size & (byte_alignment - 1));
bbf6f052 9969
ca695ac9
JB
9970 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
9971 local_vars_size += size;
bbf6f052 9972
ca695ac9 9973 return retval;
bbf6f052
RK
9974}
9975
bbf6f052 9976
ca695ac9
JB
9977/* Allocate variable-sized local array. Variable-sized arrays are
9978 actually pointers to the address in memory where they are stored. */
9979
9980rtx
9981bc_allocate_variable_array (size)
9982 tree size;
bbf6f052 9983{
ca695ac9
JB
9984 rtx retval;
9985 const int ptralign = (1 << (PTR_ALIGN - 1));
bbf6f052 9986
ca695ac9
JB
9987 /* Align pointer */
9988 if (local_vars_size & ptralign)
9989 local_vars_size += ptralign - (local_vars_size & ptralign);
bbf6f052 9990
ca695ac9
JB
9991 /* Note down local space needed: pointer to block; also return
9992 dummy rtx */
bbf6f052 9993
ca695ac9
JB
9994 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
9995 local_vars_size += POINTER_SIZE / BITS_PER_UNIT;
9996 return retval;
bbf6f052 9997}
bbf6f052 9998
bbf6f052 9999
ca695ac9
JB
10000/* Push the machine address for the given external variable offset. */
10001void
10002bc_load_externaddr (externaddr)
10003 rtx externaddr;
10004{
10005 bc_emit_bytecode (constP);
e7a42772
JB
10006 bc_emit_code_labelref (BYTECODE_LABEL (externaddr),
10007 BYTECODE_BC_LABEL (externaddr)->offset);
bbf6f052 10008
ca695ac9
JB
10009#ifdef DEBUG_PRINT_CODE
10010 fputc ('\n', stderr);
10011#endif
bbf6f052
RK
10012}
10013
bbf6f052 10014
ca695ac9
JB
10015static char *
10016bc_strdup (s)
10017 char *s;
bbf6f052 10018{
5e70898c
RS
10019 char *new = (char *) xmalloc ((strlen (s) + 1) * sizeof *s);
10020 strcpy (new, s);
10021 return new;
ca695ac9 10022}
bbf6f052 10023
bbf6f052 10024
ca695ac9
JB
10025/* Like above, but expects an IDENTIFIER. */
10026void
10027bc_load_externaddr_id (id, offset)
10028 tree id;
10029 int offset;
10030{
10031 if (!IDENTIFIER_POINTER (id))
10032 abort ();
bbf6f052 10033
ca695ac9
JB
10034 bc_emit_bytecode (constP);
10035 bc_emit_code_labelref (bc_xstrdup (IDENTIFIER_POINTER (id)), offset);
bbf6f052 10036
ca695ac9
JB
10037#ifdef DEBUG_PRINT_CODE
10038 fputc ('\n', stderr);
10039#endif
10040}
bbf6f052 10041
bbf6f052 10042
ca695ac9
JB
10043/* Push the machine address for the given local variable offset. */
10044void
10045bc_load_localaddr (localaddr)
10046 rtx localaddr;
10047{
e7a42772 10048 bc_emit_instruction (localP, (HOST_WIDE_INT) BYTECODE_BC_LABEL (localaddr)->offset);
bbf6f052 10049}
bbf6f052 10050
bbf6f052 10051
ca695ac9
JB
10052/* Push the machine address for the given parameter offset.
10053 NOTE: offset is in bits. */
10054void
10055bc_load_parmaddr (parmaddr)
10056 rtx parmaddr;
bbf6f052 10057{
e7a42772
JB
10058 bc_emit_instruction (argP, ((HOST_WIDE_INT) BYTECODE_BC_LABEL (parmaddr)->offset
10059 / BITS_PER_UNIT));
ca695ac9 10060}
bbf6f052 10061
ca695ac9
JB
10062
10063/* Convert a[i] into *(a + i). */
10064tree
10065bc_canonicalize_array_ref (exp)
10066 tree exp;
10067{
10068 tree type = TREE_TYPE (exp);
10069 tree array_adr = build1 (ADDR_EXPR, TYPE_POINTER_TO (type),
10070 TREE_OPERAND (exp, 0));
10071 tree index = TREE_OPERAND (exp, 1);
10072
10073
10074 /* Convert the integer argument to a type the same size as a pointer
10075 so the multiply won't overflow spuriously. */
10076
10077 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
10078 index = convert (type_for_size (POINTER_SIZE, 0), index);
10079
10080 /* The array address isn't volatile even if the array is.
10081 (Of course this isn't terribly relevant since the bytecode
10082 translator treats nearly everything as volatile anyway.) */
10083 TREE_THIS_VOLATILE (array_adr) = 0;
10084
10085 return build1 (INDIRECT_REF, type,
10086 fold (build (PLUS_EXPR,
10087 TYPE_POINTER_TO (type),
10088 array_adr,
10089 fold (build (MULT_EXPR,
10090 TYPE_POINTER_TO (type),
10091 index,
10092 size_in_bytes (type))))));
bbf6f052
RK
10093}
10094
bbf6f052 10095
ca695ac9
JB
10096/* Load the address of the component referenced by the given
10097 COMPONENT_REF expression.
bbf6f052 10098
ca695ac9 10099 Returns innermost lvalue. */
bbf6f052 10100
ca695ac9
JB
10101tree
10102bc_expand_component_address (exp)
10103 tree exp;
bbf6f052 10104{
ca695ac9
JB
10105 tree tem, chain;
10106 enum machine_mode mode;
10107 int bitpos = 0;
10108 HOST_WIDE_INT SIval;
a7c5971a 10109
bbf6f052 10110
ca695ac9
JB
10111 tem = TREE_OPERAND (exp, 1);
10112 mode = DECL_MODE (tem);
bbf6f052 10113
ca695ac9
JB
10114
10115 /* Compute cumulative bit offset for nested component refs
10116 and array refs, and find the ultimate containing object. */
10117
10118 for (tem = exp;; tem = TREE_OPERAND (tem, 0))
bbf6f052 10119 {
ca695ac9
JB
10120 if (TREE_CODE (tem) == COMPONENT_REF)
10121 bitpos += TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (tem, 1)));
10122 else
10123 if (TREE_CODE (tem) == ARRAY_REF
10124 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
10125 && TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) == INTEGER_CST)
bbf6f052 10126
ca695ac9
JB
10127 bitpos += (TREE_INT_CST_LOW (TREE_OPERAND (tem, 1))
10128 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)))
10129 /* * TYPE_SIZE_UNIT (TREE_TYPE (tem)) */);
10130 else
10131 break;
10132 }
bbf6f052 10133
c02bd5d9 10134 bc_expand_expr (tem);
bbf6f052 10135
cd1b4b44 10136
ca695ac9
JB
10137 /* For bitfields also push their offset and size */
10138 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
10139 bc_push_offset_and_size (bitpos, /* DECL_SIZE_UNIT */ (TREE_OPERAND (exp, 1)));
10140 else
10141 if (SIval = bitpos / BITS_PER_UNIT)
10142 bc_emit_instruction (addconstPSI, SIval);
bbf6f052 10143
ca695ac9 10144 return (TREE_OPERAND (exp, 1));
bbf6f052 10145}
e7c33f54 10146
bbf6f052 10147
ca695ac9
JB
10148/* Emit code to push two SI constants */
10149void
10150bc_push_offset_and_size (offset, size)
10151 HOST_WIDE_INT offset, size;
10152{
10153 bc_emit_instruction (constSI, offset);
10154 bc_emit_instruction (constSI, size);
10155}
bbf6f052 10156
bbf6f052 10157
ca695ac9
JB
10158/* Emit byte code to push the address of the given lvalue expression to
10159 the stack. If it's a bit field, we also push offset and size info.
bbf6f052 10160
ca695ac9
JB
10161 Returns innermost component, which allows us to determine not only
10162 its type, but also whether it's a bitfield. */
10163
10164tree
10165bc_expand_address (exp)
bbf6f052 10166 tree exp;
bbf6f052 10167{
ca695ac9
JB
10168 /* Safeguard */
10169 if (!exp || TREE_CODE (exp) == ERROR_MARK)
10170 return (exp);
bbf6f052 10171
e7c33f54 10172
ca695ac9
JB
10173 switch (TREE_CODE (exp))
10174 {
10175 case ARRAY_REF:
e7c33f54 10176
ca695ac9 10177 return (bc_expand_address (bc_canonicalize_array_ref (exp)));
e7c33f54 10178
ca695ac9 10179 case COMPONENT_REF:
bbf6f052 10180
ca695ac9 10181 return (bc_expand_component_address (exp));
bbf6f052 10182
ca695ac9 10183 case INDIRECT_REF:
bbf6f052 10184
ca695ac9
JB
10185 bc_expand_expr (TREE_OPERAND (exp, 0));
10186
10187 /* For variable-sized types: retrieve pointer. Sometimes the
10188 TYPE_SIZE tree is NULL. Is this a bug or a feature? Let's
10189 also make sure we have an operand, just in case... */
10190
10191 if (TREE_OPERAND (exp, 0)
10192 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))
10193 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))) != INTEGER_CST)
10194 bc_emit_instruction (loadP);
10195
10196 /* If packed, also return offset and size */
10197 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 0)))
10198
10199 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (exp, 0))),
10200 TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (exp, 0))));
10201
10202 return (TREE_OPERAND (exp, 0));
10203
10204 case FUNCTION_DECL:
10205
e7a42772
JB
10206 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
10207 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
bbf6f052 10208 break;
ca695ac9
JB
10209
10210 case PARM_DECL:
10211
10212 bc_load_parmaddr (DECL_RTL (exp));
10213
10214 /* For variable-sized types: retrieve pointer */
10215 if (TYPE_SIZE (TREE_TYPE (exp))
10216 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
10217 bc_emit_instruction (loadP);
10218
10219 /* If packed, also return offset and size */
10220 if (DECL_BIT_FIELD (exp))
10221 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
10222 TREE_INT_CST_LOW (DECL_SIZE (exp)));
10223
bbf6f052 10224 break;
ca695ac9
JB
10225
10226 case RESULT_DECL:
10227
10228 bc_emit_instruction (returnP);
bbf6f052 10229 break;
ca695ac9
JB
10230
10231 case VAR_DECL:
10232
10233#if 0
e7a42772 10234 if (BYTECODE_LABEL (DECL_RTL (exp)))
ca695ac9
JB
10235 bc_load_externaddr (DECL_RTL (exp));
10236#endif
10237
10238 if (DECL_EXTERNAL (exp))
e7a42772 10239 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
eb862a37 10240 (BYTECODE_BC_LABEL (DECL_RTL (exp)))->offset);
bbf6f052 10241 else
ca695ac9
JB
10242 bc_load_localaddr (DECL_RTL (exp));
10243
10244 /* For variable-sized types: retrieve pointer */
10245 if (TYPE_SIZE (TREE_TYPE (exp))
10246 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
10247 bc_emit_instruction (loadP);
10248
10249 /* If packed, also return offset and size */
10250 if (DECL_BIT_FIELD (exp))
10251 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
10252 TREE_INT_CST_LOW (DECL_SIZE (exp)));
10253
bbf6f052 10254 break;
ca695ac9
JB
10255
10256 case STRING_CST:
10257 {
10258 rtx r;
10259
10260 bc_emit_bytecode (constP);
10261 r = output_constant_def (exp);
e7a42772 10262 bc_emit_code_labelref (BYTECODE_LABEL (r), BYTECODE_BC_LABEL (r)->offset);
ca695ac9
JB
10263
10264#ifdef DEBUG_PRINT_CODE
10265 fputc ('\n', stderr);
10266#endif
10267 }
bbf6f052 10268 break;
ca695ac9 10269
bbf6f052 10270 default:
bbf6f052 10271
ca695ac9
JB
10272 abort();
10273 break;
bbf6f052
RK
10274 }
10275
ca695ac9
JB
10276 /* Most lvalues don't have components. */
10277 return (exp);
10278}
bbf6f052 10279
ca695ac9
JB
10280
10281/* Emit a type code to be used by the runtime support in handling
10282 parameter passing. The type code consists of the machine mode
10283 plus the minimal alignment shifted left 8 bits. */
10284
10285tree
10286bc_runtime_type_code (type)
10287 tree type;
10288{
10289 int val;
10290
10291 switch (TREE_CODE (type))
bbf6f052 10292 {
ca695ac9
JB
10293 case VOID_TYPE:
10294 case INTEGER_TYPE:
10295 case REAL_TYPE:
10296 case COMPLEX_TYPE:
10297 case ENUMERAL_TYPE:
10298 case POINTER_TYPE:
10299 case RECORD_TYPE:
10300
6bd6178d 10301 val = (int) TYPE_MODE (type) | TYPE_ALIGN (type) << 8;
ca695ac9
JB
10302 break;
10303
10304 case ERROR_MARK:
10305
10306 val = 0;
10307 break;
10308
10309 default:
af508edd 10310
ca695ac9
JB
10311 abort ();
10312 }
10313 return build_int_2 (val, 0);
10314}
af508edd 10315
af508edd 10316
ca695ac9
JB
10317/* Generate constructor label */
10318char *
10319bc_gen_constr_label ()
10320{
10321 static int label_counter;
10322 static char label[20];
bbf6f052 10323
ca695ac9 10324 sprintf (label, "*LR%d", label_counter++);
bbf6f052 10325
ca695ac9
JB
10326 return (obstack_copy0 (&permanent_obstack, label, strlen (label)));
10327}
bbf6f052 10328
bbf6f052 10329
ca695ac9
JB
10330/* Evaluate constructor CONSTR and return pointer to it on level one. We
10331 expand the constructor data as static data, and push a pointer to it.
10332 The pointer is put in the pointer table and is retrieved by a constP
10333 bytecode instruction. We then loop and store each constructor member in
10334 the corresponding component. Finally, we return the original pointer on
10335 the stack. */
af508edd 10336
ca695ac9
JB
10337void
10338bc_expand_constructor (constr)
10339 tree constr;
10340{
10341 char *l;
10342 HOST_WIDE_INT ptroffs;
10343 rtx constr_rtx;
bbf6f052 10344
ca695ac9
JB
10345
10346 /* Literal constructors are handled as constants, whereas
10347 non-literals are evaluated and stored element by element
10348 into the data segment. */
10349
10350 /* Allocate space in proper segment and push pointer to space on stack.
10351 */
bbf6f052 10352
ca695ac9 10353 l = bc_gen_constr_label ();
bbf6f052 10354
ca695ac9 10355 if (TREE_CONSTANT (constr))
bbf6f052 10356 {
ca695ac9
JB
10357 text_section ();
10358
10359 bc_emit_const_labeldef (l);
10360 bc_output_constructor (constr, int_size_in_bytes (TREE_TYPE (constr)));
bbf6f052 10361 }
ca695ac9
JB
10362 else
10363 {
10364 data_section ();
bbf6f052 10365
ca695ac9
JB
10366 bc_emit_data_labeldef (l);
10367 bc_output_data_constructor (constr);
10368 }
bbf6f052 10369
ca695ac9
JB
10370
10371 /* Add reference to pointer table and recall pointer to stack;
10372 this code is common for both types of constructors: literals
10373 and non-literals. */
bbf6f052 10374
de7d9320
JB
10375 ptroffs = bc_define_pointer (l);
10376 bc_emit_instruction (constP, ptroffs);
d39985fa 10377
ca695ac9
JB
10378 /* This is all that has to be done if it's a literal. */
10379 if (TREE_CONSTANT (constr))
10380 return;
bbf6f052 10381
ca695ac9
JB
10382
10383 /* At this point, we have the pointer to the structure on top of the stack.
10384 Generate sequences of store_memory calls for the constructor. */
10385
10386 /* constructor type is structure */
10387 if (TREE_CODE (TREE_TYPE (constr)) == RECORD_TYPE)
e7c33f54 10388 {
ca695ac9
JB
10389 register tree elt;
10390
10391 /* If the constructor has fewer fields than the structure,
10392 clear the whole structure first. */
10393
10394 if (list_length (CONSTRUCTOR_ELTS (constr))
10395 != list_length (TYPE_FIELDS (TREE_TYPE (constr))))
10396 {
6d6e61ce 10397 bc_emit_instruction (duplicate);
ca695ac9
JB
10398 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
10399 bc_emit_instruction (clearBLK);
10400 }
10401
10402 /* Store each element of the constructor into the corresponding
10403 field of TARGET. */
10404
10405 for (elt = CONSTRUCTOR_ELTS (constr); elt; elt = TREE_CHAIN (elt))
10406 {
10407 register tree field = TREE_PURPOSE (elt);
10408 register enum machine_mode mode;
10409 int bitsize;
10410 int bitpos;
10411 int unsignedp;
10412
10413 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field)) /* * DECL_SIZE_UNIT (field) */;
10414 mode = DECL_MODE (field);
10415 unsignedp = TREE_UNSIGNED (field);
10416
10417 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
10418
10419 bc_store_field (elt, bitsize, bitpos, mode, TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
10420 /* The alignment of TARGET is
10421 at least what its type requires. */
10422 VOIDmode, 0,
10423 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
10424 int_size_in_bytes (TREE_TYPE (constr)));
10425 }
e7c33f54 10426 }
ca695ac9
JB
10427 else
10428
10429 /* Constructor type is array */
10430 if (TREE_CODE (TREE_TYPE (constr)) == ARRAY_TYPE)
10431 {
10432 register tree elt;
10433 register int i;
10434 tree domain = TYPE_DOMAIN (TREE_TYPE (constr));
10435 int minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
10436 int maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
10437 tree elttype = TREE_TYPE (TREE_TYPE (constr));
10438
10439 /* If the constructor has fewer fields than the structure,
10440 clear the whole structure first. */
10441
10442 if (list_length (CONSTRUCTOR_ELTS (constr)) < maxelt - minelt + 1)
10443 {
6d6e61ce 10444 bc_emit_instruction (duplicate);
ca695ac9
JB
10445 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
10446 bc_emit_instruction (clearBLK);
10447 }
10448
10449
10450 /* Store each element of the constructor into the corresponding
10451 element of TARGET, determined by counting the elements. */
10452
10453 for (elt = CONSTRUCTOR_ELTS (constr), i = 0;
10454 elt;
10455 elt = TREE_CHAIN (elt), i++)
10456 {
10457 register enum machine_mode mode;
10458 int bitsize;
10459 int bitpos;
10460 int unsignedp;
10461
10462 mode = TYPE_MODE (elttype);
10463 bitsize = GET_MODE_BITSIZE (mode);
10464 unsignedp = TREE_UNSIGNED (elttype);
10465
10466 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype))
10467 /* * TYPE_SIZE_UNIT (elttype) */ );
10468
10469 bc_store_field (elt, bitsize, bitpos, mode,
10470 TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
10471 /* The alignment of TARGET is
10472 at least what its type requires. */
10473 VOIDmode, 0,
10474 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
10475 int_size_in_bytes (TREE_TYPE (constr)));
10476 }
10477
10478 }
10479}
bbf6f052 10480
bbf6f052 10481
ca695ac9
JB
10482/* Store the value of EXP (an expression tree) into member FIELD of
10483 structure at address on stack, which has type TYPE, mode MODE and
10484 occupies BITSIZE bits, starting BITPOS bits from the beginning of the
10485 structure.
bbf6f052 10486
ca695ac9
JB
10487 ALIGN is the alignment that TARGET is known to have, measured in bytes.
10488 TOTAL_SIZE is its size in bytes, or -1 if variable. */
bbf6f052 10489
ca695ac9
JB
10490void
10491bc_store_field (field, bitsize, bitpos, mode, exp, type,
10492 value_mode, unsignedp, align, total_size)
10493 int bitsize, bitpos;
10494 enum machine_mode mode;
10495 tree field, exp, type;
10496 enum machine_mode value_mode;
10497 int unsignedp;
10498 int align;
10499 int total_size;
10500{
bbf6f052 10501
ca695ac9
JB
10502 /* Expand expression and copy pointer */
10503 bc_expand_expr (exp);
10504 bc_emit_instruction (over);
bbf6f052 10505
bbf6f052 10506
ca695ac9
JB
10507 /* If the component is a bit field, we cannot use addressing to access
10508 it. Use bit-field techniques to store in it. */
bbf6f052 10509
ca695ac9
JB
10510 if (DECL_BIT_FIELD (field))
10511 {
10512 bc_store_bit_field (bitpos, bitsize, unsignedp);
10513 return;
10514 }
10515 else
10516 /* Not bit field */
10517 {
10518 HOST_WIDE_INT offset = bitpos / BITS_PER_UNIT;
10519
10520 /* Advance pointer to the desired member */
10521 if (offset)
10522 bc_emit_instruction (addconstPSI, offset);
10523
10524 /* Store */
10525 bc_store_memory (type, field);
10526 }
10527}
bbf6f052 10528
ca695ac9
JB
10529
10530/* Store SI/SU in bitfield */
bbf6f052 10531void
ca695ac9
JB
10532bc_store_bit_field (offset, size, unsignedp)
10533 int offset, size, unsignedp;
bbf6f052 10534{
ca695ac9
JB
10535 /* Push bitfield offset and size */
10536 bc_push_offset_and_size (offset, size);
bbf6f052 10537
ca695ac9
JB
10538 /* Store */
10539 bc_emit_instruction (sstoreBI);
10540}
e87b4f3f 10541
88d3b7f0 10542
ca695ac9
JB
10543/* Load SI/SU from bitfield */
10544void
10545bc_load_bit_field (offset, size, unsignedp)
10546 int offset, size, unsignedp;
10547{
10548 /* Push bitfield offset and size */
10549 bc_push_offset_and_size (offset, size);
88d3b7f0 10550
ca695ac9
JB
10551 /* Load: sign-extend if signed, else zero-extend */
10552 bc_emit_instruction (unsignedp ? zxloadBI : sxloadBI);
10553}
709f5be1 10554
bbf6f052 10555
ca695ac9
JB
10556/* Adjust interpreter stack by NLEVELS. Positive means drop NLEVELS
10557 (adjust stack pointer upwards), negative means add that number of
10558 levels (adjust the stack pointer downwards). Only positive values
10559 normally make sense. */
bbf6f052 10560
ca695ac9
JB
10561void
10562bc_adjust_stack (nlevels)
10563 int nlevels;
10564{
10565 switch (nlevels)
10566 {
10567 case 0:
10568 break;
10569
10570 case 2:
10571 bc_emit_instruction (drop);
10572
10573 case 1:
10574 bc_emit_instruction (drop);
10575 break;
10576
10577 default:
10578
10579 bc_emit_instruction (adjstackSI, (HOST_WIDE_INT) nlevels);
10580 stack_depth -= nlevels;
10581 }
10582
a68c7608
RS
10583#if defined (VALIDATE_STACK_FOR_BC)
10584 VALIDATE_STACK_FOR_BC ();
bbf6f052
RK
10585#endif
10586}
This page took 1.729299 seconds and 5 git commands to generate.