]> gcc.gnu.org Git - gcc.git/blame - gcc/expr.c
(get_token): Remove decl of unused variable QUOTE.
[gcc.git] / gcc / expr.c
CommitLineData
bbf6f052 1/* Convert tree expression to rtl instructions, for GNU compiler.
99310285 2 Copyright (C) 1988, 1992, 1993, 1994 Free Software Foundation, Inc.
bbf6f052
RK
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21#include "config.h"
ca695ac9 22#include "machmode.h"
bbf6f052
RK
23#include "rtl.h"
24#include "tree.h"
ca695ac9 25#include "obstack.h"
bbf6f052
RK
26#include "flags.h"
27#include "function.h"
28#include "insn-flags.h"
29#include "insn-codes.h"
30#include "expr.h"
31#include "insn-config.h"
32#include "recog.h"
33#include "output.h"
bbf6f052
RK
34#include "typeclass.h"
35
ca695ac9
JB
36#include "bytecode.h"
37#include "bc-opcode.h"
38#include "bc-typecd.h"
39#include "bc-optab.h"
40#include "bc-emit.h"
41
42
bbf6f052
RK
43#define CEIL(x,y) (((x) + (y) - 1) / (y))
44
45/* Decide whether a function's arguments should be processed
bbc8a071
RK
46 from first to last or from last to first.
47
48 They should if the stack and args grow in opposite directions, but
49 only if we have push insns. */
bbf6f052 50
bbf6f052 51#ifdef PUSH_ROUNDING
bbc8a071 52
3319a347 53#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
bbf6f052
RK
54#define PUSH_ARGS_REVERSED /* If it's last to first */
55#endif
bbc8a071 56
bbf6f052
RK
57#endif
58
59#ifndef STACK_PUSH_CODE
60#ifdef STACK_GROWS_DOWNWARD
61#define STACK_PUSH_CODE PRE_DEC
62#else
63#define STACK_PUSH_CODE PRE_INC
64#endif
65#endif
66
67/* Like STACK_BOUNDARY but in units of bytes, not bits. */
68#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
69
70/* If this is nonzero, we do not bother generating VOLATILE
71 around volatile memory references, and we are willing to
72 output indirect addresses. If cse is to follow, we reject
73 indirect addresses so a useful potential cse is generated;
74 if it is used only once, instruction combination will produce
75 the same indirect address eventually. */
76int cse_not_expected;
77
78/* Nonzero to generate code for all the subroutines within an
79 expression before generating the upper levels of the expression.
80 Nowadays this is never zero. */
81int do_preexpand_calls = 1;
82
83/* Number of units that we should eventually pop off the stack.
84 These are the arguments to function calls that have already returned. */
85int pending_stack_adjust;
86
87/* Nonzero means stack pops must not be deferred, and deferred stack
88 pops must not be output. It is nonzero inside a function call,
89 inside a conditional expression, inside a statement expression,
90 and in other cases as well. */
91int inhibit_defer_pop;
92
93/* A list of all cleanups which belong to the arguments of
94 function calls being expanded by expand_call. */
95tree cleanups_this_call;
96
97/* Nonzero means __builtin_saveregs has already been done in this function.
98 The value is the pseudoreg containing the value __builtin_saveregs
99 returned. */
100static rtx saveregs_value;
101
dcf76fff
TW
102/* Similarly for __builtin_apply_args. */
103static rtx apply_args_value;
104
4969d05d
RK
105/* This structure is used by move_by_pieces to describe the move to
106 be performed. */
107
108struct move_by_pieces
109{
110 rtx to;
111 rtx to_addr;
112 int autinc_to;
113 int explicit_inc_to;
114 rtx from;
115 rtx from_addr;
116 int autinc_from;
117 int explicit_inc_from;
118 int len;
119 int offset;
120 int reverse;
121};
122
c02bd5d9
JB
123/* Used to generate bytecodes: keep track of size of local variables,
124 as well as depth of arithmetic stack. (Notice that variables are
125 stored on the machine's stack, not the arithmetic stack.) */
126
186f92ce 127extern int local_vars_size;
c02bd5d9
JB
128extern int stack_depth;
129extern int max_stack_depth;
292b1216 130extern struct obstack permanent_obstack;
c02bd5d9
JB
131
132
4969d05d
RK
133static rtx enqueue_insn PROTO((rtx, rtx));
134static int queued_subexp_p PROTO((rtx));
135static void init_queue PROTO((void));
136static void move_by_pieces PROTO((rtx, rtx, int, int));
137static int move_by_pieces_ninsns PROTO((unsigned int, int));
138static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
139 struct move_by_pieces *));
4969d05d
RK
140static void store_constructor PROTO((tree, rtx));
141static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
142 enum machine_mode, int, int, int));
143static tree save_noncopied_parts PROTO((tree, tree));
144static tree init_noncopied_parts PROTO((tree, tree));
145static int safe_from_p PROTO((rtx, tree));
146static int fixed_type_p PROTO((tree));
147static int get_pointer_alignment PROTO((tree, unsigned));
148static tree string_constant PROTO((tree, tree *));
149static tree c_strlen PROTO((tree));
307b821c
RK
150static rtx expand_builtin PROTO((tree, rtx, rtx,
151 enum machine_mode, int));
0006469d
TW
152static int apply_args_size PROTO((void));
153static int apply_result_size PROTO((void));
154static rtx result_vector PROTO((int, rtx));
155static rtx expand_builtin_apply_args PROTO((void));
156static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
157static void expand_builtin_return PROTO((rtx));
4969d05d 158static rtx expand_increment PROTO((tree, int));
ca695ac9
JB
159rtx bc_expand_increment PROTO((struct increment_operator *, tree));
160tree bc_runtime_type_code PROTO((tree));
161rtx bc_allocate_local PROTO((int, int));
162void bc_store_memory PROTO((tree, tree));
163tree bc_expand_component_address PROTO((tree));
164tree bc_expand_address PROTO((tree));
165void bc_expand_constructor PROTO((tree));
166void bc_adjust_stack PROTO((int));
167tree bc_canonicalize_array_ref PROTO((tree));
168void bc_load_memory PROTO((tree, tree));
169void bc_load_externaddr PROTO((rtx));
170void bc_load_externaddr_id PROTO((tree, int));
171void bc_load_localaddr PROTO((rtx));
172void bc_load_parmaddr PROTO((rtx));
4969d05d
RK
173static void preexpand_calls PROTO((tree));
174static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
f81497d9 175static void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
4969d05d
RK
176static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
177static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
178static void do_jump_for_compare PROTO((rtx, rtx, rtx));
179static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
180static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
bbf6f052 181
4fa52007
RK
182/* Record for each mode whether we can move a register directly to or
183 from an object of that mode in memory. If we can't, we won't try
184 to use that mode directly when accessing a field of that mode. */
185
186static char direct_load[NUM_MACHINE_MODES];
187static char direct_store[NUM_MACHINE_MODES];
188
bbf6f052
RK
189/* MOVE_RATIO is the number of move instructions that is better than
190 a block move. */
191
192#ifndef MOVE_RATIO
266007a7 193#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
bbf6f052
RK
194#define MOVE_RATIO 2
195#else
196/* A value of around 6 would minimize code size; infinity would minimize
197 execution time. */
198#define MOVE_RATIO 15
199#endif
200#endif
e87b4f3f 201
266007a7 202/* This array records the insn_code of insns to perform block moves. */
e6677db3 203enum insn_code movstr_optab[NUM_MACHINE_MODES];
266007a7 204
e87b4f3f
RS
205/* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
206
207#ifndef SLOW_UNALIGNED_ACCESS
208#define SLOW_UNALIGNED_ACCESS 0
209#endif
0006469d
TW
210
211/* Register mappings for target machines without register windows. */
212#ifndef INCOMING_REGNO
213#define INCOMING_REGNO(OUT) (OUT)
214#endif
215#ifndef OUTGOING_REGNO
216#define OUTGOING_REGNO(IN) (IN)
217#endif
bbf6f052 218\f
ca695ac9
JB
219/* Maps used to convert modes to const, load, and store bytecodes. */
220enum bytecode_opcode mode_to_const_map[MAX_MACHINE_MODE];
221enum bytecode_opcode mode_to_load_map[MAX_MACHINE_MODE];
222enum bytecode_opcode mode_to_store_map[MAX_MACHINE_MODE];
223
224/* Initialize maps used to convert modes to const, load, and store
225 bytecodes. */
226void
227bc_init_mode_to_opcode_maps ()
228{
229 int mode;
230
6bd6178d 231 for (mode = 0; mode < (int) MAX_MACHINE_MODE; mode++)
ca695ac9
JB
232 mode_to_const_map[mode] =
233 mode_to_load_map[mode] =
234 mode_to_store_map[mode] = neverneverland;
235
236#define DEF_MODEMAP(SYM, CODE, UCODE, CONST, LOAD, STORE) \
6bd6178d
RK
237 mode_to_const_map[(int) SYM] = CONST; \
238 mode_to_load_map[(int) SYM] = LOAD; \
239 mode_to_store_map[(int) SYM] = STORE;
ca695ac9
JB
240
241#include "modemap.def"
242#undef DEF_MODEMAP
243}
244\f
4fa52007 245/* This is run once per compilation to set up which modes can be used
266007a7 246 directly in memory and to initialize the block move optab. */
4fa52007
RK
247
248void
249init_expr_once ()
250{
251 rtx insn, pat;
252 enum machine_mode mode;
e2549997
RS
253 /* Try indexing by frame ptr and try by stack ptr.
254 It is known that on the Convex the stack ptr isn't a valid index.
255 With luck, one or the other is valid on any machine. */
4fa52007 256 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
e2549997 257 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
4fa52007
RK
258
259 start_sequence ();
260 insn = emit_insn (gen_rtx (SET, 0, 0));
261 pat = PATTERN (insn);
262
263 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
264 mode = (enum machine_mode) ((int) mode + 1))
265 {
266 int regno;
267 rtx reg;
268 int num_clobbers;
269
270 direct_load[(int) mode] = direct_store[(int) mode] = 0;
271 PUT_MODE (mem, mode);
e2549997 272 PUT_MODE (mem1, mode);
4fa52007 273
e6fe56a4
RK
274 /* See if there is some register that can be used in this mode and
275 directly loaded or stored from memory. */
276
7308a047
RS
277 if (mode != VOIDmode && mode != BLKmode)
278 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
279 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
280 regno++)
281 {
282 if (! HARD_REGNO_MODE_OK (regno, mode))
283 continue;
e6fe56a4 284
7308a047 285 reg = gen_rtx (REG, mode, regno);
e6fe56a4 286
7308a047
RS
287 SET_SRC (pat) = mem;
288 SET_DEST (pat) = reg;
289 if (recog (pat, insn, &num_clobbers) >= 0)
290 direct_load[(int) mode] = 1;
e6fe56a4 291
e2549997
RS
292 SET_SRC (pat) = mem1;
293 SET_DEST (pat) = reg;
294 if (recog (pat, insn, &num_clobbers) >= 0)
295 direct_load[(int) mode] = 1;
296
7308a047
RS
297 SET_SRC (pat) = reg;
298 SET_DEST (pat) = mem;
299 if (recog (pat, insn, &num_clobbers) >= 0)
300 direct_store[(int) mode] = 1;
e2549997
RS
301
302 SET_SRC (pat) = reg;
303 SET_DEST (pat) = mem1;
304 if (recog (pat, insn, &num_clobbers) >= 0)
305 direct_store[(int) mode] = 1;
7308a047 306 }
4fa52007
RK
307 }
308
309 end_sequence ();
310}
311
bbf6f052
RK
312/* This is run at the start of compiling a function. */
313
314void
315init_expr ()
316{
317 init_queue ();
318
319 pending_stack_adjust = 0;
320 inhibit_defer_pop = 0;
321 cleanups_this_call = 0;
322 saveregs_value = 0;
0006469d 323 apply_args_value = 0;
e87b4f3f 324 forced_labels = 0;
bbf6f052
RK
325}
326
327/* Save all variables describing the current status into the structure *P.
328 This is used before starting a nested function. */
329
330void
331save_expr_status (p)
332 struct function *p;
333{
334 /* Instead of saving the postincrement queue, empty it. */
335 emit_queue ();
336
337 p->pending_stack_adjust = pending_stack_adjust;
338 p->inhibit_defer_pop = inhibit_defer_pop;
339 p->cleanups_this_call = cleanups_this_call;
340 p->saveregs_value = saveregs_value;
0006469d 341 p->apply_args_value = apply_args_value;
e87b4f3f 342 p->forced_labels = forced_labels;
bbf6f052
RK
343
344 pending_stack_adjust = 0;
345 inhibit_defer_pop = 0;
346 cleanups_this_call = 0;
347 saveregs_value = 0;
0006469d 348 apply_args_value = 0;
e87b4f3f 349 forced_labels = 0;
bbf6f052
RK
350}
351
352/* Restore all variables describing the current status from the structure *P.
353 This is used after a nested function. */
354
355void
356restore_expr_status (p)
357 struct function *p;
358{
359 pending_stack_adjust = p->pending_stack_adjust;
360 inhibit_defer_pop = p->inhibit_defer_pop;
361 cleanups_this_call = p->cleanups_this_call;
362 saveregs_value = p->saveregs_value;
0006469d 363 apply_args_value = p->apply_args_value;
e87b4f3f 364 forced_labels = p->forced_labels;
bbf6f052
RK
365}
366\f
367/* Manage the queue of increment instructions to be output
368 for POSTINCREMENT_EXPR expressions, etc. */
369
370static rtx pending_chain;
371
372/* Queue up to increment (or change) VAR later. BODY says how:
373 BODY should be the same thing you would pass to emit_insn
374 to increment right away. It will go to emit_insn later on.
375
376 The value is a QUEUED expression to be used in place of VAR
377 where you want to guarantee the pre-incrementation value of VAR. */
378
379static rtx
380enqueue_insn (var, body)
381 rtx var, body;
382{
383 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
906c4e36 384 var, NULL_RTX, NULL_RTX, body, pending_chain);
bbf6f052
RK
385 return pending_chain;
386}
387
388/* Use protect_from_queue to convert a QUEUED expression
389 into something that you can put immediately into an instruction.
390 If the queued incrementation has not happened yet,
391 protect_from_queue returns the variable itself.
392 If the incrementation has happened, protect_from_queue returns a temp
393 that contains a copy of the old value of the variable.
394
395 Any time an rtx which might possibly be a QUEUED is to be put
396 into an instruction, it must be passed through protect_from_queue first.
397 QUEUED expressions are not meaningful in instructions.
398
399 Do not pass a value through protect_from_queue and then hold
400 on to it for a while before putting it in an instruction!
401 If the queue is flushed in between, incorrect code will result. */
402
403rtx
404protect_from_queue (x, modify)
405 register rtx x;
406 int modify;
407{
408 register RTX_CODE code = GET_CODE (x);
409
410#if 0 /* A QUEUED can hang around after the queue is forced out. */
411 /* Shortcut for most common case. */
412 if (pending_chain == 0)
413 return x;
414#endif
415
416 if (code != QUEUED)
417 {
e9baa644
RK
418 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
419 use of autoincrement. Make a copy of the contents of the memory
420 location rather than a copy of the address, but not if the value is
421 of mode BLKmode. Don't modify X in place since it might be
422 shared. */
bbf6f052
RK
423 if (code == MEM && GET_MODE (x) != BLKmode
424 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
425 {
426 register rtx y = XEXP (x, 0);
e9baa644
RK
427 register rtx new = gen_rtx (MEM, GET_MODE (x), QUEUED_VAR (y));
428
429 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
430 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
431 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
432
bbf6f052
RK
433 if (QUEUED_INSN (y))
434 {
e9baa644
RK
435 register rtx temp = gen_reg_rtx (GET_MODE (new));
436 emit_insn_before (gen_move_insn (temp, new),
bbf6f052
RK
437 QUEUED_INSN (y));
438 return temp;
439 }
e9baa644 440 return new;
bbf6f052
RK
441 }
442 /* Otherwise, recursively protect the subexpressions of all
443 the kinds of rtx's that can contain a QUEUED. */
444 if (code == MEM)
3f15938e
RS
445 {
446 rtx tem = protect_from_queue (XEXP (x, 0), 0);
447 if (tem != XEXP (x, 0))
448 {
449 x = copy_rtx (x);
450 XEXP (x, 0) = tem;
451 }
452 }
bbf6f052
RK
453 else if (code == PLUS || code == MULT)
454 {
3f15938e
RS
455 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
456 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
457 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
458 {
459 x = copy_rtx (x);
460 XEXP (x, 0) = new0;
461 XEXP (x, 1) = new1;
462 }
bbf6f052
RK
463 }
464 return x;
465 }
466 /* If the increment has not happened, use the variable itself. */
467 if (QUEUED_INSN (x) == 0)
468 return QUEUED_VAR (x);
469 /* If the increment has happened and a pre-increment copy exists,
470 use that copy. */
471 if (QUEUED_COPY (x) != 0)
472 return QUEUED_COPY (x);
473 /* The increment has happened but we haven't set up a pre-increment copy.
474 Set one up now, and use it. */
475 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
476 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
477 QUEUED_INSN (x));
478 return QUEUED_COPY (x);
479}
480
481/* Return nonzero if X contains a QUEUED expression:
482 if it contains anything that will be altered by a queued increment.
483 We handle only combinations of MEM, PLUS, MINUS and MULT operators
484 since memory addresses generally contain only those. */
485
486static int
487queued_subexp_p (x)
488 rtx x;
489{
490 register enum rtx_code code = GET_CODE (x);
491 switch (code)
492 {
493 case QUEUED:
494 return 1;
495 case MEM:
496 return queued_subexp_p (XEXP (x, 0));
497 case MULT:
498 case PLUS:
499 case MINUS:
500 return queued_subexp_p (XEXP (x, 0))
501 || queued_subexp_p (XEXP (x, 1));
502 }
503 return 0;
504}
505
506/* Perform all the pending incrementations. */
507
508void
509emit_queue ()
510{
511 register rtx p;
512 while (p = pending_chain)
513 {
514 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
515 pending_chain = QUEUED_NEXT (p);
516 }
517}
518
519static void
520init_queue ()
521{
522 if (pending_chain)
523 abort ();
524}
525\f
526/* Copy data from FROM to TO, where the machine modes are not the same.
527 Both modes may be integer, or both may be floating.
528 UNSIGNEDP should be nonzero if FROM is an unsigned type.
529 This causes zero-extension instead of sign-extension. */
530
531void
532convert_move (to, from, unsignedp)
533 register rtx to, from;
534 int unsignedp;
535{
536 enum machine_mode to_mode = GET_MODE (to);
537 enum machine_mode from_mode = GET_MODE (from);
538 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
539 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
540 enum insn_code code;
541 rtx libcall;
542
543 /* rtx code for making an equivalent value. */
544 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
545
546 to = protect_from_queue (to, 1);
547 from = protect_from_queue (from, 0);
548
549 if (to_real != from_real)
550 abort ();
551
1499e0a8
RK
552 /* If FROM is a SUBREG that indicates that we have already done at least
553 the required extension, strip it. We don't handle such SUBREGs as
554 TO here. */
555
556 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
557 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
558 >= GET_MODE_SIZE (to_mode))
559 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
560 from = gen_lowpart (to_mode, from), from_mode = to_mode;
561
562 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
563 abort ();
564
bbf6f052
RK
565 if (to_mode == from_mode
566 || (from_mode == VOIDmode && CONSTANT_P (from)))
567 {
568 emit_move_insn (to, from);
569 return;
570 }
571
572 if (to_real)
573 {
81d79e2c
RS
574 rtx value;
575
b424402e
RS
576#ifdef HAVE_extendqfhf2
577 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == HFmode)
578 {
579 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
580 return;
581 }
582#endif
583#ifdef HAVE_extendqfsf2
584 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == SFmode)
585 {
586 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
587 return;
588 }
589#endif
590#ifdef HAVE_extendqfdf2
591 if (HAVE_extendqfdf2 && from_mode == QFmode && to_mode == DFmode)
592 {
593 emit_unop_insn (CODE_FOR_extendqfdf2, to, from, UNKNOWN);
594 return;
595 }
596#endif
597#ifdef HAVE_extendqfxf2
598 if (HAVE_extendqfxf2 && from_mode == QFmode && to_mode == XFmode)
599 {
600 emit_unop_insn (CODE_FOR_extendqfxf2, to, from, UNKNOWN);
601 return;
602 }
603#endif
604#ifdef HAVE_extendqftf2
605 if (HAVE_extendqftf2 && from_mode == QFmode && to_mode == TFmode)
606 {
607 emit_unop_insn (CODE_FOR_extendqftf2, to, from, UNKNOWN);
608 return;
609 }
610#endif
611
612#ifdef HAVE_extendhfsf2
613 if (HAVE_extendhfsf2 && from_mode == HFmode && to_mode == SFmode)
614 {
615 emit_unop_insn (CODE_FOR_extendhfsf2, to, from, UNKNOWN);
616 return;
617 }
618#endif
619#ifdef HAVE_extendhfdf2
620 if (HAVE_extendhfdf2 && from_mode == HFmode && to_mode == DFmode)
621 {
622 emit_unop_insn (CODE_FOR_extendhfdf2, to, from, UNKNOWN);
623 return;
624 }
625#endif
626#ifdef HAVE_extendhfxf2
627 if (HAVE_extendhfxf2 && from_mode == HFmode && to_mode == XFmode)
628 {
629 emit_unop_insn (CODE_FOR_extendhfxf2, to, from, UNKNOWN);
630 return;
631 }
632#endif
633#ifdef HAVE_extendhftf2
634 if (HAVE_extendhftf2 && from_mode == HFmode && to_mode == TFmode)
635 {
636 emit_unop_insn (CODE_FOR_extendhftf2, to, from, UNKNOWN);
637 return;
638 }
639#endif
640
bbf6f052
RK
641#ifdef HAVE_extendsfdf2
642 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
643 {
644 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
645 return;
646 }
647#endif
b092b471
JW
648#ifdef HAVE_extendsfxf2
649 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
650 {
651 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
652 return;
653 }
654#endif
bbf6f052
RK
655#ifdef HAVE_extendsftf2
656 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
657 {
658 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
659 return;
660 }
661#endif
b092b471
JW
662#ifdef HAVE_extenddfxf2
663 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
664 {
665 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
666 return;
667 }
668#endif
bbf6f052
RK
669#ifdef HAVE_extenddftf2
670 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
671 {
672 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
673 return;
674 }
675#endif
b424402e
RS
676
677#ifdef HAVE_trunchfqf2
678 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
679 {
680 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
681 return;
682 }
683#endif
684#ifdef HAVE_truncsfqf2
685 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
686 {
687 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
688 return;
689 }
690#endif
691#ifdef HAVE_truncdfqf2
692 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
693 {
694 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
695 return;
696 }
697#endif
698#ifdef HAVE_truncxfqf2
699 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
700 {
701 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
702 return;
703 }
704#endif
705#ifdef HAVE_trunctfqf2
706 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
707 {
708 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
709 return;
710 }
711#endif
712#ifdef HAVE_truncsfhf2
713 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
714 {
715 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
716 return;
717 }
718#endif
719#ifdef HAVE_truncdfhf2
720 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
721 {
722 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
723 return;
724 }
725#endif
726#ifdef HAVE_truncxfhf2
727 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
728 {
729 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
730 return;
731 }
732#endif
733#ifdef HAVE_trunctfhf2
734 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
735 {
736 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
737 return;
738 }
739#endif
bbf6f052
RK
740#ifdef HAVE_truncdfsf2
741 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
742 {
743 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
744 return;
745 }
746#endif
b092b471
JW
747#ifdef HAVE_truncxfsf2
748 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
749 {
750 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
751 return;
752 }
753#endif
bbf6f052
RK
754#ifdef HAVE_trunctfsf2
755 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
756 {
757 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
758 return;
759 }
760#endif
b092b471
JW
761#ifdef HAVE_truncxfdf2
762 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
763 {
764 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
765 return;
766 }
767#endif
bbf6f052
RK
768#ifdef HAVE_trunctfdf2
769 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
770 {
771 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
772 return;
773 }
774#endif
775
b092b471
JW
776 libcall = (rtx) 0;
777 switch (from_mode)
778 {
779 case SFmode:
780 switch (to_mode)
781 {
782 case DFmode:
783 libcall = extendsfdf2_libfunc;
784 break;
785
786 case XFmode:
787 libcall = extendsfxf2_libfunc;
788 break;
789
790 case TFmode:
791 libcall = extendsftf2_libfunc;
792 break;
793 }
794 break;
795
796 case DFmode:
797 switch (to_mode)
798 {
799 case SFmode:
800 libcall = truncdfsf2_libfunc;
801 break;
802
803 case XFmode:
804 libcall = extenddfxf2_libfunc;
805 break;
806
807 case TFmode:
808 libcall = extenddftf2_libfunc;
809 break;
810 }
811 break;
812
813 case XFmode:
814 switch (to_mode)
815 {
816 case SFmode:
817 libcall = truncxfsf2_libfunc;
818 break;
819
820 case DFmode:
821 libcall = truncxfdf2_libfunc;
822 break;
823 }
824 break;
825
826 case TFmode:
827 switch (to_mode)
828 {
829 case SFmode:
830 libcall = trunctfsf2_libfunc;
831 break;
832
833 case DFmode:
834 libcall = trunctfdf2_libfunc;
835 break;
836 }
837 break;
838 }
839
840 if (libcall == (rtx) 0)
841 /* This conversion is not implemented yet. */
bbf6f052
RK
842 abort ();
843
81d79e2c
RS
844 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
845 1, from, from_mode);
846 emit_move_insn (to, value);
bbf6f052
RK
847 return;
848 }
849
850 /* Now both modes are integers. */
851
852 /* Handle expanding beyond a word. */
853 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
854 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
855 {
856 rtx insns;
857 rtx lowpart;
858 rtx fill_value;
859 rtx lowfrom;
860 int i;
861 enum machine_mode lowpart_mode;
862 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
863
864 /* Try converting directly if the insn is supported. */
865 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
866 != CODE_FOR_nothing)
867 {
cd1b4b44
RK
868 /* If FROM is a SUBREG, put it into a register. Do this
869 so that we always generate the same set of insns for
870 better cse'ing; if an intermediate assignment occurred,
871 we won't be doing the operation directly on the SUBREG. */
872 if (optimize > 0 && GET_CODE (from) == SUBREG)
873 from = force_reg (from_mode, from);
bbf6f052
RK
874 emit_unop_insn (code, to, from, equiv_code);
875 return;
876 }
877 /* Next, try converting via full word. */
878 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
879 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
880 != CODE_FOR_nothing))
881 {
a81fee56
RS
882 if (GET_CODE (to) == REG)
883 emit_insn (gen_rtx (CLOBBER, VOIDmode, to));
bbf6f052
RK
884 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
885 emit_unop_insn (code, to,
886 gen_lowpart (word_mode, to), equiv_code);
887 return;
888 }
889
890 /* No special multiword conversion insn; do it by hand. */
891 start_sequence ();
892
893 /* Get a copy of FROM widened to a word, if necessary. */
894 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
895 lowpart_mode = word_mode;
896 else
897 lowpart_mode = from_mode;
898
899 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
900
901 lowpart = gen_lowpart (lowpart_mode, to);
902 emit_move_insn (lowpart, lowfrom);
903
904 /* Compute the value to put in each remaining word. */
905 if (unsignedp)
906 fill_value = const0_rtx;
907 else
908 {
909#ifdef HAVE_slt
910 if (HAVE_slt
911 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
912 && STORE_FLAG_VALUE == -1)
913 {
906c4e36
RK
914 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
915 lowpart_mode, 0, 0);
bbf6f052
RK
916 fill_value = gen_reg_rtx (word_mode);
917 emit_insn (gen_slt (fill_value));
918 }
919 else
920#endif
921 {
922 fill_value
923 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
924 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
906c4e36 925 NULL_RTX, 0);
bbf6f052
RK
926 fill_value = convert_to_mode (word_mode, fill_value, 1);
927 }
928 }
929
930 /* Fill the remaining words. */
931 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
932 {
933 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
934 rtx subword = operand_subword (to, index, 1, to_mode);
935
936 if (subword == 0)
937 abort ();
938
939 if (fill_value != subword)
940 emit_move_insn (subword, fill_value);
941 }
942
943 insns = get_insns ();
944 end_sequence ();
945
906c4e36 946 emit_no_conflict_block (insns, to, from, NULL_RTX,
2abec1b7 947 gen_rtx (equiv_code, to_mode, copy_rtx (from)));
bbf6f052
RK
948 return;
949 }
950
d3c64ee3
RS
951 /* Truncating multi-word to a word or less. */
952 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
953 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
bbf6f052 954 {
431a6eca
JW
955 if (!((GET_CODE (from) == MEM
956 && ! MEM_VOLATILE_P (from)
957 && direct_load[(int) to_mode]
958 && ! mode_dependent_address_p (XEXP (from, 0)))
959 || GET_CODE (from) == REG
960 || GET_CODE (from) == SUBREG))
961 from = force_reg (from_mode, from);
bbf6f052
RK
962 convert_move (to, gen_lowpart (word_mode, from), 0);
963 return;
964 }
965
966 /* Handle pointer conversion */ /* SPEE 900220 */
967 if (to_mode == PSImode)
968 {
969 if (from_mode != SImode)
970 from = convert_to_mode (SImode, from, unsignedp);
971
1f584163
DE
972#ifdef HAVE_truncsipsi2
973 if (HAVE_truncsipsi2)
bbf6f052 974 {
1f584163 975 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
bbf6f052
RK
976 return;
977 }
1f584163 978#endif /* HAVE_truncsipsi2 */
bbf6f052
RK
979 abort ();
980 }
981
982 if (from_mode == PSImode)
983 {
984 if (to_mode != SImode)
985 {
986 from = convert_to_mode (SImode, from, unsignedp);
987 from_mode = SImode;
988 }
989 else
990 {
1f584163
DE
991#ifdef HAVE_extendpsisi2
992 if (HAVE_extendpsisi2)
bbf6f052 993 {
1f584163 994 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
bbf6f052
RK
995 return;
996 }
1f584163 997#endif /* HAVE_extendpsisi2 */
bbf6f052
RK
998 abort ();
999 }
1000 }
1001
1002 /* Now follow all the conversions between integers
1003 no more than a word long. */
1004
1005 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1006 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1007 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
d3c64ee3 1008 GET_MODE_BITSIZE (from_mode)))
bbf6f052 1009 {
d3c64ee3
RS
1010 if (!((GET_CODE (from) == MEM
1011 && ! MEM_VOLATILE_P (from)
1012 && direct_load[(int) to_mode]
1013 && ! mode_dependent_address_p (XEXP (from, 0)))
1014 || GET_CODE (from) == REG
1015 || GET_CODE (from) == SUBREG))
1016 from = force_reg (from_mode, from);
bbf6f052
RK
1017 emit_move_insn (to, gen_lowpart (to_mode, from));
1018 return;
1019 }
1020
d3c64ee3 1021 /* Handle extension. */
bbf6f052
RK
1022 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1023 {
1024 /* Convert directly if that works. */
1025 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1026 != CODE_FOR_nothing)
1027 {
3dc4195c
RK
1028 /* If FROM is a SUBREG, put it into a register. Do this
1029 so that we always generate the same set of insns for
1030 better cse'ing; if an intermediate assignment occurred,
1031 we won't be doing the operation directly on the SUBREG. */
1032 if (optimize > 0 && GET_CODE (from) == SUBREG)
1033 from = force_reg (from_mode, from);
bbf6f052
RK
1034 emit_unop_insn (code, to, from, equiv_code);
1035 return;
1036 }
1037 else
1038 {
1039 enum machine_mode intermediate;
1040
1041 /* Search for a mode to convert via. */
1042 for (intermediate = from_mode; intermediate != VOIDmode;
1043 intermediate = GET_MODE_WIDER_MODE (intermediate))
1044 if ((can_extend_p (to_mode, intermediate, unsignedp)
1045 != CODE_FOR_nothing)
1046 && (can_extend_p (intermediate, from_mode, unsignedp)
1047 != CODE_FOR_nothing))
1048 {
1049 convert_move (to, convert_to_mode (intermediate, from,
1050 unsignedp), unsignedp);
1051 return;
1052 }
1053
1054 /* No suitable intermediate mode. */
1055 abort ();
1056 }
1057 }
1058
1059 /* Support special truncate insns for certain modes. */
1060
1061 if (from_mode == DImode && to_mode == SImode)
1062 {
1063#ifdef HAVE_truncdisi2
1064 if (HAVE_truncdisi2)
1065 {
1066 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1067 return;
1068 }
1069#endif
1070 convert_move (to, force_reg (from_mode, from), unsignedp);
1071 return;
1072 }
1073
1074 if (from_mode == DImode && to_mode == HImode)
1075 {
1076#ifdef HAVE_truncdihi2
1077 if (HAVE_truncdihi2)
1078 {
1079 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1080 return;
1081 }
1082#endif
1083 convert_move (to, force_reg (from_mode, from), unsignedp);
1084 return;
1085 }
1086
1087 if (from_mode == DImode && to_mode == QImode)
1088 {
1089#ifdef HAVE_truncdiqi2
1090 if (HAVE_truncdiqi2)
1091 {
1092 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1093 return;
1094 }
1095#endif
1096 convert_move (to, force_reg (from_mode, from), unsignedp);
1097 return;
1098 }
1099
1100 if (from_mode == SImode && to_mode == HImode)
1101 {
1102#ifdef HAVE_truncsihi2
1103 if (HAVE_truncsihi2)
1104 {
1105 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1106 return;
1107 }
1108#endif
1109 convert_move (to, force_reg (from_mode, from), unsignedp);
1110 return;
1111 }
1112
1113 if (from_mode == SImode && to_mode == QImode)
1114 {
1115#ifdef HAVE_truncsiqi2
1116 if (HAVE_truncsiqi2)
1117 {
1118 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1119 return;
1120 }
1121#endif
1122 convert_move (to, force_reg (from_mode, from), unsignedp);
1123 return;
1124 }
1125
1126 if (from_mode == HImode && to_mode == QImode)
1127 {
1128#ifdef HAVE_trunchiqi2
1129 if (HAVE_trunchiqi2)
1130 {
1131 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1132 return;
1133 }
1134#endif
1135 convert_move (to, force_reg (from_mode, from), unsignedp);
1136 return;
1137 }
1138
1139 /* Handle truncation of volatile memrefs, and so on;
1140 the things that couldn't be truncated directly,
1141 and for which there was no special instruction. */
1142 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1143 {
1144 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1145 emit_move_insn (to, temp);
1146 return;
1147 }
1148
1149 /* Mode combination is not recognized. */
1150 abort ();
1151}
1152
1153/* Return an rtx for a value that would result
1154 from converting X to mode MODE.
1155 Both X and MODE may be floating, or both integer.
1156 UNSIGNEDP is nonzero if X is an unsigned value.
1157 This can be done by referring to a part of X in place
5d901c31
RS
1158 or by copying to a new temporary with conversion.
1159
1160 This function *must not* call protect_from_queue
1161 except when putting X into an insn (in which case convert_move does it). */
bbf6f052
RK
1162
1163rtx
1164convert_to_mode (mode, x, unsignedp)
1165 enum machine_mode mode;
1166 rtx x;
1167 int unsignedp;
5ffe63ed
RS
1168{
1169 return convert_modes (mode, VOIDmode, x, unsignedp);
1170}
1171
1172/* Return an rtx for a value that would result
1173 from converting X from mode OLDMODE to mode MODE.
1174 Both modes may be floating, or both integer.
1175 UNSIGNEDP is nonzero if X is an unsigned value.
1176
1177 This can be done by referring to a part of X in place
1178 or by copying to a new temporary with conversion.
1179
1180 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1181
1182 This function *must not* call protect_from_queue
1183 except when putting X into an insn (in which case convert_move does it). */
1184
1185rtx
1186convert_modes (mode, oldmode, x, unsignedp)
1187 enum machine_mode mode, oldmode;
1188 rtx x;
1189 int unsignedp;
bbf6f052
RK
1190{
1191 register rtx temp;
5ffe63ed 1192
1499e0a8
RK
1193 /* If FROM is a SUBREG that indicates that we have already done at least
1194 the required extension, strip it. */
1195
1196 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1197 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1198 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1199 x = gen_lowpart (mode, x);
bbf6f052 1200
64791b18
RK
1201 if (GET_MODE (x) != VOIDmode)
1202 oldmode = GET_MODE (x);
1203
5ffe63ed 1204 if (mode == oldmode)
bbf6f052
RK
1205 return x;
1206
1207 /* There is one case that we must handle specially: If we are converting
906c4e36 1208 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
bbf6f052
RK
1209 we are to interpret the constant as unsigned, gen_lowpart will do
1210 the wrong if the constant appears negative. What we want to do is
1211 make the high-order word of the constant zero, not all ones. */
1212
1213 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
906c4e36 1214 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
bbf6f052 1215 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
906c4e36 1216 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
bbf6f052
RK
1217
1218 /* We can do this with a gen_lowpart if both desired and current modes
1219 are integer, and this is either a constant integer, a register, or a
ba2e110c
RK
1220 non-volatile MEM. Except for the constant case where MODE is no
1221 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
bbf6f052 1222
ba2e110c
RK
1223 if ((GET_CODE (x) == CONST_INT
1224 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 1225 || (GET_MODE_CLASS (mode) == MODE_INT
5ffe63ed 1226 && GET_MODE_CLASS (oldmode) == MODE_INT
bbf6f052 1227 && (GET_CODE (x) == CONST_DOUBLE
5ffe63ed 1228 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
d57c66da
JW
1229 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1230 && direct_load[(int) mode])
2bf29316
JW
1231 || (GET_CODE (x) == REG
1232 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1233 GET_MODE_BITSIZE (GET_MODE (x)))))))))
ba2e110c
RK
1234 {
1235 /* ?? If we don't know OLDMODE, we have to assume here that
1236 X does not need sign- or zero-extension. This may not be
1237 the case, but it's the best we can do. */
1238 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1239 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1240 {
1241 HOST_WIDE_INT val = INTVAL (x);
1242 int width = GET_MODE_BITSIZE (oldmode);
1243
1244 /* We must sign or zero-extend in this case. Start by
1245 zero-extending, then sign extend if we need to. */
1246 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1247 if (! unsignedp
1248 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1249 val |= (HOST_WIDE_INT) (-1) << width;
1250
1251 return GEN_INT (val);
1252 }
1253
1254 return gen_lowpart (mode, x);
1255 }
bbf6f052
RK
1256
1257 temp = gen_reg_rtx (mode);
1258 convert_move (temp, x, unsignedp);
1259 return temp;
1260}
1261\f
1262/* Generate several move instructions to copy LEN bytes
1263 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1264 The caller must pass FROM and TO
1265 through protect_from_queue before calling.
1266 ALIGN (in bytes) is maximum alignment we can assume. */
1267
bbf6f052
RK
1268static void
1269move_by_pieces (to, from, len, align)
1270 rtx to, from;
1271 int len, align;
1272{
1273 struct move_by_pieces data;
1274 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
e87b4f3f 1275 int max_size = MOVE_MAX + 1;
bbf6f052
RK
1276
1277 data.offset = 0;
1278 data.to_addr = to_addr;
1279 data.from_addr = from_addr;
1280 data.to = to;
1281 data.from = from;
1282 data.autinc_to
1283 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1284 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1285 data.autinc_from
1286 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1287 || GET_CODE (from_addr) == POST_INC
1288 || GET_CODE (from_addr) == POST_DEC);
1289
1290 data.explicit_inc_from = 0;
1291 data.explicit_inc_to = 0;
1292 data.reverse
1293 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1294 if (data.reverse) data.offset = len;
1295 data.len = len;
1296
1297 /* If copying requires more than two move insns,
1298 copy addresses to registers (to make displacements shorter)
1299 and use post-increment if available. */
1300 if (!(data.autinc_from && data.autinc_to)
1301 && move_by_pieces_ninsns (len, align) > 2)
1302 {
1303#ifdef HAVE_PRE_DECREMENT
1304 if (data.reverse && ! data.autinc_from)
1305 {
1306 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1307 data.autinc_from = 1;
1308 data.explicit_inc_from = -1;
1309 }
1310#endif
1311#ifdef HAVE_POST_INCREMENT
1312 if (! data.autinc_from)
1313 {
1314 data.from_addr = copy_addr_to_reg (from_addr);
1315 data.autinc_from = 1;
1316 data.explicit_inc_from = 1;
1317 }
1318#endif
1319 if (!data.autinc_from && CONSTANT_P (from_addr))
1320 data.from_addr = copy_addr_to_reg (from_addr);
1321#ifdef HAVE_PRE_DECREMENT
1322 if (data.reverse && ! data.autinc_to)
1323 {
1324 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1325 data.autinc_to = 1;
1326 data.explicit_inc_to = -1;
1327 }
1328#endif
1329#ifdef HAVE_POST_INCREMENT
1330 if (! data.reverse && ! data.autinc_to)
1331 {
1332 data.to_addr = copy_addr_to_reg (to_addr);
1333 data.autinc_to = 1;
1334 data.explicit_inc_to = 1;
1335 }
1336#endif
1337 if (!data.autinc_to && CONSTANT_P (to_addr))
1338 data.to_addr = copy_addr_to_reg (to_addr);
1339 }
1340
e87b4f3f
RS
1341 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1342 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1343 align = MOVE_MAX;
bbf6f052
RK
1344
1345 /* First move what we can in the largest integer mode, then go to
1346 successively smaller modes. */
1347
1348 while (max_size > 1)
1349 {
1350 enum machine_mode mode = VOIDmode, tmode;
1351 enum insn_code icode;
1352
e7c33f54
RK
1353 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1354 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1355 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1356 mode = tmode;
1357
1358 if (mode == VOIDmode)
1359 break;
1360
1361 icode = mov_optab->handlers[(int) mode].insn_code;
1362 if (icode != CODE_FOR_nothing
1363 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1364 GET_MODE_SIZE (mode)))
1365 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1366
1367 max_size = GET_MODE_SIZE (mode);
1368 }
1369
1370 /* The code above should have handled everything. */
1371 if (data.len != 0)
1372 abort ();
1373}
1374
1375/* Return number of insns required to move L bytes by pieces.
1376 ALIGN (in bytes) is maximum alignment we can assume. */
1377
1378static int
1379move_by_pieces_ninsns (l, align)
1380 unsigned int l;
1381 int align;
1382{
1383 register int n_insns = 0;
e87b4f3f 1384 int max_size = MOVE_MAX + 1;
bbf6f052 1385
e87b4f3f
RS
1386 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1387 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1388 align = MOVE_MAX;
bbf6f052
RK
1389
1390 while (max_size > 1)
1391 {
1392 enum machine_mode mode = VOIDmode, tmode;
1393 enum insn_code icode;
1394
e7c33f54
RK
1395 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1396 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1397 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1398 mode = tmode;
1399
1400 if (mode == VOIDmode)
1401 break;
1402
1403 icode = mov_optab->handlers[(int) mode].insn_code;
1404 if (icode != CODE_FOR_nothing
1405 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1406 GET_MODE_SIZE (mode)))
1407 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1408
1409 max_size = GET_MODE_SIZE (mode);
1410 }
1411
1412 return n_insns;
1413}
1414
1415/* Subroutine of move_by_pieces. Move as many bytes as appropriate
1416 with move instructions for mode MODE. GENFUN is the gen_... function
1417 to make a move insn for that mode. DATA has all the other info. */
1418
1419static void
1420move_by_pieces_1 (genfun, mode, data)
1421 rtx (*genfun) ();
1422 enum machine_mode mode;
1423 struct move_by_pieces *data;
1424{
1425 register int size = GET_MODE_SIZE (mode);
1426 register rtx to1, from1;
1427
1428 while (data->len >= size)
1429 {
1430 if (data->reverse) data->offset -= size;
1431
1432 to1 = (data->autinc_to
1433 ? gen_rtx (MEM, mode, data->to_addr)
1434 : change_address (data->to, mode,
1435 plus_constant (data->to_addr, data->offset)));
1436 from1 =
1437 (data->autinc_from
1438 ? gen_rtx (MEM, mode, data->from_addr)
1439 : change_address (data->from, mode,
1440 plus_constant (data->from_addr, data->offset)));
1441
1442#ifdef HAVE_PRE_DECREMENT
1443 if (data->explicit_inc_to < 0)
906c4e36 1444 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
bbf6f052 1445 if (data->explicit_inc_from < 0)
906c4e36 1446 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
bbf6f052
RK
1447#endif
1448
1449 emit_insn ((*genfun) (to1, from1));
1450#ifdef HAVE_POST_INCREMENT
1451 if (data->explicit_inc_to > 0)
906c4e36 1452 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
bbf6f052 1453 if (data->explicit_inc_from > 0)
906c4e36 1454 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
bbf6f052
RK
1455#endif
1456
1457 if (! data->reverse) data->offset += size;
1458
1459 data->len -= size;
1460 }
1461}
1462\f
1463/* Emit code to move a block Y to a block X.
1464 This may be done with string-move instructions,
1465 with multiple scalar move instructions, or with a library call.
1466
1467 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1468 with mode BLKmode.
1469 SIZE is an rtx that says how long they are.
1470 ALIGN is the maximum alignment we can assume they have,
1471 measured in bytes. */
1472
1473void
1474emit_block_move (x, y, size, align)
1475 rtx x, y;
1476 rtx size;
1477 int align;
1478{
1479 if (GET_MODE (x) != BLKmode)
1480 abort ();
1481
1482 if (GET_MODE (y) != BLKmode)
1483 abort ();
1484
1485 x = protect_from_queue (x, 1);
1486 y = protect_from_queue (y, 0);
5d901c31 1487 size = protect_from_queue (size, 0);
bbf6f052
RK
1488
1489 if (GET_CODE (x) != MEM)
1490 abort ();
1491 if (GET_CODE (y) != MEM)
1492 abort ();
1493 if (size == 0)
1494 abort ();
1495
1496 if (GET_CODE (size) == CONST_INT
906c4e36 1497 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
bbf6f052
RK
1498 move_by_pieces (x, y, INTVAL (size), align);
1499 else
1500 {
1501 /* Try the most limited insn first, because there's no point
1502 including more than one in the machine description unless
1503 the more limited one has some advantage. */
266007a7 1504
0bba3f6f 1505 rtx opalign = GEN_INT (align);
266007a7
RK
1506 enum machine_mode mode;
1507
1508 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1509 mode = GET_MODE_WIDER_MODE (mode))
bbf6f052 1510 {
266007a7 1511 enum insn_code code = movstr_optab[(int) mode];
266007a7
RK
1512
1513 if (code != CODE_FOR_nothing
803090c4
RK
1514 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1515 here because if SIZE is less than the mode mask, as it is
8008b228 1516 returned by the macro, it will definitely be less than the
803090c4 1517 actual mode mask. */
f85b95d1 1518 && (unsigned HOST_WIDE_INT) INTVAL (size) <= GET_MODE_MASK (mode)
0bba3f6f
RK
1519 && (insn_operand_predicate[(int) code][0] == 0
1520 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1521 && (insn_operand_predicate[(int) code][1] == 0
1522 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1523 && (insn_operand_predicate[(int) code][3] == 0
1524 || (*insn_operand_predicate[(int) code][3]) (opalign,
1525 VOIDmode)))
bbf6f052 1526 {
1ba1e2a8 1527 rtx op2;
266007a7
RK
1528 rtx last = get_last_insn ();
1529 rtx pat;
1530
1ba1e2a8 1531 op2 = convert_to_mode (mode, size, 1);
0bba3f6f
RK
1532 if (insn_operand_predicate[(int) code][2] != 0
1533 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
266007a7
RK
1534 op2 = copy_to_mode_reg (mode, op2);
1535
1536 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1537 if (pat)
1538 {
1539 emit_insn (pat);
1540 return;
1541 }
1542 else
1543 delete_insns_since (last);
bbf6f052
RK
1544 }
1545 }
bbf6f052
RK
1546
1547#ifdef TARGET_MEM_FUNCTIONS
d562e42e 1548 emit_library_call (memcpy_libfunc, 0,
bbf6f052
RK
1549 VOIDmode, 3, XEXP (x, 0), Pmode,
1550 XEXP (y, 0), Pmode,
0fa83258
RK
1551 convert_to_mode (TYPE_MODE (sizetype), size,
1552 TREE_UNSIGNED (sizetype)),
1553 TYPE_MODE (sizetype));
bbf6f052 1554#else
d562e42e 1555 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
1556 VOIDmode, 3, XEXP (y, 0), Pmode,
1557 XEXP (x, 0), Pmode,
0fa83258
RK
1558 convert_to_mode (TYPE_MODE (sizetype), size,
1559 TREE_UNSIGNED (sizetype)),
1560 TYPE_MODE (sizetype));
bbf6f052
RK
1561#endif
1562 }
1563}
1564\f
1565/* Copy all or part of a value X into registers starting at REGNO.
1566 The number of registers to be filled is NREGS. */
1567
1568void
1569move_block_to_reg (regno, x, nregs, mode)
1570 int regno;
1571 rtx x;
1572 int nregs;
1573 enum machine_mode mode;
1574{
1575 int i;
1576 rtx pat, last;
1577
1578 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1579 x = validize_mem (force_const_mem (mode, x));
1580
1581 /* See if the machine can do this with a load multiple insn. */
1582#ifdef HAVE_load_multiple
c3a02afe 1583 if (HAVE_load_multiple)
bbf6f052 1584 {
c3a02afe
RK
1585 last = get_last_insn ();
1586 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1587 GEN_INT (nregs));
1588 if (pat)
1589 {
1590 emit_insn (pat);
1591 return;
1592 }
1593 else
1594 delete_insns_since (last);
bbf6f052 1595 }
bbf6f052
RK
1596#endif
1597
1598 for (i = 0; i < nregs; i++)
1599 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1600 operand_subword_force (x, i, mode));
1601}
1602
1603/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
0040593d
JW
1604 The number of registers to be filled is NREGS. SIZE indicates the number
1605 of bytes in the object X. */
1606
bbf6f052
RK
1607
1608void
0040593d 1609move_block_from_reg (regno, x, nregs, size)
bbf6f052
RK
1610 int regno;
1611 rtx x;
1612 int nregs;
0040593d 1613 int size;
bbf6f052
RK
1614{
1615 int i;
1616 rtx pat, last;
1617
0040593d
JW
1618 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1619 to the left before storing to memory. */
1620 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1621 {
1622 rtx tem = operand_subword (x, 0, 1, BLKmode);
1623 rtx shift;
1624
1625 if (tem == 0)
1626 abort ();
1627
1628 shift = expand_shift (LSHIFT_EXPR, word_mode,
1629 gen_rtx (REG, word_mode, regno),
1630 build_int_2 ((UNITS_PER_WORD - size)
1631 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1632 emit_move_insn (tem, shift);
1633 return;
1634 }
1635
bbf6f052
RK
1636 /* See if the machine can do this with a store multiple insn. */
1637#ifdef HAVE_store_multiple
c3a02afe 1638 if (HAVE_store_multiple)
bbf6f052 1639 {
c3a02afe
RK
1640 last = get_last_insn ();
1641 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1642 GEN_INT (nregs));
1643 if (pat)
1644 {
1645 emit_insn (pat);
1646 return;
1647 }
1648 else
1649 delete_insns_since (last);
bbf6f052 1650 }
bbf6f052
RK
1651#endif
1652
1653 for (i = 0; i < nregs; i++)
1654 {
1655 rtx tem = operand_subword (x, i, 1, BLKmode);
1656
1657 if (tem == 0)
1658 abort ();
1659
1660 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1661 }
1662}
1663
1664/* Mark NREGS consecutive regs, starting at REGNO, as being live now. */
1665
1666void
1667use_regs (regno, nregs)
1668 int regno;
1669 int nregs;
1670{
1671 int i;
1672
1673 for (i = 0; i < nregs; i++)
1674 emit_insn (gen_rtx (USE, VOIDmode, gen_rtx (REG, word_mode, regno + i)));
1675}
1676\f
1677/* Write zeros through the storage of OBJECT.
1678 If OBJECT has BLKmode, SIZE is its length in bytes. */
1679
1680void
1681clear_storage (object, size)
1682 rtx object;
1683 int size;
1684{
1685 if (GET_MODE (object) == BLKmode)
1686 {
1687#ifdef TARGET_MEM_FUNCTIONS
d562e42e 1688 emit_library_call (memset_libfunc, 0,
bbf6f052
RK
1689 VOIDmode, 3,
1690 XEXP (object, 0), Pmode, const0_rtx, Pmode,
906c4e36 1691 GEN_INT (size), Pmode);
bbf6f052 1692#else
d562e42e 1693 emit_library_call (bzero_libfunc, 0,
bbf6f052
RK
1694 VOIDmode, 2,
1695 XEXP (object, 0), Pmode,
906c4e36 1696 GEN_INT (size), Pmode);
bbf6f052
RK
1697#endif
1698 }
1699 else
1700 emit_move_insn (object, const0_rtx);
1701}
1702
1703/* Generate code to copy Y into X.
1704 Both Y and X must have the same mode, except that
1705 Y can be a constant with VOIDmode.
1706 This mode cannot be BLKmode; use emit_block_move for that.
1707
1708 Return the last instruction emitted. */
1709
1710rtx
1711emit_move_insn (x, y)
1712 rtx x, y;
1713{
1714 enum machine_mode mode = GET_MODE (x);
bbf6f052
RK
1715
1716 x = protect_from_queue (x, 1);
1717 y = protect_from_queue (y, 0);
1718
1719 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1720 abort ();
1721
1722 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1723 y = force_const_mem (mode, y);
1724
1725 /* If X or Y are memory references, verify that their addresses are valid
1726 for the machine. */
1727 if (GET_CODE (x) == MEM
1728 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1729 && ! push_operand (x, GET_MODE (x)))
1730 || (flag_force_addr
1731 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1732 x = change_address (x, VOIDmode, XEXP (x, 0));
1733
1734 if (GET_CODE (y) == MEM
1735 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1736 || (flag_force_addr
1737 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1738 y = change_address (y, VOIDmode, XEXP (y, 0));
1739
1740 if (mode == BLKmode)
1741 abort ();
1742
261c4230
RS
1743 return emit_move_insn_1 (x, y);
1744}
1745
1746/* Low level part of emit_move_insn.
1747 Called just like emit_move_insn, but assumes X and Y
1748 are basically valid. */
1749
1750rtx
1751emit_move_insn_1 (x, y)
1752 rtx x, y;
1753{
1754 enum machine_mode mode = GET_MODE (x);
1755 enum machine_mode submode;
1756 enum mode_class class = GET_MODE_CLASS (mode);
1757 int i;
1758
bbf6f052
RK
1759 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1760 return
1761 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1762
89742723 1763 /* Expand complex moves by moving real part and imag part, if possible. */
7308a047 1764 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
d0c76654
RK
1765 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
1766 * BITS_PER_UNIT),
1767 (class == MODE_COMPLEX_INT
1768 ? MODE_INT : MODE_FLOAT),
1769 0))
7308a047
RS
1770 && (mov_optab->handlers[(int) submode].insn_code
1771 != CODE_FOR_nothing))
1772 {
1773 /* Don't split destination if it is a stack push. */
1774 int stack = push_operand (x, GET_MODE (x));
6551fa4d 1775 rtx insns;
7308a047 1776
6551fa4d 1777 start_sequence ();
7308a047
RS
1778
1779 /* If this is a stack, push the highpart first, so it
1780 will be in the argument order.
1781
1782 In that case, change_address is used only to convert
1783 the mode, not to change the address. */
c937357e
RS
1784 if (stack)
1785 {
e33c0d66
RS
1786 /* Note that the real part always precedes the imag part in memory
1787 regardless of machine's endianness. */
c937357e
RS
1788#ifdef STACK_GROWS_DOWNWARD
1789 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1790 (gen_rtx (MEM, submode, (XEXP (x, 0))),
e33c0d66 1791 gen_imagpart (submode, y)));
c937357e
RS
1792 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1793 (gen_rtx (MEM, submode, (XEXP (x, 0))),
e33c0d66 1794 gen_realpart (submode, y)));
c937357e
RS
1795#else
1796 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1797 (gen_rtx (MEM, submode, (XEXP (x, 0))),
e33c0d66 1798 gen_realpart (submode, y)));
c937357e
RS
1799 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1800 (gen_rtx (MEM, submode, (XEXP (x, 0))),
e33c0d66 1801 gen_imagpart (submode, y)));
c937357e
RS
1802#endif
1803 }
1804 else
1805 {
1806 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
976ff203 1807 (gen_realpart (submode, x), gen_realpart (submode, y)));
c937357e 1808 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
976ff203 1809 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
c937357e 1810 }
7308a047 1811
6551fa4d
JW
1812 insns = get_insns ();
1813 end_sequence ();
1814
1815 /* If X is a CONCAT, we got insns like RD = RS, ID = IS,
1816 each with a separate pseudo as destination.
1817 It's not correct for flow to treat them as a unit. */
6d6e61ce 1818 if (GET_CODE (x) != CONCAT)
6551fa4d
JW
1819 emit_no_conflict_block (insns, x, y, NULL_RTX, NULL_RTX);
1820 else
1821 emit_insns (insns);
7a1ab50a
RS
1822
1823 return get_last_insn ();
7308a047
RS
1824 }
1825
bbf6f052
RK
1826 /* This will handle any multi-word mode that lacks a move_insn pattern.
1827 However, you will get better code if you define such patterns,
1828 even if they must turn into multiple assembler instructions. */
a4320483 1829 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
bbf6f052
RK
1830 {
1831 rtx last_insn = 0;
6551fa4d
JW
1832 rtx insns;
1833
1834 start_sequence ();
bbf6f052
RK
1835
1836 for (i = 0;
1837 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1838 i++)
1839 {
1840 rtx xpart = operand_subword (x, i, 1, mode);
1841 rtx ypart = operand_subword (y, i, 1, mode);
1842
1843 /* If we can't get a part of Y, put Y into memory if it is a
1844 constant. Otherwise, force it into a register. If we still
1845 can't get a part of Y, abort. */
1846 if (ypart == 0 && CONSTANT_P (y))
1847 {
1848 y = force_const_mem (mode, y);
1849 ypart = operand_subword (y, i, 1, mode);
1850 }
1851 else if (ypart == 0)
1852 ypart = operand_subword_force (y, i, mode);
1853
1854 if (xpart == 0 || ypart == 0)
1855 abort ();
1856
1857 last_insn = emit_move_insn (xpart, ypart);
1858 }
6551fa4d
JW
1859
1860 insns = get_insns ();
1861 end_sequence ();
1862 emit_no_conflict_block (insns, x, y, NULL_RTX, NULL_RTX);
7308a047 1863
bbf6f052
RK
1864 return last_insn;
1865 }
1866 else
1867 abort ();
1868}
1869\f
1870/* Pushing data onto the stack. */
1871
1872/* Push a block of length SIZE (perhaps variable)
1873 and return an rtx to address the beginning of the block.
1874 Note that it is not possible for the value returned to be a QUEUED.
1875 The value may be virtual_outgoing_args_rtx.
1876
1877 EXTRA is the number of bytes of padding to push in addition to SIZE.
1878 BELOW nonzero means this padding comes at low addresses;
1879 otherwise, the padding comes at high addresses. */
1880
1881rtx
1882push_block (size, extra, below)
1883 rtx size;
1884 int extra, below;
1885{
1886 register rtx temp;
1887 if (CONSTANT_P (size))
1888 anti_adjust_stack (plus_constant (size, extra));
1889 else if (GET_CODE (size) == REG && extra == 0)
1890 anti_adjust_stack (size);
1891 else
1892 {
1893 rtx temp = copy_to_mode_reg (Pmode, size);
1894 if (extra != 0)
906c4e36 1895 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
bbf6f052
RK
1896 temp, 0, OPTAB_LIB_WIDEN);
1897 anti_adjust_stack (temp);
1898 }
1899
1900#ifdef STACK_GROWS_DOWNWARD
1901 temp = virtual_outgoing_args_rtx;
1902 if (extra != 0 && below)
1903 temp = plus_constant (temp, extra);
1904#else
1905 if (GET_CODE (size) == CONST_INT)
1906 temp = plus_constant (virtual_outgoing_args_rtx,
1907 - INTVAL (size) - (below ? 0 : extra));
1908 else if (extra != 0 && !below)
1909 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1910 negate_rtx (Pmode, plus_constant (size, extra)));
1911 else
1912 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1913 negate_rtx (Pmode, size));
1914#endif
1915
1916 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
1917}
1918
87e38d84 1919rtx
bbf6f052
RK
1920gen_push_operand ()
1921{
1922 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
1923}
1924
1925/* Generate code to push X onto the stack, assuming it has mode MODE and
1926 type TYPE.
1927 MODE is redundant except when X is a CONST_INT (since they don't
1928 carry mode info).
1929 SIZE is an rtx for the size of data to be copied (in bytes),
1930 needed only if X is BLKmode.
1931
1932 ALIGN (in bytes) is maximum alignment we can assume.
1933
cd048831
RK
1934 If PARTIAL and REG are both nonzero, then copy that many of the first
1935 words of X into registers starting with REG, and push the rest of X.
bbf6f052
RK
1936 The amount of space pushed is decreased by PARTIAL words,
1937 rounded *down* to a multiple of PARM_BOUNDARY.
1938 REG must be a hard register in this case.
cd048831
RK
1939 If REG is zero but PARTIAL is not, take any all others actions for an
1940 argument partially in registers, but do not actually load any
1941 registers.
bbf6f052
RK
1942
1943 EXTRA is the amount in bytes of extra space to leave next to this arg.
6dc42e49 1944 This is ignored if an argument block has already been allocated.
bbf6f052
RK
1945
1946 On a machine that lacks real push insns, ARGS_ADDR is the address of
1947 the bottom of the argument block for this call. We use indexing off there
1948 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
1949 argument block has not been preallocated.
1950
1951 ARGS_SO_FAR is the size of args previously pushed for this call. */
1952
1953void
1954emit_push_insn (x, mode, type, size, align, partial, reg, extra,
1955 args_addr, args_so_far)
1956 register rtx x;
1957 enum machine_mode mode;
1958 tree type;
1959 rtx size;
1960 int align;
1961 int partial;
1962 rtx reg;
1963 int extra;
1964 rtx args_addr;
1965 rtx args_so_far;
1966{
1967 rtx xinner;
1968 enum direction stack_direction
1969#ifdef STACK_GROWS_DOWNWARD
1970 = downward;
1971#else
1972 = upward;
1973#endif
1974
1975 /* Decide where to pad the argument: `downward' for below,
1976 `upward' for above, or `none' for don't pad it.
1977 Default is below for small data on big-endian machines; else above. */
1978 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
1979
1980 /* Invert direction if stack is post-update. */
1981 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
1982 if (where_pad != none)
1983 where_pad = (where_pad == downward ? upward : downward);
1984
1985 xinner = x = protect_from_queue (x, 0);
1986
1987 if (mode == BLKmode)
1988 {
1989 /* Copy a block into the stack, entirely or partially. */
1990
1991 register rtx temp;
1992 int used = partial * UNITS_PER_WORD;
1993 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
1994 int skip;
1995
1996 if (size == 0)
1997 abort ();
1998
1999 used -= offset;
2000
2001 /* USED is now the # of bytes we need not copy to the stack
2002 because registers will take care of them. */
2003
2004 if (partial != 0)
2005 xinner = change_address (xinner, BLKmode,
2006 plus_constant (XEXP (xinner, 0), used));
2007
2008 /* If the partial register-part of the arg counts in its stack size,
2009 skip the part of stack space corresponding to the registers.
2010 Otherwise, start copying to the beginning of the stack space,
2011 by setting SKIP to 0. */
2012#ifndef REG_PARM_STACK_SPACE
2013 skip = 0;
2014#else
2015 skip = used;
2016#endif
2017
2018#ifdef PUSH_ROUNDING
2019 /* Do it with several push insns if that doesn't take lots of insns
2020 and if there is no difficulty with push insns that skip bytes
2021 on the stack for alignment purposes. */
2022 if (args_addr == 0
2023 && GET_CODE (size) == CONST_INT
2024 && skip == 0
2025 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2026 < MOVE_RATIO)
bbf6f052
RK
2027 /* Here we avoid the case of a structure whose weak alignment
2028 forces many pushes of a small amount of data,
2029 and such small pushes do rounding that causes trouble. */
e87b4f3f
RS
2030 && ((! STRICT_ALIGNMENT && ! SLOW_UNALIGNED_ACCESS)
2031 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
bbf6f052 2032 || PUSH_ROUNDING (align) == align)
bbf6f052
RK
2033 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2034 {
2035 /* Push padding now if padding above and stack grows down,
2036 or if padding below and stack grows up.
2037 But if space already allocated, this has already been done. */
2038 if (extra && args_addr == 0
2039 && where_pad != none && where_pad != stack_direction)
906c4e36 2040 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2041
2042 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
2043 INTVAL (size) - used, align);
2044 }
2045 else
2046#endif /* PUSH_ROUNDING */
2047 {
2048 /* Otherwise make space on the stack and copy the data
2049 to the address of that space. */
2050
2051 /* Deduct words put into registers from the size we must copy. */
2052 if (partial != 0)
2053 {
2054 if (GET_CODE (size) == CONST_INT)
906c4e36 2055 size = GEN_INT (INTVAL (size) - used);
bbf6f052
RK
2056 else
2057 size = expand_binop (GET_MODE (size), sub_optab, size,
906c4e36
RK
2058 GEN_INT (used), NULL_RTX, 0,
2059 OPTAB_LIB_WIDEN);
bbf6f052
RK
2060 }
2061
2062 /* Get the address of the stack space.
2063 In this case, we do not deal with EXTRA separately.
2064 A single stack adjust will do. */
2065 if (! args_addr)
2066 {
2067 temp = push_block (size, extra, where_pad == downward);
2068 extra = 0;
2069 }
2070 else if (GET_CODE (args_so_far) == CONST_INT)
2071 temp = memory_address (BLKmode,
2072 plus_constant (args_addr,
2073 skip + INTVAL (args_so_far)));
2074 else
2075 temp = memory_address (BLKmode,
2076 plus_constant (gen_rtx (PLUS, Pmode,
2077 args_addr, args_so_far),
2078 skip));
2079
2080 /* TEMP is the address of the block. Copy the data there. */
2081 if (GET_CODE (size) == CONST_INT
2082 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2083 < MOVE_RATIO))
2084 {
2085 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
2086 INTVAL (size), align);
2087 goto ret;
2088 }
2089 /* Try the most limited insn first, because there's no point
2090 including more than one in the machine description unless
2091 the more limited one has some advantage. */
2092#ifdef HAVE_movstrqi
2093 if (HAVE_movstrqi
2094 && GET_CODE (size) == CONST_INT
2095 && ((unsigned) INTVAL (size)
2096 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
2097 {
c841050e
RS
2098 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
2099 xinner, size, GEN_INT (align));
2100 if (pat != 0)
2101 {
2102 emit_insn (pat);
2103 goto ret;
2104 }
bbf6f052
RK
2105 }
2106#endif
2107#ifdef HAVE_movstrhi
2108 if (HAVE_movstrhi
2109 && GET_CODE (size) == CONST_INT
2110 && ((unsigned) INTVAL (size)
2111 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
2112 {
c841050e
RS
2113 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
2114 xinner, size, GEN_INT (align));
2115 if (pat != 0)
2116 {
2117 emit_insn (pat);
2118 goto ret;
2119 }
bbf6f052
RK
2120 }
2121#endif
2122#ifdef HAVE_movstrsi
2123 if (HAVE_movstrsi)
2124 {
c841050e
RS
2125 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
2126 xinner, size, GEN_INT (align));
2127 if (pat != 0)
2128 {
2129 emit_insn (pat);
2130 goto ret;
2131 }
bbf6f052
RK
2132 }
2133#endif
2134#ifdef HAVE_movstrdi
2135 if (HAVE_movstrdi)
2136 {
c841050e
RS
2137 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
2138 xinner, size, GEN_INT (align));
2139 if (pat != 0)
2140 {
2141 emit_insn (pat);
2142 goto ret;
2143 }
bbf6f052
RK
2144 }
2145#endif
2146
2147#ifndef ACCUMULATE_OUTGOING_ARGS
2148 /* If the source is referenced relative to the stack pointer,
2149 copy it to another register to stabilize it. We do not need
2150 to do this if we know that we won't be changing sp. */
2151
2152 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2153 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2154 temp = copy_to_reg (temp);
2155#endif
2156
2157 /* Make inhibit_defer_pop nonzero around the library call
2158 to force it to pop the bcopy-arguments right away. */
2159 NO_DEFER_POP;
2160#ifdef TARGET_MEM_FUNCTIONS
d562e42e 2161 emit_library_call (memcpy_libfunc, 0,
bbf6f052 2162 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
0fa83258
RK
2163 convert_to_mode (TYPE_MODE (sizetype),
2164 size, TREE_UNSIGNED (sizetype)),
26ba80fc 2165 TYPE_MODE (sizetype));
bbf6f052 2166#else
d562e42e 2167 emit_library_call (bcopy_libfunc, 0,
bbf6f052 2168 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
0fa83258
RK
2169 convert_to_mode (TYPE_MODE (sizetype),
2170 size, TREE_UNSIGNED (sizetype)),
26ba80fc 2171 TYPE_MODE (sizetype));
bbf6f052
RK
2172#endif
2173 OK_DEFER_POP;
2174 }
2175 }
2176 else if (partial > 0)
2177 {
2178 /* Scalar partly in registers. */
2179
2180 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2181 int i;
2182 int not_stack;
2183 /* # words of start of argument
2184 that we must make space for but need not store. */
2185 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2186 int args_offset = INTVAL (args_so_far);
2187 int skip;
2188
2189 /* Push padding now if padding above and stack grows down,
2190 or if padding below and stack grows up.
2191 But if space already allocated, this has already been done. */
2192 if (extra && args_addr == 0
2193 && where_pad != none && where_pad != stack_direction)
906c4e36 2194 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2195
2196 /* If we make space by pushing it, we might as well push
2197 the real data. Otherwise, we can leave OFFSET nonzero
2198 and leave the space uninitialized. */
2199 if (args_addr == 0)
2200 offset = 0;
2201
2202 /* Now NOT_STACK gets the number of words that we don't need to
2203 allocate on the stack. */
2204 not_stack = partial - offset;
2205
2206 /* If the partial register-part of the arg counts in its stack size,
2207 skip the part of stack space corresponding to the registers.
2208 Otherwise, start copying to the beginning of the stack space,
2209 by setting SKIP to 0. */
2210#ifndef REG_PARM_STACK_SPACE
2211 skip = 0;
2212#else
2213 skip = not_stack;
2214#endif
2215
2216 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2217 x = validize_mem (force_const_mem (mode, x));
2218
2219 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2220 SUBREGs of such registers are not allowed. */
2221 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2222 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2223 x = copy_to_reg (x);
2224
2225 /* Loop over all the words allocated on the stack for this arg. */
2226 /* We can do it by words, because any scalar bigger than a word
2227 has a size a multiple of a word. */
2228#ifndef PUSH_ARGS_REVERSED
2229 for (i = not_stack; i < size; i++)
2230#else
2231 for (i = size - 1; i >= not_stack; i--)
2232#endif
2233 if (i >= not_stack + offset)
2234 emit_push_insn (operand_subword_force (x, i, mode),
906c4e36
RK
2235 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2236 0, args_addr,
2237 GEN_INT (args_offset + ((i - not_stack + skip)
bbf6f052
RK
2238 * UNITS_PER_WORD)));
2239 }
2240 else
2241 {
2242 rtx addr;
2243
2244 /* Push padding now if padding above and stack grows down,
2245 or if padding below and stack grows up.
2246 But if space already allocated, this has already been done. */
2247 if (extra && args_addr == 0
2248 && where_pad != none && where_pad != stack_direction)
906c4e36 2249 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2250
2251#ifdef PUSH_ROUNDING
2252 if (args_addr == 0)
2253 addr = gen_push_operand ();
2254 else
2255#endif
2256 if (GET_CODE (args_so_far) == CONST_INT)
2257 addr
2258 = memory_address (mode,
2259 plus_constant (args_addr, INTVAL (args_so_far)));
2260 else
2261 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2262 args_so_far));
2263
2264 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2265 }
2266
2267 ret:
2268 /* If part should go in registers, copy that part
2269 into the appropriate registers. Do this now, at the end,
2270 since mem-to-mem copies above may do function calls. */
cd048831 2271 if (partial > 0 && reg != 0)
bbf6f052
RK
2272 move_block_to_reg (REGNO (reg), x, partial, mode);
2273
2274 if (extra && args_addr == 0 && where_pad == stack_direction)
906c4e36 2275 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2276}
2277\f
bbf6f052
RK
2278/* Expand an assignment that stores the value of FROM into TO.
2279 If WANT_VALUE is nonzero, return an rtx for the value of TO.
709f5be1
RS
2280 (This may contain a QUEUED rtx;
2281 if the value is constant, this rtx is a constant.)
2282 Otherwise, the returned value is NULL_RTX.
bbf6f052
RK
2283
2284 SUGGEST_REG is no longer actually used.
2285 It used to mean, copy the value through a register
2286 and return that register, if that is possible.
709f5be1 2287 We now use WANT_VALUE to decide whether to do this. */
bbf6f052
RK
2288
2289rtx
2290expand_assignment (to, from, want_value, suggest_reg)
2291 tree to, from;
2292 int want_value;
2293 int suggest_reg;
2294{
2295 register rtx to_rtx = 0;
2296 rtx result;
2297
2298 /* Don't crash if the lhs of the assignment was erroneous. */
2299
2300 if (TREE_CODE (to) == ERROR_MARK)
709f5be1
RS
2301 {
2302 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
2303 return want_value ? result : NULL_RTX;
2304 }
bbf6f052 2305
ca695ac9
JB
2306 if (output_bytecode)
2307 {
2308 tree dest_innermost;
2309
2310 bc_expand_expr (from);
6d6e61ce 2311 bc_emit_instruction (duplicate);
ca695ac9
JB
2312
2313 dest_innermost = bc_expand_address (to);
2314
2315 /* Can't deduce from TYPE that we're dealing with a bitfield, so
2316 take care of it here. */
2317
2318 bc_store_memory (TREE_TYPE (to), dest_innermost);
2319 return NULL;
2320 }
2321
bbf6f052
RK
2322 /* Assignment of a structure component needs special treatment
2323 if the structure component's rtx is not simply a MEM.
2324 Assignment of an array element at a constant index
2325 has the same problem. */
2326
2327 if (TREE_CODE (to) == COMPONENT_REF
2328 || TREE_CODE (to) == BIT_FIELD_REF
2329 || (TREE_CODE (to) == ARRAY_REF
2330 && TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2331 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST))
2332 {
2333 enum machine_mode mode1;
2334 int bitsize;
2335 int bitpos;
7bb0943f 2336 tree offset;
bbf6f052
RK
2337 int unsignedp;
2338 int volatilep = 0;
0088fcb1 2339 tree tem;
d78d243c 2340 int alignment;
0088fcb1
RK
2341
2342 push_temp_slots ();
2343 tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
bbf6f052
RK
2344 &mode1, &unsignedp, &volatilep);
2345
2346 /* If we are going to use store_bit_field and extract_bit_field,
2347 make sure to_rtx will be safe for multiple use. */
2348
2349 if (mode1 == VOIDmode && want_value)
2350 tem = stabilize_reference (tem);
2351
d78d243c 2352 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
906c4e36 2353 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
2354 if (offset != 0)
2355 {
906c4e36 2356 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
2357
2358 if (GET_CODE (to_rtx) != MEM)
2359 abort ();
2360 to_rtx = change_address (to_rtx, VOIDmode,
2361 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2362 force_reg (Pmode, offset_rtx)));
d78d243c
RS
2363 /* If we have a variable offset, the known alignment
2364 is only that of the innermost structure containing the field.
2365 (Actually, we could sometimes do better by using the
2366 align of an element of the innermost array, but no need.) */
2367 if (TREE_CODE (to) == COMPONENT_REF
2368 || TREE_CODE (to) == BIT_FIELD_REF)
2369 alignment
2370 = TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (to, 0))) / BITS_PER_UNIT;
7bb0943f 2371 }
bbf6f052
RK
2372 if (volatilep)
2373 {
2374 if (GET_CODE (to_rtx) == MEM)
2375 MEM_VOLATILE_P (to_rtx) = 1;
2376#if 0 /* This was turned off because, when a field is volatile
2377 in an object which is not volatile, the object may be in a register,
2378 and then we would abort over here. */
2379 else
2380 abort ();
2381#endif
2382 }
2383
2384 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2385 (want_value
2386 /* Spurious cast makes HPUX compiler happy. */
2387 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2388 : VOIDmode),
2389 unsignedp,
2390 /* Required alignment of containing datum. */
d78d243c 2391 alignment,
bbf6f052
RK
2392 int_size_in_bytes (TREE_TYPE (tem)));
2393 preserve_temp_slots (result);
2394 free_temp_slots ();
0088fcb1 2395 pop_temp_slots ();
bbf6f052 2396
709f5be1
RS
2397 /* If the value is meaningful, convert RESULT to the proper mode.
2398 Otherwise, return nothing. */
5ffe63ed
RS
2399 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
2400 TYPE_MODE (TREE_TYPE (from)),
2401 result,
2402 TREE_UNSIGNED (TREE_TYPE (to)))
709f5be1 2403 : NULL_RTX);
bbf6f052
RK
2404 }
2405
cd1db108
RS
2406 /* If the rhs is a function call and its value is not an aggregate,
2407 call the function before we start to compute the lhs.
2408 This is needed for correct code for cases such as
2409 val = setjmp (buf) on machines where reference to val
1ad87b63
RK
2410 requires loading up part of an address in a separate insn.
2411
2412 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
2413 a promoted variable where the zero- or sign- extension needs to be done.
2414 Handling this in the normal way is safe because no computation is done
2415 before the call. */
2416 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
2417 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
cd1db108 2418 {
0088fcb1
RK
2419 rtx value;
2420
2421 push_temp_slots ();
2422 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
cd1db108
RS
2423 if (to_rtx == 0)
2424 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2425 emit_move_insn (to_rtx, value);
2426 preserve_temp_slots (to_rtx);
2427 free_temp_slots ();
0088fcb1 2428 pop_temp_slots ();
709f5be1 2429 return want_value ? to_rtx : NULL_RTX;
cd1db108
RS
2430 }
2431
bbf6f052
RK
2432 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2433 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2434
2435 if (to_rtx == 0)
906c4e36 2436 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
bbf6f052 2437
86d38d25
RS
2438 /* Don't move directly into a return register. */
2439 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
2440 {
0088fcb1
RK
2441 rtx temp;
2442
2443 push_temp_slots ();
2444 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
86d38d25
RS
2445 emit_move_insn (to_rtx, temp);
2446 preserve_temp_slots (to_rtx);
2447 free_temp_slots ();
0088fcb1 2448 pop_temp_slots ();
709f5be1 2449 return want_value ? to_rtx : NULL_RTX;
86d38d25
RS
2450 }
2451
bbf6f052
RK
2452 /* In case we are returning the contents of an object which overlaps
2453 the place the value is being stored, use a safe function when copying
2454 a value through a pointer into a structure value return block. */
2455 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2456 && current_function_returns_struct
2457 && !current_function_returns_pcc_struct)
2458 {
0088fcb1
RK
2459 rtx from_rtx, size;
2460
2461 push_temp_slots ();
33a20d10
RK
2462 size = expr_size (from);
2463 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
2464
2465#ifdef TARGET_MEM_FUNCTIONS
d562e42e 2466 emit_library_call (memcpy_libfunc, 0,
bbf6f052
RK
2467 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2468 XEXP (from_rtx, 0), Pmode,
0fa83258
RK
2469 convert_to_mode (TYPE_MODE (sizetype),
2470 size, TREE_UNSIGNED (sizetype)),
26ba80fc 2471 TYPE_MODE (sizetype));
bbf6f052 2472#else
d562e42e 2473 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
2474 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2475 XEXP (to_rtx, 0), Pmode,
0fa83258
RK
2476 convert_to_mode (TYPE_MODE (sizetype),
2477 size, TREE_UNSIGNED (sizetype)),
26ba80fc 2478 TYPE_MODE (sizetype));
bbf6f052
RK
2479#endif
2480
2481 preserve_temp_slots (to_rtx);
2482 free_temp_slots ();
0088fcb1 2483 pop_temp_slots ();
709f5be1 2484 return want_value ? to_rtx : NULL_RTX;
bbf6f052
RK
2485 }
2486
2487 /* Compute FROM and store the value in the rtx we got. */
2488
0088fcb1 2489 push_temp_slots ();
bbf6f052
RK
2490 result = store_expr (from, to_rtx, want_value);
2491 preserve_temp_slots (result);
2492 free_temp_slots ();
0088fcb1 2493 pop_temp_slots ();
709f5be1 2494 return want_value ? result : NULL_RTX;
bbf6f052
RK
2495}
2496
2497/* Generate code for computing expression EXP,
2498 and storing the value into TARGET.
bbf6f052
RK
2499 TARGET may contain a QUEUED rtx.
2500
709f5be1
RS
2501 If WANT_VALUE is nonzero, return a copy of the value
2502 not in TARGET, so that we can be sure to use the proper
2503 value in a containing expression even if TARGET has something
2504 else stored in it. If possible, we copy the value through a pseudo
2505 and return that pseudo. Or, if the value is constant, we try to
2506 return the constant. In some cases, we return a pseudo
2507 copied *from* TARGET.
2508
2509 If the mode is BLKmode then we may return TARGET itself.
2510 It turns out that in BLKmode it doesn't cause a problem.
2511 because C has no operators that could combine two different
2512 assignments into the same BLKmode object with different values
2513 with no sequence point. Will other languages need this to
2514 be more thorough?
2515
2516 If WANT_VALUE is 0, we return NULL, to make sure
2517 to catch quickly any cases where the caller uses the value
2518 and fails to set WANT_VALUE. */
bbf6f052
RK
2519
2520rtx
709f5be1 2521store_expr (exp, target, want_value)
bbf6f052
RK
2522 register tree exp;
2523 register rtx target;
709f5be1 2524 int want_value;
bbf6f052
RK
2525{
2526 register rtx temp;
2527 int dont_return_target = 0;
2528
2529 if (TREE_CODE (exp) == COMPOUND_EXPR)
2530 {
2531 /* Perform first part of compound expression, then assign from second
2532 part. */
2533 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2534 emit_queue ();
709f5be1 2535 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
bbf6f052
RK
2536 }
2537 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2538 {
2539 /* For conditional expression, get safe form of the target. Then
2540 test the condition, doing the appropriate assignment on either
2541 side. This avoids the creation of unnecessary temporaries.
2542 For non-BLKmode, it is more efficient not to do this. */
2543
2544 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2545
2546 emit_queue ();
2547 target = protect_from_queue (target, 1);
2548
2549 NO_DEFER_POP;
2550 jumpifnot (TREE_OPERAND (exp, 0), lab1);
709f5be1 2551 store_expr (TREE_OPERAND (exp, 1), target, 0);
bbf6f052
RK
2552 emit_queue ();
2553 emit_jump_insn (gen_jump (lab2));
2554 emit_barrier ();
2555 emit_label (lab1);
709f5be1 2556 store_expr (TREE_OPERAND (exp, 2), target, 0);
bbf6f052
RK
2557 emit_queue ();
2558 emit_label (lab2);
2559 OK_DEFER_POP;
709f5be1 2560 return want_value ? target : NULL_RTX;
bbf6f052 2561 }
709f5be1 2562 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
bbf6f052
RK
2563 && GET_MODE (target) != BLKmode)
2564 /* If target is in memory and caller wants value in a register instead,
2565 arrange that. Pass TARGET as target for expand_expr so that,
709f5be1 2566 if EXP is another assignment, WANT_VALUE will be nonzero for it.
c2e6aff6
RS
2567 We know expand_expr will not use the target in that case.
2568 Don't do this if TARGET is volatile because we are supposed
2569 to write it and then read it. */
bbf6f052 2570 {
906c4e36 2571 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
bbf6f052
RK
2572 GET_MODE (target), 0);
2573 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2574 temp = copy_to_reg (temp);
2575 dont_return_target = 1;
2576 }
2577 else if (queued_subexp_p (target))
709f5be1
RS
2578 /* If target contains a postincrement, let's not risk
2579 using it as the place to generate the rhs. */
bbf6f052
RK
2580 {
2581 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2582 {
2583 /* Expand EXP into a new pseudo. */
2584 temp = gen_reg_rtx (GET_MODE (target));
2585 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2586 }
2587 else
906c4e36 2588 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
709f5be1
RS
2589
2590 /* If target is volatile, ANSI requires accessing the value
2591 *from* the target, if it is accessed. So make that happen.
2592 In no case return the target itself. */
2593 if (! MEM_VOLATILE_P (target) && want_value)
2594 dont_return_target = 1;
bbf6f052 2595 }
1499e0a8
RK
2596 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2597 /* If this is an scalar in a register that is stored in a wider mode
2598 than the declared mode, compute the result into its declared mode
2599 and then convert to the wider mode. Our value is the computed
2600 expression. */
2601 {
2602 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
b258707c 2603
766f36c7
RK
2604 /* If TEMP is a volatile MEM and we want a result value, make
2605 the access now so it gets done only once. */
2606 if (GET_CODE (temp) == MEM && MEM_VOLATILE_P (temp))
2607 temp = copy_to_reg (temp);
2608
b258707c
RS
2609 /* If TEMP is a VOIDmode constant, use convert_modes to make
2610 sure that we properly convert it. */
2611 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
2612 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
2613 TYPE_MODE (TREE_TYPE (exp)), temp,
2614 SUBREG_PROMOTED_UNSIGNED_P (target));
2615
1499e0a8
RK
2616 convert_move (SUBREG_REG (target), temp,
2617 SUBREG_PROMOTED_UNSIGNED_P (target));
709f5be1 2618 return want_value ? temp : NULL_RTX;
1499e0a8 2619 }
bbf6f052
RK
2620 else
2621 {
2622 temp = expand_expr (exp, target, GET_MODE (target), 0);
766f36c7 2623 /* Return TARGET if it's a specified hardware register.
709f5be1
RS
2624 If TARGET is a volatile mem ref, either return TARGET
2625 or return a reg copied *from* TARGET; ANSI requires this.
2626
2627 Otherwise, if TEMP is not TARGET, return TEMP
2628 if it is constant (for efficiency),
2629 or if we really want the correct value. */
bbf6f052
RK
2630 if (!(target && GET_CODE (target) == REG
2631 && REGNO (target) < FIRST_PSEUDO_REGISTER)
709f5be1
RS
2632 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
2633 && temp != target
2634 && (CONSTANT_P (temp) || want_value))
bbf6f052
RK
2635 dont_return_target = 1;
2636 }
2637
b258707c
RS
2638 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
2639 the same as that of TARGET, adjust the constant. This is needed, for
2640 example, in case it is a CONST_DOUBLE and we want only a word-sized
2641 value. */
2642 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
2643 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2644 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
2645 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
2646
bbf6f052
RK
2647 /* If value was not generated in the target, store it there.
2648 Convert the value to TARGET's type first if nec. */
2649
2650 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2651 {
2652 target = protect_from_queue (target, 1);
2653 if (GET_MODE (temp) != GET_MODE (target)
2654 && GET_MODE (temp) != VOIDmode)
2655 {
2656 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2657 if (dont_return_target)
2658 {
2659 /* In this case, we will return TEMP,
2660 so make sure it has the proper mode.
2661 But don't forget to store the value into TARGET. */
2662 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2663 emit_move_insn (target, temp);
2664 }
2665 else
2666 convert_move (target, temp, unsignedp);
2667 }
2668
2669 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2670 {
2671 /* Handle copying a string constant into an array.
2672 The string constant may be shorter than the array.
2673 So copy just the string's actual length, and clear the rest. */
2674 rtx size;
2675
e87b4f3f
RS
2676 /* Get the size of the data type of the string,
2677 which is actually the size of the target. */
2678 size = expr_size (exp);
2679 if (GET_CODE (size) == CONST_INT
2680 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2681 emit_block_move (target, temp, size,
2682 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2683 else
bbf6f052 2684 {
e87b4f3f
RS
2685 /* Compute the size of the data to copy from the string. */
2686 tree copy_size
c03b7665 2687 = size_binop (MIN_EXPR,
b50d17a1 2688 make_tree (sizetype, size),
c03b7665
RK
2689 convert (sizetype,
2690 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
906c4e36
RK
2691 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
2692 VOIDmode, 0);
e87b4f3f
RS
2693 rtx label = 0;
2694
2695 /* Copy that much. */
2696 emit_block_move (target, temp, copy_size_rtx,
2697 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2698
2699 /* Figure out how much is left in TARGET
2700 that we have to clear. */
2701 if (GET_CODE (copy_size_rtx) == CONST_INT)
2702 {
2703 temp = plus_constant (XEXP (target, 0),
2704 TREE_STRING_LENGTH (exp));
2705 size = plus_constant (size,
2706 - TREE_STRING_LENGTH (exp));
2707 }
2708 else
2709 {
2710 enum machine_mode size_mode = Pmode;
2711
2712 temp = force_reg (Pmode, XEXP (target, 0));
2713 temp = expand_binop (size_mode, add_optab, temp,
906c4e36
RK
2714 copy_size_rtx, NULL_RTX, 0,
2715 OPTAB_LIB_WIDEN);
e87b4f3f
RS
2716
2717 size = expand_binop (size_mode, sub_optab, size,
906c4e36
RK
2718 copy_size_rtx, NULL_RTX, 0,
2719 OPTAB_LIB_WIDEN);
e87b4f3f 2720
906c4e36 2721 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
e87b4f3f
RS
2722 GET_MODE (size), 0, 0);
2723 label = gen_label_rtx ();
2724 emit_jump_insn (gen_blt (label));
2725 }
2726
2727 if (size != const0_rtx)
2728 {
bbf6f052 2729#ifdef TARGET_MEM_FUNCTIONS
d562e42e 2730 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
e87b4f3f 2731 temp, Pmode, const0_rtx, Pmode, size, Pmode);
bbf6f052 2732#else
d562e42e 2733 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
e87b4f3f 2734 temp, Pmode, size, Pmode);
bbf6f052 2735#endif
e87b4f3f
RS
2736 }
2737 if (label)
2738 emit_label (label);
bbf6f052
RK
2739 }
2740 }
2741 else if (GET_MODE (temp) == BLKmode)
2742 emit_block_move (target, temp, expr_size (exp),
2743 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2744 else
2745 emit_move_insn (target, temp);
2746 }
709f5be1 2747
766f36c7
RK
2748 /* If we don't want a value, return NULL_RTX. */
2749 if (! want_value)
2750 return NULL_RTX;
2751
2752 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
2753 ??? The latter test doesn't seem to make sense. */
2754 else if (dont_return_target && GET_CODE (temp) != MEM)
bbf6f052 2755 return temp;
766f36c7
RK
2756
2757 /* Return TARGET itself if it is a hard register. */
2758 else if (want_value && GET_MODE (target) != BLKmode
2759 && ! (GET_CODE (target) == REG
2760 && REGNO (target) < FIRST_PSEUDO_REGISTER))
709f5be1 2761 return copy_to_reg (target);
766f36c7
RK
2762
2763 else
709f5be1 2764 return target;
bbf6f052
RK
2765}
2766\f
2767/* Store the value of constructor EXP into the rtx TARGET.
2768 TARGET is either a REG or a MEM. */
2769
2770static void
2771store_constructor (exp, target)
2772 tree exp;
2773 rtx target;
2774{
4af3895e
JVA
2775 tree type = TREE_TYPE (exp);
2776
bbf6f052
RK
2777 /* We know our target cannot conflict, since safe_from_p has been called. */
2778#if 0
2779 /* Don't try copying piece by piece into a hard register
2780 since that is vulnerable to being clobbered by EXP.
2781 Instead, construct in a pseudo register and then copy it all. */
2782 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
2783 {
2784 rtx temp = gen_reg_rtx (GET_MODE (target));
2785 store_constructor (exp, temp);
2786 emit_move_insn (target, temp);
2787 return;
2788 }
2789#endif
2790
e44842fe
RK
2791 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
2792 || TREE_CODE (type) == QUAL_UNION_TYPE)
bbf6f052
RK
2793 {
2794 register tree elt;
2795
4af3895e 2796 /* Inform later passes that the whole union value is dead. */
e44842fe
RK
2797 if (TREE_CODE (type) == UNION_TYPE
2798 || TREE_CODE (type) == QUAL_UNION_TYPE)
bbf6f052 2799 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
4af3895e
JVA
2800
2801 /* If we are building a static constructor into a register,
2802 set the initial value as zero so we can fold the value into
2803 a constant. */
2804 else if (GET_CODE (target) == REG && TREE_STATIC (exp))
2805 emit_move_insn (target, const0_rtx);
2806
bbf6f052
RK
2807 /* If the constructor has fewer fields than the structure,
2808 clear the whole structure first. */
2809 else if (list_length (CONSTRUCTOR_ELTS (exp))
4af3895e
JVA
2810 != list_length (TYPE_FIELDS (type)))
2811 clear_storage (target, int_size_in_bytes (type));
bbf6f052
RK
2812 else
2813 /* Inform later passes that the old value is dead. */
2814 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2815
2816 /* Store each element of the constructor into
2817 the corresponding field of TARGET. */
2818
2819 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
2820 {
2821 register tree field = TREE_PURPOSE (elt);
2822 register enum machine_mode mode;
2823 int bitsize;
b50d17a1 2824 int bitpos = 0;
bbf6f052 2825 int unsignedp;
b50d17a1
RK
2826 tree pos, constant = 0, offset = 0;
2827 rtx to_rtx = target;
bbf6f052 2828
f32fd778
RS
2829 /* Just ignore missing fields.
2830 We cleared the whole structure, above,
2831 if any fields are missing. */
2832 if (field == 0)
2833 continue;
2834
bbf6f052
RK
2835 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
2836 unsignedp = TREE_UNSIGNED (field);
2837 mode = DECL_MODE (field);
2838 if (DECL_BIT_FIELD (field))
2839 mode = VOIDmode;
2840
b50d17a1
RK
2841 pos = DECL_FIELD_BITPOS (field);
2842 if (TREE_CODE (pos) == INTEGER_CST)
2843 constant = pos;
2844 else if (TREE_CODE (pos) == PLUS_EXPR
2845 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
2846 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
2847 else
2848 offset = pos;
2849
2850 if (constant)
2851 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
2852
2853 if (offset)
2854 {
2855 rtx offset_rtx;
2856
2857 if (contains_placeholder_p (offset))
2858 offset = build (WITH_RECORD_EXPR, sizetype,
2859 offset, exp);
bbf6f052 2860
b50d17a1
RK
2861 offset = size_binop (FLOOR_DIV_EXPR, offset,
2862 size_int (BITS_PER_UNIT));
bbf6f052 2863
b50d17a1
RK
2864 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2865 if (GET_CODE (to_rtx) != MEM)
2866 abort ();
2867
2868 to_rtx
2869 = change_address (to_rtx, VOIDmode,
2870 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2871 force_reg (Pmode, offset_rtx)));
2872 }
2873
2874 store_field (to_rtx, bitsize, bitpos, mode, TREE_VALUE (elt),
bbf6f052
RK
2875 /* The alignment of TARGET is
2876 at least what its type requires. */
2877 VOIDmode, 0,
4af3895e
JVA
2878 TYPE_ALIGN (type) / BITS_PER_UNIT,
2879 int_size_in_bytes (type));
bbf6f052
RK
2880 }
2881 }
4af3895e 2882 else if (TREE_CODE (type) == ARRAY_TYPE)
bbf6f052
RK
2883 {
2884 register tree elt;
2885 register int i;
4af3895e 2886 tree domain = TYPE_DOMAIN (type);
906c4e36
RK
2887 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
2888 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4af3895e 2889 tree elttype = TREE_TYPE (type);
bbf6f052
RK
2890
2891 /* If the constructor has fewer fields than the structure,
4af3895e
JVA
2892 clear the whole structure first. Similarly if this this is
2893 static constructor of a non-BLKmode object. */
bbf6f052 2894
4af3895e
JVA
2895 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
2896 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
597bb7f1 2897 clear_storage (target, int_size_in_bytes (type));
bbf6f052
RK
2898 else
2899 /* Inform later passes that the old value is dead. */
2900 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2901
2902 /* Store each element of the constructor into
2903 the corresponding element of TARGET, determined
2904 by counting the elements. */
2905 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
2906 elt;
2907 elt = TREE_CHAIN (elt), i++)
2908 {
2909 register enum machine_mode mode;
2910 int bitsize;
2911 int bitpos;
2912 int unsignedp;
03dc44a6
RS
2913 tree index = TREE_PURPOSE (elt);
2914 rtx xtarget = target;
bbf6f052
RK
2915
2916 mode = TYPE_MODE (elttype);
2917 bitsize = GET_MODE_BITSIZE (mode);
2918 unsignedp = TREE_UNSIGNED (elttype);
2919
03dc44a6
RS
2920 if (index != 0 && TREE_CODE (index) != INTEGER_CST)
2921 {
2922 /* We don't currently allow variable indices in a
2923 C initializer, but let's try here to support them. */
2924 rtx pos_rtx, addr, xtarget;
2925 tree position;
2926
2927 position = size_binop (MULT_EXPR, index, TYPE_SIZE (elttype));
2928 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
2929 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
2930 xtarget = change_address (target, mode, addr);
2931 store_expr (TREE_VALUE (elt), xtarget, 0);
2932 }
2933 else
2934 {
2935 if (index != 0)
7c314719 2936 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
03dc44a6
RS
2937 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2938 else
2939 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2940
2941 store_field (xtarget, bitsize, bitpos, mode, TREE_VALUE (elt),
2942 /* The alignment of TARGET is
2943 at least what its type requires. */
2944 VOIDmode, 0,
2945 TYPE_ALIGN (type) / BITS_PER_UNIT,
2946 int_size_in_bytes (type));
2947 }
bbf6f052
RK
2948 }
2949 }
2950
2951 else
2952 abort ();
2953}
2954
2955/* Store the value of EXP (an expression tree)
2956 into a subfield of TARGET which has mode MODE and occupies
2957 BITSIZE bits, starting BITPOS bits from the start of TARGET.
2958 If MODE is VOIDmode, it means that we are storing into a bit-field.
2959
2960 If VALUE_MODE is VOIDmode, return nothing in particular.
2961 UNSIGNEDP is not used in this case.
2962
2963 Otherwise, return an rtx for the value stored. This rtx
2964 has mode VALUE_MODE if that is convenient to do.
2965 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
2966
2967 ALIGN is the alignment that TARGET is known to have, measured in bytes.
2968 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
2969
2970static rtx
2971store_field (target, bitsize, bitpos, mode, exp, value_mode,
2972 unsignedp, align, total_size)
2973 rtx target;
2974 int bitsize, bitpos;
2975 enum machine_mode mode;
2976 tree exp;
2977 enum machine_mode value_mode;
2978 int unsignedp;
2979 int align;
2980 int total_size;
2981{
906c4e36 2982 HOST_WIDE_INT width_mask = 0;
bbf6f052 2983
906c4e36
RK
2984 if (bitsize < HOST_BITS_PER_WIDE_INT)
2985 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
bbf6f052
RK
2986
2987 /* If we are storing into an unaligned field of an aligned union that is
2988 in a register, we may have the mode of TARGET being an integer mode but
2989 MODE == BLKmode. In that case, get an aligned object whose size and
2990 alignment are the same as TARGET and store TARGET into it (we can avoid
2991 the store if the field being stored is the entire width of TARGET). Then
2992 call ourselves recursively to store the field into a BLKmode version of
2993 that object. Finally, load from the object into TARGET. This is not
2994 very efficient in general, but should only be slightly more expensive
2995 than the otherwise-required unaligned accesses. Perhaps this can be
2996 cleaned up later. */
2997
2998 if (mode == BLKmode
2999 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
3000 {
3001 rtx object = assign_stack_temp (GET_MODE (target),
3002 GET_MODE_SIZE (GET_MODE (target)), 0);
3003 rtx blk_object = copy_rtx (object);
3004
3005 PUT_MODE (blk_object, BLKmode);
3006
3007 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
3008 emit_move_insn (object, target);
3009
3010 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
3011 align, total_size);
3012
46093b97
RS
3013 /* Even though we aren't returning target, we need to
3014 give it the updated value. */
bbf6f052
RK
3015 emit_move_insn (target, object);
3016
46093b97 3017 return blk_object;
bbf6f052
RK
3018 }
3019
3020 /* If the structure is in a register or if the component
3021 is a bit field, we cannot use addressing to access it.
3022 Use bit-field techniques or SUBREG to store in it. */
3023
4fa52007
RK
3024 if (mode == VOIDmode
3025 || (mode != BLKmode && ! direct_store[(int) mode])
3026 || GET_CODE (target) == REG
c980ac49 3027 || GET_CODE (target) == SUBREG
ccc98036
RS
3028 /* If the field isn't aligned enough to store as an ordinary memref,
3029 store it as a bit field. */
3030 || (STRICT_ALIGNMENT
3031 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
3032 || (STRICT_ALIGNMENT && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
bbf6f052 3033 {
906c4e36 3034 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbd6cf73
RK
3035
3036 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
3037 MODE. */
3038 if (mode != VOIDmode && mode != BLKmode
3039 && mode != TYPE_MODE (TREE_TYPE (exp)))
3040 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
3041
bbf6f052
RK
3042 /* Store the value in the bitfield. */
3043 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
3044 if (value_mode != VOIDmode)
3045 {
3046 /* The caller wants an rtx for the value. */
3047 /* If possible, avoid refetching from the bitfield itself. */
3048 if (width_mask != 0
3049 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5c4d7cfb 3050 {
9074de27 3051 tree count;
5c4d7cfb 3052 enum machine_mode tmode;
86a2c12a 3053
5c4d7cfb
RS
3054 if (unsignedp)
3055 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
3056 tmode = GET_MODE (temp);
86a2c12a
RS
3057 if (tmode == VOIDmode)
3058 tmode = value_mode;
5c4d7cfb
RS
3059 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
3060 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
3061 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
3062 }
bbf6f052 3063 return extract_bit_field (target, bitsize, bitpos, unsignedp,
906c4e36
RK
3064 NULL_RTX, value_mode, 0, align,
3065 total_size);
bbf6f052
RK
3066 }
3067 return const0_rtx;
3068 }
3069 else
3070 {
3071 rtx addr = XEXP (target, 0);
3072 rtx to_rtx;
3073
3074 /* If a value is wanted, it must be the lhs;
3075 so make the address stable for multiple use. */
3076
3077 if (value_mode != VOIDmode && GET_CODE (addr) != REG
3078 && ! CONSTANT_ADDRESS_P (addr)
3079 /* A frame-pointer reference is already stable. */
3080 && ! (GET_CODE (addr) == PLUS
3081 && GET_CODE (XEXP (addr, 1)) == CONST_INT
3082 && (XEXP (addr, 0) == virtual_incoming_args_rtx
3083 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
3084 addr = copy_to_reg (addr);
3085
3086 /* Now build a reference to just the desired component. */
3087
3088 to_rtx = change_address (target, mode,
3089 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
3090 MEM_IN_STRUCT_P (to_rtx) = 1;
3091
3092 return store_expr (exp, to_rtx, value_mode != VOIDmode);
3093 }
3094}
3095\f
3096/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
3097 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
742920c7 3098 ARRAY_REFs and find the ultimate containing object, which we return.
bbf6f052
RK
3099
3100 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
3101 bit position, and *PUNSIGNEDP to the signedness of the field.
7bb0943f
RS
3102 If the position of the field is variable, we store a tree
3103 giving the variable offset (in units) in *POFFSET.
3104 This offset is in addition to the bit position.
3105 If the position is not variable, we store 0 in *POFFSET.
bbf6f052
RK
3106
3107 If any of the extraction expressions is volatile,
3108 we store 1 in *PVOLATILEP. Otherwise we don't change that.
3109
3110 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
3111 is a mode that can be used to access the field. In that case, *PBITSIZE
e7c33f54
RK
3112 is redundant.
3113
3114 If the field describes a variable-sized object, *PMODE is set to
3115 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
3116 this case, but the address of the object can be found. */
bbf6f052
RK
3117
3118tree
4969d05d
RK
3119get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
3120 punsignedp, pvolatilep)
bbf6f052
RK
3121 tree exp;
3122 int *pbitsize;
3123 int *pbitpos;
7bb0943f 3124 tree *poffset;
bbf6f052
RK
3125 enum machine_mode *pmode;
3126 int *punsignedp;
3127 int *pvolatilep;
3128{
b50d17a1 3129 tree orig_exp = exp;
bbf6f052
RK
3130 tree size_tree = 0;
3131 enum machine_mode mode = VOIDmode;
742920c7 3132 tree offset = integer_zero_node;
bbf6f052
RK
3133
3134 if (TREE_CODE (exp) == COMPONENT_REF)
3135 {
3136 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
3137 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
3138 mode = DECL_MODE (TREE_OPERAND (exp, 1));
3139 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
3140 }
3141 else if (TREE_CODE (exp) == BIT_FIELD_REF)
3142 {
3143 size_tree = TREE_OPERAND (exp, 1);
3144 *punsignedp = TREE_UNSIGNED (exp);
3145 }
3146 else
3147 {
3148 mode = TYPE_MODE (TREE_TYPE (exp));
3149 *pbitsize = GET_MODE_BITSIZE (mode);
3150 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3151 }
3152
3153 if (size_tree)
3154 {
3155 if (TREE_CODE (size_tree) != INTEGER_CST)
e7c33f54
RK
3156 mode = BLKmode, *pbitsize = -1;
3157 else
3158 *pbitsize = TREE_INT_CST_LOW (size_tree);
bbf6f052
RK
3159 }
3160
3161 /* Compute cumulative bit-offset for nested component-refs and array-refs,
3162 and find the ultimate containing object. */
3163
3164 *pbitpos = 0;
3165
3166 while (1)
3167 {
7bb0943f 3168 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
bbf6f052 3169 {
7bb0943f
RS
3170 tree pos = (TREE_CODE (exp) == COMPONENT_REF
3171 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
3172 : TREE_OPERAND (exp, 2));
bbf6f052 3173
e7f3c83f
RK
3174 /* If this field hasn't been filled in yet, don't go
3175 past it. This should only happen when folding expressions
3176 made during type construction. */
3177 if (pos == 0)
3178 break;
3179
7bb0943f
RS
3180 if (TREE_CODE (pos) == PLUS_EXPR)
3181 {
3182 tree constant, var;
3183 if (TREE_CODE (TREE_OPERAND (pos, 0)) == INTEGER_CST)
3184 {
3185 constant = TREE_OPERAND (pos, 0);
3186 var = TREE_OPERAND (pos, 1);
3187 }
3188 else if (TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3189 {
3190 constant = TREE_OPERAND (pos, 1);
3191 var = TREE_OPERAND (pos, 0);
3192 }
3193 else
3194 abort ();
742920c7 3195
7bb0943f 3196 *pbitpos += TREE_INT_CST_LOW (constant);
742920c7
RK
3197 offset = size_binop (PLUS_EXPR, offset,
3198 size_binop (FLOOR_DIV_EXPR, var,
3199 size_int (BITS_PER_UNIT)));
7bb0943f
RS
3200 }
3201 else if (TREE_CODE (pos) == INTEGER_CST)
3202 *pbitpos += TREE_INT_CST_LOW (pos);
3203 else
3204 {
3205 /* Assume here that the offset is a multiple of a unit.
3206 If not, there should be an explicitly added constant. */
742920c7
RK
3207 offset = size_binop (PLUS_EXPR, offset,
3208 size_binop (FLOOR_DIV_EXPR, pos,
3209 size_int (BITS_PER_UNIT)));
7bb0943f 3210 }
bbf6f052 3211 }
bbf6f052 3212
742920c7 3213 else if (TREE_CODE (exp) == ARRAY_REF)
bbf6f052 3214 {
742920c7
RK
3215 /* This code is based on the code in case ARRAY_REF in expand_expr
3216 below. We assume here that the size of an array element is
3217 always an integral multiple of BITS_PER_UNIT. */
3218
3219 tree index = TREE_OPERAND (exp, 1);
3220 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
3221 tree low_bound
3222 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
3223 tree index_type = TREE_TYPE (index);
3224
3225 if (! integer_zerop (low_bound))
3226 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
3227
3228 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
3229 {
3230 index = convert (type_for_size (POINTER_SIZE, 0), index);
3231 index_type = TREE_TYPE (index);
3232 }
3233
3234 index = fold (build (MULT_EXPR, index_type, index,
3235 TYPE_SIZE (TREE_TYPE (exp))));
3236
3237 if (TREE_CODE (index) == INTEGER_CST
3238 && TREE_INT_CST_HIGH (index) == 0)
3239 *pbitpos += TREE_INT_CST_LOW (index);
3240 else
3241 offset = size_binop (PLUS_EXPR, offset,
3242 size_binop (FLOOR_DIV_EXPR, index,
3243 size_int (BITS_PER_UNIT)));
bbf6f052
RK
3244 }
3245 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
3246 && ! ((TREE_CODE (exp) == NOP_EXPR
3247 || TREE_CODE (exp) == CONVERT_EXPR)
3248 && (TYPE_MODE (TREE_TYPE (exp))
3249 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
3250 break;
7bb0943f
RS
3251
3252 /* If any reference in the chain is volatile, the effect is volatile. */
3253 if (TREE_THIS_VOLATILE (exp))
3254 *pvolatilep = 1;
bbf6f052
RK
3255 exp = TREE_OPERAND (exp, 0);
3256 }
3257
3258 /* If this was a bit-field, see if there is a mode that allows direct
3259 access in case EXP is in memory. */
e7f3c83f 3260 if (mode == VOIDmode && *pbitsize != 0 && *pbitpos % *pbitsize == 0)
bbf6f052
RK
3261 {
3262 mode = mode_for_size (*pbitsize, MODE_INT, 0);
3263 if (mode == BLKmode)
3264 mode = VOIDmode;
3265 }
3266
742920c7
RK
3267 if (integer_zerop (offset))
3268 offset = 0;
3269
b50d17a1
RK
3270 if (offset != 0 && contains_placeholder_p (offset))
3271 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
3272
bbf6f052 3273 *pmode = mode;
7bb0943f 3274 *poffset = offset;
bbf6f052
RK
3275 return exp;
3276}
3277\f
3278/* Given an rtx VALUE that may contain additions and multiplications,
3279 return an equivalent value that just refers to a register or memory.
3280 This is done by generating instructions to perform the arithmetic
c45a13a6
RK
3281 and returning a pseudo-register containing the value.
3282
3283 The returned value may be a REG, SUBREG, MEM or constant. */
bbf6f052
RK
3284
3285rtx
3286force_operand (value, target)
3287 rtx value, target;
3288{
3289 register optab binoptab = 0;
3290 /* Use a temporary to force order of execution of calls to
3291 `force_operand'. */
3292 rtx tmp;
3293 register rtx op2;
3294 /* Use subtarget as the target for operand 0 of a binary operation. */
3295 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3296
3297 if (GET_CODE (value) == PLUS)
3298 binoptab = add_optab;
3299 else if (GET_CODE (value) == MINUS)
3300 binoptab = sub_optab;
3301 else if (GET_CODE (value) == MULT)
3302 {
3303 op2 = XEXP (value, 1);
3304 if (!CONSTANT_P (op2)
3305 && !(GET_CODE (op2) == REG && op2 != subtarget))
3306 subtarget = 0;
3307 tmp = force_operand (XEXP (value, 0), subtarget);
3308 return expand_mult (GET_MODE (value), tmp,
906c4e36 3309 force_operand (op2, NULL_RTX),
bbf6f052
RK
3310 target, 0);
3311 }
3312
3313 if (binoptab)
3314 {
3315 op2 = XEXP (value, 1);
3316 if (!CONSTANT_P (op2)
3317 && !(GET_CODE (op2) == REG && op2 != subtarget))
3318 subtarget = 0;
3319 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
3320 {
3321 binoptab = add_optab;
3322 op2 = negate_rtx (GET_MODE (value), op2);
3323 }
3324
3325 /* Check for an addition with OP2 a constant integer and our first
3326 operand a PLUS of a virtual register and something else. In that
3327 case, we want to emit the sum of the virtual register and the
3328 constant first and then add the other value. This allows virtual
3329 register instantiation to simply modify the constant rather than
3330 creating another one around this addition. */
3331 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
3332 && GET_CODE (XEXP (value, 0)) == PLUS
3333 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
3334 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
3335 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
3336 {
3337 rtx temp = expand_binop (GET_MODE (value), binoptab,
3338 XEXP (XEXP (value, 0), 0), op2,
3339 subtarget, 0, OPTAB_LIB_WIDEN);
3340 return expand_binop (GET_MODE (value), binoptab, temp,
3341 force_operand (XEXP (XEXP (value, 0), 1), 0),
3342 target, 0, OPTAB_LIB_WIDEN);
3343 }
3344
3345 tmp = force_operand (XEXP (value, 0), subtarget);
3346 return expand_binop (GET_MODE (value), binoptab, tmp,
906c4e36 3347 force_operand (op2, NULL_RTX),
bbf6f052 3348 target, 0, OPTAB_LIB_WIDEN);
8008b228 3349 /* We give UNSIGNEDP = 0 to expand_binop
bbf6f052
RK
3350 because the only operations we are expanding here are signed ones. */
3351 }
3352 return value;
3353}
3354\f
3355/* Subroutine of expand_expr:
3356 save the non-copied parts (LIST) of an expr (LHS), and return a list
3357 which can restore these values to their previous values,
3358 should something modify their storage. */
3359
3360static tree
3361save_noncopied_parts (lhs, list)
3362 tree lhs;
3363 tree list;
3364{
3365 tree tail;
3366 tree parts = 0;
3367
3368 for (tail = list; tail; tail = TREE_CHAIN (tail))
3369 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3370 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
3371 else
3372 {
3373 tree part = TREE_VALUE (tail);
3374 tree part_type = TREE_TYPE (part);
906c4e36 3375 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
bbf6f052
RK
3376 rtx target = assign_stack_temp (TYPE_MODE (part_type),
3377 int_size_in_bytes (part_type), 0);
3378 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
906c4e36 3379 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
bbf6f052 3380 parts = tree_cons (to_be_saved,
906c4e36
RK
3381 build (RTL_EXPR, part_type, NULL_TREE,
3382 (tree) target),
bbf6f052
RK
3383 parts);
3384 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
3385 }
3386 return parts;
3387}
3388
3389/* Subroutine of expand_expr:
3390 record the non-copied parts (LIST) of an expr (LHS), and return a list
3391 which specifies the initial values of these parts. */
3392
3393static tree
3394init_noncopied_parts (lhs, list)
3395 tree lhs;
3396 tree list;
3397{
3398 tree tail;
3399 tree parts = 0;
3400
3401 for (tail = list; tail; tail = TREE_CHAIN (tail))
3402 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3403 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
3404 else
3405 {
3406 tree part = TREE_VALUE (tail);
3407 tree part_type = TREE_TYPE (part);
906c4e36 3408 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
bbf6f052
RK
3409 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
3410 }
3411 return parts;
3412}
3413
3414/* Subroutine of expand_expr: return nonzero iff there is no way that
3415 EXP can reference X, which is being modified. */
3416
3417static int
3418safe_from_p (x, exp)
3419 rtx x;
3420 tree exp;
3421{
3422 rtx exp_rtl = 0;
3423 int i, nops;
3424
3425 if (x == 0)
3426 return 1;
3427
3428 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
3429 find the underlying pseudo. */
3430 if (GET_CODE (x) == SUBREG)
3431 {
3432 x = SUBREG_REG (x);
3433 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3434 return 0;
3435 }
3436
3437 /* If X is a location in the outgoing argument area, it is always safe. */
3438 if (GET_CODE (x) == MEM
3439 && (XEXP (x, 0) == virtual_outgoing_args_rtx
3440 || (GET_CODE (XEXP (x, 0)) == PLUS
3441 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
3442 return 1;
3443
3444 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3445 {
3446 case 'd':
3447 exp_rtl = DECL_RTL (exp);
3448 break;
3449
3450 case 'c':
3451 return 1;
3452
3453 case 'x':
3454 if (TREE_CODE (exp) == TREE_LIST)
f32fd778
RS
3455 return ((TREE_VALUE (exp) == 0
3456 || safe_from_p (x, TREE_VALUE (exp)))
bbf6f052
RK
3457 && (TREE_CHAIN (exp) == 0
3458 || safe_from_p (x, TREE_CHAIN (exp))));
3459 else
3460 return 0;
3461
3462 case '1':
3463 return safe_from_p (x, TREE_OPERAND (exp, 0));
3464
3465 case '2':
3466 case '<':
3467 return (safe_from_p (x, TREE_OPERAND (exp, 0))
3468 && safe_from_p (x, TREE_OPERAND (exp, 1)));
3469
3470 case 'e':
3471 case 'r':
3472 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
3473 the expression. If it is set, we conflict iff we are that rtx or
3474 both are in memory. Otherwise, we check all operands of the
3475 expression recursively. */
3476
3477 switch (TREE_CODE (exp))
3478 {
3479 case ADDR_EXPR:
e44842fe
RK
3480 return (staticp (TREE_OPERAND (exp, 0))
3481 || safe_from_p (x, TREE_OPERAND (exp, 0)));
bbf6f052
RK
3482
3483 case INDIRECT_REF:
3484 if (GET_CODE (x) == MEM)
3485 return 0;
3486 break;
3487
3488 case CALL_EXPR:
3489 exp_rtl = CALL_EXPR_RTL (exp);
3490 if (exp_rtl == 0)
3491 {
3492 /* Assume that the call will clobber all hard registers and
3493 all of memory. */
3494 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3495 || GET_CODE (x) == MEM)
3496 return 0;
3497 }
3498
3499 break;
3500
3501 case RTL_EXPR:
3502 exp_rtl = RTL_EXPR_RTL (exp);
3503 if (exp_rtl == 0)
3504 /* We don't know what this can modify. */
3505 return 0;
3506
3507 break;
3508
3509 case WITH_CLEANUP_EXPR:
3510 exp_rtl = RTL_EXPR_RTL (exp);
3511 break;
3512
3513 case SAVE_EXPR:
3514 exp_rtl = SAVE_EXPR_RTL (exp);
3515 break;
3516
8129842c
RS
3517 case BIND_EXPR:
3518 /* The only operand we look at is operand 1. The rest aren't
3519 part of the expression. */
3520 return safe_from_p (x, TREE_OPERAND (exp, 1));
3521
bbf6f052
RK
3522 case METHOD_CALL_EXPR:
3523 /* This takes a rtx argument, but shouldn't appear here. */
3524 abort ();
3525 }
3526
3527 /* If we have an rtx, we do not need to scan our operands. */
3528 if (exp_rtl)
3529 break;
3530
3531 nops = tree_code_length[(int) TREE_CODE (exp)];
3532 for (i = 0; i < nops; i++)
3533 if (TREE_OPERAND (exp, i) != 0
3534 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3535 return 0;
3536 }
3537
3538 /* If we have an rtl, find any enclosed object. Then see if we conflict
3539 with it. */
3540 if (exp_rtl)
3541 {
3542 if (GET_CODE (exp_rtl) == SUBREG)
3543 {
3544 exp_rtl = SUBREG_REG (exp_rtl);
3545 if (GET_CODE (exp_rtl) == REG
3546 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3547 return 0;
3548 }
3549
3550 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3551 are memory and EXP is not readonly. */
3552 return ! (rtx_equal_p (x, exp_rtl)
3553 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3554 && ! TREE_READONLY (exp)));
3555 }
3556
3557 /* If we reach here, it is safe. */
3558 return 1;
3559}
3560
3561/* Subroutine of expand_expr: return nonzero iff EXP is an
3562 expression whose type is statically determinable. */
3563
3564static int
3565fixed_type_p (exp)
3566 tree exp;
3567{
3568 if (TREE_CODE (exp) == PARM_DECL
3569 || TREE_CODE (exp) == VAR_DECL
3570 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3571 || TREE_CODE (exp) == COMPONENT_REF
3572 || TREE_CODE (exp) == ARRAY_REF)
3573 return 1;
3574 return 0;
3575}
3576\f
3577/* expand_expr: generate code for computing expression EXP.
3578 An rtx for the computed value is returned. The value is never null.
3579 In the case of a void EXP, const0_rtx is returned.
3580
3581 The value may be stored in TARGET if TARGET is nonzero.
3582 TARGET is just a suggestion; callers must assume that
3583 the rtx returned may not be the same as TARGET.
3584
3585 If TARGET is CONST0_RTX, it means that the value will be ignored.
3586
3587 If TMODE is not VOIDmode, it suggests generating the
3588 result in mode TMODE. But this is done only when convenient.
3589 Otherwise, TMODE is ignored and the value generated in its natural mode.
3590 TMODE is just a suggestion; callers must assume that
3591 the rtx returned may not have mode TMODE.
3592
d6a5ac33
RK
3593 Note that TARGET may have neither TMODE nor MODE. In that case, it
3594 probably will not be used.
bbf6f052
RK
3595
3596 If MODIFIER is EXPAND_SUM then when EXP is an addition
3597 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3598 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3599 products as above, or REG or MEM, or constant.
3600 Ordinarily in such cases we would output mul or add instructions
3601 and then return a pseudo reg containing the sum.
3602
3603 EXPAND_INITIALIZER is much like EXPAND_SUM except that
3604 it also marks a label as absolutely required (it can't be dead).
26fcb35a 3605 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
d6a5ac33
RK
3606 This is used for outputting expressions used in initializers.
3607
3608 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
3609 with a constant address even if that address is not normally legitimate.
3610 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
bbf6f052
RK
3611
3612rtx
3613expand_expr (exp, target, tmode, modifier)
3614 register tree exp;
3615 rtx target;
3616 enum machine_mode tmode;
3617 enum expand_modifier modifier;
3618{
b50d17a1
RK
3619 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
3620 This is static so it will be accessible to our recursive callees. */
3621 static tree placeholder_list = 0;
bbf6f052
RK
3622 register rtx op0, op1, temp;
3623 tree type = TREE_TYPE (exp);
3624 int unsignedp = TREE_UNSIGNED (type);
3625 register enum machine_mode mode = TYPE_MODE (type);
3626 register enum tree_code code = TREE_CODE (exp);
3627 optab this_optab;
3628 /* Use subtarget as the target for operand 0 of a binary operation. */
3629 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3630 rtx original_target = target;
ca695ac9 3631 /* Maybe defer this until sure not doing bytecode? */
dd27116b
RK
3632 int ignore = (target == const0_rtx
3633 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
4d87de75
RS
3634 || code == CONVERT_EXPR || code == REFERENCE_EXPR
3635 || code == COND_EXPR)
dd27116b 3636 && TREE_CODE (type) == VOID_TYPE));
bbf6f052
RK
3637 tree context;
3638
ca695ac9 3639
1d556704 3640 if (output_bytecode && modifier != EXPAND_INITIALIZER)
ca695ac9
JB
3641 {
3642 bc_expand_expr (exp);
3643 return NULL;
3644 }
3645
bbf6f052
RK
3646 /* Don't use hard regs as subtargets, because the combiner
3647 can only handle pseudo regs. */
3648 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
3649 subtarget = 0;
3650 /* Avoid subtargets inside loops,
3651 since they hide some invariant expressions. */
3652 if (preserve_subexpressions_p ())
3653 subtarget = 0;
3654
dd27116b
RK
3655 /* If we are going to ignore this result, we need only do something
3656 if there is a side-effect somewhere in the expression. If there
b50d17a1
RK
3657 is, short-circuit the most common cases here. Note that we must
3658 not call expand_expr with anything but const0_rtx in case this
3659 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
bbf6f052 3660
dd27116b
RK
3661 if (ignore)
3662 {
3663 if (! TREE_SIDE_EFFECTS (exp))
3664 return const0_rtx;
3665
3666 /* Ensure we reference a volatile object even if value is ignored. */
3667 if (TREE_THIS_VOLATILE (exp)
3668 && TREE_CODE (exp) != FUNCTION_DECL
3669 && mode != VOIDmode && mode != BLKmode)
3670 {
3671 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
3672 if (GET_CODE (temp) == MEM)
3673 temp = copy_to_reg (temp);
3674 return const0_rtx;
3675 }
3676
3677 if (TREE_CODE_CLASS (code) == '1')
3678 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
3679 VOIDmode, modifier);
3680 else if (TREE_CODE_CLASS (code) == '2'
3681 || TREE_CODE_CLASS (code) == '<')
3682 {
3683 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
3684 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
3685 return const0_rtx;
3686 }
3687 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
3688 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
3689 /* If the second operand has no side effects, just evaluate
3690 the first. */
3691 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
3692 VOIDmode, modifier);
dd27116b 3693
90764a87 3694 target = 0;
dd27116b 3695 }
bbf6f052 3696
e44842fe
RK
3697 /* If will do cse, generate all results into pseudo registers
3698 since 1) that allows cse to find more things
3699 and 2) otherwise cse could produce an insn the machine
3700 cannot support. */
3701
bbf6f052
RK
3702 if (! cse_not_expected && mode != BLKmode && target
3703 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
3704 target = subtarget;
3705
bbf6f052
RK
3706 switch (code)
3707 {
3708 case LABEL_DECL:
b552441b
RS
3709 {
3710 tree function = decl_function_context (exp);
3711 /* Handle using a label in a containing function. */
3712 if (function != current_function_decl && function != 0)
3713 {
3714 struct function *p = find_function_data (function);
3715 /* Allocate in the memory associated with the function
3716 that the label is in. */
3717 push_obstacks (p->function_obstack,
3718 p->function_maybepermanent_obstack);
3719
3720 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3721 label_rtx (exp), p->forced_labels);
3722 pop_obstacks ();
3723 }
3724 else if (modifier == EXPAND_INITIALIZER)
3725 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3726 label_rtx (exp), forced_labels);
26fcb35a 3727 temp = gen_rtx (MEM, FUNCTION_MODE,
b552441b 3728 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
26fcb35a
RS
3729 if (function != current_function_decl && function != 0)
3730 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
3731 return temp;
b552441b 3732 }
bbf6f052
RK
3733
3734 case PARM_DECL:
3735 if (DECL_RTL (exp) == 0)
3736 {
3737 error_with_decl (exp, "prior parameter's size depends on `%s'");
4af3895e 3738 return CONST0_RTX (mode);
bbf6f052
RK
3739 }
3740
d6a5ac33
RK
3741 /* ... fall through ... */
3742
bbf6f052 3743 case VAR_DECL:
2dca20cd
RS
3744 /* If a static var's type was incomplete when the decl was written,
3745 but the type is complete now, lay out the decl now. */
3746 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
3747 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
3748 {
3749 push_obstacks_nochange ();
3750 end_temporary_allocation ();
3751 layout_decl (exp, 0);
3752 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
3753 pop_obstacks ();
3754 }
d6a5ac33
RK
3755
3756 /* ... fall through ... */
3757
2dca20cd 3758 case FUNCTION_DECL:
bbf6f052
RK
3759 case RESULT_DECL:
3760 if (DECL_RTL (exp) == 0)
3761 abort ();
d6a5ac33 3762
e44842fe
RK
3763 /* Ensure variable marked as used even if it doesn't go through
3764 a parser. If it hasn't be used yet, write out an external
3765 definition. */
3766 if (! TREE_USED (exp))
3767 {
3768 assemble_external (exp);
3769 TREE_USED (exp) = 1;
3770 }
3771
bbf6f052
RK
3772 /* Handle variables inherited from containing functions. */
3773 context = decl_function_context (exp);
3774
3775 /* We treat inline_function_decl as an alias for the current function
3776 because that is the inline function whose vars, types, etc.
3777 are being merged into the current function.
3778 See expand_inline_function. */
d6a5ac33 3779
bbf6f052
RK
3780 if (context != 0 && context != current_function_decl
3781 && context != inline_function_decl
3782 /* If var is static, we don't need a static chain to access it. */
3783 && ! (GET_CODE (DECL_RTL (exp)) == MEM
3784 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
3785 {
3786 rtx addr;
3787
3788 /* Mark as non-local and addressable. */
81feeecb 3789 DECL_NONLOCAL (exp) = 1;
bbf6f052
RK
3790 mark_addressable (exp);
3791 if (GET_CODE (DECL_RTL (exp)) != MEM)
3792 abort ();
3793 addr = XEXP (DECL_RTL (exp), 0);
3794 if (GET_CODE (addr) == MEM)
d6a5ac33
RK
3795 addr = gen_rtx (MEM, Pmode,
3796 fix_lexical_addr (XEXP (addr, 0), exp));
bbf6f052
RK
3797 else
3798 addr = fix_lexical_addr (addr, exp);
3799 return change_address (DECL_RTL (exp), mode, addr);
3800 }
4af3895e 3801
bbf6f052
RK
3802 /* This is the case of an array whose size is to be determined
3803 from its initializer, while the initializer is still being parsed.
3804 See expand_decl. */
d6a5ac33 3805
bbf6f052
RK
3806 if (GET_CODE (DECL_RTL (exp)) == MEM
3807 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
3808 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
3809 XEXP (DECL_RTL (exp), 0));
d6a5ac33
RK
3810
3811 /* If DECL_RTL is memory, we are in the normal case and either
3812 the address is not valid or it is not a register and -fforce-addr
3813 is specified, get the address into a register. */
3814
bbf6f052
RK
3815 if (GET_CODE (DECL_RTL (exp)) == MEM
3816 && modifier != EXPAND_CONST_ADDRESS
3817 && modifier != EXPAND_SUM
d6a5ac33
RK
3818 && modifier != EXPAND_INITIALIZER
3819 && (! memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
bbf6f052 3820 || (flag_force_addr
d6a5ac33
RK
3821 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
3822 return change_address (DECL_RTL (exp), VOIDmode,
3823 copy_rtx (XEXP (DECL_RTL (exp), 0)));
1499e0a8
RK
3824
3825 /* If the mode of DECL_RTL does not match that of the decl, it
3826 must be a promoted value. We return a SUBREG of the wanted mode,
3827 but mark it so that we know that it was already extended. */
3828
3829 if (GET_CODE (DECL_RTL (exp)) == REG
3830 && GET_MODE (DECL_RTL (exp)) != mode)
3831 {
3832 enum machine_mode decl_mode = DECL_MODE (exp);
3833
3834 /* Get the signedness used for this variable. Ensure we get the
3835 same mode we got when the variable was declared. */
3836
3837 PROMOTE_MODE (decl_mode, unsignedp, type);
3838
3839 if (decl_mode != GET_MODE (DECL_RTL (exp)))
3840 abort ();
3841
3842 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
3843 SUBREG_PROMOTED_VAR_P (temp) = 1;
3844 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3845 return temp;
3846 }
3847
bbf6f052
RK
3848 return DECL_RTL (exp);
3849
3850 case INTEGER_CST:
3851 return immed_double_const (TREE_INT_CST_LOW (exp),
3852 TREE_INT_CST_HIGH (exp),
3853 mode);
3854
3855 case CONST_DECL:
3856 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
3857
3858 case REAL_CST:
3859 /* If optimized, generate immediate CONST_DOUBLE
3860 which will be turned into memory by reload if necessary.
3861
3862 We used to force a register so that loop.c could see it. But
3863 this does not allow gen_* patterns to perform optimizations with
3864 the constants. It also produces two insns in cases like "x = 1.0;".
3865 On most machines, floating-point constants are not permitted in
3866 many insns, so we'd end up copying it to a register in any case.
3867
3868 Now, we do the copying in expand_binop, if appropriate. */
3869 return immed_real_const (exp);
3870
3871 case COMPLEX_CST:
3872 case STRING_CST:
3873 if (! TREE_CST_RTL (exp))
3874 output_constant_def (exp);
3875
3876 /* TREE_CST_RTL probably contains a constant address.
3877 On RISC machines where a constant address isn't valid,
3878 make some insns to get that address into a register. */
3879 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
3880 && modifier != EXPAND_CONST_ADDRESS
3881 && modifier != EXPAND_INITIALIZER
3882 && modifier != EXPAND_SUM
d6a5ac33
RK
3883 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
3884 || (flag_force_addr
3885 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
bbf6f052
RK
3886 return change_address (TREE_CST_RTL (exp), VOIDmode,
3887 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
3888 return TREE_CST_RTL (exp);
3889
3890 case SAVE_EXPR:
3891 context = decl_function_context (exp);
d6a5ac33 3892
bbf6f052
RK
3893 /* We treat inline_function_decl as an alias for the current function
3894 because that is the inline function whose vars, types, etc.
3895 are being merged into the current function.
3896 See expand_inline_function. */
3897 if (context == current_function_decl || context == inline_function_decl)
3898 context = 0;
3899
3900 /* If this is non-local, handle it. */
3901 if (context)
3902 {
3903 temp = SAVE_EXPR_RTL (exp);
3904 if (temp && GET_CODE (temp) == REG)
3905 {
3906 put_var_into_stack (exp);
3907 temp = SAVE_EXPR_RTL (exp);
3908 }
3909 if (temp == 0 || GET_CODE (temp) != MEM)
3910 abort ();
3911 return change_address (temp, mode,
3912 fix_lexical_addr (XEXP (temp, 0), exp));
3913 }
3914 if (SAVE_EXPR_RTL (exp) == 0)
3915 {
3916 if (mode == BLKmode)
34a25822
RK
3917 {
3918 temp
3919 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
3920 MEM_IN_STRUCT_P (temp)
3921 = (TREE_CODE (type) == RECORD_TYPE
3922 || TREE_CODE (type) == UNION_TYPE
3923 || TREE_CODE (type) == QUAL_UNION_TYPE
3924 || TREE_CODE (type) == ARRAY_TYPE);
3925 }
bbf6f052 3926 else
1499e0a8
RK
3927 {
3928 enum machine_mode var_mode = mode;
3929
3930 if (TREE_CODE (type) == INTEGER_TYPE
3931 || TREE_CODE (type) == ENUMERAL_TYPE
3932 || TREE_CODE (type) == BOOLEAN_TYPE
3933 || TREE_CODE (type) == CHAR_TYPE
3934 || TREE_CODE (type) == REAL_TYPE
3935 || TREE_CODE (type) == POINTER_TYPE
3936 || TREE_CODE (type) == OFFSET_TYPE)
3937 {
3938 PROMOTE_MODE (var_mode, unsignedp, type);
3939 }
3940
3941 temp = gen_reg_rtx (var_mode);
3942 }
3943
bbf6f052 3944 SAVE_EXPR_RTL (exp) = temp;
bbf6f052
RK
3945 if (!optimize && GET_CODE (temp) == REG)
3946 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
3947 save_expr_regs);
ff78f773
RK
3948
3949 /* If the mode of TEMP does not match that of the expression, it
3950 must be a promoted value. We pass store_expr a SUBREG of the
3951 wanted mode but mark it so that we know that it was already
3952 extended. Note that `unsignedp' was modified above in
3953 this case. */
3954
3955 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
3956 {
3957 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
3958 SUBREG_PROMOTED_VAR_P (temp) = 1;
3959 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3960 }
3961
3962 store_expr (TREE_OPERAND (exp, 0), temp, 0);
bbf6f052 3963 }
1499e0a8
RK
3964
3965 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
3966 must be a promoted value. We return a SUBREG of the wanted mode,
adc22a04 3967 but mark it so that we know that it was already extended. */
1499e0a8
RK
3968
3969 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
3970 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
3971 {
adc22a04
RK
3972 enum machine_mode var_mode = mode;
3973
3974 if (TREE_CODE (type) == INTEGER_TYPE
3975 || TREE_CODE (type) == ENUMERAL_TYPE
3976 || TREE_CODE (type) == BOOLEAN_TYPE
3977 || TREE_CODE (type) == CHAR_TYPE
3978 || TREE_CODE (type) == REAL_TYPE
3979 || TREE_CODE (type) == POINTER_TYPE
3980 || TREE_CODE (type) == OFFSET_TYPE)
3981 {
3982 PROMOTE_MODE (var_mode, unsignedp, type);
3983 }
3984
1499e0a8
RK
3985 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
3986 SUBREG_PROMOTED_VAR_P (temp) = 1;
3987 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3988 return temp;
3989 }
3990
bbf6f052
RK
3991 return SAVE_EXPR_RTL (exp);
3992
b50d17a1
RK
3993 case PLACEHOLDER_EXPR:
3994 /* If there is an object on the head of the placeholder list,
3995 see if some object in it's references is of type TYPE. For
3996 further information, see tree.def. */
3997 if (placeholder_list)
3998 {
3999 tree object;
f59d43a9 4000 tree old_list = placeholder_list;
b50d17a1
RK
4001
4002 for (object = TREE_PURPOSE (placeholder_list);
4003 TREE_TYPE (object) != type
4004 && (TREE_CODE_CLASS (TREE_CODE (object)) == 'r'
4805bfa0
RK
4005 || TREE_CODE_CLASS (TREE_CODE (object)) == '1'
4006 || TREE_CODE_CLASS (TREE_CODE (object)) == '2'
4007 || TREE_CODE_CLASS (TREE_CODE (object)) == 'e');
b50d17a1
RK
4008 object = TREE_OPERAND (object, 0))
4009 ;
4010
4805bfa0 4011 if (object && TREE_TYPE (object) == type)
f59d43a9
RK
4012 {
4013 /* Expand this object skipping the list entries before
4014 it was found in case it is also a PLACEHOLDER_EXPR.
4015 In that case, we want to translate it using subsequent
4016 entries. */
4017 placeholder_list = TREE_CHAIN (placeholder_list);
4018 temp = expand_expr (object, original_target, tmode, modifier);
4019 placeholder_list = old_list;
4020 return temp;
4021 }
b50d17a1
RK
4022 }
4023
4024 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
4025 abort ();
4026
4027 case WITH_RECORD_EXPR:
4028 /* Put the object on the placeholder list, expand our first operand,
4029 and pop the list. */
4030 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
4031 placeholder_list);
4032 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
4033 tmode, modifier);
4034 placeholder_list = TREE_CHAIN (placeholder_list);
4035 return target;
4036
bbf6f052 4037 case EXIT_EXPR:
e44842fe
RK
4038 expand_exit_loop_if_false (NULL_PTR,
4039 invert_truthvalue (TREE_OPERAND (exp, 0)));
bbf6f052
RK
4040 return const0_rtx;
4041
4042 case LOOP_EXPR:
0088fcb1 4043 push_temp_slots ();
bbf6f052
RK
4044 expand_start_loop (1);
4045 expand_expr_stmt (TREE_OPERAND (exp, 0));
4046 expand_end_loop ();
0088fcb1 4047 pop_temp_slots ();
bbf6f052
RK
4048
4049 return const0_rtx;
4050
4051 case BIND_EXPR:
4052 {
4053 tree vars = TREE_OPERAND (exp, 0);
4054 int vars_need_expansion = 0;
4055
4056 /* Need to open a binding contour here because
4057 if there are any cleanups they most be contained here. */
4058 expand_start_bindings (0);
4059
2df53c0b
RS
4060 /* Mark the corresponding BLOCK for output in its proper place. */
4061 if (TREE_OPERAND (exp, 2) != 0
4062 && ! TREE_USED (TREE_OPERAND (exp, 2)))
4063 insert_block (TREE_OPERAND (exp, 2));
bbf6f052
RK
4064
4065 /* If VARS have not yet been expanded, expand them now. */
4066 while (vars)
4067 {
4068 if (DECL_RTL (vars) == 0)
4069 {
4070 vars_need_expansion = 1;
4071 expand_decl (vars);
4072 }
4073 expand_decl_init (vars);
4074 vars = TREE_CHAIN (vars);
4075 }
4076
4077 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
4078
4079 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
4080
4081 return temp;
4082 }
4083
4084 case RTL_EXPR:
4085 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
4086 abort ();
4087 emit_insns (RTL_EXPR_SEQUENCE (exp));
4088 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
99310285 4089 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
ca814259 4090 free_temps_for_rtl_expr (exp);
bbf6f052
RK
4091 return RTL_EXPR_RTL (exp);
4092
4093 case CONSTRUCTOR:
dd27116b
RK
4094 /* If we don't need the result, just ensure we evaluate any
4095 subexpressions. */
4096 if (ignore)
4097 {
4098 tree elt;
4099 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4100 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
4101 return const0_rtx;
4102 }
3207b172 4103
4af3895e
JVA
4104 /* All elts simple constants => refer to a constant in memory. But
4105 if this is a non-BLKmode mode, let it store a field at a time
4106 since that should make a CONST_INT or CONST_DOUBLE when we
3207b172
RK
4107 fold. Likewise, if we have a target we can use, it is best to
4108 store directly into the target. If we are making an initializer and
4109 all operands are constant, put it in memory as well. */
dd27116b 4110 else if ((TREE_STATIC (exp)
3207b172
RK
4111 && ((mode == BLKmode
4112 && ! (target != 0 && safe_from_p (target, exp)))
4113 || TREE_ADDRESSABLE (exp)))
dd27116b 4114 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
bbf6f052
RK
4115 {
4116 rtx constructor = output_constant_def (exp);
b552441b
RS
4117 if (modifier != EXPAND_CONST_ADDRESS
4118 && modifier != EXPAND_INITIALIZER
4119 && modifier != EXPAND_SUM
d6a5ac33
RK
4120 && (! memory_address_p (GET_MODE (constructor),
4121 XEXP (constructor, 0))
4122 || (flag_force_addr
4123 && GET_CODE (XEXP (constructor, 0)) != REG)))
bbf6f052
RK
4124 constructor = change_address (constructor, VOIDmode,
4125 XEXP (constructor, 0));
4126 return constructor;
4127 }
4128
bbf6f052
RK
4129 else
4130 {
4131 if (target == 0 || ! safe_from_p (target, exp))
4132 {
4133 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
d6a5ac33 4134 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
bbf6f052
RK
4135 else
4136 {
3b94d087
RS
4137 enum tree_code c = TREE_CODE (type);
4138 target
4139 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
e7f3c83f
RK
4140 if (c == RECORD_TYPE || c == UNION_TYPE
4141 || c == QUAL_UNION_TYPE || c == ARRAY_TYPE)
3b94d087 4142 MEM_IN_STRUCT_P (target) = 1;
bbf6f052
RK
4143 }
4144 }
4145 store_constructor (exp, target);
4146 return target;
4147 }
4148
4149 case INDIRECT_REF:
4150 {
4151 tree exp1 = TREE_OPERAND (exp, 0);
4152 tree exp2;
4153
4154 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
4155 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
4156 This code has the same general effect as simply doing
4157 expand_expr on the save expr, except that the expression PTR
4158 is computed for use as a memory address. This means different
4159 code, suitable for indexing, may be generated. */
4160 if (TREE_CODE (exp1) == SAVE_EXPR
4161 && SAVE_EXPR_RTL (exp1) == 0
4162 && TREE_CODE (exp2 = TREE_OPERAND (exp1, 0)) != ERROR_MARK
4163 && TYPE_MODE (TREE_TYPE (exp1)) == Pmode
4164 && TYPE_MODE (TREE_TYPE (exp2)) == Pmode)
4165 {
906c4e36
RK
4166 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
4167 VOIDmode, EXPAND_SUM);
bbf6f052
RK
4168 op0 = memory_address (mode, temp);
4169 op0 = copy_all_regs (op0);
4170 SAVE_EXPR_RTL (exp1) = op0;
4171 }
4172 else
4173 {
906c4e36 4174 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
bbf6f052
RK
4175 op0 = memory_address (mode, op0);
4176 }
8c8a8e34
JW
4177
4178 temp = gen_rtx (MEM, mode, op0);
4179 /* If address was computed by addition,
4180 mark this as an element of an aggregate. */
4181 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4182 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
4183 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
4184 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
4185 || TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4186 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
e7f3c83f 4187 || TREE_CODE (TREE_TYPE (exp)) == QUAL_UNION_TYPE
8c8a8e34
JW
4188 || (TREE_CODE (exp1) == ADDR_EXPR
4189 && (exp2 = TREE_OPERAND (exp1, 0))
4190 && (TREE_CODE (TREE_TYPE (exp2)) == ARRAY_TYPE
4191 || TREE_CODE (TREE_TYPE (exp2)) == RECORD_TYPE
e7f3c83f
RK
4192 || TREE_CODE (TREE_TYPE (exp2)) == UNION_TYPE
4193 || TREE_CODE (TREE_TYPE (exp2)) == QUAL_UNION_TYPE)))
8c8a8e34 4194 MEM_IN_STRUCT_P (temp) = 1;
2c4c436a 4195 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
89742723 4196#if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
bbf6f052
RK
4197 a location is accessed through a pointer to const does not mean
4198 that the value there can never change. */
8c8a8e34 4199 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
bbf6f052 4200#endif
8c8a8e34
JW
4201 return temp;
4202 }
bbf6f052
RK
4203
4204 case ARRAY_REF:
742920c7
RK
4205 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
4206 abort ();
bbf6f052 4207
bbf6f052 4208 {
742920c7
RK
4209 tree array = TREE_OPERAND (exp, 0);
4210 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
4211 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4212 tree index = TREE_OPERAND (exp, 1);
4213 tree index_type = TREE_TYPE (index);
bbf6f052 4214 int i;
bbf6f052 4215
b50d17a1
RK
4216 if (TREE_CODE (low_bound) != INTEGER_CST
4217 && contains_placeholder_p (low_bound))
4218 low_bound = build (WITH_RECORD_EXPR, sizetype, low_bound, exp);
4219
d4c89139
PB
4220 /* Optimize the special-case of a zero lower bound.
4221
4222 We convert the low_bound to sizetype to avoid some problems
4223 with constant folding. (E.g. suppose the lower bound is 1,
4224 and its mode is QI. Without the conversion, (ARRAY
4225 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
4226 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
4227
4228 But sizetype isn't quite right either (especially if
4229 the lowbound is negative). FIXME */
4230
742920c7 4231 if (! integer_zerop (low_bound))
d4c89139
PB
4232 index = fold (build (MINUS_EXPR, index_type, index,
4233 convert (sizetype, low_bound)));
742920c7
RK
4234
4235 if (TREE_CODE (index) != INTEGER_CST
4236 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4237 {
4238 /* Nonconstant array index or nonconstant element size.
4239 Generate the tree for *(&array+index) and expand that,
4240 except do it in a language-independent way
4241 and don't complain about non-lvalue arrays.
4242 `mark_addressable' should already have been called
4243 for any array for which this case will be reached. */
4244
4245 /* Don't forget the const or volatile flag from the array
4246 element. */
4247 tree variant_type = build_type_variant (type,
4248 TREE_READONLY (exp),
4249 TREE_THIS_VOLATILE (exp));
4250 tree array_adr = build1 (ADDR_EXPR,
4251 build_pointer_type (variant_type), array);
4252 tree elt;
b50d17a1 4253 tree size = size_in_bytes (type);
742920c7
RK
4254
4255 /* Convert the integer argument to a type the same size as a
4256 pointer so the multiply won't overflow spuriously. */
4257 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
4258 index = convert (type_for_size (POINTER_SIZE, 0), index);
4259
b50d17a1
RK
4260 if (TREE_CODE (size) != INTEGER_CST
4261 && contains_placeholder_p (size))
4262 size = build (WITH_RECORD_EXPR, sizetype, size, exp);
4263
742920c7
RK
4264 /* Don't think the address has side effects
4265 just because the array does.
4266 (In some cases the address might have side effects,
4267 and we fail to record that fact here. However, it should not
4268 matter, since expand_expr should not care.) */
4269 TREE_SIDE_EFFECTS (array_adr) = 0;
4270
4271 elt = build1 (INDIRECT_REF, type,
4272 fold (build (PLUS_EXPR,
4273 TYPE_POINTER_TO (variant_type),
4274 array_adr,
4275 fold (build (MULT_EXPR,
4276 TYPE_POINTER_TO (variant_type),
b50d17a1 4277 index, size)))));
742920c7
RK
4278
4279 /* Volatility, etc., of new expression is same as old
4280 expression. */
4281 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
4282 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
4283 TREE_READONLY (elt) = TREE_READONLY (exp);
4284
4285 return expand_expr (elt, target, tmode, modifier);
4286 }
4287
4288 /* Fold an expression like: "foo"[2].
4289 This is not done in fold so it won't happen inside &. */
4290
4291 if (TREE_CODE (array) == STRING_CST
4292 && TREE_CODE (index) == INTEGER_CST
4293 && !TREE_INT_CST_HIGH (index)
307b821c
RK
4294 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
4295 && GET_MODE_CLASS (mode) == MODE_INT)
4296 return GEN_INT (TREE_STRING_POINTER (array)[i]);
bbf6f052 4297
742920c7
RK
4298 /* If this is a constant index into a constant array,
4299 just get the value from the array. Handle both the cases when
4300 we have an explicit constructor and when our operand is a variable
4301 that was declared const. */
4af3895e 4302
742920c7
RK
4303 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
4304 {
4305 if (TREE_CODE (index) == INTEGER_CST
4306 && TREE_INT_CST_HIGH (index) == 0)
4307 {
4308 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
4309
4310 i = TREE_INT_CST_LOW (index);
4311 while (elem && i--)
4312 elem = TREE_CHAIN (elem);
4313 if (elem)
4314 return expand_expr (fold (TREE_VALUE (elem)), target,
4315 tmode, modifier);
4316 }
4317 }
4af3895e 4318
742920c7
RK
4319 else if (optimize >= 1
4320 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
4321 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
4322 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
4323 {
4324 if (TREE_CODE (index) == INTEGER_CST
4325 && TREE_INT_CST_HIGH (index) == 0)
4326 {
4327 tree init = DECL_INITIAL (array);
4328
4329 i = TREE_INT_CST_LOW (index);
4330 if (TREE_CODE (init) == CONSTRUCTOR)
4331 {
4332 tree elem = CONSTRUCTOR_ELTS (init);
4333
03dc44a6
RS
4334 while (elem
4335 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
742920c7
RK
4336 elem = TREE_CHAIN (elem);
4337 if (elem)
4338 return expand_expr (fold (TREE_VALUE (elem)), target,
4339 tmode, modifier);
4340 }
4341 else if (TREE_CODE (init) == STRING_CST
4342 && i < TREE_STRING_LENGTH (init))
307b821c 4343 return GEN_INT (TREE_STRING_POINTER (init)[i]);
742920c7
RK
4344 }
4345 }
4346 }
8c8a8e34 4347
bbf6f052
RK
4348 /* Treat array-ref with constant index as a component-ref. */
4349
4350 case COMPONENT_REF:
4351 case BIT_FIELD_REF:
4af3895e
JVA
4352 /* If the operand is a CONSTRUCTOR, we can just extract the
4353 appropriate field if it is present. */
4354 if (code != ARRAY_REF
4355 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
4356 {
4357 tree elt;
4358
4359 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
4360 elt = TREE_CHAIN (elt))
4361 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
4362 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
4363 }
4364
bbf6f052
RK
4365 {
4366 enum machine_mode mode1;
4367 int bitsize;
4368 int bitpos;
7bb0943f 4369 tree offset;
bbf6f052 4370 int volatilep = 0;
7bb0943f 4371 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
bbf6f052 4372 &mode1, &unsignedp, &volatilep);
034f9101 4373 int alignment;
bbf6f052 4374
e7f3c83f
RK
4375 /* If we got back the original object, something is wrong. Perhaps
4376 we are evaluating an expression too early. In any event, don't
4377 infinitely recurse. */
4378 if (tem == exp)
4379 abort ();
4380
bbf6f052
RK
4381 /* In some cases, we will be offsetting OP0's address by a constant.
4382 So get it as a sum, if possible. If we will be using it
4383 directly in an insn, we validate it. */
906c4e36 4384 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_SUM);
bbf6f052 4385
8c8a8e34 4386 /* If this is a constant, put it into a register if it is a
8008b228 4387 legitimate constant and memory if it isn't. */
8c8a8e34
JW
4388 if (CONSTANT_P (op0))
4389 {
4390 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
f2878c6b 4391 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
8c8a8e34
JW
4392 op0 = force_reg (mode, op0);
4393 else
4394 op0 = validize_mem (force_const_mem (mode, op0));
4395 }
4396
034f9101 4397 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
7bb0943f
RS
4398 if (offset != 0)
4399 {
906c4e36 4400 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
4401
4402 if (GET_CODE (op0) != MEM)
4403 abort ();
4404 op0 = change_address (op0, VOIDmode,
4405 gen_rtx (PLUS, Pmode, XEXP (op0, 0),
4406 force_reg (Pmode, offset_rtx)));
034f9101
RS
4407 /* If we have a variable offset, the known alignment
4408 is only that of the innermost structure containing the field.
4409 (Actually, we could sometimes do better by using the
4410 size of an element of the innermost array, but no need.) */
4411 if (TREE_CODE (exp) == COMPONENT_REF
4412 || TREE_CODE (exp) == BIT_FIELD_REF)
4413 alignment = (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
4414 / BITS_PER_UNIT);
7bb0943f
RS
4415 }
4416
bbf6f052
RK
4417 /* Don't forget about volatility even if this is a bitfield. */
4418 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
4419 {
4420 op0 = copy_rtx (op0);
4421 MEM_VOLATILE_P (op0) = 1;
4422 }
4423
ccc98036
RS
4424 /* In cases where an aligned union has an unaligned object
4425 as a field, we might be extracting a BLKmode value from
4426 an integer-mode (e.g., SImode) object. Handle this case
4427 by doing the extract into an object as wide as the field
4428 (which we know to be the width of a basic mode), then
4429 storing into memory, and changing the mode to BLKmode. */
bbf6f052 4430 if (mode1 == VOIDmode
0bba3f6f
RK
4431 || (mode1 != BLKmode && ! direct_load[(int) mode1]
4432 && modifier != EXPAND_CONST_ADDRESS
4433 && modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
ccc98036
RS
4434 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
4435 /* If the field isn't aligned enough to fetch as a memref,
4436 fetch it as a bit field. */
4437 || (STRICT_ALIGNMENT
4438 && TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
4439 || (STRICT_ALIGNMENT && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
bbf6f052 4440 {
bbf6f052
RK
4441 enum machine_mode ext_mode = mode;
4442
4443 if (ext_mode == BLKmode)
4444 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
4445
4446 if (ext_mode == BLKmode)
4447 abort ();
4448
4449 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
4450 unsignedp, target, ext_mode, ext_mode,
034f9101 4451 alignment,
bbf6f052
RK
4452 int_size_in_bytes (TREE_TYPE (tem)));
4453 if (mode == BLKmode)
4454 {
4455 rtx new = assign_stack_temp (ext_mode,
4456 bitsize / BITS_PER_UNIT, 0);
4457
4458 emit_move_insn (new, op0);
4459 op0 = copy_rtx (new);
4460 PUT_MODE (op0, BLKmode);
092dded9 4461 MEM_IN_STRUCT_P (op0) = 1;
bbf6f052
RK
4462 }
4463
4464 return op0;
4465 }
4466
4467 /* Get a reference to just this component. */
4468 if (modifier == EXPAND_CONST_ADDRESS
4469 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4470 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
4471 (bitpos / BITS_PER_UNIT)));
4472 else
4473 op0 = change_address (op0, mode1,
4474 plus_constant (XEXP (op0, 0),
4475 (bitpos / BITS_PER_UNIT)));
4476 MEM_IN_STRUCT_P (op0) = 1;
4477 MEM_VOLATILE_P (op0) |= volatilep;
4478 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
4479 return op0;
4480 if (target == 0)
4481 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4482 convert_move (target, op0, unsignedp);
4483 return target;
4484 }
4485
4486 case OFFSET_REF:
4487 {
da120c2f 4488 tree base = build1 (ADDR_EXPR, type, TREE_OPERAND (exp, 0));
bbf6f052 4489 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
906c4e36 4490 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
bbf6f052
RK
4491 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
4492 MEM_IN_STRUCT_P (temp) = 1;
f94cc92f 4493 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
89742723 4494#if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
bbf6f052
RK
4495 a location is accessed through a pointer to const does not mean
4496 that the value there can never change. */
4497 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
4498#endif
4499 return temp;
4500 }
4501
4502 /* Intended for a reference to a buffer of a file-object in Pascal.
4503 But it's not certain that a special tree code will really be
4504 necessary for these. INDIRECT_REF might work for them. */
4505 case BUFFER_REF:
4506 abort ();
4507
7308a047 4508 case IN_EXPR:
7308a047 4509 {
d6a5ac33
RK
4510 /* Pascal set IN expression.
4511
4512 Algorithm:
4513 rlo = set_low - (set_low%bits_per_word);
4514 the_word = set [ (index - rlo)/bits_per_word ];
4515 bit_index = index % bits_per_word;
4516 bitmask = 1 << bit_index;
4517 return !!(the_word & bitmask); */
4518
7308a047
RS
4519 tree set = TREE_OPERAND (exp, 0);
4520 tree index = TREE_OPERAND (exp, 1);
d6a5ac33 4521 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7308a047 4522 tree set_type = TREE_TYPE (set);
7308a047
RS
4523 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
4524 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
d6a5ac33
RK
4525 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
4526 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
4527 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
4528 rtx setval = expand_expr (set, 0, VOIDmode, 0);
4529 rtx setaddr = XEXP (setval, 0);
4530 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7308a047
RS
4531 rtx rlow;
4532 rtx diff, quo, rem, addr, bit, result;
7308a047 4533
d6a5ac33
RK
4534 preexpand_calls (exp);
4535
4536 /* If domain is empty, answer is no. Likewise if index is constant
4537 and out of bounds. */
4538 if ((TREE_CODE (set_high_bound) == INTEGER_CST
4539 && TREE_CODE (set_low_bound) == INTEGER_CST
4540 && tree_int_cst_lt (set_high_bound, set_low_bound)
4541 || (TREE_CODE (index) == INTEGER_CST
4542 && TREE_CODE (set_low_bound) == INTEGER_CST
4543 && tree_int_cst_lt (index, set_low_bound))
4544 || (TREE_CODE (set_high_bound) == INTEGER_CST
4545 && TREE_CODE (index) == INTEGER_CST
4546 && tree_int_cst_lt (set_high_bound, index))))
7308a047
RS
4547 return const0_rtx;
4548
d6a5ac33
RK
4549 if (target == 0)
4550 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7308a047
RS
4551
4552 /* If we get here, we have to generate the code for both cases
4553 (in range and out of range). */
4554
4555 op0 = gen_label_rtx ();
4556 op1 = gen_label_rtx ();
4557
4558 if (! (GET_CODE (index_val) == CONST_INT
4559 && GET_CODE (lo_r) == CONST_INT))
4560 {
17938e57 4561 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
d6a5ac33 4562 GET_MODE (index_val), iunsignedp, 0);
7308a047
RS
4563 emit_jump_insn (gen_blt (op1));
4564 }
4565
4566 if (! (GET_CODE (index_val) == CONST_INT
4567 && GET_CODE (hi_r) == CONST_INT))
4568 {
17938e57 4569 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
d6a5ac33 4570 GET_MODE (index_val), iunsignedp, 0);
7308a047
RS
4571 emit_jump_insn (gen_bgt (op1));
4572 }
4573
4574 /* Calculate the element number of bit zero in the first word
4575 of the set. */
4576 if (GET_CODE (lo_r) == CONST_INT)
17938e57
RK
4577 rlow = GEN_INT (INTVAL (lo_r)
4578 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7308a047 4579 else
17938e57
RK
4580 rlow = expand_binop (index_mode, and_optab, lo_r,
4581 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
d6a5ac33 4582 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047 4583
d6a5ac33
RK
4584 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
4585 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047
RS
4586
4587 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
d6a5ac33 4588 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7308a047 4589 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
d6a5ac33
RK
4590 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
4591
7308a047 4592 addr = memory_address (byte_mode,
d6a5ac33
RK
4593 expand_binop (index_mode, add_optab, diff,
4594 setaddr, NULL_RTX, iunsignedp,
17938e57 4595 OPTAB_LIB_WIDEN));
d6a5ac33 4596
7308a047
RS
4597 /* Extract the bit we want to examine */
4598 bit = expand_shift (RSHIFT_EXPR, byte_mode,
17938e57
RK
4599 gen_rtx (MEM, byte_mode, addr),
4600 make_tree (TREE_TYPE (index), rem),
4601 NULL_RTX, 1);
4602 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
4603 GET_MODE (target) == byte_mode ? target : 0,
7308a047 4604 1, OPTAB_LIB_WIDEN);
17938e57
RK
4605
4606 if (result != target)
4607 convert_move (target, result, 1);
7308a047
RS
4608
4609 /* Output the code to handle the out-of-range case. */
4610 emit_jump (op0);
4611 emit_label (op1);
4612 emit_move_insn (target, const0_rtx);
4613 emit_label (op0);
4614 return target;
4615 }
4616
bbf6f052
RK
4617 case WITH_CLEANUP_EXPR:
4618 if (RTL_EXPR_RTL (exp) == 0)
4619 {
4620 RTL_EXPR_RTL (exp)
e287fd6e
RK
4621 = expand_expr (TREE_OPERAND (exp, 0),
4622 target ? target : const0_rtx,
4623 tmode, modifier);
906c4e36
RK
4624 cleanups_this_call
4625 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
bbf6f052
RK
4626 /* That's it for this cleanup. */
4627 TREE_OPERAND (exp, 2) = 0;
4628 }
4629 return RTL_EXPR_RTL (exp);
4630
4631 case CALL_EXPR:
4632 /* Check for a built-in function. */
4633 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
d6a5ac33
RK
4634 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
4635 == FUNCTION_DECL)
bbf6f052
RK
4636 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4637 return expand_builtin (exp, target, subtarget, tmode, ignore);
d6a5ac33 4638
bbf6f052
RK
4639 /* If this call was expanded already by preexpand_calls,
4640 just return the result we got. */
4641 if (CALL_EXPR_RTL (exp) != 0)
4642 return CALL_EXPR_RTL (exp);
d6a5ac33 4643
8129842c 4644 return expand_call (exp, target, ignore);
bbf6f052
RK
4645
4646 case NON_LVALUE_EXPR:
4647 case NOP_EXPR:
4648 case CONVERT_EXPR:
4649 case REFERENCE_EXPR:
bbf6f052
RK
4650 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4651 return expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
d6a5ac33 4652
bbf6f052
RK
4653 if (TREE_CODE (type) == UNION_TYPE)
4654 {
4655 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
4656 if (target == 0)
4657 {
4658 if (mode == BLKmode)
4659 {
4660 if (TYPE_SIZE (type) == 0
4661 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4662 abort ();
4663 target = assign_stack_temp (BLKmode,
4664 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4665 + BITS_PER_UNIT - 1)
4666 / BITS_PER_UNIT, 0);
4667 }
4668 else
d6a5ac33 4669 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
bbf6f052 4670 }
d6a5ac33 4671
bbf6f052
RK
4672 if (GET_CODE (target) == MEM)
4673 /* Store data into beginning of memory target. */
4674 store_expr (TREE_OPERAND (exp, 0),
1499e0a8
RK
4675 change_address (target, TYPE_MODE (valtype), 0), 0);
4676
bbf6f052
RK
4677 else if (GET_CODE (target) == REG)
4678 /* Store this field into a union of the proper type. */
4679 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
4680 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
4681 VOIDmode, 0, 1,
4682 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
4683 else
4684 abort ();
4685
4686 /* Return the entire union. */
4687 return target;
4688 }
d6a5ac33 4689
1499e0a8 4690 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
12342f90
RS
4691 if (GET_MODE (op0) == mode)
4692 return op0;
12342f90 4693
d6a5ac33
RK
4694 /* If OP0 is a constant, just convert it into the proper mode. */
4695 if (CONSTANT_P (op0))
4696 return
4697 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
4698 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
12342f90 4699
26fcb35a
RS
4700 if (modifier == EXPAND_INITIALIZER)
4701 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
d6a5ac33 4702
bbf6f052
RK
4703 if (flag_force_mem && GET_CODE (op0) == MEM)
4704 op0 = copy_to_reg (op0);
4705
4706 if (target == 0)
d6a5ac33
RK
4707 return
4708 convert_to_mode (mode, op0,
4709 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052 4710 else
d6a5ac33
RK
4711 convert_move (target, op0,
4712 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052
RK
4713 return target;
4714
4715 case PLUS_EXPR:
4716 /* We come here from MINUS_EXPR when the second operand is a constant. */
4717 plus_expr:
4718 this_optab = add_optab;
4719
4720 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
4721 something else, make sure we add the register to the constant and
4722 then to the other thing. This case can occur during strength
4723 reduction and doing it this way will produce better code if the
4724 frame pointer or argument pointer is eliminated.
4725
4726 fold-const.c will ensure that the constant is always in the inner
4727 PLUS_EXPR, so the only case we need to do anything about is if
4728 sp, ap, or fp is our second argument, in which case we must swap
4729 the innermost first argument and our second argument. */
4730
4731 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4732 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
4733 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
4734 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
4735 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
4736 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
4737 {
4738 tree t = TREE_OPERAND (exp, 1);
4739
4740 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4741 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
4742 }
4743
4744 /* If the result is to be Pmode and we are adding an integer to
4745 something, we might be forming a constant. So try to use
4746 plus_constant. If it produces a sum and we can't accept it,
4747 use force_operand. This allows P = &ARR[const] to generate
4748 efficient code on machines where a SYMBOL_REF is not a valid
4749 address.
4750
4751 If this is an EXPAND_SUM call, always return the sum. */
c980ac49
RS
4752 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4753 || mode == Pmode)
bbf6f052 4754 {
c980ac49
RS
4755 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
4756 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4757 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
4758 {
4759 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
4760 EXPAND_SUM);
4761 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
4762 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4763 op1 = force_operand (op1, target);
4764 return op1;
4765 }
bbf6f052 4766
c980ac49
RS
4767 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4768 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
4769 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
4770 {
4771 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
4772 EXPAND_SUM);
4773 if (! CONSTANT_P (op0))
4774 {
4775 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4776 VOIDmode, modifier);
709f5be1
RS
4777 /* Don't go to both_summands if modifier
4778 says it's not right to return a PLUS. */
4779 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4780 goto binop2;
c980ac49
RS
4781 goto both_summands;
4782 }
4783 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
4784 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4785 op0 = force_operand (op0, target);
4786 return op0;
4787 }
bbf6f052
RK
4788 }
4789
4790 /* No sense saving up arithmetic to be done
4791 if it's all in the wrong mode to form part of an address.
4792 And force_operand won't know whether to sign-extend or
4793 zero-extend. */
4794 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
c980ac49
RS
4795 || mode != Pmode)
4796 goto binop;
bbf6f052
RK
4797
4798 preexpand_calls (exp);
4799 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4800 subtarget = 0;
4801
4802 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
906c4e36 4803 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
bbf6f052 4804
c980ac49 4805 both_summands:
bbf6f052
RK
4806 /* Make sure any term that's a sum with a constant comes last. */
4807 if (GET_CODE (op0) == PLUS
4808 && CONSTANT_P (XEXP (op0, 1)))
4809 {
4810 temp = op0;
4811 op0 = op1;
4812 op1 = temp;
4813 }
4814 /* If adding to a sum including a constant,
4815 associate it to put the constant outside. */
4816 if (GET_CODE (op1) == PLUS
4817 && CONSTANT_P (XEXP (op1, 1)))
4818 {
4819 rtx constant_term = const0_rtx;
4820
4821 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
4822 if (temp != 0)
4823 op0 = temp;
6f90e075
JW
4824 /* Ensure that MULT comes first if there is one. */
4825 else if (GET_CODE (op0) == MULT)
4826 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
bbf6f052
RK
4827 else
4828 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
4829
4830 /* Let's also eliminate constants from op0 if possible. */
4831 op0 = eliminate_constant_term (op0, &constant_term);
4832
4833 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
4834 their sum should be a constant. Form it into OP1, since the
4835 result we want will then be OP0 + OP1. */
4836
4837 temp = simplify_binary_operation (PLUS, mode, constant_term,
4838 XEXP (op1, 1));
4839 if (temp != 0)
4840 op1 = temp;
4841 else
4842 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
4843 }
4844
4845 /* Put a constant term last and put a multiplication first. */
4846 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
4847 temp = op1, op1 = op0, op0 = temp;
4848
4849 temp = simplify_binary_operation (PLUS, mode, op0, op1);
4850 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
4851
4852 case MINUS_EXPR:
ea87523e
RK
4853 /* For initializers, we are allowed to return a MINUS of two
4854 symbolic constants. Here we handle all cases when both operands
4855 are constant. */
bbf6f052
RK
4856 /* Handle difference of two symbolic constants,
4857 for the sake of an initializer. */
4858 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4859 && really_constant_p (TREE_OPERAND (exp, 0))
4860 && really_constant_p (TREE_OPERAND (exp, 1)))
4861 {
906c4e36
RK
4862 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
4863 VOIDmode, modifier);
4864 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4865 VOIDmode, modifier);
ea87523e
RK
4866
4867 /* If one operand is a CONST_INT, put it last. */
4868 if (GET_CODE (op0) == CONST_INT)
4869 temp = op0, op0 = op1, op1 = temp;
4870
4871 /* If the last operand is a CONST_INT, use plus_constant of
4872 the negated constant. Else make the MINUS. */
4873 if (GET_CODE (op1) == CONST_INT)
4874 return plus_constant (op0, - INTVAL (op1));
4875 else
4876 return gen_rtx (MINUS, mode, op0, op1);
bbf6f052
RK
4877 }
4878 /* Convert A - const to A + (-const). */
4879 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4880 {
4881 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0),
4882 fold (build1 (NEGATE_EXPR, type,
4883 TREE_OPERAND (exp, 1))));
4884 goto plus_expr;
4885 }
4886 this_optab = sub_optab;
4887 goto binop;
4888
4889 case MULT_EXPR:
4890 preexpand_calls (exp);
4891 /* If first operand is constant, swap them.
4892 Thus the following special case checks need only
4893 check the second operand. */
4894 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
4895 {
4896 register tree t1 = TREE_OPERAND (exp, 0);
4897 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
4898 TREE_OPERAND (exp, 1) = t1;
4899 }
4900
4901 /* Attempt to return something suitable for generating an
4902 indexed address, for machines that support that. */
4903
4904 if (modifier == EXPAND_SUM && mode == Pmode
4905 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
906c4e36 4906 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
4907 {
4908 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
4909
4910 /* Apply distributive law if OP0 is x+c. */
4911 if (GET_CODE (op0) == PLUS
4912 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
4913 return gen_rtx (PLUS, mode,
4914 gen_rtx (MULT, mode, XEXP (op0, 0),
906c4e36
RK
4915 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
4916 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
4917 * INTVAL (XEXP (op0, 1))));
bbf6f052
RK
4918
4919 if (GET_CODE (op0) != REG)
906c4e36 4920 op0 = force_operand (op0, NULL_RTX);
bbf6f052
RK
4921 if (GET_CODE (op0) != REG)
4922 op0 = copy_to_mode_reg (mode, op0);
4923
4924 return gen_rtx (MULT, mode, op0,
906c4e36 4925 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
bbf6f052
RK
4926 }
4927
4928 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4929 subtarget = 0;
4930
4931 /* Check for multiplying things that have been extended
4932 from a narrower type. If this machine supports multiplying
4933 in that narrower type with a result in the desired type,
4934 do it that way, and avoid the explicit type-conversion. */
4935 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
4936 && TREE_CODE (type) == INTEGER_TYPE
4937 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4938 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
4939 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4940 && int_fits_type_p (TREE_OPERAND (exp, 1),
4941 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4942 /* Don't use a widening multiply if a shift will do. */
4943 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
906c4e36 4944 > HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
4945 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
4946 ||
4947 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
4948 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4949 ==
4950 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
4951 /* If both operands are extended, they must either both
4952 be zero-extended or both be sign-extended. */
4953 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4954 ==
4955 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
4956 {
4957 enum machine_mode innermode
4958 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
4959 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4960 ? umul_widen_optab : smul_widen_optab);
4961 if (mode == GET_MODE_WIDER_MODE (innermode)
4962 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
4963 {
4964 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
906c4e36 4965 NULL_RTX, VOIDmode, 0);
bbf6f052 4966 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
906c4e36
RK
4967 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4968 VOIDmode, 0);
bbf6f052
RK
4969 else
4970 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
906c4e36 4971 NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4972 goto binop2;
4973 }
4974 }
4975 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 4976 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4977 return expand_mult (mode, op0, op1, target, unsignedp);
4978
4979 case TRUNC_DIV_EXPR:
4980 case FLOOR_DIV_EXPR:
4981 case CEIL_DIV_EXPR:
4982 case ROUND_DIV_EXPR:
4983 case EXACT_DIV_EXPR:
4984 preexpand_calls (exp);
4985 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4986 subtarget = 0;
4987 /* Possible optimization: compute the dividend with EXPAND_SUM
4988 then if the divisor is constant can optimize the case
4989 where some terms of the dividend have coeffs divisible by it. */
4990 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 4991 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4992 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
4993
4994 case RDIV_EXPR:
4995 this_optab = flodiv_optab;
4996 goto binop;
4997
4998 case TRUNC_MOD_EXPR:
4999 case FLOOR_MOD_EXPR:
5000 case CEIL_MOD_EXPR:
5001 case ROUND_MOD_EXPR:
5002 preexpand_calls (exp);
5003 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5004 subtarget = 0;
5005 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 5006 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5007 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
5008
5009 case FIX_ROUND_EXPR:
5010 case FIX_FLOOR_EXPR:
5011 case FIX_CEIL_EXPR:
5012 abort (); /* Not used for C. */
5013
5014 case FIX_TRUNC_EXPR:
906c4e36 5015 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5016 if (target == 0)
5017 target = gen_reg_rtx (mode);
5018 expand_fix (target, op0, unsignedp);
5019 return target;
5020
5021 case FLOAT_EXPR:
906c4e36 5022 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5023 if (target == 0)
5024 target = gen_reg_rtx (mode);
5025 /* expand_float can't figure out what to do if FROM has VOIDmode.
5026 So give it the correct mode. With -O, cse will optimize this. */
5027 if (GET_MODE (op0) == VOIDmode)
5028 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5029 op0);
5030 expand_float (target, op0,
5031 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5032 return target;
5033
5034 case NEGATE_EXPR:
5b22bee8 5035 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
bbf6f052
RK
5036 temp = expand_unop (mode, neg_optab, op0, target, 0);
5037 if (temp == 0)
5038 abort ();
5039 return temp;
5040
5041 case ABS_EXPR:
5042 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5043
2d7050fd 5044 /* Handle complex values specially. */
d6a5ac33
RK
5045 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
5046 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
5047 return expand_complex_abs (mode, op0, target, unsignedp);
2d7050fd 5048
bbf6f052
RK
5049 /* Unsigned abs is simply the operand. Testing here means we don't
5050 risk generating incorrect code below. */
5051 if (TREE_UNSIGNED (type))
5052 return op0;
5053
5054 /* First try to do it with a special abs instruction. */
5055 temp = expand_unop (mode, abs_optab, op0, target, 0);
5056 if (temp != 0)
5057 return temp;
5058
5059 /* If this machine has expensive jumps, we can do integer absolute
5060 value of X as (((signed) x >> (W-1)) ^ x) - ((signed) x >> (W-1)),
5061 where W is the width of MODE. */
5062
5063 if (GET_MODE_CLASS (mode) == MODE_INT && BRANCH_COST >= 2)
5064 {
5065 rtx extended = expand_shift (RSHIFT_EXPR, mode, op0,
5066 size_int (GET_MODE_BITSIZE (mode) - 1),
906c4e36 5067 NULL_RTX, 0);
bbf6f052
RK
5068
5069 temp = expand_binop (mode, xor_optab, extended, op0, target, 0,
5070 OPTAB_LIB_WIDEN);
5071 if (temp != 0)
5072 temp = expand_binop (mode, sub_optab, temp, extended, target, 0,
5073 OPTAB_LIB_WIDEN);
5074
5075 if (temp != 0)
5076 return temp;
5077 }
5078
5079 /* If that does not win, use conditional jump and negate. */
5080 target = original_target;
d6a5ac33 5081 op1 = gen_label_rtx ();
bbf6f052 5082 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 0))
d6a5ac33 5083 || GET_MODE (target) != mode
37568125 5084 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
bbf6f052
RK
5085 || (GET_CODE (target) == REG
5086 && REGNO (target) < FIRST_PSEUDO_REGISTER))
5087 target = gen_reg_rtx (mode);
d6a5ac33 5088
bbf6f052 5089 emit_move_insn (target, op0);
bbf6f052 5090 NO_DEFER_POP;
d6a5ac33
RK
5091
5092 /* If this mode is an integer too wide to compare properly,
5093 compare word by word. Rely on CSE to optimize constant cases. */
5094 if (GET_MODE_CLASS (mode) == MODE_INT && ! can_compare_p (mode))
5095 do_jump_by_parts_greater_rtx (mode, 0, target, const0_rtx,
5096 NULL_RTX, op1);
5097 else
5098 {
5099 temp = compare_from_rtx (target, const0_rtx, GE, 0, mode,
5100 NULL_RTX, 0);
5101 if (temp == const1_rtx)
5102 return target;
5103 else if (temp != const0_rtx)
5104 {
5105 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
eedd251f 5106 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op1));
d6a5ac33
RK
5107 else
5108 abort ();
5109 }
5110 }
5111
bbf6f052
RK
5112 op0 = expand_unop (mode, neg_optab, target, target, 0);
5113 if (op0 != target)
5114 emit_move_insn (target, op0);
d6a5ac33 5115 emit_label (op1);
bbf6f052
RK
5116 OK_DEFER_POP;
5117 return target;
5118
5119 case MAX_EXPR:
5120 case MIN_EXPR:
5121 target = original_target;
5122 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
fc155707 5123 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
d6a5ac33 5124 || GET_MODE (target) != mode
bbf6f052
RK
5125 || (GET_CODE (target) == REG
5126 && REGNO (target) < FIRST_PSEUDO_REGISTER))
5127 target = gen_reg_rtx (mode);
906c4e36 5128 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5129 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5130
5131 /* First try to do it with a special MIN or MAX instruction.
5132 If that does not win, use a conditional jump to select the proper
5133 value. */
5134 this_optab = (TREE_UNSIGNED (type)
5135 ? (code == MIN_EXPR ? umin_optab : umax_optab)
5136 : (code == MIN_EXPR ? smin_optab : smax_optab));
5137
5138 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
5139 OPTAB_WIDEN);
5140 if (temp != 0)
5141 return temp;
5142
ee456b1c
RK
5143 if (target != op0)
5144 emit_move_insn (target, op0);
d6a5ac33 5145
bbf6f052 5146 op0 = gen_label_rtx ();
d6a5ac33 5147
f81497d9
RS
5148 /* If this mode is an integer too wide to compare properly,
5149 compare word by word. Rely on cse to optimize constant cases. */
d6a5ac33 5150 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
bbf6f052 5151 {
f81497d9 5152 if (code == MAX_EXPR)
d6a5ac33
RK
5153 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
5154 target, op1, NULL_RTX, op0);
bbf6f052 5155 else
d6a5ac33
RK
5156 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
5157 op1, target, NULL_RTX, op0);
ee456b1c 5158 emit_move_insn (target, op1);
bbf6f052 5159 }
f81497d9
RS
5160 else
5161 {
5162 if (code == MAX_EXPR)
5163 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
ee456b1c
RK
5164 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
5165 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
f81497d9
RS
5166 else
5167 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
ee456b1c
RK
5168 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
5169 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
f81497d9 5170 if (temp == const0_rtx)
ee456b1c 5171 emit_move_insn (target, op1);
f81497d9
RS
5172 else if (temp != const_true_rtx)
5173 {
5174 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
5175 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
5176 else
5177 abort ();
ee456b1c 5178 emit_move_insn (target, op1);
f81497d9
RS
5179 }
5180 }
bbf6f052
RK
5181 emit_label (op0);
5182 return target;
5183
bbf6f052
RK
5184 case BIT_NOT_EXPR:
5185 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5186 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
5187 if (temp == 0)
5188 abort ();
5189 return temp;
5190
5191 case FFS_EXPR:
5192 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5193 temp = expand_unop (mode, ffs_optab, op0, target, 1);
5194 if (temp == 0)
5195 abort ();
5196 return temp;
5197
d6a5ac33
RK
5198 /* ??? Can optimize bitwise operations with one arg constant.
5199 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
5200 and (a bitwise1 b) bitwise2 b (etc)
5201 but that is probably not worth while. */
5202
5203 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
5204 boolean values when we want in all cases to compute both of them. In
5205 general it is fastest to do TRUTH_AND_EXPR by computing both operands
5206 as actual zero-or-1 values and then bitwise anding. In cases where
5207 there cannot be any side effects, better code would be made by
5208 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
5209 how to recognize those cases. */
5210
bbf6f052
RK
5211 case TRUTH_AND_EXPR:
5212 case BIT_AND_EXPR:
5213 this_optab = and_optab;
5214 goto binop;
5215
bbf6f052
RK
5216 case TRUTH_OR_EXPR:
5217 case BIT_IOR_EXPR:
5218 this_optab = ior_optab;
5219 goto binop;
5220
874726a8 5221 case TRUTH_XOR_EXPR:
bbf6f052
RK
5222 case BIT_XOR_EXPR:
5223 this_optab = xor_optab;
5224 goto binop;
5225
5226 case LSHIFT_EXPR:
5227 case RSHIFT_EXPR:
5228 case LROTATE_EXPR:
5229 case RROTATE_EXPR:
5230 preexpand_calls (exp);
5231 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5232 subtarget = 0;
5233 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5234 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
5235 unsignedp);
5236
d6a5ac33
RK
5237 /* Could determine the answer when only additive constants differ. Also,
5238 the addition of one can be handled by changing the condition. */
bbf6f052
RK
5239 case LT_EXPR:
5240 case LE_EXPR:
5241 case GT_EXPR:
5242 case GE_EXPR:
5243 case EQ_EXPR:
5244 case NE_EXPR:
5245 preexpand_calls (exp);
5246 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
5247 if (temp != 0)
5248 return temp;
d6a5ac33 5249
bbf6f052
RK
5250 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
5251 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
5252 && original_target
5253 && GET_CODE (original_target) == REG
5254 && (GET_MODE (original_target)
5255 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5256 {
d6a5ac33
RK
5257 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
5258 VOIDmode, 0);
5259
bbf6f052
RK
5260 if (temp != original_target)
5261 temp = copy_to_reg (temp);
d6a5ac33 5262
bbf6f052 5263 op1 = gen_label_rtx ();
906c4e36 5264 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
bbf6f052
RK
5265 GET_MODE (temp), unsignedp, 0);
5266 emit_jump_insn (gen_beq (op1));
5267 emit_move_insn (temp, const1_rtx);
5268 emit_label (op1);
5269 return temp;
5270 }
d6a5ac33 5271
bbf6f052
RK
5272 /* If no set-flag instruction, must generate a conditional
5273 store into a temporary variable. Drop through
5274 and handle this like && and ||. */
5275
5276 case TRUTH_ANDIF_EXPR:
5277 case TRUTH_ORIF_EXPR:
e44842fe
RK
5278 if (! ignore
5279 && (target == 0 || ! safe_from_p (target, exp)
5280 /* Make sure we don't have a hard reg (such as function's return
5281 value) live across basic blocks, if not optimizing. */
5282 || (!optimize && GET_CODE (target) == REG
5283 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
bbf6f052 5284 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
e44842fe
RK
5285
5286 if (target)
5287 emit_clr_insn (target);
5288
bbf6f052
RK
5289 op1 = gen_label_rtx ();
5290 jumpifnot (exp, op1);
e44842fe
RK
5291
5292 if (target)
5293 emit_0_to_1_insn (target);
5294
bbf6f052 5295 emit_label (op1);
e44842fe 5296 return ignore ? const0_rtx : target;
bbf6f052
RK
5297
5298 case TRUTH_NOT_EXPR:
5299 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5300 /* The parser is careful to generate TRUTH_NOT_EXPR
5301 only with operands that are always zero or one. */
906c4e36 5302 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
bbf6f052
RK
5303 target, 1, OPTAB_LIB_WIDEN);
5304 if (temp == 0)
5305 abort ();
5306 return temp;
5307
5308 case COMPOUND_EXPR:
5309 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
5310 emit_queue ();
5311 return expand_expr (TREE_OPERAND (exp, 1),
5312 (ignore ? const0_rtx : target),
5313 VOIDmode, 0);
5314
5315 case COND_EXPR:
5316 {
5317 /* Note that COND_EXPRs whose type is a structure or union
5318 are required to be constructed to contain assignments of
5319 a temporary variable, so that we can evaluate them here
5320 for side effect only. If type is void, we must do likewise. */
5321
5322 /* If an arm of the branch requires a cleanup,
5323 only that cleanup is performed. */
5324
5325 tree singleton = 0;
5326 tree binary_op = 0, unary_op = 0;
5327 tree old_cleanups = cleanups_this_call;
5328 cleanups_this_call = 0;
5329
5330 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
5331 convert it to our mode, if necessary. */
5332 if (integer_onep (TREE_OPERAND (exp, 1))
5333 && integer_zerop (TREE_OPERAND (exp, 2))
5334 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
5335 {
dd27116b
RK
5336 if (ignore)
5337 {
5338 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5339 modifier);
5340 return const0_rtx;
5341 }
5342
bbf6f052
RK
5343 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
5344 if (GET_MODE (op0) == mode)
5345 return op0;
d6a5ac33 5346
bbf6f052
RK
5347 if (target == 0)
5348 target = gen_reg_rtx (mode);
5349 convert_move (target, op0, unsignedp);
5350 return target;
5351 }
5352
5353 /* If we are not to produce a result, we have no target. Otherwise,
5354 if a target was specified use it; it will not be used as an
5355 intermediate target unless it is safe. If no target, use a
5356 temporary. */
5357
dd27116b 5358 if (ignore)
bbf6f052
RK
5359 temp = 0;
5360 else if (original_target
d6a5ac33
RK
5361 && safe_from_p (original_target, TREE_OPERAND (exp, 0))
5362 && GET_MODE (original_target) == mode)
bbf6f052
RK
5363 temp = original_target;
5364 else if (mode == BLKmode)
5365 {
5366 if (TYPE_SIZE (type) == 0
5367 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5368 abort ();
673bc773 5369
bbf6f052
RK
5370 temp = assign_stack_temp (BLKmode,
5371 (TREE_INT_CST_LOW (TYPE_SIZE (type))
5372 + BITS_PER_UNIT - 1)
5373 / BITS_PER_UNIT, 0);
673bc773
RS
5374 MEM_IN_STRUCT_P (temp)
5375 = (TREE_CODE (type) == RECORD_TYPE
5376 || TREE_CODE (type) == UNION_TYPE
5377 || TREE_CODE (type) == QUAL_UNION_TYPE
5378 || TREE_CODE (type) == ARRAY_TYPE);
bbf6f052
RK
5379 }
5380 else
5381 temp = gen_reg_rtx (mode);
5382
5383 /* Check for X ? A + B : A. If we have this, we can copy
5384 A to the output and conditionally add B. Similarly for unary
5385 operations. Don't do this if X has side-effects because
5386 those side effects might affect A or B and the "?" operation is
5387 a sequence point in ANSI. (We test for side effects later.) */
5388
5389 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
5390 && operand_equal_p (TREE_OPERAND (exp, 2),
5391 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
5392 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
5393 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
5394 && operand_equal_p (TREE_OPERAND (exp, 1),
5395 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
5396 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
5397 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
5398 && operand_equal_p (TREE_OPERAND (exp, 2),
5399 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
5400 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
5401 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
5402 && operand_equal_p (TREE_OPERAND (exp, 1),
5403 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
5404 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
5405
5406 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
5407 operation, do this as A + (X != 0). Similarly for other simple
5408 binary operators. */
dd27116b 5409 if (temp && singleton && binary_op
bbf6f052
RK
5410 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5411 && (TREE_CODE (binary_op) == PLUS_EXPR
5412 || TREE_CODE (binary_op) == MINUS_EXPR
5413 || TREE_CODE (binary_op) == BIT_IOR_EXPR
5414 || TREE_CODE (binary_op) == BIT_XOR_EXPR
5415 || TREE_CODE (binary_op) == BIT_AND_EXPR)
5416 && integer_onep (TREE_OPERAND (binary_op, 1))
5417 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
5418 {
5419 rtx result;
5420 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
5421 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
5422 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
5423 : TREE_CODE (binary_op) == BIT_XOR_EXPR ? xor_optab
5424 : and_optab);
5425
5426 /* If we had X ? A : A + 1, do this as A + (X == 0).
5427
5428 We have to invert the truth value here and then put it
5429 back later if do_store_flag fails. We cannot simply copy
5430 TREE_OPERAND (exp, 0) to another variable and modify that
5431 because invert_truthvalue can modify the tree pointed to
5432 by its argument. */
5433 if (singleton == TREE_OPERAND (exp, 1))
5434 TREE_OPERAND (exp, 0)
5435 = invert_truthvalue (TREE_OPERAND (exp, 0));
5436
5437 result = do_store_flag (TREE_OPERAND (exp, 0),
906c4e36
RK
5438 (safe_from_p (temp, singleton)
5439 ? temp : NULL_RTX),
bbf6f052
RK
5440 mode, BRANCH_COST <= 1);
5441
5442 if (result)
5443 {
906c4e36 5444 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5445 return expand_binop (mode, boptab, op1, result, temp,
5446 unsignedp, OPTAB_LIB_WIDEN);
5447 }
5448 else if (singleton == TREE_OPERAND (exp, 1))
5449 TREE_OPERAND (exp, 0)
5450 = invert_truthvalue (TREE_OPERAND (exp, 0));
5451 }
5452
5453 NO_DEFER_POP;
5454 op0 = gen_label_rtx ();
5455
5456 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
5457 {
5458 if (temp != 0)
5459 {
5460 /* If the target conflicts with the other operand of the
5461 binary op, we can't use it. Also, we can't use the target
5462 if it is a hard register, because evaluating the condition
5463 might clobber it. */
5464 if ((binary_op
5465 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
5466 || (GET_CODE (temp) == REG
5467 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
5468 temp = gen_reg_rtx (mode);
5469 store_expr (singleton, temp, 0);
5470 }
5471 else
906c4e36 5472 expand_expr (singleton,
2937cf87 5473 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5474 if (cleanups_this_call)
5475 {
5476 sorry ("aggregate value in COND_EXPR");
5477 cleanups_this_call = 0;
5478 }
5479 if (singleton == TREE_OPERAND (exp, 1))
5480 jumpif (TREE_OPERAND (exp, 0), op0);
5481 else
5482 jumpifnot (TREE_OPERAND (exp, 0), op0);
5483
5484 if (binary_op && temp == 0)
5485 /* Just touch the other operand. */
5486 expand_expr (TREE_OPERAND (binary_op, 1),
906c4e36 5487 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5488 else if (binary_op)
5489 store_expr (build (TREE_CODE (binary_op), type,
5490 make_tree (type, temp),
5491 TREE_OPERAND (binary_op, 1)),
5492 temp, 0);
5493 else
5494 store_expr (build1 (TREE_CODE (unary_op), type,
5495 make_tree (type, temp)),
5496 temp, 0);
5497 op1 = op0;
5498 }
5499#if 0
5500 /* This is now done in jump.c and is better done there because it
5501 produces shorter register lifetimes. */
5502
5503 /* Check for both possibilities either constants or variables
5504 in registers (but not the same as the target!). If so, can
5505 save branches by assigning one, branching, and assigning the
5506 other. */
5507 else if (temp && GET_MODE (temp) != BLKmode
5508 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
5509 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
5510 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
5511 && DECL_RTL (TREE_OPERAND (exp, 1))
5512 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
5513 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
5514 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
5515 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
5516 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
5517 && DECL_RTL (TREE_OPERAND (exp, 2))
5518 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
5519 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
5520 {
5521 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5522 temp = gen_reg_rtx (mode);
5523 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5524 jumpifnot (TREE_OPERAND (exp, 0), op0);
5525 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5526 op1 = op0;
5527 }
5528#endif
5529 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
5530 comparison operator. If we have one of these cases, set the
5531 output to A, branch on A (cse will merge these two references),
5532 then set the output to FOO. */
5533 else if (temp
5534 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5535 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5536 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5537 TREE_OPERAND (exp, 1), 0)
5538 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5539 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
5540 {
5541 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5542 temp = gen_reg_rtx (mode);
5543 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5544 jumpif (TREE_OPERAND (exp, 0), op0);
5545 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5546 op1 = op0;
5547 }
5548 else if (temp
5549 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5550 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5551 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5552 TREE_OPERAND (exp, 2), 0)
5553 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5554 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
5555 {
5556 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5557 temp = gen_reg_rtx (mode);
5558 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5559 jumpifnot (TREE_OPERAND (exp, 0), op0);
5560 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5561 op1 = op0;
5562 }
5563 else
5564 {
5565 op1 = gen_label_rtx ();
5566 jumpifnot (TREE_OPERAND (exp, 0), op0);
5567 if (temp != 0)
5568 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5569 else
906c4e36
RK
5570 expand_expr (TREE_OPERAND (exp, 1),
5571 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5572 if (cleanups_this_call)
5573 {
5574 sorry ("aggregate value in COND_EXPR");
5575 cleanups_this_call = 0;
5576 }
5577
5578 emit_queue ();
5579 emit_jump_insn (gen_jump (op1));
5580 emit_barrier ();
5581 emit_label (op0);
5582 if (temp != 0)
5583 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5584 else
906c4e36
RK
5585 expand_expr (TREE_OPERAND (exp, 2),
5586 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5587 }
5588
5589 if (cleanups_this_call)
5590 {
5591 sorry ("aggregate value in COND_EXPR");
5592 cleanups_this_call = 0;
5593 }
5594
5595 emit_queue ();
5596 emit_label (op1);
5597 OK_DEFER_POP;
5598 cleanups_this_call = old_cleanups;
5599 return temp;
5600 }
5601
5602 case TARGET_EXPR:
5603 {
5604 /* Something needs to be initialized, but we didn't know
5605 where that thing was when building the tree. For example,
5606 it could be the return value of a function, or a parameter
5607 to a function which lays down in the stack, or a temporary
5608 variable which must be passed by reference.
5609
5610 We guarantee that the expression will either be constructed
5611 or copied into our original target. */
5612
5613 tree slot = TREE_OPERAND (exp, 0);
5c062816 5614 tree exp1;
bbf6f052
RK
5615
5616 if (TREE_CODE (slot) != VAR_DECL)
5617 abort ();
5618
5619 if (target == 0)
5620 {
5621 if (DECL_RTL (slot) != 0)
ac993f4f
MS
5622 {
5623 target = DECL_RTL (slot);
5c062816 5624 /* If we have already expanded the slot, so don't do
ac993f4f 5625 it again. (mrs) */
5c062816
MS
5626 if (TREE_OPERAND (exp, 1) == NULL_TREE)
5627 return target;
ac993f4f 5628 }
bbf6f052
RK
5629 else
5630 {
5631 target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
5632 /* All temp slots at this level must not conflict. */
5633 preserve_temp_slots (target);
5634 DECL_RTL (slot) = target;
5635 }
5636
e287fd6e
RK
5637 /* We set IGNORE when we know that we're already
5638 doing this for a cleanup. */
5639 if (ignore == 0)
5640 {
5641 /* Since SLOT is not known to the called function
5642 to belong to its stack frame, we must build an explicit
5643 cleanup. This case occurs when we must build up a reference
5644 to pass the reference as an argument. In this case,
5645 it is very likely that such a reference need not be
5646 built here. */
5647
5648 if (TREE_OPERAND (exp, 2) == 0)
5649 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
5650 if (TREE_OPERAND (exp, 2))
5651 cleanups_this_call = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2),
5652 cleanups_this_call);
5653 }
bbf6f052
RK
5654 }
5655 else
5656 {
5657 /* This case does occur, when expanding a parameter which
5658 needs to be constructed on the stack. The target
5659 is the actual stack address that we want to initialize.
5660 The function we call will perform the cleanup in this case. */
5661
8c042b47
RS
5662 /* If we have already assigned it space, use that space,
5663 not target that we were passed in, as our target
5664 parameter is only a hint. */
5665 if (DECL_RTL (slot) != 0)
5666 {
5667 target = DECL_RTL (slot);
5668 /* If we have already expanded the slot, so don't do
5669 it again. (mrs) */
5670 if (TREE_OPERAND (exp, 1) == NULL_TREE)
5671 return target;
5672 }
5673
bbf6f052
RK
5674 DECL_RTL (slot) = target;
5675 }
5676
5c062816
MS
5677 exp1 = TREE_OPERAND (exp, 1);
5678 /* Mark it as expanded. */
5679 TREE_OPERAND (exp, 1) = NULL_TREE;
5680
5681 return expand_expr (exp1, target, tmode, modifier);
bbf6f052
RK
5682 }
5683
5684 case INIT_EXPR:
5685 {
5686 tree lhs = TREE_OPERAND (exp, 0);
5687 tree rhs = TREE_OPERAND (exp, 1);
5688 tree noncopied_parts = 0;
5689 tree lhs_type = TREE_TYPE (lhs);
5690
5691 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5692 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
5693 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
5694 TYPE_NONCOPIED_PARTS (lhs_type));
5695 while (noncopied_parts != 0)
5696 {
5697 expand_assignment (TREE_VALUE (noncopied_parts),
5698 TREE_PURPOSE (noncopied_parts), 0, 0);
5699 noncopied_parts = TREE_CHAIN (noncopied_parts);
5700 }
5701 return temp;
5702 }
5703
5704 case MODIFY_EXPR:
5705 {
5706 /* If lhs is complex, expand calls in rhs before computing it.
5707 That's so we don't compute a pointer and save it over a call.
5708 If lhs is simple, compute it first so we can give it as a
5709 target if the rhs is just a call. This avoids an extra temp and copy
5710 and that prevents a partial-subsumption which makes bad code.
5711 Actually we could treat component_ref's of vars like vars. */
5712
5713 tree lhs = TREE_OPERAND (exp, 0);
5714 tree rhs = TREE_OPERAND (exp, 1);
5715 tree noncopied_parts = 0;
5716 tree lhs_type = TREE_TYPE (lhs);
5717
5718 temp = 0;
5719
5720 if (TREE_CODE (lhs) != VAR_DECL
5721 && TREE_CODE (lhs) != RESULT_DECL
5722 && TREE_CODE (lhs) != PARM_DECL)
5723 preexpand_calls (exp);
5724
5725 /* Check for |= or &= of a bitfield of size one into another bitfield
5726 of size 1. In this case, (unless we need the result of the
5727 assignment) we can do this more efficiently with a
5728 test followed by an assignment, if necessary.
5729
5730 ??? At this point, we can't get a BIT_FIELD_REF here. But if
5731 things change so we do, this code should be enhanced to
5732 support it. */
5733 if (ignore
5734 && TREE_CODE (lhs) == COMPONENT_REF
5735 && (TREE_CODE (rhs) == BIT_IOR_EXPR
5736 || TREE_CODE (rhs) == BIT_AND_EXPR)
5737 && TREE_OPERAND (rhs, 0) == lhs
5738 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
5739 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
5740 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
5741 {
5742 rtx label = gen_label_rtx ();
5743
5744 do_jump (TREE_OPERAND (rhs, 1),
5745 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
5746 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
5747 expand_assignment (lhs, convert (TREE_TYPE (rhs),
5748 (TREE_CODE (rhs) == BIT_IOR_EXPR
5749 ? integer_one_node
5750 : integer_zero_node)),
5751 0, 0);
e7c33f54 5752 do_pending_stack_adjust ();
bbf6f052
RK
5753 emit_label (label);
5754 return const0_rtx;
5755 }
5756
5757 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
5758 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
5759 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
5760 TYPE_NONCOPIED_PARTS (lhs_type));
5761
5762 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5763 while (noncopied_parts != 0)
5764 {
5765 expand_assignment (TREE_PURPOSE (noncopied_parts),
5766 TREE_VALUE (noncopied_parts), 0, 0);
5767 noncopied_parts = TREE_CHAIN (noncopied_parts);
5768 }
5769 return temp;
5770 }
5771
5772 case PREINCREMENT_EXPR:
5773 case PREDECREMENT_EXPR:
5774 return expand_increment (exp, 0);
5775
5776 case POSTINCREMENT_EXPR:
5777 case POSTDECREMENT_EXPR:
5778 /* Faster to treat as pre-increment if result is not used. */
5779 return expand_increment (exp, ! ignore);
5780
5781 case ADDR_EXPR:
987c71d9
RK
5782 /* If nonzero, TEMP will be set to the address of something that might
5783 be a MEM corresponding to a stack slot. */
5784 temp = 0;
5785
bbf6f052
RK
5786 /* Are we taking the address of a nested function? */
5787 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
5788 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
5789 {
5790 op0 = trampoline_address (TREE_OPERAND (exp, 0));
5791 op0 = force_operand (op0, target);
5792 }
5793 else
5794 {
e287fd6e
RK
5795 /* We make sure to pass const0_rtx down if we came in with
5796 ignore set, to avoid doing the cleanups twice for something. */
5797 op0 = expand_expr (TREE_OPERAND (exp, 0),
5798 ignore ? const0_rtx : NULL_RTX, VOIDmode,
bbf6f052
RK
5799 (modifier == EXPAND_INITIALIZER
5800 ? modifier : EXPAND_CONST_ADDRESS));
896102d0
RK
5801
5802 /* We would like the object in memory. If it is a constant,
5803 we can have it be statically allocated into memory. For
5804 a non-constant (REG or SUBREG), we need to allocate some
5805 memory and store the value into it. */
5806
5807 if (CONSTANT_P (op0))
5808 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5809 op0);
987c71d9
RK
5810 else if (GET_CODE (op0) == MEM)
5811 temp = XEXP (op0, 0);
896102d0 5812
b6f01001
RS
5813 /* These cases happen in Fortran. Is that legitimate?
5814 Should Fortran work in another way?
5815 Do they happen in C? */
5816 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
5817 || GET_CODE (op0) == CONCAT)
896102d0
RK
5818 {
5819 /* If this object is in a register, it must be not
5820 be BLKmode. */
5821 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
5822 enum machine_mode inner_mode = TYPE_MODE (inner_type);
5823 rtx memloc
5824 = assign_stack_temp (inner_mode,
5825 int_size_in_bytes (inner_type), 1);
5826
5827 emit_move_insn (memloc, op0);
5828 op0 = memloc;
5829 }
5830
bbf6f052
RK
5831 if (GET_CODE (op0) != MEM)
5832 abort ();
5833
5834 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5835 return XEXP (op0, 0);
987c71d9 5836
bbf6f052
RK
5837 op0 = force_operand (XEXP (op0, 0), target);
5838 }
987c71d9 5839
bbf6f052 5840 if (flag_force_addr && GET_CODE (op0) != REG)
987c71d9
RK
5841 op0 = force_reg (Pmode, op0);
5842
5843 if (GET_CODE (op0) == REG)
5844 mark_reg_pointer (op0);
5845
5846 /* If we might have had a temp slot, add an equivalent address
5847 for it. */
5848 if (temp != 0)
5849 update_temp_slot_address (temp, op0);
5850
bbf6f052
RK
5851 return op0;
5852
5853 case ENTRY_VALUE_EXPR:
5854 abort ();
5855
7308a047
RS
5856 /* COMPLEX type for Extended Pascal & Fortran */
5857 case COMPLEX_EXPR:
5858 {
5859 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6551fa4d 5860 rtx insns;
7308a047
RS
5861
5862 /* Get the rtx code of the operands. */
5863 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5864 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
5865
5866 if (! target)
5867 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5868
6551fa4d 5869 start_sequence ();
7308a047
RS
5870
5871 /* Move the real (op0) and imaginary (op1) parts to their location. */
2d7050fd
RS
5872 emit_move_insn (gen_realpart (mode, target), op0);
5873 emit_move_insn (gen_imagpart (mode, target), op1);
7308a047 5874
6551fa4d
JW
5875 insns = get_insns ();
5876 end_sequence ();
5877
7308a047 5878 /* Complex construction should appear as a single unit. */
6551fa4d
JW
5879 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
5880 each with a separate pseudo as destination.
5881 It's not correct for flow to treat them as a unit. */
6d6e61ce 5882 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
5883 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
5884 else
5885 emit_insns (insns);
7308a047
RS
5886
5887 return target;
5888 }
5889
5890 case REALPART_EXPR:
2d7050fd
RS
5891 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5892 return gen_realpart (mode, op0);
7308a047
RS
5893
5894 case IMAGPART_EXPR:
2d7050fd
RS
5895 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5896 return gen_imagpart (mode, op0);
7308a047
RS
5897
5898 case CONJ_EXPR:
5899 {
7308a047 5900 rtx imag_t;
6551fa4d 5901 rtx insns;
7308a047
RS
5902
5903 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5904
5905 if (! target)
d6a5ac33 5906 target = gen_reg_rtx (mode);
7308a047 5907
6551fa4d 5908 start_sequence ();
7308a047
RS
5909
5910 /* Store the realpart and the negated imagpart to target. */
2d7050fd 5911 emit_move_insn (gen_realpart (mode, target), gen_realpart (mode, op0));
7308a047 5912
2d7050fd 5913 imag_t = gen_imagpart (mode, target);
d6a5ac33
RK
5914 temp = expand_unop (mode, neg_optab,
5915 gen_imagpart (mode, op0), imag_t, 0);
7308a047
RS
5916 if (temp != imag_t)
5917 emit_move_insn (imag_t, temp);
5918
6551fa4d
JW
5919 insns = get_insns ();
5920 end_sequence ();
5921
d6a5ac33
RK
5922 /* Conjugate should appear as a single unit
5923 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
6551fa4d
JW
5924 each with a separate pseudo as destination.
5925 It's not correct for flow to treat them as a unit. */
6d6e61ce 5926 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
5927 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
5928 else
5929 emit_insns (insns);
7308a047
RS
5930
5931 return target;
5932 }
5933
bbf6f052 5934 case ERROR_MARK:
66538193
RS
5935 op0 = CONST0_RTX (tmode);
5936 if (op0 != 0)
5937 return op0;
bbf6f052
RK
5938 return const0_rtx;
5939
5940 default:
90764a87 5941 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
bbf6f052
RK
5942 }
5943
5944 /* Here to do an ordinary binary operator, generating an instruction
5945 from the optab already placed in `this_optab'. */
5946 binop:
5947 preexpand_calls (exp);
5948 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5949 subtarget = 0;
5950 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 5951 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5952 binop2:
5953 temp = expand_binop (mode, this_optab, op0, op1, target,
5954 unsignedp, OPTAB_LIB_WIDEN);
5955 if (temp == 0)
5956 abort ();
5957 return temp;
5958}
bbf6f052 5959
bbf6f052 5960
ca695ac9
JB
5961/* Emit bytecode to evaluate the given expression EXP to the stack. */
5962void
5963bc_expand_expr (exp)
5964 tree exp;
bbf6f052 5965{
ca695ac9
JB
5966 enum tree_code code;
5967 tree type, arg0;
5968 rtx r;
5969 struct binary_operator *binoptab;
5970 struct unary_operator *unoptab;
5971 struct increment_operator *incroptab;
5972 struct bc_label *lab, *lab1;
5973 enum bytecode_opcode opcode;
5974
5975
5976 code = TREE_CODE (exp);
5977
5978 switch (code)
bbf6f052 5979 {
ca695ac9
JB
5980 case PARM_DECL:
5981
5982 if (DECL_RTL (exp) == 0)
bbf6f052 5983 {
ca695ac9
JB
5984 error_with_decl (exp, "prior parameter's size depends on `%s'");
5985 return;
bbf6f052 5986 }
ca695ac9
JB
5987
5988 bc_load_parmaddr (DECL_RTL (exp));
5989 bc_load_memory (TREE_TYPE (exp), exp);
5990
5991 return;
5992
5993 case VAR_DECL:
5994
5995 if (DECL_RTL (exp) == 0)
5996 abort ();
5997
5998#if 0
e7a42772 5999 if (BYTECODE_LABEL (DECL_RTL (exp)))
ca695ac9
JB
6000 bc_load_externaddr (DECL_RTL (exp));
6001 else
6002 bc_load_localaddr (DECL_RTL (exp));
6003#endif
6004 if (TREE_PUBLIC (exp))
e7a42772
JB
6005 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
6006 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
ca695ac9
JB
6007 else
6008 bc_load_localaddr (DECL_RTL (exp));
6009
6010 bc_load_memory (TREE_TYPE (exp), exp);
6011 return;
6012
6013 case INTEGER_CST:
6014
6015#ifdef DEBUG_PRINT_CODE
6016 fprintf (stderr, " [%x]\n", TREE_INT_CST_LOW (exp));
6017#endif
6bd6178d 6018 bc_emit_instruction (mode_to_const_map[(int) (DECL_BIT_FIELD (exp)
ca695ac9 6019 ? SImode
6bd6178d 6020 : TYPE_MODE (TREE_TYPE (exp)))],
ca695ac9
JB
6021 (HOST_WIDE_INT) TREE_INT_CST_LOW (exp));
6022 return;
6023
6024 case REAL_CST:
6025
c02bd5d9 6026#if 0
ca695ac9
JB
6027#ifdef DEBUG_PRINT_CODE
6028 fprintf (stderr, " [%g]\n", (double) TREE_INT_CST_LOW (exp));
6029#endif
c02bd5d9 6030 /* FIX THIS: find a better way to pass real_cst's. -bson */
ca695ac9
JB
6031 bc_emit_instruction (mode_to_const_map[TYPE_MODE (TREE_TYPE (exp))],
6032 (double) TREE_REAL_CST (exp));
c02bd5d9
JB
6033#else
6034 abort ();
6035#endif
6036
ca695ac9
JB
6037 return;
6038
6039 case CALL_EXPR:
6040
6041 /* We build a call description vector describing the type of
6042 the return value and of the arguments; this call vector,
6043 together with a pointer to a location for the return value
6044 and the base of the argument list, is passed to the low
6045 level machine dependent call subroutine, which is responsible
6046 for putting the arguments wherever real functions expect
6047 them, as well as getting the return value back. */
6048 {
6049 tree calldesc = 0, arg;
6050 int nargs = 0, i;
6051 rtx retval;
6052
6053 /* Push the evaluated args on the evaluation stack in reverse
6054 order. Also make an entry for each arg in the calldesc
6055 vector while we're at it. */
6056
6057 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
6058
6059 for (arg = TREE_OPERAND (exp, 1); arg; arg = TREE_CHAIN (arg))
6060 {
6061 ++nargs;
6062 bc_expand_expr (TREE_VALUE (arg));
6063
6064 calldesc = tree_cons ((tree) 0,
6065 size_in_bytes (TREE_TYPE (TREE_VALUE (arg))),
6066 calldesc);
6067 calldesc = tree_cons ((tree) 0,
6068 bc_runtime_type_code (TREE_TYPE (TREE_VALUE (arg))),
6069 calldesc);
6070 }
6071
6072 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
6073
6074 /* Allocate a location for the return value and push its
6075 address on the evaluation stack. Also make an entry
6076 at the front of the calldesc for the return value type. */
6077
6078 type = TREE_TYPE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
6079 retval = bc_allocate_local (int_size_in_bytes (type), TYPE_ALIGN (type));
6080 bc_load_localaddr (retval);
6081
6082 calldesc = tree_cons ((tree) 0, size_in_bytes (type), calldesc);
6083 calldesc = tree_cons ((tree) 0, bc_runtime_type_code (type), calldesc);
6084
6085 /* Prepend the argument count. */
6086 calldesc = tree_cons ((tree) 0,
6087 build_int_2 (nargs, 0),
6088 calldesc);
6089
6090 /* Push the address of the call description vector on the stack. */
6091 calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
6092 TREE_TYPE (calldesc) = build_array_type (integer_type_node,
6093 build_index_type (build_int_2 (nargs * 2, 0)));
6094 r = output_constant_def (calldesc);
6095 bc_load_externaddr (r);
6096
6097 /* Push the address of the function to be called. */
6098 bc_expand_expr (TREE_OPERAND (exp, 0));
6099
6100 /* Call the function, popping its address and the calldesc vector
6101 address off the evaluation stack in the process. */
6102 bc_emit_instruction (call);
6103
6104 /* Pop the arguments off the stack. */
6105 bc_adjust_stack (nargs);
6106
6107 /* Load the return value onto the stack. */
6108 bc_load_localaddr (retval);
6109 bc_load_memory (type, TREE_OPERAND (exp, 0));
6110 }
6111 return;
6112
6113 case SAVE_EXPR:
6114
6115 if (!SAVE_EXPR_RTL (exp))
bbf6f052 6116 {
ca695ac9
JB
6117 /* First time around: copy to local variable */
6118 SAVE_EXPR_RTL (exp) = bc_allocate_local (int_size_in_bytes (TREE_TYPE (exp)),
6119 TYPE_ALIGN (TREE_TYPE(exp)));
6120 bc_expand_expr (TREE_OPERAND (exp, 0));
6d6e61ce 6121 bc_emit_instruction (duplicate);
ca695ac9
JB
6122
6123 bc_load_localaddr (SAVE_EXPR_RTL (exp));
6124 bc_store_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
bbf6f052 6125 }
ca695ac9 6126 else
bbf6f052 6127 {
ca695ac9
JB
6128 /* Consecutive reference: use saved copy */
6129 bc_load_localaddr (SAVE_EXPR_RTL (exp));
6130 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
bbf6f052 6131 }
ca695ac9
JB
6132 return;
6133
6134#if 0
6135 /* FIXME: the XXXX_STMT codes have been removed in GCC2, but
6136 how are they handled instead? */
6137 case LET_STMT:
6138
6139 TREE_USED (exp) = 1;
6140 bc_expand_expr (STMT_BODY (exp));
6141 return;
6142#endif
6143
6144 case NOP_EXPR:
6145 case CONVERT_EXPR:
6146
6147 bc_expand_expr (TREE_OPERAND (exp, 0));
6148 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)), TREE_TYPE (exp));
6149 return;
6150
6151 case MODIFY_EXPR:
6152
c02bd5d9 6153 expand_assignment (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 0, 0);
ca695ac9
JB
6154 return;
6155
6156 case ADDR_EXPR:
6157
6158 bc_expand_address (TREE_OPERAND (exp, 0));
6159 return;
6160
6161 case INDIRECT_REF:
6162
6163 bc_expand_expr (TREE_OPERAND (exp, 0));
6164 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6165 return;
6166
6167 case ARRAY_REF:
6168
6169 bc_expand_expr (bc_canonicalize_array_ref (exp));
6170 return;
6171
6172 case COMPONENT_REF:
6173
6174 bc_expand_component_address (exp);
6175
6176 /* If we have a bitfield, generate a proper load */
6177 bc_load_memory (TREE_TYPE (TREE_OPERAND (exp, 1)), TREE_OPERAND (exp, 1));
6178 return;
6179
6180 case COMPOUND_EXPR:
6181
6182 bc_expand_expr (TREE_OPERAND (exp, 0));
6183 bc_emit_instruction (drop);
6184 bc_expand_expr (TREE_OPERAND (exp, 1));
6185 return;
6186
6187 case COND_EXPR:
6188
6189 bc_expand_expr (TREE_OPERAND (exp, 0));
6190 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
6191 lab = bc_get_bytecode_label ();
c02bd5d9 6192 bc_emit_bytecode (xjumpifnot);
ca695ac9
JB
6193 bc_emit_bytecode_labelref (lab);
6194
6195#ifdef DEBUG_PRINT_CODE
6196 fputc ('\n', stderr);
6197#endif
6198 bc_expand_expr (TREE_OPERAND (exp, 1));
6199 lab1 = bc_get_bytecode_label ();
6200 bc_emit_bytecode (jump);
6201 bc_emit_bytecode_labelref (lab1);
6202
6203#ifdef DEBUG_PRINT_CODE
6204 fputc ('\n', stderr);
6205#endif
6206
6207 bc_emit_bytecode_labeldef (lab);
6208 bc_expand_expr (TREE_OPERAND (exp, 2));
6209 bc_emit_bytecode_labeldef (lab1);
6210 return;
6211
6212 case TRUTH_ANDIF_EXPR:
6213
c02bd5d9 6214 opcode = xjumpifnot;
ca695ac9
JB
6215 goto andorif;
6216
6217 case TRUTH_ORIF_EXPR:
6218
c02bd5d9 6219 opcode = xjumpif;
ca695ac9
JB
6220 goto andorif;
6221
6222 case PLUS_EXPR:
6223
6224 binoptab = optab_plus_expr;
6225 goto binop;
6226
6227 case MINUS_EXPR:
6228
6229 binoptab = optab_minus_expr;
6230 goto binop;
6231
6232 case MULT_EXPR:
6233
6234 binoptab = optab_mult_expr;
6235 goto binop;
6236
6237 case TRUNC_DIV_EXPR:
6238 case FLOOR_DIV_EXPR:
6239 case CEIL_DIV_EXPR:
6240 case ROUND_DIV_EXPR:
6241 case EXACT_DIV_EXPR:
6242
6243 binoptab = optab_trunc_div_expr;
6244 goto binop;
6245
6246 case TRUNC_MOD_EXPR:
6247 case FLOOR_MOD_EXPR:
6248 case CEIL_MOD_EXPR:
6249 case ROUND_MOD_EXPR:
6250
6251 binoptab = optab_trunc_mod_expr;
6252 goto binop;
6253
6254 case FIX_ROUND_EXPR:
6255 case FIX_FLOOR_EXPR:
6256 case FIX_CEIL_EXPR:
6257 abort (); /* Not used for C. */
6258
6259 case FIX_TRUNC_EXPR:
6260 case FLOAT_EXPR:
6261 case MAX_EXPR:
6262 case MIN_EXPR:
6263 case FFS_EXPR:
6264 case LROTATE_EXPR:
6265 case RROTATE_EXPR:
6266 abort (); /* FIXME */
6267
6268 case RDIV_EXPR:
6269
6270 binoptab = optab_rdiv_expr;
6271 goto binop;
6272
6273 case BIT_AND_EXPR:
6274
6275 binoptab = optab_bit_and_expr;
6276 goto binop;
6277
6278 case BIT_IOR_EXPR:
6279
6280 binoptab = optab_bit_ior_expr;
6281 goto binop;
6282
6283 case BIT_XOR_EXPR:
6284
6285 binoptab = optab_bit_xor_expr;
6286 goto binop;
6287
6288 case LSHIFT_EXPR:
6289
6290 binoptab = optab_lshift_expr;
6291 goto binop;
6292
6293 case RSHIFT_EXPR:
6294
6295 binoptab = optab_rshift_expr;
6296 goto binop;
6297
6298 case TRUTH_AND_EXPR:
6299
6300 binoptab = optab_truth_and_expr;
6301 goto binop;
6302
6303 case TRUTH_OR_EXPR:
6304
6305 binoptab = optab_truth_or_expr;
6306 goto binop;
6307
6308 case LT_EXPR:
6309
6310 binoptab = optab_lt_expr;
6311 goto binop;
6312
6313 case LE_EXPR:
6314
6315 binoptab = optab_le_expr;
6316 goto binop;
6317
6318 case GE_EXPR:
6319
6320 binoptab = optab_ge_expr;
6321 goto binop;
6322
6323 case GT_EXPR:
6324
6325 binoptab = optab_gt_expr;
6326 goto binop;
6327
6328 case EQ_EXPR:
6329
6330 binoptab = optab_eq_expr;
6331 goto binop;
6332
6333 case NE_EXPR:
6334
6335 binoptab = optab_ne_expr;
6336 goto binop;
6337
6338 case NEGATE_EXPR:
6339
6340 unoptab = optab_negate_expr;
6341 goto unop;
6342
6343 case BIT_NOT_EXPR:
6344
6345 unoptab = optab_bit_not_expr;
6346 goto unop;
6347
6348 case TRUTH_NOT_EXPR:
6349
6350 unoptab = optab_truth_not_expr;
6351 goto unop;
6352
6353 case PREDECREMENT_EXPR:
6354
6355 incroptab = optab_predecrement_expr;
6356 goto increment;
6357
6358 case PREINCREMENT_EXPR:
6359
6360 incroptab = optab_preincrement_expr;
6361 goto increment;
6362
6363 case POSTDECREMENT_EXPR:
6364
6365 incroptab = optab_postdecrement_expr;
6366 goto increment;
6367
6368 case POSTINCREMENT_EXPR:
6369
6370 incroptab = optab_postincrement_expr;
6371 goto increment;
6372
6373 case CONSTRUCTOR:
6374
6375 bc_expand_constructor (exp);
6376 return;
6377
6378 case ERROR_MARK:
6379 case RTL_EXPR:
6380
6381 return;
6382
6383 case BIND_EXPR:
6384 {
6385 tree vars = TREE_OPERAND (exp, 0);
6386 int vars_need_expansion = 0;
6387
6388 /* Need to open a binding contour here because
6389 if there are any cleanups they most be contained here. */
6390 expand_start_bindings (0);
6391
6392 /* Mark the corresponding BLOCK for output. */
6393 if (TREE_OPERAND (exp, 2) != 0)
6394 TREE_USED (TREE_OPERAND (exp, 2)) = 1;
6395
6396 /* If VARS have not yet been expanded, expand them now. */
6397 while (vars)
6398 {
6399 if (DECL_RTL (vars) == 0)
6400 {
6401 vars_need_expansion = 1;
6402 bc_expand_decl (vars, 0);
6403 }
6404 bc_expand_decl_init (vars);
6405 vars = TREE_CHAIN (vars);
6406 }
6407
6408 bc_expand_expr (TREE_OPERAND (exp, 1));
6409
6410 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6411
6412 return;
6413 }
6414 }
6415
6416 abort ();
6417
6418 binop:
6419
6420 bc_expand_binary_operation (binoptab, TREE_TYPE (exp),
6421 TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1));
6422 return;
6423
6424
6425 unop:
6426
6427 bc_expand_unary_operation (unoptab, TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6428 return;
6429
6430
6431 andorif:
6432
6433 bc_expand_expr (TREE_OPERAND (exp, 0));
6434 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
6435 lab = bc_get_bytecode_label ();
6436
6d6e61ce 6437 bc_emit_instruction (duplicate);
ca695ac9
JB
6438 bc_emit_bytecode (opcode);
6439 bc_emit_bytecode_labelref (lab);
6440
6441#ifdef DEBUG_PRINT_CODE
6442 fputc ('\n', stderr);
6443#endif
6444
6445 bc_emit_instruction (drop);
6446
6447 bc_expand_expr (TREE_OPERAND (exp, 1));
6448 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)));
6449 bc_emit_bytecode_labeldef (lab);
6450 return;
6451
6452
6453 increment:
6454
6455 type = TREE_TYPE (TREE_OPERAND (exp, 0));
6456
6457 /* Push the quantum. */
6458 bc_expand_expr (TREE_OPERAND (exp, 1));
6459
6460 /* Convert it to the lvalue's type. */
6461 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)), type);
6462
6463 /* Push the address of the lvalue */
c02bd5d9 6464 bc_expand_expr (build1 (ADDR_EXPR, TYPE_POINTER_TO (type), TREE_OPERAND (exp, 0)));
ca695ac9
JB
6465
6466 /* Perform actual increment */
c02bd5d9 6467 bc_expand_increment (incroptab, type);
ca695ac9
JB
6468 return;
6469}
6470\f
6471/* Return the alignment in bits of EXP, a pointer valued expression.
6472 But don't return more than MAX_ALIGN no matter what.
6473 The alignment returned is, by default, the alignment of the thing that
6474 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
6475
6476 Otherwise, look at the expression to see if we can do better, i.e., if the
6477 expression is actually pointing at an object whose alignment is tighter. */
6478
6479static int
6480get_pointer_alignment (exp, max_align)
6481 tree exp;
6482 unsigned max_align;
6483{
6484 unsigned align, inner;
6485
6486 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
6487 return 0;
6488
6489 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
6490 align = MIN (align, max_align);
6491
6492 while (1)
6493 {
6494 switch (TREE_CODE (exp))
6495 {
6496 case NOP_EXPR:
6497 case CONVERT_EXPR:
6498 case NON_LVALUE_EXPR:
6499 exp = TREE_OPERAND (exp, 0);
6500 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
6501 return align;
6502 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
8dc2fbcf 6503 align = MIN (inner, max_align);
ca695ac9
JB
6504 break;
6505
6506 case PLUS_EXPR:
6507 /* If sum of pointer + int, restrict our maximum alignment to that
6508 imposed by the integer. If not, we can't do any better than
6509 ALIGN. */
6510 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
6511 return align;
6512
6513 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
6514 & (max_align - 1))
6515 != 0)
6516 max_align >>= 1;
6517
6518 exp = TREE_OPERAND (exp, 0);
6519 break;
6520
6521 case ADDR_EXPR:
6522 /* See what we are pointing at and look at its alignment. */
6523 exp = TREE_OPERAND (exp, 0);
6524 if (TREE_CODE (exp) == FUNCTION_DECL)
8dc2fbcf 6525 align = FUNCTION_BOUNDARY;
ca695ac9 6526 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
8dc2fbcf 6527 align = DECL_ALIGN (exp);
ca695ac9
JB
6528#ifdef CONSTANT_ALIGNMENT
6529 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
6530 align = CONSTANT_ALIGNMENT (exp, align);
6531#endif
6532 return MIN (align, max_align);
6533
6534 default:
6535 return align;
6536 }
6537 }
6538}
6539\f
6540/* Return the tree node and offset if a given argument corresponds to
6541 a string constant. */
6542
6543static tree
6544string_constant (arg, ptr_offset)
6545 tree arg;
6546 tree *ptr_offset;
6547{
6548 STRIP_NOPS (arg);
6549
6550 if (TREE_CODE (arg) == ADDR_EXPR
6551 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
6552 {
6553 *ptr_offset = integer_zero_node;
6554 return TREE_OPERAND (arg, 0);
6555 }
6556 else if (TREE_CODE (arg) == PLUS_EXPR)
6557 {
6558 tree arg0 = TREE_OPERAND (arg, 0);
6559 tree arg1 = TREE_OPERAND (arg, 1);
6560
6561 STRIP_NOPS (arg0);
6562 STRIP_NOPS (arg1);
6563
6564 if (TREE_CODE (arg0) == ADDR_EXPR
6565 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
6566 {
6567 *ptr_offset = arg1;
6568 return TREE_OPERAND (arg0, 0);
6569 }
6570 else if (TREE_CODE (arg1) == ADDR_EXPR
6571 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
6572 {
6573 *ptr_offset = arg0;
6574 return TREE_OPERAND (arg1, 0);
6575 }
6576 }
6577
6578 return 0;
6579}
6580
6581/* Compute the length of a C string. TREE_STRING_LENGTH is not the right
6582 way, because it could contain a zero byte in the middle.
6583 TREE_STRING_LENGTH is the size of the character array, not the string.
6584
6585 Unfortunately, string_constant can't access the values of const char
6586 arrays with initializers, so neither can we do so here. */
6587
6588static tree
6589c_strlen (src)
6590 tree src;
6591{
6592 tree offset_node;
6593 int offset, max;
6594 char *ptr;
6595
6596 src = string_constant (src, &offset_node);
6597 if (src == 0)
6598 return 0;
6599 max = TREE_STRING_LENGTH (src);
6600 ptr = TREE_STRING_POINTER (src);
6601 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
6602 {
6603 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
6604 compute the offset to the following null if we don't know where to
6605 start searching for it. */
6606 int i;
6607 for (i = 0; i < max; i++)
6608 if (ptr[i] == 0)
6609 return 0;
6610 /* We don't know the starting offset, but we do know that the string
6611 has no internal zero bytes. We can assume that the offset falls
6612 within the bounds of the string; otherwise, the programmer deserves
6613 what he gets. Subtract the offset from the length of the string,
6614 and return that. */
6615 /* This would perhaps not be valid if we were dealing with named
6616 arrays in addition to literal string constants. */
6617 return size_binop (MINUS_EXPR, size_int (max), offset_node);
6618 }
6619
6620 /* We have a known offset into the string. Start searching there for
6621 a null character. */
6622 if (offset_node == 0)
6623 offset = 0;
6624 else
6625 {
6626 /* Did we get a long long offset? If so, punt. */
6627 if (TREE_INT_CST_HIGH (offset_node) != 0)
6628 return 0;
6629 offset = TREE_INT_CST_LOW (offset_node);
6630 }
6631 /* If the offset is known to be out of bounds, warn, and call strlen at
6632 runtime. */
6633 if (offset < 0 || offset > max)
6634 {
6635 warning ("offset outside bounds of constant string");
6636 return 0;
6637 }
6638 /* Use strlen to search for the first zero byte. Since any strings
6639 constructed with build_string will have nulls appended, we win even
6640 if we get handed something like (char[4])"abcd".
6641
6642 Since OFFSET is our starting index into the string, no further
6643 calculation is needed. */
6644 return size_int (strlen (ptr + offset));
6645}
6646\f
6647/* Expand an expression EXP that calls a built-in function,
6648 with result going to TARGET if that's convenient
6649 (and in mode MODE if that's convenient).
6650 SUBTARGET may be used as the target for computing one of EXP's operands.
6651 IGNORE is nonzero if the value is to be ignored. */
6652
98aad286
RK
6653#define CALLED_AS_BUILT_IN(NODE) \
6654 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
6655
ca695ac9
JB
6656static rtx
6657expand_builtin (exp, target, subtarget, mode, ignore)
6658 tree exp;
6659 rtx target;
6660 rtx subtarget;
6661 enum machine_mode mode;
6662 int ignore;
6663{
6664 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6665 tree arglist = TREE_OPERAND (exp, 1);
6666 rtx op0;
6667 rtx lab1, insns;
6668 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
6669 optab builtin_optab;
6670
6671 switch (DECL_FUNCTION_CODE (fndecl))
6672 {
6673 case BUILT_IN_ABS:
6674 case BUILT_IN_LABS:
6675 case BUILT_IN_FABS:
6676 /* build_function_call changes these into ABS_EXPR. */
6677 abort ();
6678
6679 case BUILT_IN_SIN:
6680 case BUILT_IN_COS:
6681 case BUILT_IN_FSQRT:
6682 /* If not optimizing, call the library function. */
6683 if (! optimize)
6684 break;
6685
6686 if (arglist == 0
6687 /* Arg could be wrong type if user redeclared this fcn wrong. */
6688 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
7b073ca6 6689 break;
ca695ac9
JB
6690
6691 /* Stabilize and compute the argument. */
6692 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
6693 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
6694 {
6695 exp = copy_node (exp);
6696 arglist = copy_node (arglist);
6697 TREE_OPERAND (exp, 1) = arglist;
6698 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
6699 }
6700 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
6701
6702 /* Make a suitable register to place result in. */
6703 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6704
6705 emit_queue ();
6706 start_sequence ();
6707
6708 switch (DECL_FUNCTION_CODE (fndecl))
6709 {
6710 case BUILT_IN_SIN:
6711 builtin_optab = sin_optab; break;
6712 case BUILT_IN_COS:
6713 builtin_optab = cos_optab; break;
6714 case BUILT_IN_FSQRT:
6715 builtin_optab = sqrt_optab; break;
6716 default:
6717 abort ();
6718 }
6719
6720 /* Compute into TARGET.
6721 Set TARGET to wherever the result comes back. */
6722 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
6723 builtin_optab, op0, target, 0);
6724
6725 /* If we were unable to expand via the builtin, stop the
6726 sequence (without outputting the insns) and break, causing
6727 a call the the library function. */
6728 if (target == 0)
6729 {
6730 end_sequence ();
6731 break;
6732 }
6733
6734 /* Check the results by default. But if flag_fast_math is turned on,
6735 then assume sqrt will always be called with valid arguments. */
6736
6737 if (! flag_fast_math)
6738 {
6739 /* Don't define the builtin FP instructions
6740 if your machine is not IEEE. */
6741 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
6742 abort ();
6743
6744 lab1 = gen_label_rtx ();
6745
6746 /* Test the result; if it is NaN, set errno=EDOM because
6747 the argument was not in the domain. */
6748 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
6749 emit_jump_insn (gen_beq (lab1));
6750
6751#if TARGET_EDOM
6752 {
6753#ifdef GEN_ERRNO_RTX
6754 rtx errno_rtx = GEN_ERRNO_RTX;
6755#else
6756 rtx errno_rtx
6757 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "*errno"));
6758#endif
6759
6760 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
6761 }
6762#else
6763 /* We can't set errno=EDOM directly; let the library call do it.
6764 Pop the arguments right away in case the call gets deleted. */
6765 NO_DEFER_POP;
6766 expand_call (exp, target, 0);
6767 OK_DEFER_POP;
6768#endif
6769
6770 emit_label (lab1);
6771 }
6772
6773 /* Output the entire sequence. */
6774 insns = get_insns ();
6775 end_sequence ();
6776 emit_insns (insns);
6777
6778 return target;
6779
6780 /* __builtin_apply_args returns block of memory allocated on
6781 the stack into which is stored the arg pointer, structure
6782 value address, static chain, and all the registers that might
6783 possibly be used in performing a function call. The code is
6784 moved to the start of the function so the incoming values are
6785 saved. */
6786 case BUILT_IN_APPLY_ARGS:
6787 /* Don't do __builtin_apply_args more than once in a function.
6788 Save the result of the first call and reuse it. */
6789 if (apply_args_value != 0)
6790 return apply_args_value;
6791 {
6792 /* When this function is called, it means that registers must be
6793 saved on entry to this function. So we migrate the
6794 call to the first insn of this function. */
6795 rtx temp;
6796 rtx seq;
6797
6798 start_sequence ();
6799 temp = expand_builtin_apply_args ();
6800 seq = get_insns ();
6801 end_sequence ();
6802
6803 apply_args_value = temp;
6804
6805 /* Put the sequence after the NOTE that starts the function.
6806 If this is inside a SEQUENCE, make the outer-level insn
6807 chain current, so the code is placed at the start of the
6808 function. */
6809 push_topmost_sequence ();
6810 emit_insns_before (seq, NEXT_INSN (get_insns ()));
6811 pop_topmost_sequence ();
6812 return temp;
6813 }
6814
6815 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6816 FUNCTION with a copy of the parameters described by
6817 ARGUMENTS, and ARGSIZE. It returns a block of memory
6818 allocated on the stack into which is stored all the registers
6819 that might possibly be used for returning the result of a
6820 function. ARGUMENTS is the value returned by
6821 __builtin_apply_args. ARGSIZE is the number of bytes of
6822 arguments that must be copied. ??? How should this value be
6823 computed? We'll also need a safe worst case value for varargs
6824 functions. */
6825 case BUILT_IN_APPLY:
6826 if (arglist == 0
6827 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6828 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6829 || TREE_CHAIN (arglist) == 0
6830 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6831 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6832 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6833 return const0_rtx;
6834 else
6835 {
6836 int i;
6837 tree t;
6838 rtx ops[3];
6839
6840 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
6841 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
6842
6843 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6844 }
6845
6846 /* __builtin_return (RESULT) causes the function to return the
6847 value described by RESULT. RESULT is address of the block of
6848 memory returned by __builtin_apply. */
6849 case BUILT_IN_RETURN:
6850 if (arglist
6851 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6852 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
6853 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
6854 NULL_RTX, VOIDmode, 0));
6855 return const0_rtx;
6856
6857 case BUILT_IN_SAVEREGS:
6858 /* Don't do __builtin_saveregs more than once in a function.
6859 Save the result of the first call and reuse it. */
6860 if (saveregs_value != 0)
6861 return saveregs_value;
6862 {
6863 /* When this function is called, it means that registers must be
6864 saved on entry to this function. So we migrate the
6865 call to the first insn of this function. */
6866 rtx temp;
6867 rtx seq;
ca695ac9
JB
6868
6869 /* Now really call the function. `expand_call' does not call
6870 expand_builtin, so there is no danger of infinite recursion here. */
6871 start_sequence ();
6872
6873#ifdef EXPAND_BUILTIN_SAVEREGS
6874 /* Do whatever the machine needs done in this case. */
6875 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
6876#else
6877 /* The register where the function returns its value
6878 is likely to have something else in it, such as an argument.
6879 So preserve that register around the call. */
d0c76654 6880
ca695ac9
JB
6881 if (value_mode != VOIDmode)
6882 {
d0c76654
RK
6883 rtx valreg = hard_libcall_value (value_mode);
6884 rtx saved_valreg = gen_reg_rtx (value_mode);
6885
ca695ac9 6886 emit_move_insn (saved_valreg, valreg);
d0c76654
RK
6887 temp = expand_call (exp, target, ignore);
6888 emit_move_insn (valreg, saved_valreg);
ca695ac9 6889 }
d0c76654
RK
6890 else
6891 /* Generate the call, putting the value in a pseudo. */
6892 temp = expand_call (exp, target, ignore);
ca695ac9
JB
6893#endif
6894
6895 seq = get_insns ();
6896 end_sequence ();
6897
6898 saveregs_value = temp;
6899
6900 /* Put the sequence after the NOTE that starts the function.
6901 If this is inside a SEQUENCE, make the outer-level insn
6902 chain current, so the code is placed at the start of the
6903 function. */
6904 push_topmost_sequence ();
6905 emit_insns_before (seq, NEXT_INSN (get_insns ()));
6906 pop_topmost_sequence ();
6907 return temp;
6908 }
6909
6910 /* __builtin_args_info (N) returns word N of the arg space info
6911 for the current function. The number and meanings of words
6912 is controlled by the definition of CUMULATIVE_ARGS. */
6913 case BUILT_IN_ARGS_INFO:
6914 {
6915 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
6916 int i;
6917 int *word_ptr = (int *) &current_function_args_info;
6918 tree type, elts, result;
6919
6920 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
6921 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
6922 __FILE__, __LINE__);
6923
6924 if (arglist != 0)
6925 {
6926 tree arg = TREE_VALUE (arglist);
6927 if (TREE_CODE (arg) != INTEGER_CST)
6928 error ("argument of `__builtin_args_info' must be constant");
6929 else
6930 {
6931 int wordnum = TREE_INT_CST_LOW (arg);
6932
6933 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
6934 error ("argument of `__builtin_args_info' out of range");
6935 else
6936 return GEN_INT (word_ptr[wordnum]);
6937 }
6938 }
6939 else
6940 error ("missing argument in `__builtin_args_info'");
6941
6942 return const0_rtx;
6943
6944#if 0
6945 for (i = 0; i < nwords; i++)
6946 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
6947
6948 type = build_array_type (integer_type_node,
6949 build_index_type (build_int_2 (nwords, 0)));
6950 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
6951 TREE_CONSTANT (result) = 1;
6952 TREE_STATIC (result) = 1;
6953 result = build (INDIRECT_REF, build_pointer_type (type), result);
6954 TREE_CONSTANT (result) = 1;
6955 return expand_expr (result, NULL_RTX, VOIDmode, 0);
6956#endif
6957 }
6958
c4dfe0fc
JW
6959 /* Return the address of the first anonymous stack arg.
6960 This should only be used for stdarg functions. */
ca695ac9
JB
6961 case BUILT_IN_NEXT_ARG:
6962 {
6963 tree fntype = TREE_TYPE (current_function_decl);
c4dfe0fc
JW
6964 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
6965 tree arg;
6966
6967 if (TYPE_ARG_TYPES (fntype) == 0
6968 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
6969 == void_type_node))
ca695ac9
JB
6970 {
6971 error ("`va_start' used in function with fixed args");
6972 return const0_rtx;
6973 }
c4dfe0fc
JW
6974
6975 arg = TREE_VALUE (arglist);
6976 /* Strip off all nops for the sake of the comparison. This is not
6977 quite the same as STRIP_NOPS. It does more. */
6978 while (TREE_CODE (arg) == NOP_EXPR
6979 || TREE_CODE (arg) == CONVERT_EXPR
6980 || TREE_CODE (arg) == NON_LVALUE_EXPR)
6981 arg = TREE_OPERAND (arg, 0);
6982 if (arg != last_parm)
6983 warning ("second parameter of `va_start' not last named argument");
ca695ac9
JB
6984 }
6985
6986 return expand_binop (Pmode, add_optab,
6987 current_function_internal_arg_pointer,
6988 current_function_arg_offset_rtx,
6989 NULL_RTX, 0, OPTAB_LIB_WIDEN);
6990
6991 case BUILT_IN_CLASSIFY_TYPE:
6992 if (arglist != 0)
6993 {
6994 tree type = TREE_TYPE (TREE_VALUE (arglist));
6995 enum tree_code code = TREE_CODE (type);
6996 if (code == VOID_TYPE)
6997 return GEN_INT (void_type_class);
6998 if (code == INTEGER_TYPE)
6999 return GEN_INT (integer_type_class);
7000 if (code == CHAR_TYPE)
7001 return GEN_INT (char_type_class);
7002 if (code == ENUMERAL_TYPE)
7003 return GEN_INT (enumeral_type_class);
7004 if (code == BOOLEAN_TYPE)
7005 return GEN_INT (boolean_type_class);
7006 if (code == POINTER_TYPE)
7007 return GEN_INT (pointer_type_class);
7008 if (code == REFERENCE_TYPE)
7009 return GEN_INT (reference_type_class);
7010 if (code == OFFSET_TYPE)
7011 return GEN_INT (offset_type_class);
7012 if (code == REAL_TYPE)
7013 return GEN_INT (real_type_class);
7014 if (code == COMPLEX_TYPE)
7015 return GEN_INT (complex_type_class);
7016 if (code == FUNCTION_TYPE)
7017 return GEN_INT (function_type_class);
7018 if (code == METHOD_TYPE)
7019 return GEN_INT (method_type_class);
7020 if (code == RECORD_TYPE)
7021 return GEN_INT (record_type_class);
7022 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
7023 return GEN_INT (union_type_class);
7024 if (code == ARRAY_TYPE)
4042d440
PB
7025 {
7026 if (TYPE_STRING_FLAG (type))
7027 return GEN_INT (string_type_class);
7028 else
7029 return GEN_INT (array_type_class);
7030 }
ca695ac9
JB
7031 if (code == SET_TYPE)
7032 return GEN_INT (set_type_class);
7033 if (code == FILE_TYPE)
7034 return GEN_INT (file_type_class);
7035 if (code == LANG_TYPE)
7036 return GEN_INT (lang_type_class);
7037 }
7038 return GEN_INT (no_type_class);
7039
7040 case BUILT_IN_CONSTANT_P:
7041 if (arglist == 0)
7042 return const0_rtx;
7043 else
7044 return (TREE_CODE_CLASS (TREE_CODE (TREE_VALUE (arglist))) == 'c'
7045 ? const1_rtx : const0_rtx);
7046
7047 case BUILT_IN_FRAME_ADDRESS:
7048 /* The argument must be a nonnegative integer constant.
7049 It counts the number of frames to scan up the stack.
7050 The value is the address of that frame. */
7051 case BUILT_IN_RETURN_ADDRESS:
7052 /* The argument must be a nonnegative integer constant.
7053 It counts the number of frames to scan up the stack.
7054 The value is the return address saved in that frame. */
7055 if (arglist == 0)
7056 /* Warning about missing arg was already issued. */
7057 return const0_rtx;
7058 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
7059 {
7060 error ("invalid arg to `__builtin_return_address'");
7061 return const0_rtx;
7062 }
153c149b 7063 else if (tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
ca695ac9
JB
7064 {
7065 error ("invalid arg to `__builtin_return_address'");
7066 return const0_rtx;
7067 }
7068 else
7069 {
7070 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
7071 rtx tem = frame_pointer_rtx;
7072 int i;
7073
7074 /* Some machines need special handling before we can access arbitrary
7075 frames. For example, on the sparc, we must first flush all
7076 register windows to the stack. */
7077#ifdef SETUP_FRAME_ADDRESSES
7078 SETUP_FRAME_ADDRESSES ();
7079#endif
7080
7081 /* On the sparc, the return address is not in the frame, it is
7082 in a register. There is no way to access it off of the current
7083 frame pointer, but it can be accessed off the previous frame
7084 pointer by reading the value from the register window save
7085 area. */
7086#ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
7087 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_RETURN_ADDRESS)
7088 count--;
7089#endif
7090
7091 /* Scan back COUNT frames to the specified frame. */
7092 for (i = 0; i < count; i++)
7093 {
7094 /* Assume the dynamic chain pointer is in the word that
7095 the frame address points to, unless otherwise specified. */
7096#ifdef DYNAMIC_CHAIN_ADDRESS
7097 tem = DYNAMIC_CHAIN_ADDRESS (tem);
7098#endif
7099 tem = memory_address (Pmode, tem);
7100 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
7101 }
7102
7103 /* For __builtin_frame_address, return what we've got. */
7104 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
7105 return tem;
7106
7107 /* For __builtin_return_address,
7108 Get the return address from that frame. */
7109#ifdef RETURN_ADDR_RTX
7110 return RETURN_ADDR_RTX (count, tem);
7111#else
7112 tem = memory_address (Pmode,
7113 plus_constant (tem, GET_MODE_SIZE (Pmode)));
7114 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
7115#endif
7116 }
7117
7118 case BUILT_IN_ALLOCA:
7119 if (arglist == 0
7120 /* Arg could be non-integer if user redeclared this fcn wrong. */
7121 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
7b073ca6 7122 break;
ca695ac9
JB
7123 current_function_calls_alloca = 1;
7124 /* Compute the argument. */
7125 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
7126
7127 /* Allocate the desired space. */
7128 target = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
7129
7130 /* Record the new stack level for nonlocal gotos. */
7131 if (nonlocal_goto_handler_slot != 0)
7132 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
7133 return target;
7134
7135 case BUILT_IN_FFS:
7136 /* If not optimizing, call the library function. */
98aad286 7137 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
ca695ac9
JB
7138 break;
7139
7140 if (arglist == 0
7141 /* Arg could be non-integer if user redeclared this fcn wrong. */
7142 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
7b073ca6 7143 break;
ca695ac9
JB
7144
7145 /* Compute the argument. */
7146 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
7147 /* Compute ffs, into TARGET if possible.
7148 Set TARGET to wherever the result comes back. */
7149 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
7150 ffs_optab, op0, target, 1);
7151 if (target == 0)
7152 abort ();
7153 return target;
7154
7155 case BUILT_IN_STRLEN:
7156 /* If not optimizing, call the library function. */
98aad286 7157 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
ca695ac9
JB
7158 break;
7159
7160 if (arglist == 0
7161 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7162 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
7b073ca6 7163 break;
ca695ac9
JB
7164 else
7165 {
7166 tree src = TREE_VALUE (arglist);
7167 tree len = c_strlen (src);
7168
7169 int align
7170 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7171
7172 rtx result, src_rtx, char_rtx;
7173 enum machine_mode insn_mode = value_mode, char_mode;
7174 enum insn_code icode;
7175
7176 /* If the length is known, just return it. */
7177 if (len != 0)
7178 return expand_expr (len, target, mode, 0);
7179
7180 /* If SRC is not a pointer type, don't do this operation inline. */
7181 if (align == 0)
7182 break;
7183
7184 /* Call a function if we can't compute strlen in the right mode. */
7185
7186 while (insn_mode != VOIDmode)
7187 {
7188 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
7189 if (icode != CODE_FOR_nothing)
7190 break;
bbf6f052 7191
ca695ac9
JB
7192 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
7193 }
7194 if (insn_mode == VOIDmode)
7195 break;
bbf6f052 7196
ca695ac9
JB
7197 /* Make a place to write the result of the instruction. */
7198 result = target;
7199 if (! (result != 0
7200 && GET_CODE (result) == REG
7201 && GET_MODE (result) == insn_mode
7202 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
7203 result = gen_reg_rtx (insn_mode);
bbf6f052 7204
ca695ac9
JB
7205 /* Make sure the operands are acceptable to the predicates. */
7206
7207 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
7208 result = gen_reg_rtx (insn_mode);
7209
7210 src_rtx = memory_address (BLKmode,
7211 expand_expr (src, NULL_RTX, Pmode,
7212 EXPAND_NORMAL));
7213 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
7214 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
7215
7216 char_rtx = const0_rtx;
7217 char_mode = insn_operand_mode[(int)icode][2];
7218 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
7219 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
7220
7221 emit_insn (GEN_FCN (icode) (result,
7222 gen_rtx (MEM, BLKmode, src_rtx),
7223 char_rtx, GEN_INT (align)));
7224
7225 /* Return the value in the proper mode for this function. */
7226 if (GET_MODE (result) == value_mode)
7227 return result;
7228 else if (target != 0)
7229 {
7230 convert_move (target, result, 0);
7231 return target;
7232 }
7233 else
7234 return convert_to_mode (value_mode, result, 0);
7235 }
7236
7237 case BUILT_IN_STRCPY:
e87b4f3f 7238 /* If not optimizing, call the library function. */
98aad286 7239 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
e87b4f3f
RS
7240 break;
7241
7242 if (arglist == 0
ca695ac9
JB
7243 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7244 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7245 || TREE_CHAIN (arglist) == 0
7246 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
7b073ca6 7247 break;
ca695ac9 7248 else
db0e6d01 7249 {
ca695ac9 7250 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
e7c33f54 7251
ca695ac9
JB
7252 if (len == 0)
7253 break;
e7c33f54 7254
ca695ac9 7255 len = size_binop (PLUS_EXPR, len, integer_one_node);
e7c33f54 7256
ca695ac9 7257 chainon (arglist, build_tree_list (NULL_TREE, len));
1bbddf11
JVA
7258 }
7259
ca695ac9
JB
7260 /* Drops in. */
7261 case BUILT_IN_MEMCPY:
7262 /* If not optimizing, call the library function. */
98aad286 7263 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
ca695ac9 7264 break;
e7c33f54 7265
ca695ac9
JB
7266 if (arglist == 0
7267 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7268 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7269 || TREE_CHAIN (arglist) == 0
7270 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
7271 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
7272 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7b073ca6 7273 break;
ca695ac9 7274 else
e7c33f54 7275 {
ca695ac9
JB
7276 tree dest = TREE_VALUE (arglist);
7277 tree src = TREE_VALUE (TREE_CHAIN (arglist));
7278 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
e87b4f3f 7279
ca695ac9
JB
7280 int src_align
7281 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7282 int dest_align
7283 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7284 rtx dest_rtx, dest_mem, src_mem;
60bac6ea 7285
ca695ac9
JB
7286 /* If either SRC or DEST is not a pointer type, don't do
7287 this operation in-line. */
7288 if (src_align == 0 || dest_align == 0)
7289 {
7290 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
7291 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
7292 break;
7293 }
7294
7295 dest_rtx = expand_expr (dest, NULL_RTX, Pmode, EXPAND_NORMAL);
7296 dest_mem = gen_rtx (MEM, BLKmode,
7297 memory_address (BLKmode, dest_rtx));
7298 src_mem = gen_rtx (MEM, BLKmode,
7299 memory_address (BLKmode,
7300 expand_expr (src, NULL_RTX,
7301 Pmode,
7302 EXPAND_NORMAL)));
7303
7304 /* Copy word part most expediently. */
7305 emit_block_move (dest_mem, src_mem,
7306 expand_expr (len, NULL_RTX, VOIDmode, 0),
7307 MIN (src_align, dest_align));
7308 return dest_rtx;
7309 }
7310
7311/* These comparison functions need an instruction that returns an actual
7312 index. An ordinary compare that just sets the condition codes
7313 is not enough. */
7314#ifdef HAVE_cmpstrsi
7315 case BUILT_IN_STRCMP:
7316 /* If not optimizing, call the library function. */
98aad286 7317 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
ca695ac9
JB
7318 break;
7319
7320 if (arglist == 0
7321 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7322 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7323 || TREE_CHAIN (arglist) == 0
7324 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
7b073ca6 7325 break;
ca695ac9
JB
7326 else if (!HAVE_cmpstrsi)
7327 break;
7328 {
7329 tree arg1 = TREE_VALUE (arglist);
7330 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
7331 tree offset;
7332 tree len, len2;
7333
7334 len = c_strlen (arg1);
7335 if (len)
7336 len = size_binop (PLUS_EXPR, integer_one_node, len);
7337 len2 = c_strlen (arg2);
7338 if (len2)
7339 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
7340
7341 /* If we don't have a constant length for the first, use the length
7342 of the second, if we know it. We don't require a constant for
7343 this case; some cost analysis could be done if both are available
7344 but neither is constant. For now, assume they're equally cheap.
7345
7346 If both strings have constant lengths, use the smaller. This
7347 could arise if optimization results in strcpy being called with
7348 two fixed strings, or if the code was machine-generated. We should
7349 add some code to the `memcmp' handler below to deal with such
7350 situations, someday. */
7351 if (!len || TREE_CODE (len) != INTEGER_CST)
7352 {
7353 if (len2)
7354 len = len2;
7355 else if (len == 0)
7356 break;
7357 }
7358 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
7359 {
7360 if (tree_int_cst_lt (len2, len))
7361 len = len2;
7362 }
7363
7364 chainon (arglist, build_tree_list (NULL_TREE, len));
7365 }
7366
7367 /* Drops in. */
7368 case BUILT_IN_MEMCMP:
7369 /* If not optimizing, call the library function. */
98aad286 7370 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
ca695ac9
JB
7371 break;
7372
7373 if (arglist == 0
7374 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7375 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7376 || TREE_CHAIN (arglist) == 0
7377 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
7378 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
7379 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7b073ca6 7380 break;
ca695ac9
JB
7381 else if (!HAVE_cmpstrsi)
7382 break;
7383 {
7384 tree arg1 = TREE_VALUE (arglist);
7385 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
7386 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7387 rtx result;
7388
7389 int arg1_align
7390 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7391 int arg2_align
7392 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7393 enum machine_mode insn_mode
7394 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
60bac6ea 7395
ca695ac9
JB
7396 /* If we don't have POINTER_TYPE, call the function. */
7397 if (arg1_align == 0 || arg2_align == 0)
7398 {
7399 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
7400 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
7401 break;
7402 }
60bac6ea 7403
ca695ac9
JB
7404 /* Make a place to write the result of the instruction. */
7405 result = target;
7406 if (! (result != 0
7407 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
7408 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
7409 result = gen_reg_rtx (insn_mode);
60bac6ea 7410
ca695ac9
JB
7411 emit_insn (gen_cmpstrsi (result,
7412 gen_rtx (MEM, BLKmode,
7413 expand_expr (arg1, NULL_RTX, Pmode,
7414 EXPAND_NORMAL)),
7415 gen_rtx (MEM, BLKmode,
7416 expand_expr (arg2, NULL_RTX, Pmode,
7417 EXPAND_NORMAL)),
7418 expand_expr (len, NULL_RTX, VOIDmode, 0),
7419 GEN_INT (MIN (arg1_align, arg2_align))));
60bac6ea 7420
ca695ac9
JB
7421 /* Return the value in the proper mode for this function. */
7422 mode = TYPE_MODE (TREE_TYPE (exp));
7423 if (GET_MODE (result) == mode)
7424 return result;
7425 else if (target != 0)
7426 {
7427 convert_move (target, result, 0);
7428 return target;
60bac6ea 7429 }
ca695ac9
JB
7430 else
7431 return convert_to_mode (mode, result, 0);
7432 }
60bac6ea 7433#else
ca695ac9
JB
7434 case BUILT_IN_STRCMP:
7435 case BUILT_IN_MEMCMP:
7436 break;
60bac6ea
RS
7437#endif
7438
ca695ac9
JB
7439 default: /* just do library call, if unknown builtin */
7440 error ("built-in function `%s' not currently supported",
7441 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
7442 }
e87b4f3f 7443
ca695ac9
JB
7444 /* The switch statement above can drop through to cause the function
7445 to be called normally. */
e7c33f54 7446
ca695ac9
JB
7447 return expand_call (exp, target, ignore);
7448}
7449\f
7450/* Built-in functions to perform an untyped call and return. */
0006469d 7451
ca695ac9
JB
7452/* For each register that may be used for calling a function, this
7453 gives a mode used to copy the register's value. VOIDmode indicates
7454 the register is not used for calling a function. If the machine
7455 has register windows, this gives only the outbound registers.
7456 INCOMING_REGNO gives the corresponding inbound register. */
7457static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
0006469d 7458
ca695ac9
JB
7459/* For each register that may be used for returning values, this gives
7460 a mode used to copy the register's value. VOIDmode indicates the
7461 register is not used for returning values. If the machine has
7462 register windows, this gives only the outbound registers.
7463 INCOMING_REGNO gives the corresponding inbound register. */
7464static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
0006469d 7465
ca695ac9
JB
7466/* For each register that may be used for calling a function, this
7467 gives the offset of that register into the block returned by
7468 __bultin_apply_args. 0 indicates that the register is not
7469 used for calling a function. */
7470static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
0006469d 7471
ca695ac9
JB
7472/* Return the offset of register REGNO into the block returned by
7473 __builtin_apply_args. This is not declared static, since it is
7474 needed in objc-act.c. */
0006469d 7475
ca695ac9
JB
7476int
7477apply_args_register_offset (regno)
7478 int regno;
7479{
7480 apply_args_size ();
0006469d 7481
ca695ac9
JB
7482 /* Arguments are always put in outgoing registers (in the argument
7483 block) if such make sense. */
7484#ifdef OUTGOING_REGNO
7485 regno = OUTGOING_REGNO(regno);
7486#endif
7487 return apply_args_reg_offset[regno];
7488}
0006469d 7489
ca695ac9
JB
7490/* Return the size required for the block returned by __builtin_apply_args,
7491 and initialize apply_args_mode. */
0006469d 7492
ca695ac9
JB
7493static int
7494apply_args_size ()
7495{
7496 static int size = -1;
7497 int align, regno;
7498 enum machine_mode mode;
bbf6f052 7499
ca695ac9
JB
7500 /* The values computed by this function never change. */
7501 if (size < 0)
7502 {
7503 /* The first value is the incoming arg-pointer. */
7504 size = GET_MODE_SIZE (Pmode);
bbf6f052 7505
ca695ac9
JB
7506 /* The second value is the structure value address unless this is
7507 passed as an "invisible" first argument. */
7508 if (struct_value_rtx)
7509 size += GET_MODE_SIZE (Pmode);
7510
7511 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7512 if (FUNCTION_ARG_REGNO_P (regno))
bbf6f052 7513 {
ca695ac9
JB
7514 /* Search for the proper mode for copying this register's
7515 value. I'm not sure this is right, but it works so far. */
7516 enum machine_mode best_mode = VOIDmode;
7517
7518 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
7519 mode != VOIDmode;
7520 mode = GET_MODE_WIDER_MODE (mode))
7521 if (HARD_REGNO_MODE_OK (regno, mode)
7522 && HARD_REGNO_NREGS (regno, mode) == 1)
7523 best_mode = mode;
7524
7525 if (best_mode == VOIDmode)
7526 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
7527 mode != VOIDmode;
7528 mode = GET_MODE_WIDER_MODE (mode))
7529 if (HARD_REGNO_MODE_OK (regno, mode)
7530 && (mov_optab->handlers[(int) mode].insn_code
7531 != CODE_FOR_nothing))
7532 best_mode = mode;
7533
7534 mode = best_mode;
7535 if (mode == VOIDmode)
7536 abort ();
7537
7538 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7539 if (size % align != 0)
7540 size = CEIL (size, align) * align;
7541 apply_args_reg_offset[regno] = size;
7542 size += GET_MODE_SIZE (mode);
7543 apply_args_mode[regno] = mode;
7544 }
7545 else
7546 {
7547 apply_args_mode[regno] = VOIDmode;
7548 apply_args_reg_offset[regno] = 0;
bbf6f052 7549 }
ca695ac9
JB
7550 }
7551 return size;
7552}
bbf6f052 7553
ca695ac9
JB
7554/* Return the size required for the block returned by __builtin_apply,
7555 and initialize apply_result_mode. */
bbf6f052 7556
ca695ac9
JB
7557static int
7558apply_result_size ()
7559{
7560 static int size = -1;
7561 int align, regno;
7562 enum machine_mode mode;
bbf6f052 7563
ca695ac9
JB
7564 /* The values computed by this function never change. */
7565 if (size < 0)
7566 {
7567 size = 0;
bbf6f052 7568
ca695ac9
JB
7569 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7570 if (FUNCTION_VALUE_REGNO_P (regno))
7571 {
7572 /* Search for the proper mode for copying this register's
7573 value. I'm not sure this is right, but it works so far. */
7574 enum machine_mode best_mode = VOIDmode;
bbf6f052 7575
ca695ac9
JB
7576 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
7577 mode != TImode;
7578 mode = GET_MODE_WIDER_MODE (mode))
7579 if (HARD_REGNO_MODE_OK (regno, mode))
7580 best_mode = mode;
bbf6f052 7581
ca695ac9
JB
7582 if (best_mode == VOIDmode)
7583 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
7584 mode != VOIDmode;
7585 mode = GET_MODE_WIDER_MODE (mode))
7586 if (HARD_REGNO_MODE_OK (regno, mode)
7587 && (mov_optab->handlers[(int) mode].insn_code
7588 != CODE_FOR_nothing))
7589 best_mode = mode;
bbf6f052 7590
ca695ac9
JB
7591 mode = best_mode;
7592 if (mode == VOIDmode)
7593 abort ();
bbf6f052 7594
ca695ac9
JB
7595 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7596 if (size % align != 0)
7597 size = CEIL (size, align) * align;
7598 size += GET_MODE_SIZE (mode);
7599 apply_result_mode[regno] = mode;
bbf6f052
RK
7600 }
7601 else
ca695ac9 7602 apply_result_mode[regno] = VOIDmode;
bbf6f052 7603
ca695ac9
JB
7604 /* Allow targets that use untyped_call and untyped_return to override
7605 the size so that machine-specific information can be stored here. */
7606#ifdef APPLY_RESULT_SIZE
7607 size = APPLY_RESULT_SIZE;
7608#endif
7609 }
7610 return size;
7611}
bbf6f052 7612
ca695ac9
JB
7613#if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
7614/* Create a vector describing the result block RESULT. If SAVEP is true,
7615 the result block is used to save the values; otherwise it is used to
7616 restore the values. */
bbf6f052 7617
ca695ac9
JB
7618static rtx
7619result_vector (savep, result)
7620 int savep;
7621 rtx result;
7622{
7623 int regno, size, align, nelts;
7624 enum machine_mode mode;
7625 rtx reg, mem;
7626 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
7627
7628 size = nelts = 0;
7629 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7630 if ((mode = apply_result_mode[regno]) != VOIDmode)
7631 {
7632 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7633 if (size % align != 0)
7634 size = CEIL (size, align) * align;
7635 reg = gen_rtx (REG, mode, savep ? INCOMING_REGNO (regno) : regno);
7636 mem = change_address (result, mode,
7637 plus_constant (XEXP (result, 0), size));
7638 savevec[nelts++] = (savep
7639 ? gen_rtx (SET, VOIDmode, mem, reg)
7640 : gen_rtx (SET, VOIDmode, reg, mem));
7641 size += GET_MODE_SIZE (mode);
bbf6f052 7642 }
ca695ac9
JB
7643 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
7644}
7645#endif /* HAVE_untyped_call or HAVE_untyped_return */
bbf6f052 7646
ca695ac9
JB
7647/* Save the state required to perform an untyped call with the same
7648 arguments as were passed to the current function. */
7649
7650static rtx
7651expand_builtin_apply_args ()
7652{
7653 rtx registers;
7654 int size, align, regno;
7655 enum machine_mode mode;
7656
7657 /* Create a block where the arg-pointer, structure value address,
7658 and argument registers can be saved. */
7659 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
7660
7661 /* Walk past the arg-pointer and structure value address. */
7662 size = GET_MODE_SIZE (Pmode);
7663 if (struct_value_rtx)
7664 size += GET_MODE_SIZE (Pmode);
7665
7666 /* Save each register used in calling a function to the block. */
7667 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7668 if ((mode = apply_args_mode[regno]) != VOIDmode)
bbf6f052 7669 {
ca695ac9
JB
7670 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7671 if (size % align != 0)
7672 size = CEIL (size, align) * align;
7673 emit_move_insn (change_address (registers, mode,
7674 plus_constant (XEXP (registers, 0),
7675 size)),
7676 gen_rtx (REG, mode, INCOMING_REGNO (regno)));
7677 size += GET_MODE_SIZE (mode);
bbf6f052
RK
7678 }
7679
ca695ac9
JB
7680 /* Save the arg pointer to the block. */
7681 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
7682 copy_to_reg (virtual_incoming_args_rtx));
7683 size = GET_MODE_SIZE (Pmode);
bbf6f052 7684
ca695ac9
JB
7685 /* Save the structure value address unless this is passed as an
7686 "invisible" first argument. */
7687 if (struct_value_incoming_rtx)
7688 {
7689 emit_move_insn (change_address (registers, Pmode,
7690 plus_constant (XEXP (registers, 0),
7691 size)),
7692 copy_to_reg (struct_value_incoming_rtx));
7693 size += GET_MODE_SIZE (Pmode);
7694 }
7695
7696 /* Return the address of the block. */
7697 return copy_addr_to_reg (XEXP (registers, 0));
7698}
7699
7700/* Perform an untyped call and save the state required to perform an
7701 untyped return of whatever value was returned by the given function. */
7702
7703static rtx
7704expand_builtin_apply (function, arguments, argsize)
7705 rtx function, arguments, argsize;
7706{
7707 int size, align, regno;
7708 enum machine_mode mode;
7709 rtx incoming_args, result, reg, dest, call_insn;
7710 rtx old_stack_level = 0;
7711 rtx use_insns = 0;
bbf6f052 7712
ca695ac9
JB
7713 /* Create a block where the return registers can be saved. */
7714 result = assign_stack_local (BLKmode, apply_result_size (), -1);
bbf6f052 7715
ca695ac9 7716 /* ??? The argsize value should be adjusted here. */
bbf6f052 7717
ca695ac9
JB
7718 /* Fetch the arg pointer from the ARGUMENTS block. */
7719 incoming_args = gen_reg_rtx (Pmode);
7720 emit_move_insn (incoming_args,
7721 gen_rtx (MEM, Pmode, arguments));
7722#ifndef STACK_GROWS_DOWNWARD
7723 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
7724 incoming_args, 0, OPTAB_LIB_WIDEN);
46b68a37
JW
7725#endif
7726
ca695ac9
JB
7727 /* Perform postincrements before actually calling the function. */
7728 emit_queue ();
46b68a37 7729
ca695ac9
JB
7730 /* Push a new argument block and copy the arguments. */
7731 do_pending_stack_adjust ();
7732 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
bbf6f052 7733
ca695ac9
JB
7734 /* Push a block of memory onto the stack to store the memory arguments.
7735 Save the address in a register, and copy the memory arguments. ??? I
7736 haven't figured out how the calling convention macros effect this,
7737 but it's likely that the source and/or destination addresses in
7738 the block copy will need updating in machine specific ways. */
7739 dest = copy_addr_to_reg (push_block (argsize, 0, 0));
7740 emit_block_move (gen_rtx (MEM, BLKmode, dest),
7741 gen_rtx (MEM, BLKmode, incoming_args),
7742 argsize,
7743 PARM_BOUNDARY / BITS_PER_UNIT);
bbf6f052 7744
ca695ac9
JB
7745 /* Refer to the argument block. */
7746 apply_args_size ();
7747 arguments = gen_rtx (MEM, BLKmode, arguments);
7748
7749 /* Walk past the arg-pointer and structure value address. */
7750 size = GET_MODE_SIZE (Pmode);
7751 if (struct_value_rtx)
7752 size += GET_MODE_SIZE (Pmode);
7753
7754 /* Restore each of the registers previously saved. Make USE insns
7755 for each of these registers for use in making the call. */
7756 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7757 if ((mode = apply_args_mode[regno]) != VOIDmode)
7758 {
7759 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7760 if (size % align != 0)
7761 size = CEIL (size, align) * align;
7762 reg = gen_rtx (REG, mode, regno);
7763 emit_move_insn (reg,
7764 change_address (arguments, mode,
7765 plus_constant (XEXP (arguments, 0),
7766 size)));
7767
7768 push_to_sequence (use_insns);
7769 emit_insn (gen_rtx (USE, VOIDmode, reg));
7770 use_insns = get_insns ();
7771 end_sequence ();
7772 size += GET_MODE_SIZE (mode);
7773 }
7774
7775 /* Restore the structure value address unless this is passed as an
7776 "invisible" first argument. */
7777 size = GET_MODE_SIZE (Pmode);
7778 if (struct_value_rtx)
7779 {
7780 rtx value = gen_reg_rtx (Pmode);
7781 emit_move_insn (value,
7782 change_address (arguments, Pmode,
7783 plus_constant (XEXP (arguments, 0),
7784 size)));
7785 emit_move_insn (struct_value_rtx, value);
7786 if (GET_CODE (struct_value_rtx) == REG)
7787 {
7788 push_to_sequence (use_insns);
7789 emit_insn (gen_rtx (USE, VOIDmode, struct_value_rtx));
7790 use_insns = get_insns ();
7791 end_sequence ();
bbf6f052 7792 }
ca695ac9
JB
7793 size += GET_MODE_SIZE (Pmode);
7794 }
bbf6f052 7795
ca695ac9
JB
7796 /* All arguments and registers used for the call are set up by now! */
7797 function = prepare_call_address (function, NULL_TREE, &use_insns);
bbf6f052 7798
ca695ac9
JB
7799 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
7800 and we don't want to load it into a register as an optimization,
7801 because prepare_call_address already did it if it should be done. */
7802 if (GET_CODE (function) != SYMBOL_REF)
7803 function = memory_address (FUNCTION_MODE, function);
bbf6f052 7804
ca695ac9
JB
7805 /* Generate the actual call instruction and save the return value. */
7806#ifdef HAVE_untyped_call
7807 if (HAVE_untyped_call)
7808 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
7809 result, result_vector (1, result)));
7810 else
7811#endif
7812#ifdef HAVE_call_value
7813 if (HAVE_call_value)
7814 {
7815 rtx valreg = 0;
bbf6f052 7816
ca695ac9
JB
7817 /* Locate the unique return register. It is not possible to
7818 express a call that sets more than one return register using
7819 call_value; use untyped_call for that. In fact, untyped_call
7820 only needs to save the return registers in the given block. */
7821 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7822 if ((mode = apply_result_mode[regno]) != VOIDmode)
7823 {
7824 if (valreg)
7825 abort (); /* HAVE_untyped_call required. */
7826 valreg = gen_rtx (REG, mode, regno);
7827 }
bbf6f052 7828
ca695ac9
JB
7829 emit_call_insn (gen_call_value (valreg,
7830 gen_rtx (MEM, FUNCTION_MODE, function),
7831 const0_rtx, NULL_RTX, const0_rtx));
bbf6f052 7832
ca695ac9
JB
7833 emit_move_insn (change_address (result, GET_MODE (valreg),
7834 XEXP (result, 0)),
7835 valreg);
7836 }
7837 else
7838#endif
7839 abort ();
bbf6f052 7840
ca695ac9
JB
7841 /* Find the CALL insn we just emitted and write the USE insns before it. */
7842 for (call_insn = get_last_insn ();
7843 call_insn && GET_CODE (call_insn) != CALL_INSN;
7844 call_insn = PREV_INSN (call_insn))
7845 ;
bbf6f052 7846
ca695ac9
JB
7847 if (! call_insn)
7848 abort ();
bbf6f052 7849
ca695ac9
JB
7850 /* Put the USE insns before the CALL. */
7851 emit_insns_before (use_insns, call_insn);
e7c33f54 7852
ca695ac9
JB
7853 /* Restore the stack. */
7854 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
e7c33f54 7855
ca695ac9
JB
7856 /* Return the address of the result block. */
7857 return copy_addr_to_reg (XEXP (result, 0));
7858}
e7c33f54 7859
ca695ac9 7860/* Perform an untyped return. */
e7c33f54 7861
ca695ac9
JB
7862static void
7863expand_builtin_return (result)
7864 rtx result;
7865{
7866 int size, align, regno;
7867 enum machine_mode mode;
7868 rtx reg;
7869 rtx use_insns = 0;
e7c33f54 7870
ca695ac9
JB
7871 apply_result_size ();
7872 result = gen_rtx (MEM, BLKmode, result);
e7c33f54 7873
ca695ac9
JB
7874#ifdef HAVE_untyped_return
7875 if (HAVE_untyped_return)
7876 {
7877 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
7878 emit_barrier ();
7879 return;
7880 }
7881#endif
e7c33f54 7882
ca695ac9
JB
7883 /* Restore the return value and note that each value is used. */
7884 size = 0;
7885 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7886 if ((mode = apply_result_mode[regno]) != VOIDmode)
7887 {
7888 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7889 if (size % align != 0)
7890 size = CEIL (size, align) * align;
7891 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
7892 emit_move_insn (reg,
7893 change_address (result, mode,
7894 plus_constant (XEXP (result, 0),
7895 size)));
e7c33f54 7896
ca695ac9
JB
7897 push_to_sequence (use_insns);
7898 emit_insn (gen_rtx (USE, VOIDmode, reg));
7899 use_insns = get_insns ();
7900 end_sequence ();
7901 size += GET_MODE_SIZE (mode);
7902 }
e7c33f54 7903
ca695ac9
JB
7904 /* Put the USE insns before the return. */
7905 emit_insns (use_insns);
e7c33f54 7906
ca695ac9
JB
7907 /* Return whatever values was restored by jumping directly to the end
7908 of the function. */
7909 expand_null_return ();
7910}
7911\f
7912/* Expand code for a post- or pre- increment or decrement
7913 and return the RTX for the result.
7914 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
e7c33f54 7915
ca695ac9
JB
7916static rtx
7917expand_increment (exp, post)
7918 register tree exp;
7919 int post;
7920{
7921 register rtx op0, op1;
7922 register rtx temp, value;
7923 register tree incremented = TREE_OPERAND (exp, 0);
7924 optab this_optab = add_optab;
7925 int icode;
7926 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
7927 int op0_is_copy = 0;
7928 int single_insn = 0;
a97f5a86
RS
7929 /* 1 means we can't store into OP0 directly,
7930 because it is a subreg narrower than a word,
7931 and we don't dare clobber the rest of the word. */
7932 int bad_subreg = 0;
e7c33f54 7933
ca695ac9 7934 if (output_bytecode)
c02bd5d9
JB
7935 {
7936 bc_expand_expr (exp);
7937 return NULL_RTX;
7938 }
e7c33f54 7939
ca695ac9
JB
7940 /* Stabilize any component ref that might need to be
7941 evaluated more than once below. */
7942 if (!post
7943 || TREE_CODE (incremented) == BIT_FIELD_REF
7944 || (TREE_CODE (incremented) == COMPONENT_REF
7945 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
7946 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
7947 incremented = stabilize_reference (incremented);
7948 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
7949 ones into save exprs so that they don't accidentally get evaluated
7950 more than once by the code below. */
7951 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
7952 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
7953 incremented = save_expr (incremented);
bbf6f052 7954
ca695ac9
JB
7955 /* Compute the operands as RTX.
7956 Note whether OP0 is the actual lvalue or a copy of it:
7957 I believe it is a copy iff it is a register or subreg
7958 and insns were generated in computing it. */
bbf6f052 7959
ca695ac9
JB
7960 temp = get_last_insn ();
7961 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
bbf6f052 7962
ca695ac9
JB
7963 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
7964 in place but intead must do sign- or zero-extension during assignment,
7965 so we copy it into a new register and let the code below use it as
7966 a copy.
bbf6f052 7967
ca695ac9
JB
7968 Note that we can safely modify this SUBREG since it is know not to be
7969 shared (it was made by the expand_expr call above). */
bbf6f052 7970
ca695ac9
JB
7971 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
7972 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
a97f5a86
RS
7973 else if (GET_CODE (op0) == SUBREG
7974 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
79777b79
RK
7975 {
7976 /* We cannot increment this SUBREG in place. If we are
7977 post-incrementing, get a copy of the old value. Otherwise,
7978 just mark that we cannot increment in place. */
7979 if (post)
7980 op0 = copy_to_reg (op0);
7981 else
7982 bad_subreg = 1;
7983 }
bbf6f052 7984
ca695ac9
JB
7985 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
7986 && temp != get_last_insn ());
7987 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052 7988
ca695ac9
JB
7989 /* Decide whether incrementing or decrementing. */
7990 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
7991 || TREE_CODE (exp) == PREDECREMENT_EXPR)
7992 this_optab = sub_optab;
bbf6f052 7993
ca695ac9
JB
7994 /* Convert decrement by a constant into a negative increment. */
7995 if (this_optab == sub_optab
7996 && GET_CODE (op1) == CONST_INT)
7997 {
7998 op1 = GEN_INT (- INTVAL (op1));
7999 this_optab = add_optab;
8000 }
bbf6f052 8001
ca695ac9
JB
8002 /* For a preincrement, see if we can do this with a single instruction. */
8003 if (!post)
8004 {
8005 icode = (int) this_optab->handlers[(int) mode].insn_code;
8006 if (icode != (int) CODE_FOR_nothing
8007 /* Make sure that OP0 is valid for operands 0 and 1
8008 of the insn we want to queue. */
8009 && (*insn_operand_predicate[icode][0]) (op0, mode)
8010 && (*insn_operand_predicate[icode][1]) (op0, mode)
8011 && (*insn_operand_predicate[icode][2]) (op1, mode))
8012 single_insn = 1;
8013 }
bbf6f052 8014
ca695ac9
JB
8015 /* If OP0 is not the actual lvalue, but rather a copy in a register,
8016 then we cannot just increment OP0. We must therefore contrive to
8017 increment the original value. Then, for postincrement, we can return
8018 OP0 since it is a copy of the old value. For preincrement, expand here
a97f5a86
RS
8019 unless we can do it with a single insn.
8020
8021 Likewise if storing directly into OP0 would clobber high bits
8022 we need to preserve (bad_subreg). */
8023 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
ca695ac9
JB
8024 {
8025 /* This is the easiest way to increment the value wherever it is.
8026 Problems with multiple evaluation of INCREMENTED are prevented
8027 because either (1) it is a component_ref or preincrement,
8028 in which case it was stabilized above, or (2) it is an array_ref
8029 with constant index in an array in a register, which is
8030 safe to reevaluate. */
8031 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
8032 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8033 ? MINUS_EXPR : PLUS_EXPR),
8034 TREE_TYPE (exp),
8035 incremented,
8036 TREE_OPERAND (exp, 1));
8037 temp = expand_assignment (incremented, newexp, ! post, 0);
8038 return post ? op0 : temp;
8039 }
bbf6f052 8040
ca695ac9
JB
8041 if (post)
8042 {
8043 /* We have a true reference to the value in OP0.
8044 If there is an insn to add or subtract in this mode, queue it.
8045 Queueing the increment insn avoids the register shuffling
8046 that often results if we must increment now and first save
8047 the old value for subsequent use. */
bbf6f052 8048
ca695ac9
JB
8049#if 0 /* Turned off to avoid making extra insn for indexed memref. */
8050 op0 = stabilize (op0);
8051#endif
bbf6f052 8052
ca695ac9
JB
8053 icode = (int) this_optab->handlers[(int) mode].insn_code;
8054 if (icode != (int) CODE_FOR_nothing
8055 /* Make sure that OP0 is valid for operands 0 and 1
8056 of the insn we want to queue. */
8057 && (*insn_operand_predicate[icode][0]) (op0, mode)
8058 && (*insn_operand_predicate[icode][1]) (op0, mode))
8059 {
8060 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
8061 op1 = force_reg (mode, op1);
bbf6f052 8062
ca695ac9
JB
8063 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
8064 }
8065 }
bbf6f052 8066
ca695ac9
JB
8067 /* Preincrement, or we can't increment with one simple insn. */
8068 if (post)
8069 /* Save a copy of the value before inc or dec, to return it later. */
8070 temp = value = copy_to_reg (op0);
8071 else
8072 /* Arrange to return the incremented value. */
8073 /* Copy the rtx because expand_binop will protect from the queue,
8074 and the results of that would be invalid for us to return
8075 if our caller does emit_queue before using our result. */
8076 temp = copy_rtx (value = op0);
bbf6f052 8077
ca695ac9
JB
8078 /* Increment however we can. */
8079 op1 = expand_binop (mode, this_optab, value, op1, op0,
8080 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
8081 /* Make sure the value is stored into OP0. */
8082 if (op1 != op0)
8083 emit_move_insn (op0, op1);
bbf6f052 8084
ca695ac9
JB
8085 return temp;
8086}
8087\f
8088/* Expand all function calls contained within EXP, innermost ones first.
8089 But don't look within expressions that have sequence points.
8090 For each CALL_EXPR, record the rtx for its value
8091 in the CALL_EXPR_RTL field. */
bbf6f052 8092
ca695ac9
JB
8093static void
8094preexpand_calls (exp)
8095 tree exp;
8096{
8097 register int nops, i;
8098 int type = TREE_CODE_CLASS (TREE_CODE (exp));
bbf6f052 8099
ca695ac9
JB
8100 if (! do_preexpand_calls)
8101 return;
bbf6f052 8102
ca695ac9 8103 /* Only expressions and references can contain calls. */
bbf6f052 8104
ca695ac9
JB
8105 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
8106 return;
bbf6f052 8107
ca695ac9
JB
8108 switch (TREE_CODE (exp))
8109 {
8110 case CALL_EXPR:
8111 /* Do nothing if already expanded. */
8112 if (CALL_EXPR_RTL (exp) != 0)
8113 return;
bbf6f052 8114
ca695ac9
JB
8115 /* Do nothing to built-in functions. */
8116 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
8117 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
8118 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8119 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
8120 return;
bbf6f052 8121
ca695ac9
JB
8122 case COMPOUND_EXPR:
8123 case COND_EXPR:
8124 case TRUTH_ANDIF_EXPR:
8125 case TRUTH_ORIF_EXPR:
8126 /* If we find one of these, then we can be sure
8127 the adjust will be done for it (since it makes jumps).
8128 Do it now, so that if this is inside an argument
8129 of a function, we don't get the stack adjustment
8130 after some other args have already been pushed. */
8131 do_pending_stack_adjust ();
8132 return;
bbf6f052 8133
ca695ac9
JB
8134 case BLOCK:
8135 case RTL_EXPR:
8136 case WITH_CLEANUP_EXPR:
8137 return;
bbf6f052 8138
ca695ac9
JB
8139 case SAVE_EXPR:
8140 if (SAVE_EXPR_RTL (exp) != 0)
8141 return;
8142 }
bbf6f052 8143
ca695ac9
JB
8144 nops = tree_code_length[(int) TREE_CODE (exp)];
8145 for (i = 0; i < nops; i++)
8146 if (TREE_OPERAND (exp, i) != 0)
8147 {
8148 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
8149 if (type == 'e' || type == '<' || type == '1' || type == '2'
8150 || type == 'r')
8151 preexpand_calls (TREE_OPERAND (exp, i));
8152 }
bbf6f052
RK
8153}
8154\f
ca695ac9
JB
8155/* At the start of a function, record that we have no previously-pushed
8156 arguments waiting to be popped. */
0006469d 8157
ca695ac9
JB
8158void
8159init_pending_stack_adjust ()
8160{
8161 pending_stack_adjust = 0;
8162}
fb2ca25a 8163
ca695ac9
JB
8164/* When exiting from function, if safe, clear out any pending stack adjust
8165 so the adjustment won't get done. */
904762c8 8166
ca695ac9
JB
8167void
8168clear_pending_stack_adjust ()
fb2ca25a 8169{
ca695ac9
JB
8170#ifdef EXIT_IGNORE_STACK
8171 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
8172 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
8173 && ! flag_inline_functions)
8174 pending_stack_adjust = 0;
fb2ca25a 8175#endif
fb2ca25a
KKT
8176}
8177
ca695ac9
JB
8178/* Pop any previously-pushed arguments that have not been popped yet. */
8179
8180void
8181do_pending_stack_adjust ()
8182{
8183 if (inhibit_defer_pop == 0)
8184 {
8185 if (pending_stack_adjust != 0)
8186 adjust_stack (GEN_INT (pending_stack_adjust));
8187 pending_stack_adjust = 0;
8188 }
8189}
8190
8191/* Expand all cleanups up to OLD_CLEANUPS.
8192 Needed here, and also for language-dependent calls. */
904762c8 8193
ca695ac9
JB
8194void
8195expand_cleanups_to (old_cleanups)
8196 tree old_cleanups;
0006469d 8197{
ca695ac9 8198 while (cleanups_this_call != old_cleanups)
0006469d 8199 {
ca695ac9
JB
8200 expand_expr (TREE_VALUE (cleanups_this_call), NULL_RTX, VOIDmode, 0);
8201 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
8202 }
8203}
8204\f
8205/* Expand conditional expressions. */
0006469d 8206
ca695ac9
JB
8207/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
8208 LABEL is an rtx of code CODE_LABEL, in this function and all the
8209 functions here. */
0006469d 8210
ca695ac9
JB
8211void
8212jumpifnot (exp, label)
8213 tree exp;
8214 rtx label;
8215{
8216 do_jump (exp, label, NULL_RTX);
8217}
0006469d 8218
ca695ac9 8219/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
0006469d 8220
ca695ac9
JB
8221void
8222jumpif (exp, label)
8223 tree exp;
8224 rtx label;
8225{
8226 do_jump (exp, NULL_RTX, label);
8227}
0006469d 8228
ca695ac9
JB
8229/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
8230 the result is zero, or IF_TRUE_LABEL if the result is one.
8231 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
8232 meaning fall through in that case.
0006469d 8233
ca695ac9
JB
8234 do_jump always does any pending stack adjust except when it does not
8235 actually perform a jump. An example where there is no jump
8236 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
0006469d 8237
ca695ac9
JB
8238 This function is responsible for optimizing cases such as
8239 &&, || and comparison operators in EXP. */
904762c8 8240
ca695ac9
JB
8241void
8242do_jump (exp, if_false_label, if_true_label)
8243 tree exp;
8244 rtx if_false_label, if_true_label;
0006469d 8245{
ca695ac9
JB
8246 register enum tree_code code = TREE_CODE (exp);
8247 /* Some cases need to create a label to jump to
8248 in order to properly fall through.
8249 These cases set DROP_THROUGH_LABEL nonzero. */
8250 rtx drop_through_label = 0;
8251 rtx temp;
8252 rtx comparison = 0;
8253 int i;
8254 tree type;
0006469d 8255
ca695ac9 8256 emit_queue ();
0006469d 8257
ca695ac9
JB
8258 switch (code)
8259 {
8260 case ERROR_MARK:
8261 break;
0006469d 8262
ca695ac9
JB
8263 case INTEGER_CST:
8264 temp = integer_zerop (exp) ? if_false_label : if_true_label;
8265 if (temp)
8266 emit_jump (temp);
8267 break;
0006469d 8268
ca695ac9
JB
8269#if 0
8270 /* This is not true with #pragma weak */
8271 case ADDR_EXPR:
8272 /* The address of something can never be zero. */
8273 if (if_true_label)
8274 emit_jump (if_true_label);
8275 break;
8276#endif
0006469d 8277
ca695ac9
JB
8278 case NOP_EXPR:
8279 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
8280 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
8281 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
8282 goto normal;
8283 case CONVERT_EXPR:
8284 /* If we are narrowing the operand, we have to do the compare in the
8285 narrower mode. */
8286 if ((TYPE_PRECISION (TREE_TYPE (exp))
8287 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8288 goto normal;
8289 case NON_LVALUE_EXPR:
8290 case REFERENCE_EXPR:
8291 case ABS_EXPR:
8292 case NEGATE_EXPR:
8293 case LROTATE_EXPR:
8294 case RROTATE_EXPR:
8295 /* These cannot change zero->non-zero or vice versa. */
8296 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8297 break;
0006469d 8298
ca695ac9
JB
8299#if 0
8300 /* This is never less insns than evaluating the PLUS_EXPR followed by
8301 a test and can be longer if the test is eliminated. */
8302 case PLUS_EXPR:
8303 /* Reduce to minus. */
8304 exp = build (MINUS_EXPR, TREE_TYPE (exp),
8305 TREE_OPERAND (exp, 0),
8306 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
8307 TREE_OPERAND (exp, 1))));
8308 /* Process as MINUS. */
0006469d 8309#endif
0006469d 8310
ca695ac9
JB
8311 case MINUS_EXPR:
8312 /* Non-zero iff operands of minus differ. */
8313 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
8314 TREE_OPERAND (exp, 0),
8315 TREE_OPERAND (exp, 1)),
8316 NE, NE);
8317 break;
904762c8 8318
ca695ac9
JB
8319 case BIT_AND_EXPR:
8320 /* If we are AND'ing with a small constant, do this comparison in the
8321 smallest type that fits. If the machine doesn't have comparisons
8322 that small, it will be converted back to the wider comparison.
8323 This helps if we are testing the sign bit of a narrower object.
8324 combine can't do this for us because it can't know whether a
8325 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
0006469d 8326
ca695ac9
JB
8327 if (! SLOW_BYTE_ACCESS
8328 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8329 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
8330 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
8331 && (type = type_for_size (i + 1, 1)) != 0
8332 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
8333 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
8334 != CODE_FOR_nothing))
8335 {
8336 do_jump (convert (type, exp), if_false_label, if_true_label);
8337 break;
8338 }
8339 goto normal;
904762c8 8340
ca695ac9
JB
8341 case TRUTH_NOT_EXPR:
8342 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8343 break;
0006469d 8344
ca695ac9
JB
8345 case TRUTH_ANDIF_EXPR:
8346 if (if_false_label == 0)
8347 if_false_label = drop_through_label = gen_label_rtx ();
8348 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
8349 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8350 break;
0006469d 8351
ca695ac9
JB
8352 case TRUTH_ORIF_EXPR:
8353 if (if_true_label == 0)
8354 if_true_label = drop_through_label = gen_label_rtx ();
8355 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
8356 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8357 break;
0006469d 8358
ca695ac9 8359 case COMPOUND_EXPR:
0088fcb1 8360 push_temp_slots ();
ca695ac9
JB
8361 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8362 free_temp_slots ();
0088fcb1 8363 pop_temp_slots ();
ca695ac9
JB
8364 emit_queue ();
8365 do_pending_stack_adjust ();
8366 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8367 break;
0006469d 8368
ca695ac9
JB
8369 case COMPONENT_REF:
8370 case BIT_FIELD_REF:
8371 case ARRAY_REF:
8372 {
8373 int bitsize, bitpos, unsignedp;
8374 enum machine_mode mode;
8375 tree type;
8376 tree offset;
8377 int volatilep = 0;
0006469d 8378
ca695ac9
JB
8379 /* Get description of this reference. We don't actually care
8380 about the underlying object here. */
8381 get_inner_reference (exp, &bitsize, &bitpos, &offset,
8382 &mode, &unsignedp, &volatilep);
0006469d 8383
ca695ac9
JB
8384 type = type_for_size (bitsize, unsignedp);
8385 if (! SLOW_BYTE_ACCESS
8386 && type != 0 && bitsize >= 0
8387 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
8388 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
8389 != CODE_FOR_nothing))
8390 {
8391 do_jump (convert (type, exp), if_false_label, if_true_label);
8392 break;
8393 }
8394 goto normal;
8395 }
0006469d 8396
ca695ac9
JB
8397 case COND_EXPR:
8398 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
8399 if (integer_onep (TREE_OPERAND (exp, 1))
8400 && integer_zerop (TREE_OPERAND (exp, 2)))
8401 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
904762c8 8402
ca695ac9
JB
8403 else if (integer_zerop (TREE_OPERAND (exp, 1))
8404 && integer_onep (TREE_OPERAND (exp, 2)))
8405 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
0006469d 8406
ca695ac9
JB
8407 else
8408 {
8409 register rtx label1 = gen_label_rtx ();
8410 drop_through_label = gen_label_rtx ();
8411 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
8412 /* Now the THEN-expression. */
8413 do_jump (TREE_OPERAND (exp, 1),
8414 if_false_label ? if_false_label : drop_through_label,
8415 if_true_label ? if_true_label : drop_through_label);
8416 /* In case the do_jump just above never jumps. */
8417 do_pending_stack_adjust ();
8418 emit_label (label1);
8419 /* Now the ELSE-expression. */
8420 do_jump (TREE_OPERAND (exp, 2),
8421 if_false_label ? if_false_label : drop_through_label,
8422 if_true_label ? if_true_label : drop_through_label);
8423 }
8424 break;
0006469d 8425
ca695ac9
JB
8426 case EQ_EXPR:
8427 if (integer_zerop (TREE_OPERAND (exp, 1)))
8428 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
0766f239
RS
8429 else if (((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8430 == MODE_INT)
8431 &&
8432 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8433 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_FLOAT
8434 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_INT)
ca695ac9
JB
8435 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
8436 else
8437 comparison = compare (exp, EQ, EQ);
8438 break;
0006469d 8439
ca695ac9
JB
8440 case NE_EXPR:
8441 if (integer_zerop (TREE_OPERAND (exp, 1)))
8442 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
0766f239
RS
8443 else if (((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8444 == MODE_INT)
8445 &&
8446 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8447 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_FLOAT
8448 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_INT)
ca695ac9
JB
8449 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
8450 else
8451 comparison = compare (exp, NE, NE);
8452 break;
0006469d 8453
ca695ac9
JB
8454 case LT_EXPR:
8455 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8456 == MODE_INT)
8457 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8458 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
8459 else
8460 comparison = compare (exp, LT, LTU);
8461 break;
0006469d 8462
ca695ac9
JB
8463 case LE_EXPR:
8464 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8465 == MODE_INT)
8466 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8467 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
8468 else
8469 comparison = compare (exp, LE, LEU);
8470 break;
0006469d 8471
ca695ac9
JB
8472 case GT_EXPR:
8473 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8474 == MODE_INT)
8475 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8476 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
8477 else
8478 comparison = compare (exp, GT, GTU);
8479 break;
0006469d 8480
ca695ac9
JB
8481 case GE_EXPR:
8482 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8483 == MODE_INT)
8484 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8485 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
8486 else
8487 comparison = compare (exp, GE, GEU);
8488 break;
0006469d 8489
ca695ac9
JB
8490 default:
8491 normal:
8492 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
8493#if 0
8494 /* This is not needed any more and causes poor code since it causes
8495 comparisons and tests from non-SI objects to have different code
8496 sequences. */
8497 /* Copy to register to avoid generating bad insns by cse
8498 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
8499 if (!cse_not_expected && GET_CODE (temp) == MEM)
8500 temp = copy_to_reg (temp);
8501#endif
8502 do_pending_stack_adjust ();
8503 if (GET_CODE (temp) == CONST_INT)
8504 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
8505 else if (GET_CODE (temp) == LABEL_REF)
8506 comparison = const_true_rtx;
8507 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
8508 && !can_compare_p (GET_MODE (temp)))
8509 /* Note swapping the labels gives us not-equal. */
8510 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
8511 else if (GET_MODE (temp) != VOIDmode)
8512 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
8513 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
8514 GET_MODE (temp), NULL_RTX, 0);
8515 else
8516 abort ();
8517 }
0006469d 8518
ca695ac9
JB
8519 /* Do any postincrements in the expression that was tested. */
8520 emit_queue ();
0006469d 8521
ca695ac9
JB
8522 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
8523 straight into a conditional jump instruction as the jump condition.
8524 Otherwise, all the work has been done already. */
0006469d 8525
ca695ac9 8526 if (comparison == const_true_rtx)
0006469d 8527 {
ca695ac9
JB
8528 if (if_true_label)
8529 emit_jump (if_true_label);
0006469d 8530 }
ca695ac9
JB
8531 else if (comparison == const0_rtx)
8532 {
8533 if (if_false_label)
8534 emit_jump (if_false_label);
8535 }
8536 else if (comparison)
8537 do_jump_for_compare (comparison, if_false_label, if_true_label);
0006469d 8538
ca695ac9 8539 if (drop_through_label)
0006469d 8540 {
ca695ac9
JB
8541 /* If do_jump produces code that might be jumped around,
8542 do any stack adjusts from that code, before the place
8543 where control merges in. */
8544 do_pending_stack_adjust ();
8545 emit_label (drop_through_label);
8546 }
8547}
8548\f
8549/* Given a comparison expression EXP for values too wide to be compared
8550 with one insn, test the comparison and jump to the appropriate label.
8551 The code of EXP is ignored; we always test GT if SWAP is 0,
8552 and LT if SWAP is 1. */
0006469d 8553
ca695ac9
JB
8554static void
8555do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
8556 tree exp;
8557 int swap;
8558 rtx if_false_label, if_true_label;
8559{
8560 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
8561 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
8562 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
8563 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
8564 rtx drop_through_label = 0;
8565 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
8566 int i;
0006469d 8567
ca695ac9
JB
8568 if (! if_true_label || ! if_false_label)
8569 drop_through_label = gen_label_rtx ();
8570 if (! if_true_label)
8571 if_true_label = drop_through_label;
8572 if (! if_false_label)
8573 if_false_label = drop_through_label;
0006469d 8574
ca695ac9
JB
8575 /* Compare a word at a time, high order first. */
8576 for (i = 0; i < nwords; i++)
8577 {
8578 rtx comp;
8579 rtx op0_word, op1_word;
0006469d 8580
ca695ac9
JB
8581 if (WORDS_BIG_ENDIAN)
8582 {
8583 op0_word = operand_subword_force (op0, i, mode);
8584 op1_word = operand_subword_force (op1, i, mode);
8585 }
8586 else
8587 {
8588 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
8589 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
8590 }
0006469d 8591
ca695ac9
JB
8592 /* All but high-order word must be compared as unsigned. */
8593 comp = compare_from_rtx (op0_word, op1_word,
8594 (unsignedp || i > 0) ? GTU : GT,
8595 unsignedp, word_mode, NULL_RTX, 0);
8596 if (comp == const_true_rtx)
8597 emit_jump (if_true_label);
8598 else if (comp != const0_rtx)
8599 do_jump_for_compare (comp, NULL_RTX, if_true_label);
0006469d 8600
ca695ac9
JB
8601 /* Consider lower words only if these are equal. */
8602 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
8603 NULL_RTX, 0);
8604 if (comp == const_true_rtx)
8605 emit_jump (if_false_label);
8606 else if (comp != const0_rtx)
8607 do_jump_for_compare (comp, NULL_RTX, if_false_label);
8608 }
0006469d 8609
ca695ac9
JB
8610 if (if_false_label)
8611 emit_jump (if_false_label);
8612 if (drop_through_label)
8613 emit_label (drop_through_label);
0006469d
TW
8614}
8615
ca695ac9
JB
8616/* Compare OP0 with OP1, word at a time, in mode MODE.
8617 UNSIGNEDP says to do unsigned comparison.
8618 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
904762c8 8619
0006469d 8620static void
ca695ac9
JB
8621do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
8622 enum machine_mode mode;
8623 int unsignedp;
8624 rtx op0, op1;
8625 rtx if_false_label, if_true_label;
0006469d 8626{
ca695ac9
JB
8627 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
8628 rtx drop_through_label = 0;
8629 int i;
0006469d 8630
ca695ac9
JB
8631 if (! if_true_label || ! if_false_label)
8632 drop_through_label = gen_label_rtx ();
8633 if (! if_true_label)
8634 if_true_label = drop_through_label;
8635 if (! if_false_label)
8636 if_false_label = drop_through_label;
0006469d 8637
ca695ac9
JB
8638 /* Compare a word at a time, high order first. */
8639 for (i = 0; i < nwords; i++)
0006469d 8640 {
ca695ac9
JB
8641 rtx comp;
8642 rtx op0_word, op1_word;
0006469d 8643
ca695ac9
JB
8644 if (WORDS_BIG_ENDIAN)
8645 {
8646 op0_word = operand_subword_force (op0, i, mode);
8647 op1_word = operand_subword_force (op1, i, mode);
8648 }
8649 else
8650 {
8651 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
8652 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
8653 }
0006469d 8654
ca695ac9
JB
8655 /* All but high-order word must be compared as unsigned. */
8656 comp = compare_from_rtx (op0_word, op1_word,
8657 (unsignedp || i > 0) ? GTU : GT,
8658 unsignedp, word_mode, NULL_RTX, 0);
8659 if (comp == const_true_rtx)
8660 emit_jump (if_true_label);
8661 else if (comp != const0_rtx)
8662 do_jump_for_compare (comp, NULL_RTX, if_true_label);
0006469d 8663
ca695ac9
JB
8664 /* Consider lower words only if these are equal. */
8665 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
8666 NULL_RTX, 0);
8667 if (comp == const_true_rtx)
8668 emit_jump (if_false_label);
8669 else if (comp != const0_rtx)
8670 do_jump_for_compare (comp, NULL_RTX, if_false_label);
8671 }
0006469d 8672
ca695ac9
JB
8673 if (if_false_label)
8674 emit_jump (if_false_label);
8675 if (drop_through_label)
8676 emit_label (drop_through_label);
0006469d 8677}
bbf6f052 8678
ca695ac9
JB
8679/* Given an EQ_EXPR expression EXP for values too wide to be compared
8680 with one insn, test the comparison and jump to the appropriate label. */
8681
8682static void
8683do_jump_by_parts_equality (exp, if_false_label, if_true_label)
8684 tree exp;
8685 rtx if_false_label, if_true_label;
bbf6f052 8686{
ca695ac9
JB
8687 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8688 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8689 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
8690 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
8691 int i;
8692 rtx drop_through_label = 0;
bbf6f052 8693
ca695ac9
JB
8694 if (! if_false_label)
8695 drop_through_label = if_false_label = gen_label_rtx ();
bbf6f052 8696
ca695ac9
JB
8697 for (i = 0; i < nwords; i++)
8698 {
8699 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
8700 operand_subword_force (op1, i, mode),
8701 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
8702 word_mode, NULL_RTX, 0);
8703 if (comp == const_true_rtx)
8704 emit_jump (if_false_label);
8705 else if (comp != const0_rtx)
8706 do_jump_for_compare (comp, if_false_label, NULL_RTX);
8707 }
1499e0a8 8708
ca695ac9
JB
8709 if (if_true_label)
8710 emit_jump (if_true_label);
8711 if (drop_through_label)
8712 emit_label (drop_through_label);
8713}
8714\f
8715/* Jump according to whether OP0 is 0.
8716 We assume that OP0 has an integer mode that is too wide
8717 for the available compare insns. */
1499e0a8 8718
ca695ac9
JB
8719static void
8720do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
8721 rtx op0;
8722 rtx if_false_label, if_true_label;
8723{
8724 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
8725 int i;
8726 rtx drop_through_label = 0;
1499e0a8 8727
ca695ac9
JB
8728 if (! if_false_label)
8729 drop_through_label = if_false_label = gen_label_rtx ();
1499e0a8 8730
ca695ac9
JB
8731 for (i = 0; i < nwords; i++)
8732 {
8733 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
8734 GET_MODE (op0)),
8735 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
8736 if (comp == const_true_rtx)
8737 emit_jump (if_false_label);
8738 else if (comp != const0_rtx)
8739 do_jump_for_compare (comp, if_false_label, NULL_RTX);
8740 }
1499e0a8 8741
ca695ac9
JB
8742 if (if_true_label)
8743 emit_jump (if_true_label);
8744 if (drop_through_label)
8745 emit_label (drop_through_label);
8746}
bbf6f052 8747
ca695ac9
JB
8748/* Given a comparison expression in rtl form, output conditional branches to
8749 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
bbf6f052 8750
ca695ac9
JB
8751static void
8752do_jump_for_compare (comparison, if_false_label, if_true_label)
8753 rtx comparison, if_false_label, if_true_label;
8754{
8755 if (if_true_label)
a358cee0 8756 {
ca695ac9
JB
8757 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
8758 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
8759 else
8760 abort ();
a358cee0 8761
ca695ac9
JB
8762 if (if_false_label)
8763 emit_jump (if_false_label);
c980ac49 8764 }
ca695ac9 8765 else if (if_false_label)
bbf6f052 8766 {
ca695ac9 8767 rtx insn;
f12f485a 8768 rtx prev = get_last_insn ();
ca695ac9 8769 rtx branch = 0;
bbf6f052 8770
f12f485a
RK
8771 if (prev != 0)
8772 prev = PREV_INSN (prev);
8773
ca695ac9
JB
8774 /* Output the branch with the opposite condition. Then try to invert
8775 what is generated. If more than one insn is a branch, or if the
8776 branch is not the last insn written, abort. If we can't invert
8777 the branch, emit make a true label, redirect this jump to that,
8778 emit a jump to the false label and define the true label. */
bbf6f052 8779
ca695ac9
JB
8780 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
8781 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_false_label));
8782 else
8783 abort ();
bbf6f052 8784
ca695ac9
JB
8785 /* Here we get the insn before what was just emitted.
8786 On some machines, emitting the branch can discard
8787 the previous compare insn and emit a replacement. */
8788 if (prev == 0)
8789 /* If there's only one preceding insn... */
8790 insn = get_insns ();
8791 else
8792 insn = NEXT_INSN (prev);
bbf6f052 8793
ca695ac9
JB
8794 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
8795 if (GET_CODE (insn) == JUMP_INSN)
8796 {
8797 if (branch)
8798 abort ();
8799 branch = insn;
8800 }
8801
8802 if (branch != get_last_insn ())
8803 abort ();
8804
127e4d19 8805 JUMP_LABEL (branch) = if_false_label;
ca695ac9
JB
8806 if (! invert_jump (branch, if_false_label))
8807 {
8808 if_true_label = gen_label_rtx ();
8809 redirect_jump (branch, if_true_label);
8810 emit_jump (if_false_label);
8811 emit_label (if_true_label);
bbf6f052
RK
8812 }
8813 }
ca695ac9
JB
8814}
8815\f
8816/* Generate code for a comparison expression EXP
8817 (including code to compute the values to be compared)
8818 and set (CC0) according to the result.
8819 SIGNED_CODE should be the rtx operation for this comparison for
8820 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
8821
8822 We force a stack adjustment unless there are currently
8823 things pushed on the stack that aren't yet used. */
8824
8825static rtx
8826compare (exp, signed_code, unsigned_code)
8827 register tree exp;
8828 enum rtx_code signed_code, unsigned_code;
8829{
8830 register rtx op0
8831 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8832 register rtx op1
8833 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8834 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
8835 register enum machine_mode mode = TYPE_MODE (type);
8836 int unsignedp = TREE_UNSIGNED (type);
8837 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
bbf6f052 8838
ca695ac9
JB
8839 return compare_from_rtx (op0, op1, code, unsignedp, mode,
8840 ((mode == BLKmode)
8841 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
8842 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
8843}
bbf6f052 8844
ca695ac9
JB
8845/* Like compare but expects the values to compare as two rtx's.
8846 The decision as to signed or unsigned comparison must be made by the caller.
bbf6f052 8847
ca695ac9
JB
8848 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
8849 compared.
bbf6f052 8850
ca695ac9
JB
8851 If ALIGN is non-zero, it is the alignment of this type; if zero, the
8852 size of MODE should be used. */
bbf6f052 8853
ca695ac9
JB
8854rtx
8855compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
8856 register rtx op0, op1;
8857 enum rtx_code code;
8858 int unsignedp;
8859 enum machine_mode mode;
8860 rtx size;
8861 int align;
8862{
8863 rtx tem;
bbf6f052 8864
ca695ac9
JB
8865 /* If one operand is constant, make it the second one. Only do this
8866 if the other operand is not constant as well. */
bbf6f052 8867
ca695ac9
JB
8868 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
8869 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
8870 {
8871 tem = op0;
8872 op0 = op1;
8873 op1 = tem;
8874 code = swap_condition (code);
8875 }
bbf6f052 8876
ca695ac9 8877 if (flag_force_mem)
bbf6f052 8878 {
ca695ac9
JB
8879 op0 = force_not_mem (op0);
8880 op1 = force_not_mem (op1);
8881 }
bbf6f052 8882
ca695ac9 8883 do_pending_stack_adjust ();
bbf6f052 8884
ca695ac9
JB
8885 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
8886 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
8887 return tem;
bbf6f052 8888
ca695ac9
JB
8889#if 0
8890 /* There's no need to do this now that combine.c can eliminate lots of
8891 sign extensions. This can be less efficient in certain cases on other
8892 machines. */
bbf6f052 8893
ca695ac9
JB
8894 /* If this is a signed equality comparison, we can do it as an
8895 unsigned comparison since zero-extension is cheaper than sign
8896 extension and comparisons with zero are done as unsigned. This is
8897 the case even on machines that can do fast sign extension, since
8898 zero-extension is easier to combine with other operations than
8899 sign-extension is. If we are comparing against a constant, we must
8900 convert it to what it would look like unsigned. */
8901 if ((code == EQ || code == NE) && ! unsignedp
8902 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
8903 {
8904 if (GET_CODE (op1) == CONST_INT
8905 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
8906 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
8907 unsignedp = 1;
bbf6f052 8908 }
ca695ac9
JB
8909#endif
8910
8911 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
bbf6f052 8912
ca695ac9 8913 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
bbf6f052
RK
8914}
8915\f
ca695ac9
JB
8916/* Generate code to calculate EXP using a store-flag instruction
8917 and return an rtx for the result. EXP is either a comparison
8918 or a TRUTH_NOT_EXPR whose operand is a comparison.
bbf6f052 8919
ca695ac9 8920 If TARGET is nonzero, store the result there if convenient.
bbf6f052 8921
ca695ac9
JB
8922 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
8923 cheap.
bbf6f052 8924
ca695ac9
JB
8925 Return zero if there is no suitable set-flag instruction
8926 available on this machine.
bbf6f052 8927
ca695ac9
JB
8928 Once expand_expr has been called on the arguments of the comparison,
8929 we are committed to doing the store flag, since it is not safe to
8930 re-evaluate the expression. We emit the store-flag insn by calling
8931 emit_store_flag, but only expand the arguments if we have a reason
8932 to believe that emit_store_flag will be successful. If we think that
8933 it will, but it isn't, we have to simulate the store-flag with a
8934 set/jump/set sequence. */
bbf6f052 8935
ca695ac9
JB
8936static rtx
8937do_store_flag (exp, target, mode, only_cheap)
8938 tree exp;
8939 rtx target;
8940 enum machine_mode mode;
8941 int only_cheap;
bbf6f052 8942{
ca695ac9
JB
8943 enum rtx_code code;
8944 tree arg0, arg1, type;
8945 tree tem;
8946 enum machine_mode operand_mode;
8947 int invert = 0;
8948 int unsignedp;
8949 rtx op0, op1;
8950 enum insn_code icode;
8951 rtx subtarget = target;
8952 rtx result, label, pattern, jump_pat;
bbf6f052 8953
ca695ac9
JB
8954 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
8955 result at the end. We can't simply invert the test since it would
8956 have already been inverted if it were valid. This case occurs for
8957 some floating-point comparisons. */
8958
8959 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
8960 invert = 1, exp = TREE_OPERAND (exp, 0);
8961
8962 arg0 = TREE_OPERAND (exp, 0);
8963 arg1 = TREE_OPERAND (exp, 1);
8964 type = TREE_TYPE (arg0);
8965 operand_mode = TYPE_MODE (type);
8966 unsignedp = TREE_UNSIGNED (type);
8967
8968 /* We won't bother with BLKmode store-flag operations because it would mean
8969 passing a lot of information to emit_store_flag. */
8970 if (operand_mode == BLKmode)
8971 return 0;
8972
8973 STRIP_NOPS (arg0);
8974 STRIP_NOPS (arg1);
8975
8976 /* Get the rtx comparison code to use. We know that EXP is a comparison
8977 operation of some type. Some comparisons against 1 and -1 can be
8978 converted to comparisons with zero. Do so here so that the tests
8979 below will be aware that we have a comparison with zero. These
8980 tests will not catch constants in the first operand, but constants
8981 are rarely passed as the first operand. */
8982
8983 switch (TREE_CODE (exp))
8984 {
8985 case EQ_EXPR:
8986 code = EQ;
8987 break;
8988 case NE_EXPR:
8989 code = NE;
8990 break;
8991 case LT_EXPR:
8992 if (integer_onep (arg1))
8993 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
8994 else
8995 code = unsignedp ? LTU : LT;
8996 break;
8997 case LE_EXPR:
8998 if (! unsignedp && integer_all_onesp (arg1))
8999 arg1 = integer_zero_node, code = LT;
9000 else
9001 code = unsignedp ? LEU : LE;
9002 break;
9003 case GT_EXPR:
9004 if (! unsignedp && integer_all_onesp (arg1))
9005 arg1 = integer_zero_node, code = GE;
9006 else
9007 code = unsignedp ? GTU : GT;
9008 break;
9009 case GE_EXPR:
9010 if (integer_onep (arg1))
9011 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9012 else
9013 code = unsignedp ? GEU : GE;
9014 break;
9015 default:
9016 abort ();
9017 }
bbf6f052 9018
ca695ac9
JB
9019 /* Put a constant second. */
9020 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
bbf6f052 9021 {
ca695ac9
JB
9022 tem = arg0; arg0 = arg1; arg1 = tem;
9023 code = swap_condition (code);
bbf6f052 9024 }
bbf6f052 9025
ca695ac9
JB
9026 /* If this is an equality or inequality test of a single bit, we can
9027 do this by shifting the bit being tested to the low-order bit and
9028 masking the result with the constant 1. If the condition was EQ,
9029 we xor it with 1. This does not require an scc insn and is faster
9030 than an scc insn even if we have it. */
bbf6f052 9031
ca695ac9
JB
9032 if ((code == NE || code == EQ)
9033 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9034 && integer_pow2p (TREE_OPERAND (arg0, 1))
9035 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
9036 {
9037 tree inner = TREE_OPERAND (arg0, 0);
9038 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
9039 NULL_RTX, VOIDmode, 0)));
9040 int ops_unsignedp;
bbf6f052 9041
ca695ac9
JB
9042 /* If INNER is a right shift of a constant and it plus BITNUM does
9043 not overflow, adjust BITNUM and INNER. */
bbf6f052 9044
ca695ac9
JB
9045 if (TREE_CODE (inner) == RSHIFT_EXPR
9046 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
9047 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
9048 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
9049 < TYPE_PRECISION (type)))
9050 {
9051 bitnum +=TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
9052 inner = TREE_OPERAND (inner, 0);
9053 }
bbf6f052 9054
ca695ac9
JB
9055 /* If we are going to be able to omit the AND below, we must do our
9056 operations as unsigned. If we must use the AND, we have a choice.
9057 Normally unsigned is faster, but for some machines signed is. */
9058 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
ad92c826
RK
9059#ifdef LOAD_EXTEND_OP
9060 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
ca695ac9
JB
9061#else
9062 : 1
9063#endif
9064 );
bbf6f052 9065
ca695ac9
JB
9066 if (subtarget == 0 || GET_CODE (subtarget) != REG
9067 || GET_MODE (subtarget) != operand_mode
9068 || ! safe_from_p (subtarget, inner))
9069 subtarget = 0;
e7c33f54 9070
ca695ac9 9071 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
bbf6f052 9072
ca695ac9
JB
9073 if (bitnum != 0)
9074 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
0c316b20 9075 size_int (bitnum), subtarget, ops_unsignedp);
bbf6f052 9076
ca695ac9
JB
9077 if (GET_MODE (op0) != mode)
9078 op0 = convert_to_mode (mode, op0, ops_unsignedp);
bbf6f052 9079
ca695ac9 9080 if ((code == EQ && ! invert) || (code == NE && invert))
0c316b20 9081 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
ca695ac9 9082 ops_unsignedp, OPTAB_LIB_WIDEN);
bbf6f052 9083
ca695ac9
JB
9084 /* Put the AND last so it can combine with more things. */
9085 if (bitnum != TYPE_PRECISION (type) - 1)
0c316b20 9086 op0 = expand_and (op0, const1_rtx, subtarget);
bbf6f052 9087
ca695ac9
JB
9088 return op0;
9089 }
bbf6f052 9090
ca695ac9
JB
9091 /* Now see if we are likely to be able to do this. Return if not. */
9092 if (! can_compare_p (operand_mode))
9093 return 0;
9094 icode = setcc_gen_code[(int) code];
9095 if (icode == CODE_FOR_nothing
9096 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
9097 {
9098 /* We can only do this if it is one of the special cases that
9099 can be handled without an scc insn. */
9100 if ((code == LT && integer_zerop (arg1))
9101 || (! only_cheap && code == GE && integer_zerop (arg1)))
9102 ;
9103 else if (BRANCH_COST >= 0
9104 && ! only_cheap && (code == NE || code == EQ)
9105 && TREE_CODE (type) != REAL_TYPE
9106 && ((abs_optab->handlers[(int) operand_mode].insn_code
9107 != CODE_FOR_nothing)
9108 || (ffs_optab->handlers[(int) operand_mode].insn_code
9109 != CODE_FOR_nothing)))
9110 ;
9111 else
9112 return 0;
9113 }
9114
9115 preexpand_calls (exp);
9116 if (subtarget == 0 || GET_CODE (subtarget) != REG
9117 || GET_MODE (subtarget) != operand_mode
9118 || ! safe_from_p (subtarget, arg1))
9119 subtarget = 0;
bbf6f052 9120
ca695ac9
JB
9121 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
9122 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
bbf6f052 9123
ca695ac9
JB
9124 if (target == 0)
9125 target = gen_reg_rtx (mode);
bbf6f052 9126
ca695ac9
JB
9127 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9128 because, if the emit_store_flag does anything it will succeed and
9129 OP0 and OP1 will not be used subsequently. */
bbf6f052 9130
ca695ac9
JB
9131 result = emit_store_flag (target, code,
9132 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9133 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9134 operand_mode, unsignedp, 1);
bbf6f052 9135
ca695ac9
JB
9136 if (result)
9137 {
9138 if (invert)
9139 result = expand_binop (mode, xor_optab, result, const1_rtx,
9140 result, 0, OPTAB_LIB_WIDEN);
9141 return result;
9142 }
bbf6f052 9143
ca695ac9
JB
9144 /* If this failed, we have to do this with set/compare/jump/set code. */
9145 if (target == 0 || GET_CODE (target) != REG
9146 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9147 target = gen_reg_rtx (GET_MODE (target));
bbf6f052 9148
ca695ac9
JB
9149 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9150 result = compare_from_rtx (op0, op1, code, unsignedp,
9151 operand_mode, NULL_RTX, 0);
9152 if (GET_CODE (result) == CONST_INT)
9153 return (((result == const0_rtx && ! invert)
9154 || (result != const0_rtx && invert))
9155 ? const0_rtx : const1_rtx);
bbf6f052 9156
ca695ac9
JB
9157 label = gen_label_rtx ();
9158 if (bcc_gen_fctn[(int) code] == 0)
9159 abort ();
bbf6f052 9160
ca695ac9
JB
9161 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9162 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9163 emit_label (label);
bbf6f052 9164
ca695ac9
JB
9165 return target;
9166}
9167\f
9168/* Generate a tablejump instruction (used for switch statements). */
bbf6f052 9169
ca695ac9 9170#ifdef HAVE_tablejump
bbf6f052 9171
ca695ac9
JB
9172/* INDEX is the value being switched on, with the lowest value
9173 in the table already subtracted.
9174 MODE is its expected mode (needed if INDEX is constant).
9175 RANGE is the length of the jump table.
9176 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
bbf6f052 9177
ca695ac9
JB
9178 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9179 index value is out of range. */
bbf6f052 9180
ca695ac9
JB
9181void
9182do_tablejump (index, mode, range, table_label, default_label)
9183 rtx index, range, table_label, default_label;
9184 enum machine_mode mode;
9185{
9186 register rtx temp, vector;
bbf6f052 9187
ca695ac9
JB
9188 /* Do an unsigned comparison (in the proper mode) between the index
9189 expression and the value which represents the length of the range.
9190 Since we just finished subtracting the lower bound of the range
9191 from the index expression, this comparison allows us to simultaneously
9192 check that the original index expression value is both greater than
9193 or equal to the minimum value of the range and less than or equal to
9194 the maximum value of the range. */
bbf6f052 9195
bf500664
RK
9196 emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
9197 emit_jump_insn (gen_bgtu (default_label));
bbf6f052 9198
ca695ac9
JB
9199 /* If index is in range, it must fit in Pmode.
9200 Convert to Pmode so we can index with it. */
9201 if (mode != Pmode)
9202 index = convert_to_mode (Pmode, index, 1);
bbf6f052 9203
ca695ac9
JB
9204 /* Don't let a MEM slip thru, because then INDEX that comes
9205 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9206 and break_out_memory_refs will go to work on it and mess it up. */
9207#ifdef PIC_CASE_VECTOR_ADDRESS
9208 if (flag_pic && GET_CODE (index) != REG)
9209 index = copy_to_mode_reg (Pmode, index);
9210#endif
bbf6f052 9211
ca695ac9
JB
9212 /* If flag_force_addr were to affect this address
9213 it could interfere with the tricky assumptions made
9214 about addresses that contain label-refs,
9215 which may be valid only very near the tablejump itself. */
9216 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9217 GET_MODE_SIZE, because this indicates how large insns are. The other
9218 uses should all be Pmode, because they are addresses. This code
9219 could fail if addresses and insns are not the same size. */
9220 index = gen_rtx (PLUS, Pmode,
9221 gen_rtx (MULT, Pmode, index,
9222 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9223 gen_rtx (LABEL_REF, Pmode, table_label));
9224#ifdef PIC_CASE_VECTOR_ADDRESS
9225 if (flag_pic)
9226 index = PIC_CASE_VECTOR_ADDRESS (index);
9227 else
9228#endif
9229 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9230 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9231 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
9232 RTX_UNCHANGING_P (vector) = 1;
9233 convert_move (temp, vector, 0);
bbf6f052 9234
ca695ac9 9235 emit_jump_insn (gen_tablejump (temp, table_label));
bbf6f052 9236
ca695ac9
JB
9237#ifndef CASE_VECTOR_PC_RELATIVE
9238 /* If we are generating PIC code or if the table is PC-relative, the
9239 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9240 if (! flag_pic)
9241 emit_barrier ();
bbf6f052 9242#endif
ca695ac9 9243}
bbf6f052 9244
ca695ac9 9245#endif /* HAVE_tablejump */
bbf6f052 9246
bbf6f052 9247
ca695ac9
JB
9248/* Emit a suitable bytecode to load a value from memory, assuming a pointer
9249 to that value is on the top of the stack. The resulting type is TYPE, and
9250 the source declaration is DECL. */
bbf6f052 9251
ca695ac9
JB
9252void
9253bc_load_memory (type, decl)
9254 tree type, decl;
9255{
9256 enum bytecode_opcode opcode;
9257
9258
9259 /* Bit fields are special. We only know about signed and
9260 unsigned ints, and enums. The latter are treated as
9261 signed integers. */
9262
9263 if (DECL_BIT_FIELD (decl))
9264 if (TREE_CODE (type) == ENUMERAL_TYPE
9265 || TREE_CODE (type) == INTEGER_TYPE)
9266 opcode = TREE_UNSIGNED (type) ? zxloadBI : sxloadBI;
9267 else
9268 abort ();
9269 else
9270 /* See corresponding comment in bc_store_memory(). */
9271 if (TYPE_MODE (type) == BLKmode
9272 || TYPE_MODE (type) == VOIDmode)
9273 return;
9274 else
6bd6178d 9275 opcode = mode_to_load_map [(int) TYPE_MODE (type)];
bbf6f052 9276
ca695ac9
JB
9277 if (opcode == neverneverland)
9278 abort ();
9279
9280 bc_emit_bytecode (opcode);
9281
9282#ifdef DEBUG_PRINT_CODE
9283 fputc ('\n', stderr);
9284#endif
bbf6f052 9285}
bbf6f052 9286
bbf6f052 9287
ca695ac9
JB
9288/* Store the contents of the second stack slot to the address in the
9289 top stack slot. DECL is the declaration of the destination and is used
9290 to determine whether we're dealing with a bitfield. */
bbf6f052 9291
ca695ac9
JB
9292void
9293bc_store_memory (type, decl)
9294 tree type, decl;
9295{
9296 enum bytecode_opcode opcode;
9297
9298
9299 if (DECL_BIT_FIELD (decl))
f81497d9 9300 {
ca695ac9
JB
9301 if (TREE_CODE (type) == ENUMERAL_TYPE
9302 || TREE_CODE (type) == INTEGER_TYPE)
9303 opcode = sstoreBI;
f81497d9 9304 else
ca695ac9 9305 abort ();
f81497d9 9306 }
ca695ac9
JB
9307 else
9308 if (TYPE_MODE (type) == BLKmode)
9309 {
9310 /* Copy structure. This expands to a block copy instruction, storeBLK.
9311 In addition to the arguments expected by the other store instructions,
9312 it also expects a type size (SImode) on top of the stack, which is the
9313 structure size in size units (usually bytes). The two first arguments
9314 are already on the stack; so we just put the size on level 1. For some
9315 other languages, the size may be variable, this is why we don't encode
9316 it as a storeBLK literal, but rather treat it as a full-fledged expression. */
9317
9318 bc_expand_expr (TYPE_SIZE (type));
9319 opcode = storeBLK;
9320 }
9321 else
6bd6178d 9322 opcode = mode_to_store_map [(int) TYPE_MODE (type)];
f81497d9 9323
ca695ac9
JB
9324 if (opcode == neverneverland)
9325 abort ();
9326
9327 bc_emit_bytecode (opcode);
9328
9329#ifdef DEBUG_PRINT_CODE
9330 fputc ('\n', stderr);
9331#endif
f81497d9
RS
9332}
9333
f81497d9 9334
ca695ac9
JB
9335/* Allocate local stack space sufficient to hold a value of the given
9336 SIZE at alignment boundary ALIGNMENT bits. ALIGNMENT must be an
9337 integral power of 2. A special case is locals of type VOID, which
9338 have size 0 and alignment 1 - any "voidish" SIZE or ALIGNMENT is
9339 remapped into the corresponding attribute of SI. */
9340
9341rtx
9342bc_allocate_local (size, alignment)
9343 int size, alignment;
f81497d9 9344{
ca695ac9
JB
9345 rtx retval;
9346 int byte_alignment;
f81497d9 9347
ca695ac9
JB
9348 if (size < 0)
9349 abort ();
f81497d9 9350
ca695ac9
JB
9351 /* Normalize size and alignment */
9352 if (!size)
9353 size = UNITS_PER_WORD;
bbf6f052 9354
ca695ac9
JB
9355 if (alignment < BITS_PER_UNIT)
9356 byte_alignment = 1 << (INT_ALIGN - 1);
9357 else
9358 /* Align */
9359 byte_alignment = alignment / BITS_PER_UNIT;
bbf6f052 9360
ca695ac9
JB
9361 if (local_vars_size & (byte_alignment - 1))
9362 local_vars_size += byte_alignment - (local_vars_size & (byte_alignment - 1));
bbf6f052 9363
ca695ac9
JB
9364 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
9365 local_vars_size += size;
bbf6f052 9366
ca695ac9 9367 return retval;
bbf6f052
RK
9368}
9369
bbf6f052 9370
ca695ac9
JB
9371/* Allocate variable-sized local array. Variable-sized arrays are
9372 actually pointers to the address in memory where they are stored. */
9373
9374rtx
9375bc_allocate_variable_array (size)
9376 tree size;
bbf6f052 9377{
ca695ac9
JB
9378 rtx retval;
9379 const int ptralign = (1 << (PTR_ALIGN - 1));
bbf6f052 9380
ca695ac9
JB
9381 /* Align pointer */
9382 if (local_vars_size & ptralign)
9383 local_vars_size += ptralign - (local_vars_size & ptralign);
bbf6f052 9384
ca695ac9
JB
9385 /* Note down local space needed: pointer to block; also return
9386 dummy rtx */
bbf6f052 9387
ca695ac9
JB
9388 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
9389 local_vars_size += POINTER_SIZE / BITS_PER_UNIT;
9390 return retval;
bbf6f052 9391}
bbf6f052 9392
bbf6f052 9393
ca695ac9
JB
9394/* Push the machine address for the given external variable offset. */
9395void
9396bc_load_externaddr (externaddr)
9397 rtx externaddr;
9398{
9399 bc_emit_bytecode (constP);
e7a42772
JB
9400 bc_emit_code_labelref (BYTECODE_LABEL (externaddr),
9401 BYTECODE_BC_LABEL (externaddr)->offset);
bbf6f052 9402
ca695ac9
JB
9403#ifdef DEBUG_PRINT_CODE
9404 fputc ('\n', stderr);
9405#endif
bbf6f052
RK
9406}
9407
bbf6f052 9408
ca695ac9
JB
9409static char *
9410bc_strdup (s)
9411 char *s;
bbf6f052 9412{
5e70898c
RS
9413 char *new = (char *) xmalloc ((strlen (s) + 1) * sizeof *s);
9414 strcpy (new, s);
9415 return new;
ca695ac9 9416}
bbf6f052 9417
bbf6f052 9418
ca695ac9
JB
9419/* Like above, but expects an IDENTIFIER. */
9420void
9421bc_load_externaddr_id (id, offset)
9422 tree id;
9423 int offset;
9424{
9425 if (!IDENTIFIER_POINTER (id))
9426 abort ();
bbf6f052 9427
ca695ac9
JB
9428 bc_emit_bytecode (constP);
9429 bc_emit_code_labelref (bc_xstrdup (IDENTIFIER_POINTER (id)), offset);
bbf6f052 9430
ca695ac9
JB
9431#ifdef DEBUG_PRINT_CODE
9432 fputc ('\n', stderr);
9433#endif
9434}
bbf6f052 9435
bbf6f052 9436
ca695ac9
JB
9437/* Push the machine address for the given local variable offset. */
9438void
9439bc_load_localaddr (localaddr)
9440 rtx localaddr;
9441{
e7a42772 9442 bc_emit_instruction (localP, (HOST_WIDE_INT) BYTECODE_BC_LABEL (localaddr)->offset);
bbf6f052 9443}
bbf6f052 9444
bbf6f052 9445
ca695ac9
JB
9446/* Push the machine address for the given parameter offset.
9447 NOTE: offset is in bits. */
9448void
9449bc_load_parmaddr (parmaddr)
9450 rtx parmaddr;
bbf6f052 9451{
e7a42772
JB
9452 bc_emit_instruction (argP, ((HOST_WIDE_INT) BYTECODE_BC_LABEL (parmaddr)->offset
9453 / BITS_PER_UNIT));
ca695ac9 9454}
bbf6f052 9455
ca695ac9
JB
9456
9457/* Convert a[i] into *(a + i). */
9458tree
9459bc_canonicalize_array_ref (exp)
9460 tree exp;
9461{
9462 tree type = TREE_TYPE (exp);
9463 tree array_adr = build1 (ADDR_EXPR, TYPE_POINTER_TO (type),
9464 TREE_OPERAND (exp, 0));
9465 tree index = TREE_OPERAND (exp, 1);
9466
9467
9468 /* Convert the integer argument to a type the same size as a pointer
9469 so the multiply won't overflow spuriously. */
9470
9471 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
9472 index = convert (type_for_size (POINTER_SIZE, 0), index);
9473
9474 /* The array address isn't volatile even if the array is.
9475 (Of course this isn't terribly relevant since the bytecode
9476 translator treats nearly everything as volatile anyway.) */
9477 TREE_THIS_VOLATILE (array_adr) = 0;
9478
9479 return build1 (INDIRECT_REF, type,
9480 fold (build (PLUS_EXPR,
9481 TYPE_POINTER_TO (type),
9482 array_adr,
9483 fold (build (MULT_EXPR,
9484 TYPE_POINTER_TO (type),
9485 index,
9486 size_in_bytes (type))))));
bbf6f052
RK
9487}
9488
bbf6f052 9489
ca695ac9
JB
9490/* Load the address of the component referenced by the given
9491 COMPONENT_REF expression.
bbf6f052 9492
ca695ac9 9493 Returns innermost lvalue. */
bbf6f052 9494
ca695ac9
JB
9495tree
9496bc_expand_component_address (exp)
9497 tree exp;
bbf6f052 9498{
ca695ac9
JB
9499 tree tem, chain;
9500 enum machine_mode mode;
9501 int bitpos = 0;
9502 HOST_WIDE_INT SIval;
a7c5971a 9503
bbf6f052 9504
ca695ac9
JB
9505 tem = TREE_OPERAND (exp, 1);
9506 mode = DECL_MODE (tem);
bbf6f052 9507
ca695ac9
JB
9508
9509 /* Compute cumulative bit offset for nested component refs
9510 and array refs, and find the ultimate containing object. */
9511
9512 for (tem = exp;; tem = TREE_OPERAND (tem, 0))
bbf6f052 9513 {
ca695ac9
JB
9514 if (TREE_CODE (tem) == COMPONENT_REF)
9515 bitpos += TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (tem, 1)));
9516 else
9517 if (TREE_CODE (tem) == ARRAY_REF
9518 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
9519 && TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) == INTEGER_CST)
bbf6f052 9520
ca695ac9
JB
9521 bitpos += (TREE_INT_CST_LOW (TREE_OPERAND (tem, 1))
9522 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)))
9523 /* * TYPE_SIZE_UNIT (TREE_TYPE (tem)) */);
9524 else
9525 break;
9526 }
bbf6f052 9527
c02bd5d9 9528 bc_expand_expr (tem);
bbf6f052 9529
cd1b4b44 9530
ca695ac9
JB
9531 /* For bitfields also push their offset and size */
9532 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
9533 bc_push_offset_and_size (bitpos, /* DECL_SIZE_UNIT */ (TREE_OPERAND (exp, 1)));
9534 else
9535 if (SIval = bitpos / BITS_PER_UNIT)
9536 bc_emit_instruction (addconstPSI, SIval);
bbf6f052 9537
ca695ac9 9538 return (TREE_OPERAND (exp, 1));
bbf6f052 9539}
e7c33f54 9540
bbf6f052 9541
ca695ac9
JB
9542/* Emit code to push two SI constants */
9543void
9544bc_push_offset_and_size (offset, size)
9545 HOST_WIDE_INT offset, size;
9546{
9547 bc_emit_instruction (constSI, offset);
9548 bc_emit_instruction (constSI, size);
9549}
bbf6f052 9550
bbf6f052 9551
ca695ac9
JB
9552/* Emit byte code to push the address of the given lvalue expression to
9553 the stack. If it's a bit field, we also push offset and size info.
bbf6f052 9554
ca695ac9
JB
9555 Returns innermost component, which allows us to determine not only
9556 its type, but also whether it's a bitfield. */
9557
9558tree
9559bc_expand_address (exp)
bbf6f052 9560 tree exp;
bbf6f052 9561{
ca695ac9
JB
9562 /* Safeguard */
9563 if (!exp || TREE_CODE (exp) == ERROR_MARK)
9564 return (exp);
bbf6f052 9565
e7c33f54 9566
ca695ac9
JB
9567 switch (TREE_CODE (exp))
9568 {
9569 case ARRAY_REF:
e7c33f54 9570
ca695ac9 9571 return (bc_expand_address (bc_canonicalize_array_ref (exp)));
e7c33f54 9572
ca695ac9 9573 case COMPONENT_REF:
bbf6f052 9574
ca695ac9 9575 return (bc_expand_component_address (exp));
bbf6f052 9576
ca695ac9 9577 case INDIRECT_REF:
bbf6f052 9578
ca695ac9
JB
9579 bc_expand_expr (TREE_OPERAND (exp, 0));
9580
9581 /* For variable-sized types: retrieve pointer. Sometimes the
9582 TYPE_SIZE tree is NULL. Is this a bug or a feature? Let's
9583 also make sure we have an operand, just in case... */
9584
9585 if (TREE_OPERAND (exp, 0)
9586 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))
9587 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))) != INTEGER_CST)
9588 bc_emit_instruction (loadP);
9589
9590 /* If packed, also return offset and size */
9591 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 0)))
9592
9593 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (exp, 0))),
9594 TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (exp, 0))));
9595
9596 return (TREE_OPERAND (exp, 0));
9597
9598 case FUNCTION_DECL:
9599
e7a42772
JB
9600 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
9601 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
bbf6f052 9602 break;
ca695ac9
JB
9603
9604 case PARM_DECL:
9605
9606 bc_load_parmaddr (DECL_RTL (exp));
9607
9608 /* For variable-sized types: retrieve pointer */
9609 if (TYPE_SIZE (TREE_TYPE (exp))
9610 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
9611 bc_emit_instruction (loadP);
9612
9613 /* If packed, also return offset and size */
9614 if (DECL_BIT_FIELD (exp))
9615 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
9616 TREE_INT_CST_LOW (DECL_SIZE (exp)));
9617
bbf6f052 9618 break;
ca695ac9
JB
9619
9620 case RESULT_DECL:
9621
9622 bc_emit_instruction (returnP);
bbf6f052 9623 break;
ca695ac9
JB
9624
9625 case VAR_DECL:
9626
9627#if 0
e7a42772 9628 if (BYTECODE_LABEL (DECL_RTL (exp)))
ca695ac9
JB
9629 bc_load_externaddr (DECL_RTL (exp));
9630#endif
9631
9632 if (DECL_EXTERNAL (exp))
e7a42772 9633 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
eb862a37 9634 (BYTECODE_BC_LABEL (DECL_RTL (exp)))->offset);
bbf6f052 9635 else
ca695ac9
JB
9636 bc_load_localaddr (DECL_RTL (exp));
9637
9638 /* For variable-sized types: retrieve pointer */
9639 if (TYPE_SIZE (TREE_TYPE (exp))
9640 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
9641 bc_emit_instruction (loadP);
9642
9643 /* If packed, also return offset and size */
9644 if (DECL_BIT_FIELD (exp))
9645 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
9646 TREE_INT_CST_LOW (DECL_SIZE (exp)));
9647
bbf6f052 9648 break;
ca695ac9
JB
9649
9650 case STRING_CST:
9651 {
9652 rtx r;
9653
9654 bc_emit_bytecode (constP);
9655 r = output_constant_def (exp);
e7a42772 9656 bc_emit_code_labelref (BYTECODE_LABEL (r), BYTECODE_BC_LABEL (r)->offset);
ca695ac9
JB
9657
9658#ifdef DEBUG_PRINT_CODE
9659 fputc ('\n', stderr);
9660#endif
9661 }
bbf6f052 9662 break;
ca695ac9 9663
bbf6f052 9664 default:
bbf6f052 9665
ca695ac9
JB
9666 abort();
9667 break;
bbf6f052
RK
9668 }
9669
ca695ac9
JB
9670 /* Most lvalues don't have components. */
9671 return (exp);
9672}
bbf6f052 9673
ca695ac9
JB
9674
9675/* Emit a type code to be used by the runtime support in handling
9676 parameter passing. The type code consists of the machine mode
9677 plus the minimal alignment shifted left 8 bits. */
9678
9679tree
9680bc_runtime_type_code (type)
9681 tree type;
9682{
9683 int val;
9684
9685 switch (TREE_CODE (type))
bbf6f052 9686 {
ca695ac9
JB
9687 case VOID_TYPE:
9688 case INTEGER_TYPE:
9689 case REAL_TYPE:
9690 case COMPLEX_TYPE:
9691 case ENUMERAL_TYPE:
9692 case POINTER_TYPE:
9693 case RECORD_TYPE:
9694
6bd6178d 9695 val = (int) TYPE_MODE (type) | TYPE_ALIGN (type) << 8;
ca695ac9
JB
9696 break;
9697
9698 case ERROR_MARK:
9699
9700 val = 0;
9701 break;
9702
9703 default:
af508edd 9704
ca695ac9
JB
9705 abort ();
9706 }
9707 return build_int_2 (val, 0);
9708}
af508edd 9709
af508edd 9710
ca695ac9
JB
9711/* Generate constructor label */
9712char *
9713bc_gen_constr_label ()
9714{
9715 static int label_counter;
9716 static char label[20];
bbf6f052 9717
ca695ac9 9718 sprintf (label, "*LR%d", label_counter++);
bbf6f052 9719
ca695ac9
JB
9720 return (obstack_copy0 (&permanent_obstack, label, strlen (label)));
9721}
bbf6f052 9722
bbf6f052 9723
ca695ac9
JB
9724/* Evaluate constructor CONSTR and return pointer to it on level one. We
9725 expand the constructor data as static data, and push a pointer to it.
9726 The pointer is put in the pointer table and is retrieved by a constP
9727 bytecode instruction. We then loop and store each constructor member in
9728 the corresponding component. Finally, we return the original pointer on
9729 the stack. */
af508edd 9730
ca695ac9
JB
9731void
9732bc_expand_constructor (constr)
9733 tree constr;
9734{
9735 char *l;
9736 HOST_WIDE_INT ptroffs;
9737 rtx constr_rtx;
bbf6f052 9738
ca695ac9
JB
9739
9740 /* Literal constructors are handled as constants, whereas
9741 non-literals are evaluated and stored element by element
9742 into the data segment. */
9743
9744 /* Allocate space in proper segment and push pointer to space on stack.
9745 */
bbf6f052 9746
ca695ac9 9747 l = bc_gen_constr_label ();
bbf6f052 9748
ca695ac9 9749 if (TREE_CONSTANT (constr))
bbf6f052 9750 {
ca695ac9
JB
9751 text_section ();
9752
9753 bc_emit_const_labeldef (l);
9754 bc_output_constructor (constr, int_size_in_bytes (TREE_TYPE (constr)));
bbf6f052 9755 }
ca695ac9
JB
9756 else
9757 {
9758 data_section ();
bbf6f052 9759
ca695ac9
JB
9760 bc_emit_data_labeldef (l);
9761 bc_output_data_constructor (constr);
9762 }
bbf6f052 9763
ca695ac9
JB
9764
9765 /* Add reference to pointer table and recall pointer to stack;
9766 this code is common for both types of constructors: literals
9767 and non-literals. */
bbf6f052 9768
de7d9320
JB
9769 ptroffs = bc_define_pointer (l);
9770 bc_emit_instruction (constP, ptroffs);
d39985fa 9771
ca695ac9
JB
9772 /* This is all that has to be done if it's a literal. */
9773 if (TREE_CONSTANT (constr))
9774 return;
bbf6f052 9775
ca695ac9
JB
9776
9777 /* At this point, we have the pointer to the structure on top of the stack.
9778 Generate sequences of store_memory calls for the constructor. */
9779
9780 /* constructor type is structure */
9781 if (TREE_CODE (TREE_TYPE (constr)) == RECORD_TYPE)
e7c33f54 9782 {
ca695ac9
JB
9783 register tree elt;
9784
9785 /* If the constructor has fewer fields than the structure,
9786 clear the whole structure first. */
9787
9788 if (list_length (CONSTRUCTOR_ELTS (constr))
9789 != list_length (TYPE_FIELDS (TREE_TYPE (constr))))
9790 {
6d6e61ce 9791 bc_emit_instruction (duplicate);
ca695ac9
JB
9792 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
9793 bc_emit_instruction (clearBLK);
9794 }
9795
9796 /* Store each element of the constructor into the corresponding
9797 field of TARGET. */
9798
9799 for (elt = CONSTRUCTOR_ELTS (constr); elt; elt = TREE_CHAIN (elt))
9800 {
9801 register tree field = TREE_PURPOSE (elt);
9802 register enum machine_mode mode;
9803 int bitsize;
9804 int bitpos;
9805 int unsignedp;
9806
9807 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field)) /* * DECL_SIZE_UNIT (field) */;
9808 mode = DECL_MODE (field);
9809 unsignedp = TREE_UNSIGNED (field);
9810
9811 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
9812
9813 bc_store_field (elt, bitsize, bitpos, mode, TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
9814 /* The alignment of TARGET is
9815 at least what its type requires. */
9816 VOIDmode, 0,
9817 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
9818 int_size_in_bytes (TREE_TYPE (constr)));
9819 }
e7c33f54 9820 }
ca695ac9
JB
9821 else
9822
9823 /* Constructor type is array */
9824 if (TREE_CODE (TREE_TYPE (constr)) == ARRAY_TYPE)
9825 {
9826 register tree elt;
9827 register int i;
9828 tree domain = TYPE_DOMAIN (TREE_TYPE (constr));
9829 int minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
9830 int maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
9831 tree elttype = TREE_TYPE (TREE_TYPE (constr));
9832
9833 /* If the constructor has fewer fields than the structure,
9834 clear the whole structure first. */
9835
9836 if (list_length (CONSTRUCTOR_ELTS (constr)) < maxelt - minelt + 1)
9837 {
6d6e61ce 9838 bc_emit_instruction (duplicate);
ca695ac9
JB
9839 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
9840 bc_emit_instruction (clearBLK);
9841 }
9842
9843
9844 /* Store each element of the constructor into the corresponding
9845 element of TARGET, determined by counting the elements. */
9846
9847 for (elt = CONSTRUCTOR_ELTS (constr), i = 0;
9848 elt;
9849 elt = TREE_CHAIN (elt), i++)
9850 {
9851 register enum machine_mode mode;
9852 int bitsize;
9853 int bitpos;
9854 int unsignedp;
9855
9856 mode = TYPE_MODE (elttype);
9857 bitsize = GET_MODE_BITSIZE (mode);
9858 unsignedp = TREE_UNSIGNED (elttype);
9859
9860 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype))
9861 /* * TYPE_SIZE_UNIT (elttype) */ );
9862
9863 bc_store_field (elt, bitsize, bitpos, mode,
9864 TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
9865 /* The alignment of TARGET is
9866 at least what its type requires. */
9867 VOIDmode, 0,
9868 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
9869 int_size_in_bytes (TREE_TYPE (constr)));
9870 }
9871
9872 }
9873}
bbf6f052 9874
bbf6f052 9875
ca695ac9
JB
9876/* Store the value of EXP (an expression tree) into member FIELD of
9877 structure at address on stack, which has type TYPE, mode MODE and
9878 occupies BITSIZE bits, starting BITPOS bits from the beginning of the
9879 structure.
bbf6f052 9880
ca695ac9
JB
9881 ALIGN is the alignment that TARGET is known to have, measured in bytes.
9882 TOTAL_SIZE is its size in bytes, or -1 if variable. */
bbf6f052 9883
ca695ac9
JB
9884void
9885bc_store_field (field, bitsize, bitpos, mode, exp, type,
9886 value_mode, unsignedp, align, total_size)
9887 int bitsize, bitpos;
9888 enum machine_mode mode;
9889 tree field, exp, type;
9890 enum machine_mode value_mode;
9891 int unsignedp;
9892 int align;
9893 int total_size;
9894{
bbf6f052 9895
ca695ac9
JB
9896 /* Expand expression and copy pointer */
9897 bc_expand_expr (exp);
9898 bc_emit_instruction (over);
bbf6f052 9899
bbf6f052 9900
ca695ac9
JB
9901 /* If the component is a bit field, we cannot use addressing to access
9902 it. Use bit-field techniques to store in it. */
bbf6f052 9903
ca695ac9
JB
9904 if (DECL_BIT_FIELD (field))
9905 {
9906 bc_store_bit_field (bitpos, bitsize, unsignedp);
9907 return;
9908 }
9909 else
9910 /* Not bit field */
9911 {
9912 HOST_WIDE_INT offset = bitpos / BITS_PER_UNIT;
9913
9914 /* Advance pointer to the desired member */
9915 if (offset)
9916 bc_emit_instruction (addconstPSI, offset);
9917
9918 /* Store */
9919 bc_store_memory (type, field);
9920 }
9921}
bbf6f052 9922
ca695ac9
JB
9923
9924/* Store SI/SU in bitfield */
bbf6f052 9925void
ca695ac9
JB
9926bc_store_bit_field (offset, size, unsignedp)
9927 int offset, size, unsignedp;
bbf6f052 9928{
ca695ac9
JB
9929 /* Push bitfield offset and size */
9930 bc_push_offset_and_size (offset, size);
bbf6f052 9931
ca695ac9
JB
9932 /* Store */
9933 bc_emit_instruction (sstoreBI);
9934}
e87b4f3f 9935
88d3b7f0 9936
ca695ac9
JB
9937/* Load SI/SU from bitfield */
9938void
9939bc_load_bit_field (offset, size, unsignedp)
9940 int offset, size, unsignedp;
9941{
9942 /* Push bitfield offset and size */
9943 bc_push_offset_and_size (offset, size);
88d3b7f0 9944
ca695ac9
JB
9945 /* Load: sign-extend if signed, else zero-extend */
9946 bc_emit_instruction (unsignedp ? zxloadBI : sxloadBI);
9947}
709f5be1 9948
bbf6f052 9949
ca695ac9
JB
9950/* Adjust interpreter stack by NLEVELS. Positive means drop NLEVELS
9951 (adjust stack pointer upwards), negative means add that number of
9952 levels (adjust the stack pointer downwards). Only positive values
9953 normally make sense. */
bbf6f052 9954
ca695ac9
JB
9955void
9956bc_adjust_stack (nlevels)
9957 int nlevels;
9958{
9959 switch (nlevels)
9960 {
9961 case 0:
9962 break;
9963
9964 case 2:
9965 bc_emit_instruction (drop);
9966
9967 case 1:
9968 bc_emit_instruction (drop);
9969 break;
9970
9971 default:
9972
9973 bc_emit_instruction (adjstackSI, (HOST_WIDE_INT) nlevels);
9974 stack_depth -= nlevels;
9975 }
9976
a68c7608
RS
9977#if defined (VALIDATE_STACK_FOR_BC)
9978 VALIDATE_STACK_FOR_BC ();
bbf6f052
RK
9979#endif
9980}
This page took 1.477506 seconds and 5 git commands to generate.