]> gcc.gnu.org Git - gcc.git/blame - gcc/expr.c
(g++.o): New target.
[gcc.git] / gcc / expr.c
CommitLineData
bbf6f052 1/* Convert tree expression to rtl instructions, for GNU compiler.
4be204f0 2 Copyright (C) 1988, 1992, 1993 Free Software Foundation, Inc.
bbf6f052
RK
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21#include "config.h"
ca695ac9 22#include "machmode.h"
bbf6f052
RK
23#include "rtl.h"
24#include "tree.h"
ca695ac9 25#include "obstack.h"
bbf6f052
RK
26#include "flags.h"
27#include "function.h"
28#include "insn-flags.h"
29#include "insn-codes.h"
30#include "expr.h"
31#include "insn-config.h"
32#include "recog.h"
33#include "output.h"
bbf6f052
RK
34#include "typeclass.h"
35
ca695ac9
JB
36#include "bytecode.h"
37#include "bc-opcode.h"
38#include "bc-typecd.h"
39#include "bc-optab.h"
40#include "bc-emit.h"
41
42
bbf6f052
RK
43#define CEIL(x,y) (((x) + (y) - 1) / (y))
44
45/* Decide whether a function's arguments should be processed
bbc8a071
RK
46 from first to last or from last to first.
47
48 They should if the stack and args grow in opposite directions, but
49 only if we have push insns. */
bbf6f052 50
bbf6f052 51#ifdef PUSH_ROUNDING
bbc8a071 52
3319a347 53#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
bbf6f052
RK
54#define PUSH_ARGS_REVERSED /* If it's last to first */
55#endif
bbc8a071 56
bbf6f052
RK
57#endif
58
59#ifndef STACK_PUSH_CODE
60#ifdef STACK_GROWS_DOWNWARD
61#define STACK_PUSH_CODE PRE_DEC
62#else
63#define STACK_PUSH_CODE PRE_INC
64#endif
65#endif
66
67/* Like STACK_BOUNDARY but in units of bytes, not bits. */
68#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
69
70/* If this is nonzero, we do not bother generating VOLATILE
71 around volatile memory references, and we are willing to
72 output indirect addresses. If cse is to follow, we reject
73 indirect addresses so a useful potential cse is generated;
74 if it is used only once, instruction combination will produce
75 the same indirect address eventually. */
76int cse_not_expected;
77
78/* Nonzero to generate code for all the subroutines within an
79 expression before generating the upper levels of the expression.
80 Nowadays this is never zero. */
81int do_preexpand_calls = 1;
82
83/* Number of units that we should eventually pop off the stack.
84 These are the arguments to function calls that have already returned. */
85int pending_stack_adjust;
86
87/* Nonzero means stack pops must not be deferred, and deferred stack
88 pops must not be output. It is nonzero inside a function call,
89 inside a conditional expression, inside a statement expression,
90 and in other cases as well. */
91int inhibit_defer_pop;
92
93/* A list of all cleanups which belong to the arguments of
94 function calls being expanded by expand_call. */
95tree cleanups_this_call;
96
97/* Nonzero means __builtin_saveregs has already been done in this function.
98 The value is the pseudoreg containing the value __builtin_saveregs
99 returned. */
100static rtx saveregs_value;
101
dcf76fff
TW
102/* Similarly for __builtin_apply_args. */
103static rtx apply_args_value;
104
4969d05d
RK
105/* This structure is used by move_by_pieces to describe the move to
106 be performed. */
107
108struct move_by_pieces
109{
110 rtx to;
111 rtx to_addr;
112 int autinc_to;
113 int explicit_inc_to;
114 rtx from;
115 rtx from_addr;
116 int autinc_from;
117 int explicit_inc_from;
118 int len;
119 int offset;
120 int reverse;
121};
122
c02bd5d9
JB
123/* Used to generate bytecodes: keep track of size of local variables,
124 as well as depth of arithmetic stack. (Notice that variables are
125 stored on the machine's stack, not the arithmetic stack.) */
126
127int local_vars_size;
128extern int stack_depth;
129extern int max_stack_depth;
292b1216 130extern struct obstack permanent_obstack;
c02bd5d9
JB
131
132
4969d05d
RK
133static rtx enqueue_insn PROTO((rtx, rtx));
134static int queued_subexp_p PROTO((rtx));
135static void init_queue PROTO((void));
136static void move_by_pieces PROTO((rtx, rtx, int, int));
137static int move_by_pieces_ninsns PROTO((unsigned int, int));
138static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
139 struct move_by_pieces *));
140static void group_insns PROTO((rtx));
141static void store_constructor PROTO((tree, rtx));
142static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
143 enum machine_mode, int, int, int));
144static tree save_noncopied_parts PROTO((tree, tree));
145static tree init_noncopied_parts PROTO((tree, tree));
146static int safe_from_p PROTO((rtx, tree));
147static int fixed_type_p PROTO((tree));
148static int get_pointer_alignment PROTO((tree, unsigned));
149static tree string_constant PROTO((tree, tree *));
150static tree c_strlen PROTO((tree));
151static rtx expand_builtin PROTO((tree, rtx, rtx, enum machine_mode, int));
0006469d
TW
152static int apply_args_size PROTO((void));
153static int apply_result_size PROTO((void));
154static rtx result_vector PROTO((int, rtx));
155static rtx expand_builtin_apply_args PROTO((void));
156static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
157static void expand_builtin_return PROTO((rtx));
4969d05d 158static rtx expand_increment PROTO((tree, int));
ca695ac9
JB
159rtx bc_expand_increment PROTO((struct increment_operator *, tree));
160tree bc_runtime_type_code PROTO((tree));
161rtx bc_allocate_local PROTO((int, int));
162void bc_store_memory PROTO((tree, tree));
163tree bc_expand_component_address PROTO((tree));
164tree bc_expand_address PROTO((tree));
165void bc_expand_constructor PROTO((tree));
166void bc_adjust_stack PROTO((int));
167tree bc_canonicalize_array_ref PROTO((tree));
168void bc_load_memory PROTO((tree, tree));
169void bc_load_externaddr PROTO((rtx));
170void bc_load_externaddr_id PROTO((tree, int));
171void bc_load_localaddr PROTO((rtx));
172void bc_load_parmaddr PROTO((rtx));
4969d05d
RK
173static void preexpand_calls PROTO((tree));
174static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
f81497d9 175static void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
4969d05d
RK
176static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
177static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
178static void do_jump_for_compare PROTO((rtx, rtx, rtx));
179static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
180static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
bbf6f052 181
4fa52007
RK
182/* Record for each mode whether we can move a register directly to or
183 from an object of that mode in memory. If we can't, we won't try
184 to use that mode directly when accessing a field of that mode. */
185
186static char direct_load[NUM_MACHINE_MODES];
187static char direct_store[NUM_MACHINE_MODES];
188
bbf6f052
RK
189/* MOVE_RATIO is the number of move instructions that is better than
190 a block move. */
191
192#ifndef MOVE_RATIO
266007a7 193#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
bbf6f052
RK
194#define MOVE_RATIO 2
195#else
196/* A value of around 6 would minimize code size; infinity would minimize
197 execution time. */
198#define MOVE_RATIO 15
199#endif
200#endif
e87b4f3f 201
266007a7 202/* This array records the insn_code of insns to perform block moves. */
e6677db3 203enum insn_code movstr_optab[NUM_MACHINE_MODES];
266007a7 204
e87b4f3f
RS
205/* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
206
207#ifndef SLOW_UNALIGNED_ACCESS
208#define SLOW_UNALIGNED_ACCESS 0
209#endif
0006469d
TW
210
211/* Register mappings for target machines without register windows. */
212#ifndef INCOMING_REGNO
213#define INCOMING_REGNO(OUT) (OUT)
214#endif
215#ifndef OUTGOING_REGNO
216#define OUTGOING_REGNO(IN) (IN)
217#endif
bbf6f052 218\f
ca695ac9
JB
219/* Maps used to convert modes to const, load, and store bytecodes. */
220enum bytecode_opcode mode_to_const_map[MAX_MACHINE_MODE];
221enum bytecode_opcode mode_to_load_map[MAX_MACHINE_MODE];
222enum bytecode_opcode mode_to_store_map[MAX_MACHINE_MODE];
223
224/* Initialize maps used to convert modes to const, load, and store
225 bytecodes. */
226void
227bc_init_mode_to_opcode_maps ()
228{
229 int mode;
230
6bd6178d 231 for (mode = 0; mode < (int) MAX_MACHINE_MODE; mode++)
ca695ac9
JB
232 mode_to_const_map[mode] =
233 mode_to_load_map[mode] =
234 mode_to_store_map[mode] = neverneverland;
235
236#define DEF_MODEMAP(SYM, CODE, UCODE, CONST, LOAD, STORE) \
6bd6178d
RK
237 mode_to_const_map[(int) SYM] = CONST; \
238 mode_to_load_map[(int) SYM] = LOAD; \
239 mode_to_store_map[(int) SYM] = STORE;
ca695ac9
JB
240
241#include "modemap.def"
242#undef DEF_MODEMAP
243}
244\f
4fa52007 245/* This is run once per compilation to set up which modes can be used
266007a7 246 directly in memory and to initialize the block move optab. */
4fa52007
RK
247
248void
249init_expr_once ()
250{
251 rtx insn, pat;
252 enum machine_mode mode;
e2549997
RS
253 /* Try indexing by frame ptr and try by stack ptr.
254 It is known that on the Convex the stack ptr isn't a valid index.
255 With luck, one or the other is valid on any machine. */
4fa52007 256 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
e2549997 257 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
4fa52007
RK
258
259 start_sequence ();
260 insn = emit_insn (gen_rtx (SET, 0, 0));
261 pat = PATTERN (insn);
262
263 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
264 mode = (enum machine_mode) ((int) mode + 1))
265 {
266 int regno;
267 rtx reg;
268 int num_clobbers;
269
270 direct_load[(int) mode] = direct_store[(int) mode] = 0;
271 PUT_MODE (mem, mode);
e2549997 272 PUT_MODE (mem1, mode);
4fa52007 273
e6fe56a4
RK
274 /* See if there is some register that can be used in this mode and
275 directly loaded or stored from memory. */
276
7308a047
RS
277 if (mode != VOIDmode && mode != BLKmode)
278 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
279 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
280 regno++)
281 {
282 if (! HARD_REGNO_MODE_OK (regno, mode))
283 continue;
e6fe56a4 284
7308a047 285 reg = gen_rtx (REG, mode, regno);
e6fe56a4 286
7308a047
RS
287 SET_SRC (pat) = mem;
288 SET_DEST (pat) = reg;
289 if (recog (pat, insn, &num_clobbers) >= 0)
290 direct_load[(int) mode] = 1;
e6fe56a4 291
e2549997
RS
292 SET_SRC (pat) = mem1;
293 SET_DEST (pat) = reg;
294 if (recog (pat, insn, &num_clobbers) >= 0)
295 direct_load[(int) mode] = 1;
296
7308a047
RS
297 SET_SRC (pat) = reg;
298 SET_DEST (pat) = mem;
299 if (recog (pat, insn, &num_clobbers) >= 0)
300 direct_store[(int) mode] = 1;
e2549997
RS
301
302 SET_SRC (pat) = reg;
303 SET_DEST (pat) = mem1;
304 if (recog (pat, insn, &num_clobbers) >= 0)
305 direct_store[(int) mode] = 1;
7308a047 306 }
4fa52007
RK
307 }
308
309 end_sequence ();
310}
311
bbf6f052
RK
312/* This is run at the start of compiling a function. */
313
314void
315init_expr ()
316{
317 init_queue ();
318
319 pending_stack_adjust = 0;
320 inhibit_defer_pop = 0;
321 cleanups_this_call = 0;
322 saveregs_value = 0;
0006469d 323 apply_args_value = 0;
e87b4f3f 324 forced_labels = 0;
bbf6f052
RK
325}
326
327/* Save all variables describing the current status into the structure *P.
328 This is used before starting a nested function. */
329
330void
331save_expr_status (p)
332 struct function *p;
333{
334 /* Instead of saving the postincrement queue, empty it. */
335 emit_queue ();
336
337 p->pending_stack_adjust = pending_stack_adjust;
338 p->inhibit_defer_pop = inhibit_defer_pop;
339 p->cleanups_this_call = cleanups_this_call;
340 p->saveregs_value = saveregs_value;
0006469d 341 p->apply_args_value = apply_args_value;
e87b4f3f 342 p->forced_labels = forced_labels;
bbf6f052
RK
343
344 pending_stack_adjust = 0;
345 inhibit_defer_pop = 0;
346 cleanups_this_call = 0;
347 saveregs_value = 0;
0006469d 348 apply_args_value = 0;
e87b4f3f 349 forced_labels = 0;
bbf6f052
RK
350}
351
352/* Restore all variables describing the current status from the structure *P.
353 This is used after a nested function. */
354
355void
356restore_expr_status (p)
357 struct function *p;
358{
359 pending_stack_adjust = p->pending_stack_adjust;
360 inhibit_defer_pop = p->inhibit_defer_pop;
361 cleanups_this_call = p->cleanups_this_call;
362 saveregs_value = p->saveregs_value;
0006469d 363 apply_args_value = p->apply_args_value;
e87b4f3f 364 forced_labels = p->forced_labels;
bbf6f052
RK
365}
366\f
367/* Manage the queue of increment instructions to be output
368 for POSTINCREMENT_EXPR expressions, etc. */
369
370static rtx pending_chain;
371
372/* Queue up to increment (or change) VAR later. BODY says how:
373 BODY should be the same thing you would pass to emit_insn
374 to increment right away. It will go to emit_insn later on.
375
376 The value is a QUEUED expression to be used in place of VAR
377 where you want to guarantee the pre-incrementation value of VAR. */
378
379static rtx
380enqueue_insn (var, body)
381 rtx var, body;
382{
383 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
906c4e36 384 var, NULL_RTX, NULL_RTX, body, pending_chain);
bbf6f052
RK
385 return pending_chain;
386}
387
388/* Use protect_from_queue to convert a QUEUED expression
389 into something that you can put immediately into an instruction.
390 If the queued incrementation has not happened yet,
391 protect_from_queue returns the variable itself.
392 If the incrementation has happened, protect_from_queue returns a temp
393 that contains a copy of the old value of the variable.
394
395 Any time an rtx which might possibly be a QUEUED is to be put
396 into an instruction, it must be passed through protect_from_queue first.
397 QUEUED expressions are not meaningful in instructions.
398
399 Do not pass a value through protect_from_queue and then hold
400 on to it for a while before putting it in an instruction!
401 If the queue is flushed in between, incorrect code will result. */
402
403rtx
404protect_from_queue (x, modify)
405 register rtx x;
406 int modify;
407{
408 register RTX_CODE code = GET_CODE (x);
409
410#if 0 /* A QUEUED can hang around after the queue is forced out. */
411 /* Shortcut for most common case. */
412 if (pending_chain == 0)
413 return x;
414#endif
415
416 if (code != QUEUED)
417 {
418 /* A special hack for read access to (MEM (QUEUED ...))
419 to facilitate use of autoincrement.
420 Make a copy of the contents of the memory location
421 rather than a copy of the address, but not
422 if the value is of mode BLKmode. */
423 if (code == MEM && GET_MODE (x) != BLKmode
424 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
425 {
426 register rtx y = XEXP (x, 0);
427 XEXP (x, 0) = QUEUED_VAR (y);
428 if (QUEUED_INSN (y))
429 {
430 register rtx temp = gen_reg_rtx (GET_MODE (x));
431 emit_insn_before (gen_move_insn (temp, x),
432 QUEUED_INSN (y));
433 return temp;
434 }
435 return x;
436 }
437 /* Otherwise, recursively protect the subexpressions of all
438 the kinds of rtx's that can contain a QUEUED. */
439 if (code == MEM)
3f15938e
RS
440 {
441 rtx tem = protect_from_queue (XEXP (x, 0), 0);
442 if (tem != XEXP (x, 0))
443 {
444 x = copy_rtx (x);
445 XEXP (x, 0) = tem;
446 }
447 }
bbf6f052
RK
448 else if (code == PLUS || code == MULT)
449 {
3f15938e
RS
450 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
451 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
452 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
453 {
454 x = copy_rtx (x);
455 XEXP (x, 0) = new0;
456 XEXP (x, 1) = new1;
457 }
bbf6f052
RK
458 }
459 return x;
460 }
461 /* If the increment has not happened, use the variable itself. */
462 if (QUEUED_INSN (x) == 0)
463 return QUEUED_VAR (x);
464 /* If the increment has happened and a pre-increment copy exists,
465 use that copy. */
466 if (QUEUED_COPY (x) != 0)
467 return QUEUED_COPY (x);
468 /* The increment has happened but we haven't set up a pre-increment copy.
469 Set one up now, and use it. */
470 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
471 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
472 QUEUED_INSN (x));
473 return QUEUED_COPY (x);
474}
475
476/* Return nonzero if X contains a QUEUED expression:
477 if it contains anything that will be altered by a queued increment.
478 We handle only combinations of MEM, PLUS, MINUS and MULT operators
479 since memory addresses generally contain only those. */
480
481static int
482queued_subexp_p (x)
483 rtx x;
484{
485 register enum rtx_code code = GET_CODE (x);
486 switch (code)
487 {
488 case QUEUED:
489 return 1;
490 case MEM:
491 return queued_subexp_p (XEXP (x, 0));
492 case MULT:
493 case PLUS:
494 case MINUS:
495 return queued_subexp_p (XEXP (x, 0))
496 || queued_subexp_p (XEXP (x, 1));
497 }
498 return 0;
499}
500
501/* Perform all the pending incrementations. */
502
503void
504emit_queue ()
505{
506 register rtx p;
507 while (p = pending_chain)
508 {
509 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
510 pending_chain = QUEUED_NEXT (p);
511 }
512}
513
514static void
515init_queue ()
516{
517 if (pending_chain)
518 abort ();
519}
520\f
521/* Copy data from FROM to TO, where the machine modes are not the same.
522 Both modes may be integer, or both may be floating.
523 UNSIGNEDP should be nonzero if FROM is an unsigned type.
524 This causes zero-extension instead of sign-extension. */
525
526void
527convert_move (to, from, unsignedp)
528 register rtx to, from;
529 int unsignedp;
530{
531 enum machine_mode to_mode = GET_MODE (to);
532 enum machine_mode from_mode = GET_MODE (from);
533 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
534 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
535 enum insn_code code;
536 rtx libcall;
537
538 /* rtx code for making an equivalent value. */
539 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
540
541 to = protect_from_queue (to, 1);
542 from = protect_from_queue (from, 0);
543
544 if (to_real != from_real)
545 abort ();
546
1499e0a8
RK
547 /* If FROM is a SUBREG that indicates that we have already done at least
548 the required extension, strip it. We don't handle such SUBREGs as
549 TO here. */
550
551 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
552 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
553 >= GET_MODE_SIZE (to_mode))
554 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
555 from = gen_lowpart (to_mode, from), from_mode = to_mode;
556
557 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
558 abort ();
559
bbf6f052
RK
560 if (to_mode == from_mode
561 || (from_mode == VOIDmode && CONSTANT_P (from)))
562 {
563 emit_move_insn (to, from);
564 return;
565 }
566
567 if (to_real)
568 {
81d79e2c
RS
569 rtx value;
570
b424402e
RS
571#ifdef HAVE_extendqfhf2
572 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == HFmode)
573 {
574 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
575 return;
576 }
577#endif
578#ifdef HAVE_extendqfsf2
579 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == SFmode)
580 {
581 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
582 return;
583 }
584#endif
585#ifdef HAVE_extendqfdf2
586 if (HAVE_extendqfdf2 && from_mode == QFmode && to_mode == DFmode)
587 {
588 emit_unop_insn (CODE_FOR_extendqfdf2, to, from, UNKNOWN);
589 return;
590 }
591#endif
592#ifdef HAVE_extendqfxf2
593 if (HAVE_extendqfxf2 && from_mode == QFmode && to_mode == XFmode)
594 {
595 emit_unop_insn (CODE_FOR_extendqfxf2, to, from, UNKNOWN);
596 return;
597 }
598#endif
599#ifdef HAVE_extendqftf2
600 if (HAVE_extendqftf2 && from_mode == QFmode && to_mode == TFmode)
601 {
602 emit_unop_insn (CODE_FOR_extendqftf2, to, from, UNKNOWN);
603 return;
604 }
605#endif
606
607#ifdef HAVE_extendhfsf2
608 if (HAVE_extendhfsf2 && from_mode == HFmode && to_mode == SFmode)
609 {
610 emit_unop_insn (CODE_FOR_extendhfsf2, to, from, UNKNOWN);
611 return;
612 }
613#endif
614#ifdef HAVE_extendhfdf2
615 if (HAVE_extendhfdf2 && from_mode == HFmode && to_mode == DFmode)
616 {
617 emit_unop_insn (CODE_FOR_extendhfdf2, to, from, UNKNOWN);
618 return;
619 }
620#endif
621#ifdef HAVE_extendhfxf2
622 if (HAVE_extendhfxf2 && from_mode == HFmode && to_mode == XFmode)
623 {
624 emit_unop_insn (CODE_FOR_extendhfxf2, to, from, UNKNOWN);
625 return;
626 }
627#endif
628#ifdef HAVE_extendhftf2
629 if (HAVE_extendhftf2 && from_mode == HFmode && to_mode == TFmode)
630 {
631 emit_unop_insn (CODE_FOR_extendhftf2, to, from, UNKNOWN);
632 return;
633 }
634#endif
635
bbf6f052
RK
636#ifdef HAVE_extendsfdf2
637 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
638 {
639 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
640 return;
641 }
642#endif
b092b471
JW
643#ifdef HAVE_extendsfxf2
644 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
645 {
646 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
647 return;
648 }
649#endif
bbf6f052
RK
650#ifdef HAVE_extendsftf2
651 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
652 {
653 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
654 return;
655 }
656#endif
b092b471
JW
657#ifdef HAVE_extenddfxf2
658 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
659 {
660 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
661 return;
662 }
663#endif
bbf6f052
RK
664#ifdef HAVE_extenddftf2
665 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
666 {
667 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
668 return;
669 }
670#endif
b424402e
RS
671
672#ifdef HAVE_trunchfqf2
673 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
674 {
675 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
676 return;
677 }
678#endif
679#ifdef HAVE_truncsfqf2
680 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
681 {
682 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
683 return;
684 }
685#endif
686#ifdef HAVE_truncdfqf2
687 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
688 {
689 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
690 return;
691 }
692#endif
693#ifdef HAVE_truncxfqf2
694 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
695 {
696 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
697 return;
698 }
699#endif
700#ifdef HAVE_trunctfqf2
701 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
702 {
703 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
704 return;
705 }
706#endif
707#ifdef HAVE_truncsfhf2
708 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
709 {
710 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
711 return;
712 }
713#endif
714#ifdef HAVE_truncdfhf2
715 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
716 {
717 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
718 return;
719 }
720#endif
721#ifdef HAVE_truncxfhf2
722 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
723 {
724 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
725 return;
726 }
727#endif
728#ifdef HAVE_trunctfhf2
729 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
730 {
731 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
732 return;
733 }
734#endif
bbf6f052
RK
735#ifdef HAVE_truncdfsf2
736 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
737 {
738 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
739 return;
740 }
741#endif
b092b471
JW
742#ifdef HAVE_truncxfsf2
743 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
744 {
745 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
746 return;
747 }
748#endif
bbf6f052
RK
749#ifdef HAVE_trunctfsf2
750 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
751 {
752 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
753 return;
754 }
755#endif
b092b471
JW
756#ifdef HAVE_truncxfdf2
757 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
758 {
759 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
760 return;
761 }
762#endif
bbf6f052
RK
763#ifdef HAVE_trunctfdf2
764 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
765 {
766 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
767 return;
768 }
769#endif
770
b092b471
JW
771 libcall = (rtx) 0;
772 switch (from_mode)
773 {
774 case SFmode:
775 switch (to_mode)
776 {
777 case DFmode:
778 libcall = extendsfdf2_libfunc;
779 break;
780
781 case XFmode:
782 libcall = extendsfxf2_libfunc;
783 break;
784
785 case TFmode:
786 libcall = extendsftf2_libfunc;
787 break;
788 }
789 break;
790
791 case DFmode:
792 switch (to_mode)
793 {
794 case SFmode:
795 libcall = truncdfsf2_libfunc;
796 break;
797
798 case XFmode:
799 libcall = extenddfxf2_libfunc;
800 break;
801
802 case TFmode:
803 libcall = extenddftf2_libfunc;
804 break;
805 }
806 break;
807
808 case XFmode:
809 switch (to_mode)
810 {
811 case SFmode:
812 libcall = truncxfsf2_libfunc;
813 break;
814
815 case DFmode:
816 libcall = truncxfdf2_libfunc;
817 break;
818 }
819 break;
820
821 case TFmode:
822 switch (to_mode)
823 {
824 case SFmode:
825 libcall = trunctfsf2_libfunc;
826 break;
827
828 case DFmode:
829 libcall = trunctfdf2_libfunc;
830 break;
831 }
832 break;
833 }
834
835 if (libcall == (rtx) 0)
836 /* This conversion is not implemented yet. */
bbf6f052
RK
837 abort ();
838
81d79e2c
RS
839 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
840 1, from, from_mode);
841 emit_move_insn (to, value);
bbf6f052
RK
842 return;
843 }
844
845 /* Now both modes are integers. */
846
847 /* Handle expanding beyond a word. */
848 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
849 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
850 {
851 rtx insns;
852 rtx lowpart;
853 rtx fill_value;
854 rtx lowfrom;
855 int i;
856 enum machine_mode lowpart_mode;
857 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
858
859 /* Try converting directly if the insn is supported. */
860 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
861 != CODE_FOR_nothing)
862 {
cd1b4b44
RK
863 /* If FROM is a SUBREG, put it into a register. Do this
864 so that we always generate the same set of insns for
865 better cse'ing; if an intermediate assignment occurred,
866 we won't be doing the operation directly on the SUBREG. */
867 if (optimize > 0 && GET_CODE (from) == SUBREG)
868 from = force_reg (from_mode, from);
bbf6f052
RK
869 emit_unop_insn (code, to, from, equiv_code);
870 return;
871 }
872 /* Next, try converting via full word. */
873 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
874 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
875 != CODE_FOR_nothing))
876 {
877 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
878 emit_unop_insn (code, to,
879 gen_lowpart (word_mode, to), equiv_code);
880 return;
881 }
882
883 /* No special multiword conversion insn; do it by hand. */
884 start_sequence ();
885
886 /* Get a copy of FROM widened to a word, if necessary. */
887 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
888 lowpart_mode = word_mode;
889 else
890 lowpart_mode = from_mode;
891
892 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
893
894 lowpart = gen_lowpart (lowpart_mode, to);
895 emit_move_insn (lowpart, lowfrom);
896
897 /* Compute the value to put in each remaining word. */
898 if (unsignedp)
899 fill_value = const0_rtx;
900 else
901 {
902#ifdef HAVE_slt
903 if (HAVE_slt
904 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
905 && STORE_FLAG_VALUE == -1)
906 {
906c4e36
RK
907 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
908 lowpart_mode, 0, 0);
bbf6f052
RK
909 fill_value = gen_reg_rtx (word_mode);
910 emit_insn (gen_slt (fill_value));
911 }
912 else
913#endif
914 {
915 fill_value
916 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
917 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
906c4e36 918 NULL_RTX, 0);
bbf6f052
RK
919 fill_value = convert_to_mode (word_mode, fill_value, 1);
920 }
921 }
922
923 /* Fill the remaining words. */
924 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
925 {
926 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
927 rtx subword = operand_subword (to, index, 1, to_mode);
928
929 if (subword == 0)
930 abort ();
931
932 if (fill_value != subword)
933 emit_move_insn (subword, fill_value);
934 }
935
936 insns = get_insns ();
937 end_sequence ();
938
906c4e36 939 emit_no_conflict_block (insns, to, from, NULL_RTX,
2abec1b7 940 gen_rtx (equiv_code, to_mode, copy_rtx (from)));
bbf6f052
RK
941 return;
942 }
943
d3c64ee3
RS
944 /* Truncating multi-word to a word or less. */
945 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
946 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
bbf6f052 947 {
431a6eca
JW
948 if (!((GET_CODE (from) == MEM
949 && ! MEM_VOLATILE_P (from)
950 && direct_load[(int) to_mode]
951 && ! mode_dependent_address_p (XEXP (from, 0)))
952 || GET_CODE (from) == REG
953 || GET_CODE (from) == SUBREG))
954 from = force_reg (from_mode, from);
bbf6f052
RK
955 convert_move (to, gen_lowpart (word_mode, from), 0);
956 return;
957 }
958
959 /* Handle pointer conversion */ /* SPEE 900220 */
960 if (to_mode == PSImode)
961 {
962 if (from_mode != SImode)
963 from = convert_to_mode (SImode, from, unsignedp);
964
965#ifdef HAVE_truncsipsi
966 if (HAVE_truncsipsi)
967 {
968 emit_unop_insn (CODE_FOR_truncsipsi, to, from, UNKNOWN);
969 return;
970 }
971#endif /* HAVE_truncsipsi */
972 abort ();
973 }
974
975 if (from_mode == PSImode)
976 {
977 if (to_mode != SImode)
978 {
979 from = convert_to_mode (SImode, from, unsignedp);
980 from_mode = SImode;
981 }
982 else
983 {
984#ifdef HAVE_extendpsisi
985 if (HAVE_extendpsisi)
986 {
987 emit_unop_insn (CODE_FOR_extendpsisi, to, from, UNKNOWN);
988 return;
989 }
990#endif /* HAVE_extendpsisi */
991 abort ();
992 }
993 }
994
995 /* Now follow all the conversions between integers
996 no more than a word long. */
997
998 /* For truncation, usually we can just refer to FROM in a narrower mode. */
999 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1000 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
d3c64ee3 1001 GET_MODE_BITSIZE (from_mode)))
bbf6f052 1002 {
d3c64ee3
RS
1003 if (!((GET_CODE (from) == MEM
1004 && ! MEM_VOLATILE_P (from)
1005 && direct_load[(int) to_mode]
1006 && ! mode_dependent_address_p (XEXP (from, 0)))
1007 || GET_CODE (from) == REG
1008 || GET_CODE (from) == SUBREG))
1009 from = force_reg (from_mode, from);
bbf6f052
RK
1010 emit_move_insn (to, gen_lowpart (to_mode, from));
1011 return;
1012 }
1013
d3c64ee3 1014 /* Handle extension. */
bbf6f052
RK
1015 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1016 {
1017 /* Convert directly if that works. */
1018 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1019 != CODE_FOR_nothing)
1020 {
3dc4195c
RK
1021 /* If FROM is a SUBREG, put it into a register. Do this
1022 so that we always generate the same set of insns for
1023 better cse'ing; if an intermediate assignment occurred,
1024 we won't be doing the operation directly on the SUBREG. */
1025 if (optimize > 0 && GET_CODE (from) == SUBREG)
1026 from = force_reg (from_mode, from);
bbf6f052
RK
1027 emit_unop_insn (code, to, from, equiv_code);
1028 return;
1029 }
1030 else
1031 {
1032 enum machine_mode intermediate;
1033
1034 /* Search for a mode to convert via. */
1035 for (intermediate = from_mode; intermediate != VOIDmode;
1036 intermediate = GET_MODE_WIDER_MODE (intermediate))
1037 if ((can_extend_p (to_mode, intermediate, unsignedp)
1038 != CODE_FOR_nothing)
1039 && (can_extend_p (intermediate, from_mode, unsignedp)
1040 != CODE_FOR_nothing))
1041 {
1042 convert_move (to, convert_to_mode (intermediate, from,
1043 unsignedp), unsignedp);
1044 return;
1045 }
1046
1047 /* No suitable intermediate mode. */
1048 abort ();
1049 }
1050 }
1051
1052 /* Support special truncate insns for certain modes. */
1053
1054 if (from_mode == DImode && to_mode == SImode)
1055 {
1056#ifdef HAVE_truncdisi2
1057 if (HAVE_truncdisi2)
1058 {
1059 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1060 return;
1061 }
1062#endif
1063 convert_move (to, force_reg (from_mode, from), unsignedp);
1064 return;
1065 }
1066
1067 if (from_mode == DImode && to_mode == HImode)
1068 {
1069#ifdef HAVE_truncdihi2
1070 if (HAVE_truncdihi2)
1071 {
1072 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1073 return;
1074 }
1075#endif
1076 convert_move (to, force_reg (from_mode, from), unsignedp);
1077 return;
1078 }
1079
1080 if (from_mode == DImode && to_mode == QImode)
1081 {
1082#ifdef HAVE_truncdiqi2
1083 if (HAVE_truncdiqi2)
1084 {
1085 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1086 return;
1087 }
1088#endif
1089 convert_move (to, force_reg (from_mode, from), unsignedp);
1090 return;
1091 }
1092
1093 if (from_mode == SImode && to_mode == HImode)
1094 {
1095#ifdef HAVE_truncsihi2
1096 if (HAVE_truncsihi2)
1097 {
1098 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1099 return;
1100 }
1101#endif
1102 convert_move (to, force_reg (from_mode, from), unsignedp);
1103 return;
1104 }
1105
1106 if (from_mode == SImode && to_mode == QImode)
1107 {
1108#ifdef HAVE_truncsiqi2
1109 if (HAVE_truncsiqi2)
1110 {
1111 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1112 return;
1113 }
1114#endif
1115 convert_move (to, force_reg (from_mode, from), unsignedp);
1116 return;
1117 }
1118
1119 if (from_mode == HImode && to_mode == QImode)
1120 {
1121#ifdef HAVE_trunchiqi2
1122 if (HAVE_trunchiqi2)
1123 {
1124 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1125 return;
1126 }
1127#endif
1128 convert_move (to, force_reg (from_mode, from), unsignedp);
1129 return;
1130 }
1131
1132 /* Handle truncation of volatile memrefs, and so on;
1133 the things that couldn't be truncated directly,
1134 and for which there was no special instruction. */
1135 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1136 {
1137 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1138 emit_move_insn (to, temp);
1139 return;
1140 }
1141
1142 /* Mode combination is not recognized. */
1143 abort ();
1144}
1145
1146/* Return an rtx for a value that would result
1147 from converting X to mode MODE.
1148 Both X and MODE may be floating, or both integer.
1149 UNSIGNEDP is nonzero if X is an unsigned value.
1150 This can be done by referring to a part of X in place
5d901c31
RS
1151 or by copying to a new temporary with conversion.
1152
1153 This function *must not* call protect_from_queue
1154 except when putting X into an insn (in which case convert_move does it). */
bbf6f052
RK
1155
1156rtx
1157convert_to_mode (mode, x, unsignedp)
1158 enum machine_mode mode;
1159 rtx x;
1160 int unsignedp;
5ffe63ed
RS
1161{
1162 return convert_modes (mode, VOIDmode, x, unsignedp);
1163}
1164
1165/* Return an rtx for a value that would result
1166 from converting X from mode OLDMODE to mode MODE.
1167 Both modes may be floating, or both integer.
1168 UNSIGNEDP is nonzero if X is an unsigned value.
1169
1170 This can be done by referring to a part of X in place
1171 or by copying to a new temporary with conversion.
1172
1173 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1174
1175 This function *must not* call protect_from_queue
1176 except when putting X into an insn (in which case convert_move does it). */
1177
1178rtx
1179convert_modes (mode, oldmode, x, unsignedp)
1180 enum machine_mode mode, oldmode;
1181 rtx x;
1182 int unsignedp;
bbf6f052
RK
1183{
1184 register rtx temp;
5ffe63ed 1185
1499e0a8
RK
1186 /* If FROM is a SUBREG that indicates that we have already done at least
1187 the required extension, strip it. */
1188
1189 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1190 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1191 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1192 x = gen_lowpart (mode, x);
bbf6f052 1193
64791b18
RK
1194 if (GET_MODE (x) != VOIDmode)
1195 oldmode = GET_MODE (x);
1196
5ffe63ed 1197 if (mode == oldmode)
bbf6f052
RK
1198 return x;
1199
1200 /* There is one case that we must handle specially: If we are converting
906c4e36 1201 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
bbf6f052
RK
1202 we are to interpret the constant as unsigned, gen_lowpart will do
1203 the wrong if the constant appears negative. What we want to do is
1204 make the high-order word of the constant zero, not all ones. */
1205
1206 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
906c4e36 1207 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
bbf6f052 1208 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
906c4e36 1209 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
bbf6f052
RK
1210
1211 /* We can do this with a gen_lowpart if both desired and current modes
1212 are integer, and this is either a constant integer, a register, or a
ba2e110c
RK
1213 non-volatile MEM. Except for the constant case where MODE is no
1214 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
bbf6f052 1215
ba2e110c
RK
1216 if ((GET_CODE (x) == CONST_INT
1217 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 1218 || (GET_MODE_CLASS (mode) == MODE_INT
5ffe63ed 1219 && GET_MODE_CLASS (oldmode) == MODE_INT
bbf6f052 1220 && (GET_CODE (x) == CONST_DOUBLE
5ffe63ed 1221 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
d57c66da
JW
1222 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1223 && direct_load[(int) mode])
bbf6f052 1224 || GET_CODE (x) == REG)))))
ba2e110c
RK
1225 {
1226 /* ?? If we don't know OLDMODE, we have to assume here that
1227 X does not need sign- or zero-extension. This may not be
1228 the case, but it's the best we can do. */
1229 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1230 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1231 {
1232 HOST_WIDE_INT val = INTVAL (x);
1233 int width = GET_MODE_BITSIZE (oldmode);
1234
1235 /* We must sign or zero-extend in this case. Start by
1236 zero-extending, then sign extend if we need to. */
1237 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1238 if (! unsignedp
1239 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1240 val |= (HOST_WIDE_INT) (-1) << width;
1241
1242 return GEN_INT (val);
1243 }
1244
1245 return gen_lowpart (mode, x);
1246 }
bbf6f052
RK
1247
1248 temp = gen_reg_rtx (mode);
1249 convert_move (temp, x, unsignedp);
1250 return temp;
1251}
1252\f
1253/* Generate several move instructions to copy LEN bytes
1254 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1255 The caller must pass FROM and TO
1256 through protect_from_queue before calling.
1257 ALIGN (in bytes) is maximum alignment we can assume. */
1258
bbf6f052
RK
1259static void
1260move_by_pieces (to, from, len, align)
1261 rtx to, from;
1262 int len, align;
1263{
1264 struct move_by_pieces data;
1265 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
e87b4f3f 1266 int max_size = MOVE_MAX + 1;
bbf6f052
RK
1267
1268 data.offset = 0;
1269 data.to_addr = to_addr;
1270 data.from_addr = from_addr;
1271 data.to = to;
1272 data.from = from;
1273 data.autinc_to
1274 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1275 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1276 data.autinc_from
1277 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1278 || GET_CODE (from_addr) == POST_INC
1279 || GET_CODE (from_addr) == POST_DEC);
1280
1281 data.explicit_inc_from = 0;
1282 data.explicit_inc_to = 0;
1283 data.reverse
1284 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1285 if (data.reverse) data.offset = len;
1286 data.len = len;
1287
1288 /* If copying requires more than two move insns,
1289 copy addresses to registers (to make displacements shorter)
1290 and use post-increment if available. */
1291 if (!(data.autinc_from && data.autinc_to)
1292 && move_by_pieces_ninsns (len, align) > 2)
1293 {
1294#ifdef HAVE_PRE_DECREMENT
1295 if (data.reverse && ! data.autinc_from)
1296 {
1297 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1298 data.autinc_from = 1;
1299 data.explicit_inc_from = -1;
1300 }
1301#endif
1302#ifdef HAVE_POST_INCREMENT
1303 if (! data.autinc_from)
1304 {
1305 data.from_addr = copy_addr_to_reg (from_addr);
1306 data.autinc_from = 1;
1307 data.explicit_inc_from = 1;
1308 }
1309#endif
1310 if (!data.autinc_from && CONSTANT_P (from_addr))
1311 data.from_addr = copy_addr_to_reg (from_addr);
1312#ifdef HAVE_PRE_DECREMENT
1313 if (data.reverse && ! data.autinc_to)
1314 {
1315 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1316 data.autinc_to = 1;
1317 data.explicit_inc_to = -1;
1318 }
1319#endif
1320#ifdef HAVE_POST_INCREMENT
1321 if (! data.reverse && ! data.autinc_to)
1322 {
1323 data.to_addr = copy_addr_to_reg (to_addr);
1324 data.autinc_to = 1;
1325 data.explicit_inc_to = 1;
1326 }
1327#endif
1328 if (!data.autinc_to && CONSTANT_P (to_addr))
1329 data.to_addr = copy_addr_to_reg (to_addr);
1330 }
1331
e87b4f3f
RS
1332 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1333 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1334 align = MOVE_MAX;
bbf6f052
RK
1335
1336 /* First move what we can in the largest integer mode, then go to
1337 successively smaller modes. */
1338
1339 while (max_size > 1)
1340 {
1341 enum machine_mode mode = VOIDmode, tmode;
1342 enum insn_code icode;
1343
e7c33f54
RK
1344 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1345 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1346 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1347 mode = tmode;
1348
1349 if (mode == VOIDmode)
1350 break;
1351
1352 icode = mov_optab->handlers[(int) mode].insn_code;
1353 if (icode != CODE_FOR_nothing
1354 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1355 GET_MODE_SIZE (mode)))
1356 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1357
1358 max_size = GET_MODE_SIZE (mode);
1359 }
1360
1361 /* The code above should have handled everything. */
1362 if (data.len != 0)
1363 abort ();
1364}
1365
1366/* Return number of insns required to move L bytes by pieces.
1367 ALIGN (in bytes) is maximum alignment we can assume. */
1368
1369static int
1370move_by_pieces_ninsns (l, align)
1371 unsigned int l;
1372 int align;
1373{
1374 register int n_insns = 0;
e87b4f3f 1375 int max_size = MOVE_MAX + 1;
bbf6f052 1376
e87b4f3f
RS
1377 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1378 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1379 align = MOVE_MAX;
bbf6f052
RK
1380
1381 while (max_size > 1)
1382 {
1383 enum machine_mode mode = VOIDmode, tmode;
1384 enum insn_code icode;
1385
e7c33f54
RK
1386 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1387 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1388 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1389 mode = tmode;
1390
1391 if (mode == VOIDmode)
1392 break;
1393
1394 icode = mov_optab->handlers[(int) mode].insn_code;
1395 if (icode != CODE_FOR_nothing
1396 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1397 GET_MODE_SIZE (mode)))
1398 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1399
1400 max_size = GET_MODE_SIZE (mode);
1401 }
1402
1403 return n_insns;
1404}
1405
1406/* Subroutine of move_by_pieces. Move as many bytes as appropriate
1407 with move instructions for mode MODE. GENFUN is the gen_... function
1408 to make a move insn for that mode. DATA has all the other info. */
1409
1410static void
1411move_by_pieces_1 (genfun, mode, data)
1412 rtx (*genfun) ();
1413 enum machine_mode mode;
1414 struct move_by_pieces *data;
1415{
1416 register int size = GET_MODE_SIZE (mode);
1417 register rtx to1, from1;
1418
1419 while (data->len >= size)
1420 {
1421 if (data->reverse) data->offset -= size;
1422
1423 to1 = (data->autinc_to
1424 ? gen_rtx (MEM, mode, data->to_addr)
1425 : change_address (data->to, mode,
1426 plus_constant (data->to_addr, data->offset)));
1427 from1 =
1428 (data->autinc_from
1429 ? gen_rtx (MEM, mode, data->from_addr)
1430 : change_address (data->from, mode,
1431 plus_constant (data->from_addr, data->offset)));
1432
1433#ifdef HAVE_PRE_DECREMENT
1434 if (data->explicit_inc_to < 0)
906c4e36 1435 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
bbf6f052 1436 if (data->explicit_inc_from < 0)
906c4e36 1437 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
bbf6f052
RK
1438#endif
1439
1440 emit_insn ((*genfun) (to1, from1));
1441#ifdef HAVE_POST_INCREMENT
1442 if (data->explicit_inc_to > 0)
906c4e36 1443 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
bbf6f052 1444 if (data->explicit_inc_from > 0)
906c4e36 1445 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
bbf6f052
RK
1446#endif
1447
1448 if (! data->reverse) data->offset += size;
1449
1450 data->len -= size;
1451 }
1452}
1453\f
1454/* Emit code to move a block Y to a block X.
1455 This may be done with string-move instructions,
1456 with multiple scalar move instructions, or with a library call.
1457
1458 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1459 with mode BLKmode.
1460 SIZE is an rtx that says how long they are.
1461 ALIGN is the maximum alignment we can assume they have,
1462 measured in bytes. */
1463
1464void
1465emit_block_move (x, y, size, align)
1466 rtx x, y;
1467 rtx size;
1468 int align;
1469{
1470 if (GET_MODE (x) != BLKmode)
1471 abort ();
1472
1473 if (GET_MODE (y) != BLKmode)
1474 abort ();
1475
1476 x = protect_from_queue (x, 1);
1477 y = protect_from_queue (y, 0);
5d901c31 1478 size = protect_from_queue (size, 0);
bbf6f052
RK
1479
1480 if (GET_CODE (x) != MEM)
1481 abort ();
1482 if (GET_CODE (y) != MEM)
1483 abort ();
1484 if (size == 0)
1485 abort ();
1486
1487 if (GET_CODE (size) == CONST_INT
906c4e36 1488 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
bbf6f052
RK
1489 move_by_pieces (x, y, INTVAL (size), align);
1490 else
1491 {
1492 /* Try the most limited insn first, because there's no point
1493 including more than one in the machine description unless
1494 the more limited one has some advantage. */
266007a7 1495
0bba3f6f 1496 rtx opalign = GEN_INT (align);
266007a7
RK
1497 enum machine_mode mode;
1498
1499 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1500 mode = GET_MODE_WIDER_MODE (mode))
bbf6f052 1501 {
266007a7 1502 enum insn_code code = movstr_optab[(int) mode];
266007a7
RK
1503
1504 if (code != CODE_FOR_nothing
803090c4
RK
1505 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1506 here because if SIZE is less than the mode mask, as it is
8008b228 1507 returned by the macro, it will definitely be less than the
803090c4 1508 actual mode mask. */
f85b95d1 1509 && (unsigned HOST_WIDE_INT) INTVAL (size) <= GET_MODE_MASK (mode)
0bba3f6f
RK
1510 && (insn_operand_predicate[(int) code][0] == 0
1511 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1512 && (insn_operand_predicate[(int) code][1] == 0
1513 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1514 && (insn_operand_predicate[(int) code][3] == 0
1515 || (*insn_operand_predicate[(int) code][3]) (opalign,
1516 VOIDmode)))
bbf6f052 1517 {
1ba1e2a8 1518 rtx op2;
266007a7
RK
1519 rtx last = get_last_insn ();
1520 rtx pat;
1521
1ba1e2a8 1522 op2 = convert_to_mode (mode, size, 1);
0bba3f6f
RK
1523 if (insn_operand_predicate[(int) code][2] != 0
1524 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
266007a7
RK
1525 op2 = copy_to_mode_reg (mode, op2);
1526
1527 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1528 if (pat)
1529 {
1530 emit_insn (pat);
1531 return;
1532 }
1533 else
1534 delete_insns_since (last);
bbf6f052
RK
1535 }
1536 }
bbf6f052
RK
1537
1538#ifdef TARGET_MEM_FUNCTIONS
d562e42e 1539 emit_library_call (memcpy_libfunc, 0,
bbf6f052
RK
1540 VOIDmode, 3, XEXP (x, 0), Pmode,
1541 XEXP (y, 0), Pmode,
0fa83258
RK
1542 convert_to_mode (TYPE_MODE (sizetype), size,
1543 TREE_UNSIGNED (sizetype)),
1544 TYPE_MODE (sizetype));
bbf6f052 1545#else
d562e42e 1546 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
1547 VOIDmode, 3, XEXP (y, 0), Pmode,
1548 XEXP (x, 0), Pmode,
0fa83258
RK
1549 convert_to_mode (TYPE_MODE (sizetype), size,
1550 TREE_UNSIGNED (sizetype)),
1551 TYPE_MODE (sizetype));
bbf6f052
RK
1552#endif
1553 }
1554}
1555\f
1556/* Copy all or part of a value X into registers starting at REGNO.
1557 The number of registers to be filled is NREGS. */
1558
1559void
1560move_block_to_reg (regno, x, nregs, mode)
1561 int regno;
1562 rtx x;
1563 int nregs;
1564 enum machine_mode mode;
1565{
1566 int i;
1567 rtx pat, last;
1568
1569 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1570 x = validize_mem (force_const_mem (mode, x));
1571
1572 /* See if the machine can do this with a load multiple insn. */
1573#ifdef HAVE_load_multiple
1574 last = get_last_insn ();
1575 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
906c4e36 1576 GEN_INT (nregs));
bbf6f052
RK
1577 if (pat)
1578 {
1579 emit_insn (pat);
1580 return;
1581 }
1582 else
1583 delete_insns_since (last);
1584#endif
1585
1586 for (i = 0; i < nregs; i++)
1587 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1588 operand_subword_force (x, i, mode));
1589}
1590
1591/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
0040593d
JW
1592 The number of registers to be filled is NREGS. SIZE indicates the number
1593 of bytes in the object X. */
1594
bbf6f052
RK
1595
1596void
0040593d 1597move_block_from_reg (regno, x, nregs, size)
bbf6f052
RK
1598 int regno;
1599 rtx x;
1600 int nregs;
0040593d 1601 int size;
bbf6f052
RK
1602{
1603 int i;
1604 rtx pat, last;
1605
0040593d
JW
1606 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1607 to the left before storing to memory. */
1608 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1609 {
1610 rtx tem = operand_subword (x, 0, 1, BLKmode);
1611 rtx shift;
1612
1613 if (tem == 0)
1614 abort ();
1615
1616 shift = expand_shift (LSHIFT_EXPR, word_mode,
1617 gen_rtx (REG, word_mode, regno),
1618 build_int_2 ((UNITS_PER_WORD - size)
1619 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1620 emit_move_insn (tem, shift);
1621 return;
1622 }
1623
bbf6f052
RK
1624 /* See if the machine can do this with a store multiple insn. */
1625#ifdef HAVE_store_multiple
1626 last = get_last_insn ();
1627 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
906c4e36 1628 GEN_INT (nregs));
bbf6f052
RK
1629 if (pat)
1630 {
1631 emit_insn (pat);
1632 return;
1633 }
1634 else
1635 delete_insns_since (last);
1636#endif
1637
1638 for (i = 0; i < nregs; i++)
1639 {
1640 rtx tem = operand_subword (x, i, 1, BLKmode);
1641
1642 if (tem == 0)
1643 abort ();
1644
1645 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1646 }
1647}
1648
1649/* Mark NREGS consecutive regs, starting at REGNO, as being live now. */
1650
1651void
1652use_regs (regno, nregs)
1653 int regno;
1654 int nregs;
1655{
1656 int i;
1657
1658 for (i = 0; i < nregs; i++)
1659 emit_insn (gen_rtx (USE, VOIDmode, gen_rtx (REG, word_mode, regno + i)));
1660}
7308a047
RS
1661
1662/* Mark the instructions since PREV as a libcall block.
1663 Add REG_LIBCALL to PREV and add a REG_RETVAL to the most recent insn. */
1664
f76a70d5 1665static void
7308a047
RS
1666group_insns (prev)
1667 rtx prev;
1668{
1669 rtx insn_first;
1670 rtx insn_last;
1671
1672 /* Find the instructions to mark */
1673 if (prev)
1674 insn_first = NEXT_INSN (prev);
1675 else
1676 insn_first = get_insns ();
1677
1678 insn_last = get_last_insn ();
1679
1680 REG_NOTES (insn_last) = gen_rtx (INSN_LIST, REG_RETVAL, insn_first,
1681 REG_NOTES (insn_last));
1682
1683 REG_NOTES (insn_first) = gen_rtx (INSN_LIST, REG_LIBCALL, insn_last,
1684 REG_NOTES (insn_first));
1685}
bbf6f052
RK
1686\f
1687/* Write zeros through the storage of OBJECT.
1688 If OBJECT has BLKmode, SIZE is its length in bytes. */
1689
1690void
1691clear_storage (object, size)
1692 rtx object;
1693 int size;
1694{
1695 if (GET_MODE (object) == BLKmode)
1696 {
1697#ifdef TARGET_MEM_FUNCTIONS
d562e42e 1698 emit_library_call (memset_libfunc, 0,
bbf6f052
RK
1699 VOIDmode, 3,
1700 XEXP (object, 0), Pmode, const0_rtx, Pmode,
906c4e36 1701 GEN_INT (size), Pmode);
bbf6f052 1702#else
d562e42e 1703 emit_library_call (bzero_libfunc, 0,
bbf6f052
RK
1704 VOIDmode, 2,
1705 XEXP (object, 0), Pmode,
906c4e36 1706 GEN_INT (size), Pmode);
bbf6f052
RK
1707#endif
1708 }
1709 else
1710 emit_move_insn (object, const0_rtx);
1711}
1712
1713/* Generate code to copy Y into X.
1714 Both Y and X must have the same mode, except that
1715 Y can be a constant with VOIDmode.
1716 This mode cannot be BLKmode; use emit_block_move for that.
1717
1718 Return the last instruction emitted. */
1719
1720rtx
1721emit_move_insn (x, y)
1722 rtx x, y;
1723{
1724 enum machine_mode mode = GET_MODE (x);
7308a047
RS
1725 enum machine_mode submode;
1726 enum mode_class class = GET_MODE_CLASS (mode);
bbf6f052
RK
1727 int i;
1728
1729 x = protect_from_queue (x, 1);
1730 y = protect_from_queue (y, 0);
1731
1732 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1733 abort ();
1734
1735 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1736 y = force_const_mem (mode, y);
1737
1738 /* If X or Y are memory references, verify that their addresses are valid
1739 for the machine. */
1740 if (GET_CODE (x) == MEM
1741 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1742 && ! push_operand (x, GET_MODE (x)))
1743 || (flag_force_addr
1744 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1745 x = change_address (x, VOIDmode, XEXP (x, 0));
1746
1747 if (GET_CODE (y) == MEM
1748 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1749 || (flag_force_addr
1750 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1751 y = change_address (y, VOIDmode, XEXP (y, 0));
1752
1753 if (mode == BLKmode)
1754 abort ();
1755
261c4230
RS
1756 return emit_move_insn_1 (x, y);
1757}
1758
1759/* Low level part of emit_move_insn.
1760 Called just like emit_move_insn, but assumes X and Y
1761 are basically valid. */
1762
1763rtx
1764emit_move_insn_1 (x, y)
1765 rtx x, y;
1766{
1767 enum machine_mode mode = GET_MODE (x);
1768 enum machine_mode submode;
1769 enum mode_class class = GET_MODE_CLASS (mode);
1770 int i;
1771
7308a047
RS
1772 if (class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1773 submode = mode_for_size (GET_MODE_UNIT_SIZE (mode) * BITS_PER_UNIT,
1774 (class == MODE_COMPLEX_INT
1775 ? MODE_INT : MODE_FLOAT),
1776 0);
1777
bbf6f052
RK
1778 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1779 return
1780 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1781
89742723 1782 /* Expand complex moves by moving real part and imag part, if possible. */
7308a047
RS
1783 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1784 && submode != BLKmode
1785 && (mov_optab->handlers[(int) submode].insn_code
1786 != CODE_FOR_nothing))
1787 {
1788 /* Don't split destination if it is a stack push. */
1789 int stack = push_operand (x, GET_MODE (x));
1790 rtx prev = get_last_insn ();
1791
1792 /* Tell flow that the whole of the destination is being set. */
1793 if (GET_CODE (x) == REG)
1794 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
1795
1796 /* If this is a stack, push the highpart first, so it
1797 will be in the argument order.
1798
1799 In that case, change_address is used only to convert
1800 the mode, not to change the address. */
c937357e
RS
1801 if (stack)
1802 {
1803#ifdef STACK_GROWS_DOWNWARD
1804 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1805 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1806 gen_highpart (submode, y)));
1807 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1808 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1809 gen_lowpart (submode, y)));
1810#else
1811 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1812 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1813 gen_lowpart (submode, y)));
1814 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1815 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1816 gen_highpart (submode, y)));
1817#endif
1818 }
1819 else
1820 {
1821 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1822 (gen_highpart (submode, x), gen_highpart (submode, y)));
1823 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1824 (gen_lowpart (submode, x), gen_lowpart (submode, y)));
1825 }
7308a047
RS
1826
1827 group_insns (prev);
7a1ab50a
RS
1828
1829 return get_last_insn ();
7308a047
RS
1830 }
1831
bbf6f052
RK
1832 /* This will handle any multi-word mode that lacks a move_insn pattern.
1833 However, you will get better code if you define such patterns,
1834 even if they must turn into multiple assembler instructions. */
a4320483 1835 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
bbf6f052
RK
1836 {
1837 rtx last_insn = 0;
7308a047 1838 rtx prev_insn = get_last_insn ();
bbf6f052
RK
1839
1840 for (i = 0;
1841 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1842 i++)
1843 {
1844 rtx xpart = operand_subword (x, i, 1, mode);
1845 rtx ypart = operand_subword (y, i, 1, mode);
1846
1847 /* If we can't get a part of Y, put Y into memory if it is a
1848 constant. Otherwise, force it into a register. If we still
1849 can't get a part of Y, abort. */
1850 if (ypart == 0 && CONSTANT_P (y))
1851 {
1852 y = force_const_mem (mode, y);
1853 ypart = operand_subword (y, i, 1, mode);
1854 }
1855 else if (ypart == 0)
1856 ypart = operand_subword_force (y, i, mode);
1857
1858 if (xpart == 0 || ypart == 0)
1859 abort ();
1860
1861 last_insn = emit_move_insn (xpart, ypart);
1862 }
7308a047
RS
1863 /* Mark these insns as a libcall block. */
1864 group_insns (prev_insn);
1865
bbf6f052
RK
1866 return last_insn;
1867 }
1868 else
1869 abort ();
1870}
1871\f
1872/* Pushing data onto the stack. */
1873
1874/* Push a block of length SIZE (perhaps variable)
1875 and return an rtx to address the beginning of the block.
1876 Note that it is not possible for the value returned to be a QUEUED.
1877 The value may be virtual_outgoing_args_rtx.
1878
1879 EXTRA is the number of bytes of padding to push in addition to SIZE.
1880 BELOW nonzero means this padding comes at low addresses;
1881 otherwise, the padding comes at high addresses. */
1882
1883rtx
1884push_block (size, extra, below)
1885 rtx size;
1886 int extra, below;
1887{
1888 register rtx temp;
1889 if (CONSTANT_P (size))
1890 anti_adjust_stack (plus_constant (size, extra));
1891 else if (GET_CODE (size) == REG && extra == 0)
1892 anti_adjust_stack (size);
1893 else
1894 {
1895 rtx temp = copy_to_mode_reg (Pmode, size);
1896 if (extra != 0)
906c4e36 1897 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
bbf6f052
RK
1898 temp, 0, OPTAB_LIB_WIDEN);
1899 anti_adjust_stack (temp);
1900 }
1901
1902#ifdef STACK_GROWS_DOWNWARD
1903 temp = virtual_outgoing_args_rtx;
1904 if (extra != 0 && below)
1905 temp = plus_constant (temp, extra);
1906#else
1907 if (GET_CODE (size) == CONST_INT)
1908 temp = plus_constant (virtual_outgoing_args_rtx,
1909 - INTVAL (size) - (below ? 0 : extra));
1910 else if (extra != 0 && !below)
1911 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1912 negate_rtx (Pmode, plus_constant (size, extra)));
1913 else
1914 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1915 negate_rtx (Pmode, size));
1916#endif
1917
1918 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
1919}
1920
87e38d84 1921rtx
bbf6f052
RK
1922gen_push_operand ()
1923{
1924 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
1925}
1926
1927/* Generate code to push X onto the stack, assuming it has mode MODE and
1928 type TYPE.
1929 MODE is redundant except when X is a CONST_INT (since they don't
1930 carry mode info).
1931 SIZE is an rtx for the size of data to be copied (in bytes),
1932 needed only if X is BLKmode.
1933
1934 ALIGN (in bytes) is maximum alignment we can assume.
1935
cd048831
RK
1936 If PARTIAL and REG are both nonzero, then copy that many of the first
1937 words of X into registers starting with REG, and push the rest of X.
bbf6f052
RK
1938 The amount of space pushed is decreased by PARTIAL words,
1939 rounded *down* to a multiple of PARM_BOUNDARY.
1940 REG must be a hard register in this case.
cd048831
RK
1941 If REG is zero but PARTIAL is not, take any all others actions for an
1942 argument partially in registers, but do not actually load any
1943 registers.
bbf6f052
RK
1944
1945 EXTRA is the amount in bytes of extra space to leave next to this arg.
6dc42e49 1946 This is ignored if an argument block has already been allocated.
bbf6f052
RK
1947
1948 On a machine that lacks real push insns, ARGS_ADDR is the address of
1949 the bottom of the argument block for this call. We use indexing off there
1950 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
1951 argument block has not been preallocated.
1952
1953 ARGS_SO_FAR is the size of args previously pushed for this call. */
1954
1955void
1956emit_push_insn (x, mode, type, size, align, partial, reg, extra,
1957 args_addr, args_so_far)
1958 register rtx x;
1959 enum machine_mode mode;
1960 tree type;
1961 rtx size;
1962 int align;
1963 int partial;
1964 rtx reg;
1965 int extra;
1966 rtx args_addr;
1967 rtx args_so_far;
1968{
1969 rtx xinner;
1970 enum direction stack_direction
1971#ifdef STACK_GROWS_DOWNWARD
1972 = downward;
1973#else
1974 = upward;
1975#endif
1976
1977 /* Decide where to pad the argument: `downward' for below,
1978 `upward' for above, or `none' for don't pad it.
1979 Default is below for small data on big-endian machines; else above. */
1980 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
1981
1982 /* Invert direction if stack is post-update. */
1983 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
1984 if (where_pad != none)
1985 where_pad = (where_pad == downward ? upward : downward);
1986
1987 xinner = x = protect_from_queue (x, 0);
1988
1989 if (mode == BLKmode)
1990 {
1991 /* Copy a block into the stack, entirely or partially. */
1992
1993 register rtx temp;
1994 int used = partial * UNITS_PER_WORD;
1995 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
1996 int skip;
1997
1998 if (size == 0)
1999 abort ();
2000
2001 used -= offset;
2002
2003 /* USED is now the # of bytes we need not copy to the stack
2004 because registers will take care of them. */
2005
2006 if (partial != 0)
2007 xinner = change_address (xinner, BLKmode,
2008 plus_constant (XEXP (xinner, 0), used));
2009
2010 /* If the partial register-part of the arg counts in its stack size,
2011 skip the part of stack space corresponding to the registers.
2012 Otherwise, start copying to the beginning of the stack space,
2013 by setting SKIP to 0. */
2014#ifndef REG_PARM_STACK_SPACE
2015 skip = 0;
2016#else
2017 skip = used;
2018#endif
2019
2020#ifdef PUSH_ROUNDING
2021 /* Do it with several push insns if that doesn't take lots of insns
2022 and if there is no difficulty with push insns that skip bytes
2023 on the stack for alignment purposes. */
2024 if (args_addr == 0
2025 && GET_CODE (size) == CONST_INT
2026 && skip == 0
2027 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2028 < MOVE_RATIO)
bbf6f052
RK
2029 /* Here we avoid the case of a structure whose weak alignment
2030 forces many pushes of a small amount of data,
2031 and such small pushes do rounding that causes trouble. */
e87b4f3f
RS
2032 && ((! STRICT_ALIGNMENT && ! SLOW_UNALIGNED_ACCESS)
2033 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
bbf6f052 2034 || PUSH_ROUNDING (align) == align)
bbf6f052
RK
2035 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2036 {
2037 /* Push padding now if padding above and stack grows down,
2038 or if padding below and stack grows up.
2039 But if space already allocated, this has already been done. */
2040 if (extra && args_addr == 0
2041 && where_pad != none && where_pad != stack_direction)
906c4e36 2042 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2043
2044 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
2045 INTVAL (size) - used, align);
2046 }
2047 else
2048#endif /* PUSH_ROUNDING */
2049 {
2050 /* Otherwise make space on the stack and copy the data
2051 to the address of that space. */
2052
2053 /* Deduct words put into registers from the size we must copy. */
2054 if (partial != 0)
2055 {
2056 if (GET_CODE (size) == CONST_INT)
906c4e36 2057 size = GEN_INT (INTVAL (size) - used);
bbf6f052
RK
2058 else
2059 size = expand_binop (GET_MODE (size), sub_optab, size,
906c4e36
RK
2060 GEN_INT (used), NULL_RTX, 0,
2061 OPTAB_LIB_WIDEN);
bbf6f052
RK
2062 }
2063
2064 /* Get the address of the stack space.
2065 In this case, we do not deal with EXTRA separately.
2066 A single stack adjust will do. */
2067 if (! args_addr)
2068 {
2069 temp = push_block (size, extra, where_pad == downward);
2070 extra = 0;
2071 }
2072 else if (GET_CODE (args_so_far) == CONST_INT)
2073 temp = memory_address (BLKmode,
2074 plus_constant (args_addr,
2075 skip + INTVAL (args_so_far)));
2076 else
2077 temp = memory_address (BLKmode,
2078 plus_constant (gen_rtx (PLUS, Pmode,
2079 args_addr, args_so_far),
2080 skip));
2081
2082 /* TEMP is the address of the block. Copy the data there. */
2083 if (GET_CODE (size) == CONST_INT
2084 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2085 < MOVE_RATIO))
2086 {
2087 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
2088 INTVAL (size), align);
2089 goto ret;
2090 }
2091 /* Try the most limited insn first, because there's no point
2092 including more than one in the machine description unless
2093 the more limited one has some advantage. */
2094#ifdef HAVE_movstrqi
2095 if (HAVE_movstrqi
2096 && GET_CODE (size) == CONST_INT
2097 && ((unsigned) INTVAL (size)
2098 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
2099 {
c841050e
RS
2100 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
2101 xinner, size, GEN_INT (align));
2102 if (pat != 0)
2103 {
2104 emit_insn (pat);
2105 goto ret;
2106 }
bbf6f052
RK
2107 }
2108#endif
2109#ifdef HAVE_movstrhi
2110 if (HAVE_movstrhi
2111 && GET_CODE (size) == CONST_INT
2112 && ((unsigned) INTVAL (size)
2113 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
2114 {
c841050e
RS
2115 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
2116 xinner, size, GEN_INT (align));
2117 if (pat != 0)
2118 {
2119 emit_insn (pat);
2120 goto ret;
2121 }
bbf6f052
RK
2122 }
2123#endif
2124#ifdef HAVE_movstrsi
2125 if (HAVE_movstrsi)
2126 {
c841050e
RS
2127 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
2128 xinner, size, GEN_INT (align));
2129 if (pat != 0)
2130 {
2131 emit_insn (pat);
2132 goto ret;
2133 }
bbf6f052
RK
2134 }
2135#endif
2136#ifdef HAVE_movstrdi
2137 if (HAVE_movstrdi)
2138 {
c841050e
RS
2139 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
2140 xinner, size, GEN_INT (align));
2141 if (pat != 0)
2142 {
2143 emit_insn (pat);
2144 goto ret;
2145 }
bbf6f052
RK
2146 }
2147#endif
2148
2149#ifndef ACCUMULATE_OUTGOING_ARGS
2150 /* If the source is referenced relative to the stack pointer,
2151 copy it to another register to stabilize it. We do not need
2152 to do this if we know that we won't be changing sp. */
2153
2154 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2155 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2156 temp = copy_to_reg (temp);
2157#endif
2158
2159 /* Make inhibit_defer_pop nonzero around the library call
2160 to force it to pop the bcopy-arguments right away. */
2161 NO_DEFER_POP;
2162#ifdef TARGET_MEM_FUNCTIONS
d562e42e 2163 emit_library_call (memcpy_libfunc, 0,
bbf6f052 2164 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
0fa83258
RK
2165 convert_to_mode (TYPE_MODE (sizetype),
2166 size, TREE_UNSIGNED (sizetype)),
26ba80fc 2167 TYPE_MODE (sizetype));
bbf6f052 2168#else
d562e42e 2169 emit_library_call (bcopy_libfunc, 0,
bbf6f052 2170 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
0fa83258
RK
2171 convert_to_mode (TYPE_MODE (sizetype),
2172 size, TREE_UNSIGNED (sizetype)),
26ba80fc 2173 TYPE_MODE (sizetype));
bbf6f052
RK
2174#endif
2175 OK_DEFER_POP;
2176 }
2177 }
2178 else if (partial > 0)
2179 {
2180 /* Scalar partly in registers. */
2181
2182 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2183 int i;
2184 int not_stack;
2185 /* # words of start of argument
2186 that we must make space for but need not store. */
2187 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2188 int args_offset = INTVAL (args_so_far);
2189 int skip;
2190
2191 /* Push padding now if padding above and stack grows down,
2192 or if padding below and stack grows up.
2193 But if space already allocated, this has already been done. */
2194 if (extra && args_addr == 0
2195 && where_pad != none && where_pad != stack_direction)
906c4e36 2196 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2197
2198 /* If we make space by pushing it, we might as well push
2199 the real data. Otherwise, we can leave OFFSET nonzero
2200 and leave the space uninitialized. */
2201 if (args_addr == 0)
2202 offset = 0;
2203
2204 /* Now NOT_STACK gets the number of words that we don't need to
2205 allocate on the stack. */
2206 not_stack = partial - offset;
2207
2208 /* If the partial register-part of the arg counts in its stack size,
2209 skip the part of stack space corresponding to the registers.
2210 Otherwise, start copying to the beginning of the stack space,
2211 by setting SKIP to 0. */
2212#ifndef REG_PARM_STACK_SPACE
2213 skip = 0;
2214#else
2215 skip = not_stack;
2216#endif
2217
2218 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2219 x = validize_mem (force_const_mem (mode, x));
2220
2221 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2222 SUBREGs of such registers are not allowed. */
2223 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2224 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2225 x = copy_to_reg (x);
2226
2227 /* Loop over all the words allocated on the stack for this arg. */
2228 /* We can do it by words, because any scalar bigger than a word
2229 has a size a multiple of a word. */
2230#ifndef PUSH_ARGS_REVERSED
2231 for (i = not_stack; i < size; i++)
2232#else
2233 for (i = size - 1; i >= not_stack; i--)
2234#endif
2235 if (i >= not_stack + offset)
2236 emit_push_insn (operand_subword_force (x, i, mode),
906c4e36
RK
2237 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2238 0, args_addr,
2239 GEN_INT (args_offset + ((i - not_stack + skip)
bbf6f052
RK
2240 * UNITS_PER_WORD)));
2241 }
2242 else
2243 {
2244 rtx addr;
2245
2246 /* Push padding now if padding above and stack grows down,
2247 or if padding below and stack grows up.
2248 But if space already allocated, this has already been done. */
2249 if (extra && args_addr == 0
2250 && where_pad != none && where_pad != stack_direction)
906c4e36 2251 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2252
2253#ifdef PUSH_ROUNDING
2254 if (args_addr == 0)
2255 addr = gen_push_operand ();
2256 else
2257#endif
2258 if (GET_CODE (args_so_far) == CONST_INT)
2259 addr
2260 = memory_address (mode,
2261 plus_constant (args_addr, INTVAL (args_so_far)));
2262 else
2263 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2264 args_so_far));
2265
2266 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2267 }
2268
2269 ret:
2270 /* If part should go in registers, copy that part
2271 into the appropriate registers. Do this now, at the end,
2272 since mem-to-mem copies above may do function calls. */
cd048831 2273 if (partial > 0 && reg != 0)
bbf6f052
RK
2274 move_block_to_reg (REGNO (reg), x, partial, mode);
2275
2276 if (extra && args_addr == 0 && where_pad == stack_direction)
906c4e36 2277 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2278}
2279\f
bbf6f052
RK
2280/* Expand an assignment that stores the value of FROM into TO.
2281 If WANT_VALUE is nonzero, return an rtx for the value of TO.
709f5be1
RS
2282 (This may contain a QUEUED rtx;
2283 if the value is constant, this rtx is a constant.)
2284 Otherwise, the returned value is NULL_RTX.
bbf6f052
RK
2285
2286 SUGGEST_REG is no longer actually used.
2287 It used to mean, copy the value through a register
2288 and return that register, if that is possible.
709f5be1 2289 We now use WANT_VALUE to decide whether to do this. */
bbf6f052
RK
2290
2291rtx
2292expand_assignment (to, from, want_value, suggest_reg)
2293 tree to, from;
2294 int want_value;
2295 int suggest_reg;
2296{
2297 register rtx to_rtx = 0;
2298 rtx result;
2299
2300 /* Don't crash if the lhs of the assignment was erroneous. */
2301
2302 if (TREE_CODE (to) == ERROR_MARK)
709f5be1
RS
2303 {
2304 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
2305 return want_value ? result : NULL_RTX;
2306 }
bbf6f052 2307
ca695ac9
JB
2308 if (output_bytecode)
2309 {
2310 tree dest_innermost;
2311
2312 bc_expand_expr (from);
2313 bc_emit_instruction (dup);
2314
2315 dest_innermost = bc_expand_address (to);
2316
2317 /* Can't deduce from TYPE that we're dealing with a bitfield, so
2318 take care of it here. */
2319
2320 bc_store_memory (TREE_TYPE (to), dest_innermost);
2321 return NULL;
2322 }
2323
bbf6f052
RK
2324 /* Assignment of a structure component needs special treatment
2325 if the structure component's rtx is not simply a MEM.
2326 Assignment of an array element at a constant index
2327 has the same problem. */
2328
2329 if (TREE_CODE (to) == COMPONENT_REF
2330 || TREE_CODE (to) == BIT_FIELD_REF
2331 || (TREE_CODE (to) == ARRAY_REF
2332 && TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2333 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST))
2334 {
2335 enum machine_mode mode1;
2336 int bitsize;
2337 int bitpos;
7bb0943f 2338 tree offset;
bbf6f052
RK
2339 int unsignedp;
2340 int volatilep = 0;
0088fcb1
RK
2341 tree tem;
2342
2343 push_temp_slots ();
2344 tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
bbf6f052
RK
2345 &mode1, &unsignedp, &volatilep);
2346
2347 /* If we are going to use store_bit_field and extract_bit_field,
2348 make sure to_rtx will be safe for multiple use. */
2349
2350 if (mode1 == VOIDmode && want_value)
2351 tem = stabilize_reference (tem);
2352
906c4e36 2353 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
2354 if (offset != 0)
2355 {
906c4e36 2356 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
2357
2358 if (GET_CODE (to_rtx) != MEM)
2359 abort ();
2360 to_rtx = change_address (to_rtx, VOIDmode,
2361 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2362 force_reg (Pmode, offset_rtx)));
2363 }
bbf6f052
RK
2364 if (volatilep)
2365 {
2366 if (GET_CODE (to_rtx) == MEM)
2367 MEM_VOLATILE_P (to_rtx) = 1;
2368#if 0 /* This was turned off because, when a field is volatile
2369 in an object which is not volatile, the object may be in a register,
2370 and then we would abort over here. */
2371 else
2372 abort ();
2373#endif
2374 }
2375
2376 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2377 (want_value
2378 /* Spurious cast makes HPUX compiler happy. */
2379 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2380 : VOIDmode),
2381 unsignedp,
2382 /* Required alignment of containing datum. */
2383 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
2384 int_size_in_bytes (TREE_TYPE (tem)));
2385 preserve_temp_slots (result);
2386 free_temp_slots ();
0088fcb1 2387 pop_temp_slots ();
bbf6f052 2388
709f5be1
RS
2389 /* If the value is meaningful, convert RESULT to the proper mode.
2390 Otherwise, return nothing. */
5ffe63ed
RS
2391 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
2392 TYPE_MODE (TREE_TYPE (from)),
2393 result,
2394 TREE_UNSIGNED (TREE_TYPE (to)))
709f5be1 2395 : NULL_RTX);
bbf6f052
RK
2396 }
2397
cd1db108
RS
2398 /* If the rhs is a function call and its value is not an aggregate,
2399 call the function before we start to compute the lhs.
2400 This is needed for correct code for cases such as
2401 val = setjmp (buf) on machines where reference to val
1ad87b63
RK
2402 requires loading up part of an address in a separate insn.
2403
2404 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
2405 a promoted variable where the zero- or sign- extension needs to be done.
2406 Handling this in the normal way is safe because no computation is done
2407 before the call. */
2408 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
2409 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
cd1db108 2410 {
0088fcb1
RK
2411 rtx value;
2412
2413 push_temp_slots ();
2414 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
cd1db108
RS
2415 if (to_rtx == 0)
2416 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2417 emit_move_insn (to_rtx, value);
2418 preserve_temp_slots (to_rtx);
2419 free_temp_slots ();
0088fcb1 2420 pop_temp_slots ();
709f5be1 2421 return want_value ? to_rtx : NULL_RTX;
cd1db108
RS
2422 }
2423
bbf6f052
RK
2424 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2425 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2426
2427 if (to_rtx == 0)
906c4e36 2428 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
bbf6f052 2429
86d38d25
RS
2430 /* Don't move directly into a return register. */
2431 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
2432 {
0088fcb1
RK
2433 rtx temp;
2434
2435 push_temp_slots ();
2436 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
86d38d25
RS
2437 emit_move_insn (to_rtx, temp);
2438 preserve_temp_slots (to_rtx);
2439 free_temp_slots ();
0088fcb1 2440 pop_temp_slots ();
709f5be1 2441 return want_value ? to_rtx : NULL_RTX;
86d38d25
RS
2442 }
2443
bbf6f052
RK
2444 /* In case we are returning the contents of an object which overlaps
2445 the place the value is being stored, use a safe function when copying
2446 a value through a pointer into a structure value return block. */
2447 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2448 && current_function_returns_struct
2449 && !current_function_returns_pcc_struct)
2450 {
0088fcb1
RK
2451 rtx from_rtx, size;
2452
2453 push_temp_slots ();
33a20d10
RK
2454 size = expr_size (from);
2455 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
2456
2457#ifdef TARGET_MEM_FUNCTIONS
d562e42e 2458 emit_library_call (memcpy_libfunc, 0,
bbf6f052
RK
2459 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2460 XEXP (from_rtx, 0), Pmode,
0fa83258
RK
2461 convert_to_mode (TYPE_MODE (sizetype),
2462 size, TREE_UNSIGNED (sizetype)),
26ba80fc 2463 TYPE_MODE (sizetype));
bbf6f052 2464#else
d562e42e 2465 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
2466 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2467 XEXP (to_rtx, 0), Pmode,
0fa83258
RK
2468 convert_to_mode (TYPE_MODE (sizetype),
2469 size, TREE_UNSIGNED (sizetype)),
26ba80fc 2470 TYPE_MODE (sizetype));
bbf6f052
RK
2471#endif
2472
2473 preserve_temp_slots (to_rtx);
2474 free_temp_slots ();
0088fcb1 2475 pop_temp_slots ();
709f5be1 2476 return want_value ? to_rtx : NULL_RTX;
bbf6f052
RK
2477 }
2478
2479 /* Compute FROM and store the value in the rtx we got. */
2480
0088fcb1 2481 push_temp_slots ();
bbf6f052
RK
2482 result = store_expr (from, to_rtx, want_value);
2483 preserve_temp_slots (result);
2484 free_temp_slots ();
0088fcb1 2485 pop_temp_slots ();
709f5be1 2486 return want_value ? result : NULL_RTX;
bbf6f052
RK
2487}
2488
2489/* Generate code for computing expression EXP,
2490 and storing the value into TARGET.
bbf6f052
RK
2491 TARGET may contain a QUEUED rtx.
2492
709f5be1
RS
2493 If WANT_VALUE is nonzero, return a copy of the value
2494 not in TARGET, so that we can be sure to use the proper
2495 value in a containing expression even if TARGET has something
2496 else stored in it. If possible, we copy the value through a pseudo
2497 and return that pseudo. Or, if the value is constant, we try to
2498 return the constant. In some cases, we return a pseudo
2499 copied *from* TARGET.
2500
2501 If the mode is BLKmode then we may return TARGET itself.
2502 It turns out that in BLKmode it doesn't cause a problem.
2503 because C has no operators that could combine two different
2504 assignments into the same BLKmode object with different values
2505 with no sequence point. Will other languages need this to
2506 be more thorough?
2507
2508 If WANT_VALUE is 0, we return NULL, to make sure
2509 to catch quickly any cases where the caller uses the value
2510 and fails to set WANT_VALUE. */
bbf6f052
RK
2511
2512rtx
709f5be1 2513store_expr (exp, target, want_value)
bbf6f052
RK
2514 register tree exp;
2515 register rtx target;
709f5be1 2516 int want_value;
bbf6f052
RK
2517{
2518 register rtx temp;
2519 int dont_return_target = 0;
2520
2521 if (TREE_CODE (exp) == COMPOUND_EXPR)
2522 {
2523 /* Perform first part of compound expression, then assign from second
2524 part. */
2525 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2526 emit_queue ();
709f5be1 2527 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
bbf6f052
RK
2528 }
2529 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2530 {
2531 /* For conditional expression, get safe form of the target. Then
2532 test the condition, doing the appropriate assignment on either
2533 side. This avoids the creation of unnecessary temporaries.
2534 For non-BLKmode, it is more efficient not to do this. */
2535
2536 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2537
2538 emit_queue ();
2539 target = protect_from_queue (target, 1);
2540
2541 NO_DEFER_POP;
2542 jumpifnot (TREE_OPERAND (exp, 0), lab1);
709f5be1 2543 store_expr (TREE_OPERAND (exp, 1), target, 0);
bbf6f052
RK
2544 emit_queue ();
2545 emit_jump_insn (gen_jump (lab2));
2546 emit_barrier ();
2547 emit_label (lab1);
709f5be1 2548 store_expr (TREE_OPERAND (exp, 2), target, 0);
bbf6f052
RK
2549 emit_queue ();
2550 emit_label (lab2);
2551 OK_DEFER_POP;
709f5be1 2552 return want_value ? target : NULL_RTX;
bbf6f052 2553 }
709f5be1 2554 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
bbf6f052
RK
2555 && GET_MODE (target) != BLKmode)
2556 /* If target is in memory and caller wants value in a register instead,
2557 arrange that. Pass TARGET as target for expand_expr so that,
709f5be1 2558 if EXP is another assignment, WANT_VALUE will be nonzero for it.
c2e6aff6
RS
2559 We know expand_expr will not use the target in that case.
2560 Don't do this if TARGET is volatile because we are supposed
2561 to write it and then read it. */
bbf6f052 2562 {
906c4e36 2563 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
bbf6f052
RK
2564 GET_MODE (target), 0);
2565 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2566 temp = copy_to_reg (temp);
2567 dont_return_target = 1;
2568 }
2569 else if (queued_subexp_p (target))
709f5be1
RS
2570 /* If target contains a postincrement, let's not risk
2571 using it as the place to generate the rhs. */
bbf6f052
RK
2572 {
2573 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2574 {
2575 /* Expand EXP into a new pseudo. */
2576 temp = gen_reg_rtx (GET_MODE (target));
2577 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2578 }
2579 else
906c4e36 2580 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
709f5be1
RS
2581
2582 /* If target is volatile, ANSI requires accessing the value
2583 *from* the target, if it is accessed. So make that happen.
2584 In no case return the target itself. */
2585 if (! MEM_VOLATILE_P (target) && want_value)
2586 dont_return_target = 1;
bbf6f052 2587 }
1499e0a8
RK
2588 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2589 /* If this is an scalar in a register that is stored in a wider mode
2590 than the declared mode, compute the result into its declared mode
2591 and then convert to the wider mode. Our value is the computed
2592 expression. */
2593 {
2594 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2595 convert_move (SUBREG_REG (target), temp,
2596 SUBREG_PROMOTED_UNSIGNED_P (target));
709f5be1 2597 return want_value ? temp : NULL_RTX;
1499e0a8 2598 }
bbf6f052
RK
2599 else
2600 {
2601 temp = expand_expr (exp, target, GET_MODE (target), 0);
2602 /* DO return TARGET if it's a specified hardware register.
c2e6aff6 2603 expand_return relies on this.
709f5be1
RS
2604 If TARGET is a volatile mem ref, either return TARGET
2605 or return a reg copied *from* TARGET; ANSI requires this.
2606
2607 Otherwise, if TEMP is not TARGET, return TEMP
2608 if it is constant (for efficiency),
2609 or if we really want the correct value. */
bbf6f052
RK
2610 if (!(target && GET_CODE (target) == REG
2611 && REGNO (target) < FIRST_PSEUDO_REGISTER)
709f5be1
RS
2612 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
2613 && temp != target
2614 && (CONSTANT_P (temp) || want_value))
bbf6f052
RK
2615 dont_return_target = 1;
2616 }
2617
2618 /* If value was not generated in the target, store it there.
2619 Convert the value to TARGET's type first if nec. */
2620
2621 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2622 {
2623 target = protect_from_queue (target, 1);
2624 if (GET_MODE (temp) != GET_MODE (target)
2625 && GET_MODE (temp) != VOIDmode)
2626 {
2627 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2628 if (dont_return_target)
2629 {
2630 /* In this case, we will return TEMP,
2631 so make sure it has the proper mode.
2632 But don't forget to store the value into TARGET. */
2633 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2634 emit_move_insn (target, temp);
2635 }
2636 else
2637 convert_move (target, temp, unsignedp);
2638 }
2639
2640 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2641 {
2642 /* Handle copying a string constant into an array.
2643 The string constant may be shorter than the array.
2644 So copy just the string's actual length, and clear the rest. */
2645 rtx size;
2646
e87b4f3f
RS
2647 /* Get the size of the data type of the string,
2648 which is actually the size of the target. */
2649 size = expr_size (exp);
2650 if (GET_CODE (size) == CONST_INT
2651 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2652 emit_block_move (target, temp, size,
2653 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2654 else
bbf6f052 2655 {
e87b4f3f
RS
2656 /* Compute the size of the data to copy from the string. */
2657 tree copy_size
c03b7665 2658 = size_binop (MIN_EXPR,
b50d17a1 2659 make_tree (sizetype, size),
c03b7665
RK
2660 convert (sizetype,
2661 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
906c4e36
RK
2662 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
2663 VOIDmode, 0);
e87b4f3f
RS
2664 rtx label = 0;
2665
2666 /* Copy that much. */
2667 emit_block_move (target, temp, copy_size_rtx,
2668 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2669
2670 /* Figure out how much is left in TARGET
2671 that we have to clear. */
2672 if (GET_CODE (copy_size_rtx) == CONST_INT)
2673 {
2674 temp = plus_constant (XEXP (target, 0),
2675 TREE_STRING_LENGTH (exp));
2676 size = plus_constant (size,
2677 - TREE_STRING_LENGTH (exp));
2678 }
2679 else
2680 {
2681 enum machine_mode size_mode = Pmode;
2682
2683 temp = force_reg (Pmode, XEXP (target, 0));
2684 temp = expand_binop (size_mode, add_optab, temp,
906c4e36
RK
2685 copy_size_rtx, NULL_RTX, 0,
2686 OPTAB_LIB_WIDEN);
e87b4f3f
RS
2687
2688 size = expand_binop (size_mode, sub_optab, size,
906c4e36
RK
2689 copy_size_rtx, NULL_RTX, 0,
2690 OPTAB_LIB_WIDEN);
e87b4f3f 2691
906c4e36 2692 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
e87b4f3f
RS
2693 GET_MODE (size), 0, 0);
2694 label = gen_label_rtx ();
2695 emit_jump_insn (gen_blt (label));
2696 }
2697
2698 if (size != const0_rtx)
2699 {
bbf6f052 2700#ifdef TARGET_MEM_FUNCTIONS
d562e42e 2701 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
e87b4f3f 2702 temp, Pmode, const0_rtx, Pmode, size, Pmode);
bbf6f052 2703#else
d562e42e 2704 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
e87b4f3f 2705 temp, Pmode, size, Pmode);
bbf6f052 2706#endif
e87b4f3f
RS
2707 }
2708 if (label)
2709 emit_label (label);
bbf6f052
RK
2710 }
2711 }
2712 else if (GET_MODE (temp) == BLKmode)
2713 emit_block_move (target, temp, expr_size (exp),
2714 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2715 else
2716 emit_move_insn (target, temp);
2717 }
709f5be1 2718
7d26fec6 2719 if (dont_return_target && GET_CODE (temp) != MEM)
bbf6f052 2720 return temp;
709f5be1
RS
2721 if (want_value && GET_MODE (target) != BLKmode)
2722 return copy_to_reg (target);
2723 if (want_value)
2724 return target;
2725 return NULL_RTX;
bbf6f052
RK
2726}
2727\f
2728/* Store the value of constructor EXP into the rtx TARGET.
2729 TARGET is either a REG or a MEM. */
2730
2731static void
2732store_constructor (exp, target)
2733 tree exp;
2734 rtx target;
2735{
4af3895e
JVA
2736 tree type = TREE_TYPE (exp);
2737
bbf6f052
RK
2738 /* We know our target cannot conflict, since safe_from_p has been called. */
2739#if 0
2740 /* Don't try copying piece by piece into a hard register
2741 since that is vulnerable to being clobbered by EXP.
2742 Instead, construct in a pseudo register and then copy it all. */
2743 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
2744 {
2745 rtx temp = gen_reg_rtx (GET_MODE (target));
2746 store_constructor (exp, temp);
2747 emit_move_insn (target, temp);
2748 return;
2749 }
2750#endif
2751
e44842fe
RK
2752 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
2753 || TREE_CODE (type) == QUAL_UNION_TYPE)
bbf6f052
RK
2754 {
2755 register tree elt;
2756
4af3895e 2757 /* Inform later passes that the whole union value is dead. */
e44842fe
RK
2758 if (TREE_CODE (type) == UNION_TYPE
2759 || TREE_CODE (type) == QUAL_UNION_TYPE)
bbf6f052 2760 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
4af3895e
JVA
2761
2762 /* If we are building a static constructor into a register,
2763 set the initial value as zero so we can fold the value into
2764 a constant. */
2765 else if (GET_CODE (target) == REG && TREE_STATIC (exp))
2766 emit_move_insn (target, const0_rtx);
2767
bbf6f052
RK
2768 /* If the constructor has fewer fields than the structure,
2769 clear the whole structure first. */
2770 else if (list_length (CONSTRUCTOR_ELTS (exp))
4af3895e
JVA
2771 != list_length (TYPE_FIELDS (type)))
2772 clear_storage (target, int_size_in_bytes (type));
bbf6f052
RK
2773 else
2774 /* Inform later passes that the old value is dead. */
2775 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2776
2777 /* Store each element of the constructor into
2778 the corresponding field of TARGET. */
2779
2780 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
2781 {
2782 register tree field = TREE_PURPOSE (elt);
2783 register enum machine_mode mode;
2784 int bitsize;
b50d17a1 2785 int bitpos = 0;
bbf6f052 2786 int unsignedp;
b50d17a1
RK
2787 tree pos, constant = 0, offset = 0;
2788 rtx to_rtx = target;
bbf6f052 2789
f32fd778
RS
2790 /* Just ignore missing fields.
2791 We cleared the whole structure, above,
2792 if any fields are missing. */
2793 if (field == 0)
2794 continue;
2795
bbf6f052
RK
2796 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
2797 unsignedp = TREE_UNSIGNED (field);
2798 mode = DECL_MODE (field);
2799 if (DECL_BIT_FIELD (field))
2800 mode = VOIDmode;
2801
b50d17a1
RK
2802 pos = DECL_FIELD_BITPOS (field);
2803 if (TREE_CODE (pos) == INTEGER_CST)
2804 constant = pos;
2805 else if (TREE_CODE (pos) == PLUS_EXPR
2806 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
2807 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
2808 else
2809 offset = pos;
2810
2811 if (constant)
2812 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
2813
2814 if (offset)
2815 {
2816 rtx offset_rtx;
2817
2818 if (contains_placeholder_p (offset))
2819 offset = build (WITH_RECORD_EXPR, sizetype,
2820 offset, exp);
bbf6f052 2821
b50d17a1
RK
2822 offset = size_binop (FLOOR_DIV_EXPR, offset,
2823 size_int (BITS_PER_UNIT));
bbf6f052 2824
b50d17a1
RK
2825 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2826 if (GET_CODE (to_rtx) != MEM)
2827 abort ();
2828
2829 to_rtx
2830 = change_address (to_rtx, VOIDmode,
2831 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2832 force_reg (Pmode, offset_rtx)));
2833 }
2834
2835 store_field (to_rtx, bitsize, bitpos, mode, TREE_VALUE (elt),
bbf6f052
RK
2836 /* The alignment of TARGET is
2837 at least what its type requires. */
2838 VOIDmode, 0,
4af3895e
JVA
2839 TYPE_ALIGN (type) / BITS_PER_UNIT,
2840 int_size_in_bytes (type));
bbf6f052
RK
2841 }
2842 }
4af3895e 2843 else if (TREE_CODE (type) == ARRAY_TYPE)
bbf6f052
RK
2844 {
2845 register tree elt;
2846 register int i;
4af3895e 2847 tree domain = TYPE_DOMAIN (type);
906c4e36
RK
2848 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
2849 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4af3895e 2850 tree elttype = TREE_TYPE (type);
bbf6f052
RK
2851
2852 /* If the constructor has fewer fields than the structure,
4af3895e
JVA
2853 clear the whole structure first. Similarly if this this is
2854 static constructor of a non-BLKmode object. */
bbf6f052 2855
4af3895e
JVA
2856 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
2857 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
597bb7f1 2858 clear_storage (target, int_size_in_bytes (type));
bbf6f052
RK
2859 else
2860 /* Inform later passes that the old value is dead. */
2861 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2862
2863 /* Store each element of the constructor into
2864 the corresponding element of TARGET, determined
2865 by counting the elements. */
2866 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
2867 elt;
2868 elt = TREE_CHAIN (elt), i++)
2869 {
2870 register enum machine_mode mode;
2871 int bitsize;
2872 int bitpos;
2873 int unsignedp;
03dc44a6
RS
2874 tree index = TREE_PURPOSE (elt);
2875 rtx xtarget = target;
bbf6f052
RK
2876
2877 mode = TYPE_MODE (elttype);
2878 bitsize = GET_MODE_BITSIZE (mode);
2879 unsignedp = TREE_UNSIGNED (elttype);
2880
03dc44a6
RS
2881 if (index != 0 && TREE_CODE (index) != INTEGER_CST)
2882 {
2883 /* We don't currently allow variable indices in a
2884 C initializer, but let's try here to support them. */
2885 rtx pos_rtx, addr, xtarget;
2886 tree position;
2887
2888 position = size_binop (MULT_EXPR, index, TYPE_SIZE (elttype));
2889 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
2890 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
2891 xtarget = change_address (target, mode, addr);
2892 store_expr (TREE_VALUE (elt), xtarget, 0);
2893 }
2894 else
2895 {
2896 if (index != 0)
7c314719 2897 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
03dc44a6
RS
2898 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2899 else
2900 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2901
2902 store_field (xtarget, bitsize, bitpos, mode, TREE_VALUE (elt),
2903 /* The alignment of TARGET is
2904 at least what its type requires. */
2905 VOIDmode, 0,
2906 TYPE_ALIGN (type) / BITS_PER_UNIT,
2907 int_size_in_bytes (type));
2908 }
bbf6f052
RK
2909 }
2910 }
2911
2912 else
2913 abort ();
2914}
2915
2916/* Store the value of EXP (an expression tree)
2917 into a subfield of TARGET which has mode MODE and occupies
2918 BITSIZE bits, starting BITPOS bits from the start of TARGET.
2919 If MODE is VOIDmode, it means that we are storing into a bit-field.
2920
2921 If VALUE_MODE is VOIDmode, return nothing in particular.
2922 UNSIGNEDP is not used in this case.
2923
2924 Otherwise, return an rtx for the value stored. This rtx
2925 has mode VALUE_MODE if that is convenient to do.
2926 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
2927
2928 ALIGN is the alignment that TARGET is known to have, measured in bytes.
2929 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
2930
2931static rtx
2932store_field (target, bitsize, bitpos, mode, exp, value_mode,
2933 unsignedp, align, total_size)
2934 rtx target;
2935 int bitsize, bitpos;
2936 enum machine_mode mode;
2937 tree exp;
2938 enum machine_mode value_mode;
2939 int unsignedp;
2940 int align;
2941 int total_size;
2942{
906c4e36 2943 HOST_WIDE_INT width_mask = 0;
bbf6f052 2944
906c4e36
RK
2945 if (bitsize < HOST_BITS_PER_WIDE_INT)
2946 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
bbf6f052
RK
2947
2948 /* If we are storing into an unaligned field of an aligned union that is
2949 in a register, we may have the mode of TARGET being an integer mode but
2950 MODE == BLKmode. In that case, get an aligned object whose size and
2951 alignment are the same as TARGET and store TARGET into it (we can avoid
2952 the store if the field being stored is the entire width of TARGET). Then
2953 call ourselves recursively to store the field into a BLKmode version of
2954 that object. Finally, load from the object into TARGET. This is not
2955 very efficient in general, but should only be slightly more expensive
2956 than the otherwise-required unaligned accesses. Perhaps this can be
2957 cleaned up later. */
2958
2959 if (mode == BLKmode
2960 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
2961 {
2962 rtx object = assign_stack_temp (GET_MODE (target),
2963 GET_MODE_SIZE (GET_MODE (target)), 0);
2964 rtx blk_object = copy_rtx (object);
2965
2966 PUT_MODE (blk_object, BLKmode);
2967
2968 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
2969 emit_move_insn (object, target);
2970
2971 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
2972 align, total_size);
2973
46093b97
RS
2974 /* Even though we aren't returning target, we need to
2975 give it the updated value. */
bbf6f052
RK
2976 emit_move_insn (target, object);
2977
46093b97 2978 return blk_object;
bbf6f052
RK
2979 }
2980
2981 /* If the structure is in a register or if the component
2982 is a bit field, we cannot use addressing to access it.
2983 Use bit-field techniques or SUBREG to store in it. */
2984
4fa52007
RK
2985 if (mode == VOIDmode
2986 || (mode != BLKmode && ! direct_store[(int) mode])
2987 || GET_CODE (target) == REG
c980ac49 2988 || GET_CODE (target) == SUBREG
ccc98036
RS
2989 /* If the field isn't aligned enough to store as an ordinary memref,
2990 store it as a bit field. */
2991 || (STRICT_ALIGNMENT
2992 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
2993 || (STRICT_ALIGNMENT && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
bbf6f052 2994 {
906c4e36 2995 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
2996 /* Store the value in the bitfield. */
2997 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
2998 if (value_mode != VOIDmode)
2999 {
3000 /* The caller wants an rtx for the value. */
3001 /* If possible, avoid refetching from the bitfield itself. */
3002 if (width_mask != 0
3003 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5c4d7cfb 3004 {
9074de27 3005 tree count;
5c4d7cfb 3006 enum machine_mode tmode;
86a2c12a 3007
5c4d7cfb
RS
3008 if (unsignedp)
3009 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
3010 tmode = GET_MODE (temp);
86a2c12a
RS
3011 if (tmode == VOIDmode)
3012 tmode = value_mode;
5c4d7cfb
RS
3013 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
3014 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
3015 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
3016 }
bbf6f052 3017 return extract_bit_field (target, bitsize, bitpos, unsignedp,
906c4e36
RK
3018 NULL_RTX, value_mode, 0, align,
3019 total_size);
bbf6f052
RK
3020 }
3021 return const0_rtx;
3022 }
3023 else
3024 {
3025 rtx addr = XEXP (target, 0);
3026 rtx to_rtx;
3027
3028 /* If a value is wanted, it must be the lhs;
3029 so make the address stable for multiple use. */
3030
3031 if (value_mode != VOIDmode && GET_CODE (addr) != REG
3032 && ! CONSTANT_ADDRESS_P (addr)
3033 /* A frame-pointer reference is already stable. */
3034 && ! (GET_CODE (addr) == PLUS
3035 && GET_CODE (XEXP (addr, 1)) == CONST_INT
3036 && (XEXP (addr, 0) == virtual_incoming_args_rtx
3037 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
3038 addr = copy_to_reg (addr);
3039
3040 /* Now build a reference to just the desired component. */
3041
3042 to_rtx = change_address (target, mode,
3043 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
3044 MEM_IN_STRUCT_P (to_rtx) = 1;
3045
3046 return store_expr (exp, to_rtx, value_mode != VOIDmode);
3047 }
3048}
3049\f
3050/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
3051 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
742920c7 3052 ARRAY_REFs and find the ultimate containing object, which we return.
bbf6f052
RK
3053
3054 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
3055 bit position, and *PUNSIGNEDP to the signedness of the field.
7bb0943f
RS
3056 If the position of the field is variable, we store a tree
3057 giving the variable offset (in units) in *POFFSET.
3058 This offset is in addition to the bit position.
3059 If the position is not variable, we store 0 in *POFFSET.
bbf6f052
RK
3060
3061 If any of the extraction expressions is volatile,
3062 we store 1 in *PVOLATILEP. Otherwise we don't change that.
3063
3064 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
3065 is a mode that can be used to access the field. In that case, *PBITSIZE
e7c33f54
RK
3066 is redundant.
3067
3068 If the field describes a variable-sized object, *PMODE is set to
3069 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
3070 this case, but the address of the object can be found. */
bbf6f052
RK
3071
3072tree
4969d05d
RK
3073get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
3074 punsignedp, pvolatilep)
bbf6f052
RK
3075 tree exp;
3076 int *pbitsize;
3077 int *pbitpos;
7bb0943f 3078 tree *poffset;
bbf6f052
RK
3079 enum machine_mode *pmode;
3080 int *punsignedp;
3081 int *pvolatilep;
3082{
b50d17a1 3083 tree orig_exp = exp;
bbf6f052
RK
3084 tree size_tree = 0;
3085 enum machine_mode mode = VOIDmode;
742920c7 3086 tree offset = integer_zero_node;
bbf6f052
RK
3087
3088 if (TREE_CODE (exp) == COMPONENT_REF)
3089 {
3090 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
3091 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
3092 mode = DECL_MODE (TREE_OPERAND (exp, 1));
3093 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
3094 }
3095 else if (TREE_CODE (exp) == BIT_FIELD_REF)
3096 {
3097 size_tree = TREE_OPERAND (exp, 1);
3098 *punsignedp = TREE_UNSIGNED (exp);
3099 }
3100 else
3101 {
3102 mode = TYPE_MODE (TREE_TYPE (exp));
3103 *pbitsize = GET_MODE_BITSIZE (mode);
3104 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3105 }
3106
3107 if (size_tree)
3108 {
3109 if (TREE_CODE (size_tree) != INTEGER_CST)
e7c33f54
RK
3110 mode = BLKmode, *pbitsize = -1;
3111 else
3112 *pbitsize = TREE_INT_CST_LOW (size_tree);
bbf6f052
RK
3113 }
3114
3115 /* Compute cumulative bit-offset for nested component-refs and array-refs,
3116 and find the ultimate containing object. */
3117
3118 *pbitpos = 0;
3119
3120 while (1)
3121 {
7bb0943f 3122 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
bbf6f052 3123 {
7bb0943f
RS
3124 tree pos = (TREE_CODE (exp) == COMPONENT_REF
3125 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
3126 : TREE_OPERAND (exp, 2));
bbf6f052 3127
e7f3c83f
RK
3128 /* If this field hasn't been filled in yet, don't go
3129 past it. This should only happen when folding expressions
3130 made during type construction. */
3131 if (pos == 0)
3132 break;
3133
7bb0943f
RS
3134 if (TREE_CODE (pos) == PLUS_EXPR)
3135 {
3136 tree constant, var;
3137 if (TREE_CODE (TREE_OPERAND (pos, 0)) == INTEGER_CST)
3138 {
3139 constant = TREE_OPERAND (pos, 0);
3140 var = TREE_OPERAND (pos, 1);
3141 }
3142 else if (TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3143 {
3144 constant = TREE_OPERAND (pos, 1);
3145 var = TREE_OPERAND (pos, 0);
3146 }
3147 else
3148 abort ();
742920c7 3149
7bb0943f 3150 *pbitpos += TREE_INT_CST_LOW (constant);
742920c7
RK
3151 offset = size_binop (PLUS_EXPR, offset,
3152 size_binop (FLOOR_DIV_EXPR, var,
3153 size_int (BITS_PER_UNIT)));
7bb0943f
RS
3154 }
3155 else if (TREE_CODE (pos) == INTEGER_CST)
3156 *pbitpos += TREE_INT_CST_LOW (pos);
3157 else
3158 {
3159 /* Assume here that the offset is a multiple of a unit.
3160 If not, there should be an explicitly added constant. */
742920c7
RK
3161 offset = size_binop (PLUS_EXPR, offset,
3162 size_binop (FLOOR_DIV_EXPR, pos,
3163 size_int (BITS_PER_UNIT)));
7bb0943f 3164 }
bbf6f052 3165 }
bbf6f052 3166
742920c7 3167 else if (TREE_CODE (exp) == ARRAY_REF)
bbf6f052 3168 {
742920c7
RK
3169 /* This code is based on the code in case ARRAY_REF in expand_expr
3170 below. We assume here that the size of an array element is
3171 always an integral multiple of BITS_PER_UNIT. */
3172
3173 tree index = TREE_OPERAND (exp, 1);
3174 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
3175 tree low_bound
3176 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
3177 tree index_type = TREE_TYPE (index);
3178
3179 if (! integer_zerop (low_bound))
3180 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
3181
3182 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
3183 {
3184 index = convert (type_for_size (POINTER_SIZE, 0), index);
3185 index_type = TREE_TYPE (index);
3186 }
3187
3188 index = fold (build (MULT_EXPR, index_type, index,
3189 TYPE_SIZE (TREE_TYPE (exp))));
3190
3191 if (TREE_CODE (index) == INTEGER_CST
3192 && TREE_INT_CST_HIGH (index) == 0)
3193 *pbitpos += TREE_INT_CST_LOW (index);
3194 else
3195 offset = size_binop (PLUS_EXPR, offset,
3196 size_binop (FLOOR_DIV_EXPR, index,
3197 size_int (BITS_PER_UNIT)));
bbf6f052
RK
3198 }
3199 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
3200 && ! ((TREE_CODE (exp) == NOP_EXPR
3201 || TREE_CODE (exp) == CONVERT_EXPR)
3202 && (TYPE_MODE (TREE_TYPE (exp))
3203 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
3204 break;
7bb0943f
RS
3205
3206 /* If any reference in the chain is volatile, the effect is volatile. */
3207 if (TREE_THIS_VOLATILE (exp))
3208 *pvolatilep = 1;
bbf6f052
RK
3209 exp = TREE_OPERAND (exp, 0);
3210 }
3211
3212 /* If this was a bit-field, see if there is a mode that allows direct
3213 access in case EXP is in memory. */
e7f3c83f 3214 if (mode == VOIDmode && *pbitsize != 0 && *pbitpos % *pbitsize == 0)
bbf6f052
RK
3215 {
3216 mode = mode_for_size (*pbitsize, MODE_INT, 0);
3217 if (mode == BLKmode)
3218 mode = VOIDmode;
3219 }
3220
742920c7
RK
3221 if (integer_zerop (offset))
3222 offset = 0;
3223
b50d17a1
RK
3224 if (offset != 0 && contains_placeholder_p (offset))
3225 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
3226
bbf6f052 3227 *pmode = mode;
7bb0943f 3228 *poffset = offset;
bbf6f052
RK
3229 return exp;
3230}
3231\f
3232/* Given an rtx VALUE that may contain additions and multiplications,
3233 return an equivalent value that just refers to a register or memory.
3234 This is done by generating instructions to perform the arithmetic
c45a13a6
RK
3235 and returning a pseudo-register containing the value.
3236
3237 The returned value may be a REG, SUBREG, MEM or constant. */
bbf6f052
RK
3238
3239rtx
3240force_operand (value, target)
3241 rtx value, target;
3242{
3243 register optab binoptab = 0;
3244 /* Use a temporary to force order of execution of calls to
3245 `force_operand'. */
3246 rtx tmp;
3247 register rtx op2;
3248 /* Use subtarget as the target for operand 0 of a binary operation. */
3249 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3250
3251 if (GET_CODE (value) == PLUS)
3252 binoptab = add_optab;
3253 else if (GET_CODE (value) == MINUS)
3254 binoptab = sub_optab;
3255 else if (GET_CODE (value) == MULT)
3256 {
3257 op2 = XEXP (value, 1);
3258 if (!CONSTANT_P (op2)
3259 && !(GET_CODE (op2) == REG && op2 != subtarget))
3260 subtarget = 0;
3261 tmp = force_operand (XEXP (value, 0), subtarget);
3262 return expand_mult (GET_MODE (value), tmp,
906c4e36 3263 force_operand (op2, NULL_RTX),
bbf6f052
RK
3264 target, 0);
3265 }
3266
3267 if (binoptab)
3268 {
3269 op2 = XEXP (value, 1);
3270 if (!CONSTANT_P (op2)
3271 && !(GET_CODE (op2) == REG && op2 != subtarget))
3272 subtarget = 0;
3273 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
3274 {
3275 binoptab = add_optab;
3276 op2 = negate_rtx (GET_MODE (value), op2);
3277 }
3278
3279 /* Check for an addition with OP2 a constant integer and our first
3280 operand a PLUS of a virtual register and something else. In that
3281 case, we want to emit the sum of the virtual register and the
3282 constant first and then add the other value. This allows virtual
3283 register instantiation to simply modify the constant rather than
3284 creating another one around this addition. */
3285 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
3286 && GET_CODE (XEXP (value, 0)) == PLUS
3287 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
3288 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
3289 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
3290 {
3291 rtx temp = expand_binop (GET_MODE (value), binoptab,
3292 XEXP (XEXP (value, 0), 0), op2,
3293 subtarget, 0, OPTAB_LIB_WIDEN);
3294 return expand_binop (GET_MODE (value), binoptab, temp,
3295 force_operand (XEXP (XEXP (value, 0), 1), 0),
3296 target, 0, OPTAB_LIB_WIDEN);
3297 }
3298
3299 tmp = force_operand (XEXP (value, 0), subtarget);
3300 return expand_binop (GET_MODE (value), binoptab, tmp,
906c4e36 3301 force_operand (op2, NULL_RTX),
bbf6f052 3302 target, 0, OPTAB_LIB_WIDEN);
8008b228 3303 /* We give UNSIGNEDP = 0 to expand_binop
bbf6f052
RK
3304 because the only operations we are expanding here are signed ones. */
3305 }
3306 return value;
3307}
3308\f
3309/* Subroutine of expand_expr:
3310 save the non-copied parts (LIST) of an expr (LHS), and return a list
3311 which can restore these values to their previous values,
3312 should something modify their storage. */
3313
3314static tree
3315save_noncopied_parts (lhs, list)
3316 tree lhs;
3317 tree list;
3318{
3319 tree tail;
3320 tree parts = 0;
3321
3322 for (tail = list; tail; tail = TREE_CHAIN (tail))
3323 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3324 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
3325 else
3326 {
3327 tree part = TREE_VALUE (tail);
3328 tree part_type = TREE_TYPE (part);
906c4e36 3329 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
bbf6f052
RK
3330 rtx target = assign_stack_temp (TYPE_MODE (part_type),
3331 int_size_in_bytes (part_type), 0);
3332 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
906c4e36 3333 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
bbf6f052 3334 parts = tree_cons (to_be_saved,
906c4e36
RK
3335 build (RTL_EXPR, part_type, NULL_TREE,
3336 (tree) target),
bbf6f052
RK
3337 parts);
3338 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
3339 }
3340 return parts;
3341}
3342
3343/* Subroutine of expand_expr:
3344 record the non-copied parts (LIST) of an expr (LHS), and return a list
3345 which specifies the initial values of these parts. */
3346
3347static tree
3348init_noncopied_parts (lhs, list)
3349 tree lhs;
3350 tree list;
3351{
3352 tree tail;
3353 tree parts = 0;
3354
3355 for (tail = list; tail; tail = TREE_CHAIN (tail))
3356 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3357 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
3358 else
3359 {
3360 tree part = TREE_VALUE (tail);
3361 tree part_type = TREE_TYPE (part);
906c4e36 3362 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
bbf6f052
RK
3363 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
3364 }
3365 return parts;
3366}
3367
3368/* Subroutine of expand_expr: return nonzero iff there is no way that
3369 EXP can reference X, which is being modified. */
3370
3371static int
3372safe_from_p (x, exp)
3373 rtx x;
3374 tree exp;
3375{
3376 rtx exp_rtl = 0;
3377 int i, nops;
3378
3379 if (x == 0)
3380 return 1;
3381
3382 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
3383 find the underlying pseudo. */
3384 if (GET_CODE (x) == SUBREG)
3385 {
3386 x = SUBREG_REG (x);
3387 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3388 return 0;
3389 }
3390
3391 /* If X is a location in the outgoing argument area, it is always safe. */
3392 if (GET_CODE (x) == MEM
3393 && (XEXP (x, 0) == virtual_outgoing_args_rtx
3394 || (GET_CODE (XEXP (x, 0)) == PLUS
3395 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
3396 return 1;
3397
3398 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3399 {
3400 case 'd':
3401 exp_rtl = DECL_RTL (exp);
3402 break;
3403
3404 case 'c':
3405 return 1;
3406
3407 case 'x':
3408 if (TREE_CODE (exp) == TREE_LIST)
f32fd778
RS
3409 return ((TREE_VALUE (exp) == 0
3410 || safe_from_p (x, TREE_VALUE (exp)))
bbf6f052
RK
3411 && (TREE_CHAIN (exp) == 0
3412 || safe_from_p (x, TREE_CHAIN (exp))));
3413 else
3414 return 0;
3415
3416 case '1':
3417 return safe_from_p (x, TREE_OPERAND (exp, 0));
3418
3419 case '2':
3420 case '<':
3421 return (safe_from_p (x, TREE_OPERAND (exp, 0))
3422 && safe_from_p (x, TREE_OPERAND (exp, 1)));
3423
3424 case 'e':
3425 case 'r':
3426 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
3427 the expression. If it is set, we conflict iff we are that rtx or
3428 both are in memory. Otherwise, we check all operands of the
3429 expression recursively. */
3430
3431 switch (TREE_CODE (exp))
3432 {
3433 case ADDR_EXPR:
e44842fe
RK
3434 return (staticp (TREE_OPERAND (exp, 0))
3435 || safe_from_p (x, TREE_OPERAND (exp, 0)));
bbf6f052
RK
3436
3437 case INDIRECT_REF:
3438 if (GET_CODE (x) == MEM)
3439 return 0;
3440 break;
3441
3442 case CALL_EXPR:
3443 exp_rtl = CALL_EXPR_RTL (exp);
3444 if (exp_rtl == 0)
3445 {
3446 /* Assume that the call will clobber all hard registers and
3447 all of memory. */
3448 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3449 || GET_CODE (x) == MEM)
3450 return 0;
3451 }
3452
3453 break;
3454
3455 case RTL_EXPR:
3456 exp_rtl = RTL_EXPR_RTL (exp);
3457 if (exp_rtl == 0)
3458 /* We don't know what this can modify. */
3459 return 0;
3460
3461 break;
3462
3463 case WITH_CLEANUP_EXPR:
3464 exp_rtl = RTL_EXPR_RTL (exp);
3465 break;
3466
3467 case SAVE_EXPR:
3468 exp_rtl = SAVE_EXPR_RTL (exp);
3469 break;
3470
8129842c
RS
3471 case BIND_EXPR:
3472 /* The only operand we look at is operand 1. The rest aren't
3473 part of the expression. */
3474 return safe_from_p (x, TREE_OPERAND (exp, 1));
3475
bbf6f052
RK
3476 case METHOD_CALL_EXPR:
3477 /* This takes a rtx argument, but shouldn't appear here. */
3478 abort ();
3479 }
3480
3481 /* If we have an rtx, we do not need to scan our operands. */
3482 if (exp_rtl)
3483 break;
3484
3485 nops = tree_code_length[(int) TREE_CODE (exp)];
3486 for (i = 0; i < nops; i++)
3487 if (TREE_OPERAND (exp, i) != 0
3488 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3489 return 0;
3490 }
3491
3492 /* If we have an rtl, find any enclosed object. Then see if we conflict
3493 with it. */
3494 if (exp_rtl)
3495 {
3496 if (GET_CODE (exp_rtl) == SUBREG)
3497 {
3498 exp_rtl = SUBREG_REG (exp_rtl);
3499 if (GET_CODE (exp_rtl) == REG
3500 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3501 return 0;
3502 }
3503
3504 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3505 are memory and EXP is not readonly. */
3506 return ! (rtx_equal_p (x, exp_rtl)
3507 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3508 && ! TREE_READONLY (exp)));
3509 }
3510
3511 /* If we reach here, it is safe. */
3512 return 1;
3513}
3514
3515/* Subroutine of expand_expr: return nonzero iff EXP is an
3516 expression whose type is statically determinable. */
3517
3518static int
3519fixed_type_p (exp)
3520 tree exp;
3521{
3522 if (TREE_CODE (exp) == PARM_DECL
3523 || TREE_CODE (exp) == VAR_DECL
3524 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3525 || TREE_CODE (exp) == COMPONENT_REF
3526 || TREE_CODE (exp) == ARRAY_REF)
3527 return 1;
3528 return 0;
3529}
3530\f
3531/* expand_expr: generate code for computing expression EXP.
3532 An rtx for the computed value is returned. The value is never null.
3533 In the case of a void EXP, const0_rtx is returned.
3534
3535 The value may be stored in TARGET if TARGET is nonzero.
3536 TARGET is just a suggestion; callers must assume that
3537 the rtx returned may not be the same as TARGET.
3538
3539 If TARGET is CONST0_RTX, it means that the value will be ignored.
3540
3541 If TMODE is not VOIDmode, it suggests generating the
3542 result in mode TMODE. But this is done only when convenient.
3543 Otherwise, TMODE is ignored and the value generated in its natural mode.
3544 TMODE is just a suggestion; callers must assume that
3545 the rtx returned may not have mode TMODE.
3546
3547 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
3548 with a constant address even if that address is not normally legitimate.
3549 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
3550
3551 If MODIFIER is EXPAND_SUM then when EXP is an addition
3552 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3553 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3554 products as above, or REG or MEM, or constant.
3555 Ordinarily in such cases we would output mul or add instructions
3556 and then return a pseudo reg containing the sum.
3557
3558 EXPAND_INITIALIZER is much like EXPAND_SUM except that
3559 it also marks a label as absolutely required (it can't be dead).
26fcb35a 3560 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6dc42e49 3561 This is used for outputting expressions used in initializers. */
bbf6f052
RK
3562
3563rtx
3564expand_expr (exp, target, tmode, modifier)
3565 register tree exp;
3566 rtx target;
3567 enum machine_mode tmode;
3568 enum expand_modifier modifier;
3569{
b50d17a1
RK
3570 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
3571 This is static so it will be accessible to our recursive callees. */
3572 static tree placeholder_list = 0;
bbf6f052
RK
3573 register rtx op0, op1, temp;
3574 tree type = TREE_TYPE (exp);
3575 int unsignedp = TREE_UNSIGNED (type);
3576 register enum machine_mode mode = TYPE_MODE (type);
3577 register enum tree_code code = TREE_CODE (exp);
3578 optab this_optab;
3579 /* Use subtarget as the target for operand 0 of a binary operation. */
3580 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3581 rtx original_target = target;
ca695ac9 3582 /* Maybe defer this until sure not doing bytecode? */
dd27116b
RK
3583 int ignore = (target == const0_rtx
3584 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
4d87de75
RS
3585 || code == CONVERT_EXPR || code == REFERENCE_EXPR
3586 || code == COND_EXPR)
dd27116b 3587 && TREE_CODE (type) == VOID_TYPE));
bbf6f052
RK
3588 tree context;
3589
ca695ac9
JB
3590
3591 if (output_bytecode)
3592 {
3593 bc_expand_expr (exp);
3594 return NULL;
3595 }
3596
bbf6f052
RK
3597 /* Don't use hard regs as subtargets, because the combiner
3598 can only handle pseudo regs. */
3599 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
3600 subtarget = 0;
3601 /* Avoid subtargets inside loops,
3602 since they hide some invariant expressions. */
3603 if (preserve_subexpressions_p ())
3604 subtarget = 0;
3605
dd27116b
RK
3606 /* If we are going to ignore this result, we need only do something
3607 if there is a side-effect somewhere in the expression. If there
b50d17a1
RK
3608 is, short-circuit the most common cases here. Note that we must
3609 not call expand_expr with anything but const0_rtx in case this
3610 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
bbf6f052 3611
dd27116b
RK
3612 if (ignore)
3613 {
3614 if (! TREE_SIDE_EFFECTS (exp))
3615 return const0_rtx;
3616
3617 /* Ensure we reference a volatile object even if value is ignored. */
3618 if (TREE_THIS_VOLATILE (exp)
3619 && TREE_CODE (exp) != FUNCTION_DECL
3620 && mode != VOIDmode && mode != BLKmode)
3621 {
3622 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
3623 if (GET_CODE (temp) == MEM)
3624 temp = copy_to_reg (temp);
3625 return const0_rtx;
3626 }
3627
3628 if (TREE_CODE_CLASS (code) == '1')
3629 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
3630 VOIDmode, modifier);
3631 else if (TREE_CODE_CLASS (code) == '2'
3632 || TREE_CODE_CLASS (code) == '<')
3633 {
3634 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
3635 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
3636 return const0_rtx;
3637 }
3638 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
3639 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
3640 /* If the second operand has no side effects, just evaluate
3641 the first. */
3642 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
3643 VOIDmode, modifier);
dd27116b 3644
90764a87 3645 target = 0;
dd27116b 3646 }
bbf6f052 3647
e44842fe
RK
3648 /* If will do cse, generate all results into pseudo registers
3649 since 1) that allows cse to find more things
3650 and 2) otherwise cse could produce an insn the machine
3651 cannot support. */
3652
bbf6f052
RK
3653 if (! cse_not_expected && mode != BLKmode && target
3654 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
3655 target = subtarget;
3656
bbf6f052
RK
3657 switch (code)
3658 {
3659 case LABEL_DECL:
b552441b
RS
3660 {
3661 tree function = decl_function_context (exp);
3662 /* Handle using a label in a containing function. */
3663 if (function != current_function_decl && function != 0)
3664 {
3665 struct function *p = find_function_data (function);
3666 /* Allocate in the memory associated with the function
3667 that the label is in. */
3668 push_obstacks (p->function_obstack,
3669 p->function_maybepermanent_obstack);
3670
3671 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3672 label_rtx (exp), p->forced_labels);
3673 pop_obstacks ();
3674 }
3675 else if (modifier == EXPAND_INITIALIZER)
3676 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3677 label_rtx (exp), forced_labels);
26fcb35a 3678 temp = gen_rtx (MEM, FUNCTION_MODE,
b552441b 3679 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
26fcb35a
RS
3680 if (function != current_function_decl && function != 0)
3681 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
3682 return temp;
b552441b 3683 }
bbf6f052
RK
3684
3685 case PARM_DECL:
3686 if (DECL_RTL (exp) == 0)
3687 {
3688 error_with_decl (exp, "prior parameter's size depends on `%s'");
4af3895e 3689 return CONST0_RTX (mode);
bbf6f052
RK
3690 }
3691
bbf6f052 3692 case VAR_DECL:
2dca20cd
RS
3693 /* If a static var's type was incomplete when the decl was written,
3694 but the type is complete now, lay out the decl now. */
3695 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
3696 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
3697 {
3698 push_obstacks_nochange ();
3699 end_temporary_allocation ();
3700 layout_decl (exp, 0);
3701 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
3702 pop_obstacks ();
3703 }
3704 case FUNCTION_DECL:
bbf6f052
RK
3705 case RESULT_DECL:
3706 if (DECL_RTL (exp) == 0)
3707 abort ();
e44842fe
RK
3708 /* Ensure variable marked as used even if it doesn't go through
3709 a parser. If it hasn't be used yet, write out an external
3710 definition. */
3711 if (! TREE_USED (exp))
3712 {
3713 assemble_external (exp);
3714 TREE_USED (exp) = 1;
3715 }
3716
bbf6f052
RK
3717 /* Handle variables inherited from containing functions. */
3718 context = decl_function_context (exp);
3719
3720 /* We treat inline_function_decl as an alias for the current function
3721 because that is the inline function whose vars, types, etc.
3722 are being merged into the current function.
3723 See expand_inline_function. */
3724 if (context != 0 && context != current_function_decl
3725 && context != inline_function_decl
3726 /* If var is static, we don't need a static chain to access it. */
3727 && ! (GET_CODE (DECL_RTL (exp)) == MEM
3728 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
3729 {
3730 rtx addr;
3731
3732 /* Mark as non-local and addressable. */
81feeecb 3733 DECL_NONLOCAL (exp) = 1;
bbf6f052
RK
3734 mark_addressable (exp);
3735 if (GET_CODE (DECL_RTL (exp)) != MEM)
3736 abort ();
3737 addr = XEXP (DECL_RTL (exp), 0);
3738 if (GET_CODE (addr) == MEM)
3739 addr = gen_rtx (MEM, Pmode, fix_lexical_addr (XEXP (addr, 0), exp));
3740 else
3741 addr = fix_lexical_addr (addr, exp);
3742 return change_address (DECL_RTL (exp), mode, addr);
3743 }
4af3895e 3744
bbf6f052
RK
3745 /* This is the case of an array whose size is to be determined
3746 from its initializer, while the initializer is still being parsed.
3747 See expand_decl. */
3748 if (GET_CODE (DECL_RTL (exp)) == MEM
3749 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
3750 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
3751 XEXP (DECL_RTL (exp), 0));
3752 if (GET_CODE (DECL_RTL (exp)) == MEM
3753 && modifier != EXPAND_CONST_ADDRESS
3754 && modifier != EXPAND_SUM
3755 && modifier != EXPAND_INITIALIZER)
3756 {
3757 /* DECL_RTL probably contains a constant address.
3758 On RISC machines where a constant address isn't valid,
3759 make some insns to get that address into a register. */
3760 if (!memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
3761 || (flag_force_addr
3762 && CONSTANT_ADDRESS_P (XEXP (DECL_RTL (exp), 0))))
3763 return change_address (DECL_RTL (exp), VOIDmode,
3764 copy_rtx (XEXP (DECL_RTL (exp), 0)));
3765 }
1499e0a8
RK
3766
3767 /* If the mode of DECL_RTL does not match that of the decl, it
3768 must be a promoted value. We return a SUBREG of the wanted mode,
3769 but mark it so that we know that it was already extended. */
3770
3771 if (GET_CODE (DECL_RTL (exp)) == REG
3772 && GET_MODE (DECL_RTL (exp)) != mode)
3773 {
3774 enum machine_mode decl_mode = DECL_MODE (exp);
3775
3776 /* Get the signedness used for this variable. Ensure we get the
3777 same mode we got when the variable was declared. */
3778
3779 PROMOTE_MODE (decl_mode, unsignedp, type);
3780
3781 if (decl_mode != GET_MODE (DECL_RTL (exp)))
3782 abort ();
3783
3784 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
3785 SUBREG_PROMOTED_VAR_P (temp) = 1;
3786 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3787 return temp;
3788 }
3789
bbf6f052
RK
3790 return DECL_RTL (exp);
3791
3792 case INTEGER_CST:
3793 return immed_double_const (TREE_INT_CST_LOW (exp),
3794 TREE_INT_CST_HIGH (exp),
3795 mode);
3796
3797 case CONST_DECL:
3798 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
3799
3800 case REAL_CST:
3801 /* If optimized, generate immediate CONST_DOUBLE
3802 which will be turned into memory by reload if necessary.
3803
3804 We used to force a register so that loop.c could see it. But
3805 this does not allow gen_* patterns to perform optimizations with
3806 the constants. It also produces two insns in cases like "x = 1.0;".
3807 On most machines, floating-point constants are not permitted in
3808 many insns, so we'd end up copying it to a register in any case.
3809
3810 Now, we do the copying in expand_binop, if appropriate. */
3811 return immed_real_const (exp);
3812
3813 case COMPLEX_CST:
3814 case STRING_CST:
3815 if (! TREE_CST_RTL (exp))
3816 output_constant_def (exp);
3817
3818 /* TREE_CST_RTL probably contains a constant address.
3819 On RISC machines where a constant address isn't valid,
3820 make some insns to get that address into a register. */
3821 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
3822 && modifier != EXPAND_CONST_ADDRESS
3823 && modifier != EXPAND_INITIALIZER
3824 && modifier != EXPAND_SUM
3825 && !memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0)))
3826 return change_address (TREE_CST_RTL (exp), VOIDmode,
3827 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
3828 return TREE_CST_RTL (exp);
3829
3830 case SAVE_EXPR:
3831 context = decl_function_context (exp);
3832 /* We treat inline_function_decl as an alias for the current function
3833 because that is the inline function whose vars, types, etc.
3834 are being merged into the current function.
3835 See expand_inline_function. */
3836 if (context == current_function_decl || context == inline_function_decl)
3837 context = 0;
3838
3839 /* If this is non-local, handle it. */
3840 if (context)
3841 {
3842 temp = SAVE_EXPR_RTL (exp);
3843 if (temp && GET_CODE (temp) == REG)
3844 {
3845 put_var_into_stack (exp);
3846 temp = SAVE_EXPR_RTL (exp);
3847 }
3848 if (temp == 0 || GET_CODE (temp) != MEM)
3849 abort ();
3850 return change_address (temp, mode,
3851 fix_lexical_addr (XEXP (temp, 0), exp));
3852 }
3853 if (SAVE_EXPR_RTL (exp) == 0)
3854 {
3855 if (mode == BLKmode)
34a25822
RK
3856 {
3857 temp
3858 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
3859 MEM_IN_STRUCT_P (temp)
3860 = (TREE_CODE (type) == RECORD_TYPE
3861 || TREE_CODE (type) == UNION_TYPE
3862 || TREE_CODE (type) == QUAL_UNION_TYPE
3863 || TREE_CODE (type) == ARRAY_TYPE);
3864 }
bbf6f052 3865 else
1499e0a8
RK
3866 {
3867 enum machine_mode var_mode = mode;
3868
3869 if (TREE_CODE (type) == INTEGER_TYPE
3870 || TREE_CODE (type) == ENUMERAL_TYPE
3871 || TREE_CODE (type) == BOOLEAN_TYPE
3872 || TREE_CODE (type) == CHAR_TYPE
3873 || TREE_CODE (type) == REAL_TYPE
3874 || TREE_CODE (type) == POINTER_TYPE
3875 || TREE_CODE (type) == OFFSET_TYPE)
3876 {
3877 PROMOTE_MODE (var_mode, unsignedp, type);
3878 }
3879
3880 temp = gen_reg_rtx (var_mode);
3881 }
3882
bbf6f052 3883 SAVE_EXPR_RTL (exp) = temp;
bbf6f052
RK
3884 if (!optimize && GET_CODE (temp) == REG)
3885 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
3886 save_expr_regs);
ff78f773
RK
3887
3888 /* If the mode of TEMP does not match that of the expression, it
3889 must be a promoted value. We pass store_expr a SUBREG of the
3890 wanted mode but mark it so that we know that it was already
3891 extended. Note that `unsignedp' was modified above in
3892 this case. */
3893
3894 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
3895 {
3896 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
3897 SUBREG_PROMOTED_VAR_P (temp) = 1;
3898 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3899 }
3900
3901 store_expr (TREE_OPERAND (exp, 0), temp, 0);
bbf6f052 3902 }
1499e0a8
RK
3903
3904 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
3905 must be a promoted value. We return a SUBREG of the wanted mode,
3906 but mark it so that we know that it was already extended. Note
3907 that `unsignedp' was modified above in this case. */
3908
3909 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
3910 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
3911 {
3912 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
3913 SUBREG_PROMOTED_VAR_P (temp) = 1;
3914 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3915 return temp;
3916 }
3917
bbf6f052
RK
3918 return SAVE_EXPR_RTL (exp);
3919
b50d17a1
RK
3920 case PLACEHOLDER_EXPR:
3921 /* If there is an object on the head of the placeholder list,
3922 see if some object in it's references is of type TYPE. For
3923 further information, see tree.def. */
3924 if (placeholder_list)
3925 {
3926 tree object;
3927
3928 for (object = TREE_PURPOSE (placeholder_list);
3929 TREE_TYPE (object) != type
3930 && (TREE_CODE_CLASS (TREE_CODE (object)) == 'r'
3931 || TREE_CODE_CLASS (TREE_CODE (object) == '1'
3932 || TREE_CODE_CLASS (TREE_CODE (object) == '2'
3933 || TREE_CODE_CLASS (TREE_CODE (object) == 'e'))));
3934 object = TREE_OPERAND (object, 0))
3935 ;
3936
3937 if (object && TREE_TYPE (object))
3938 return expand_expr (object, original_target, tmode, modifier);
3939 }
3940
3941 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
3942 abort ();
3943
3944 case WITH_RECORD_EXPR:
3945 /* Put the object on the placeholder list, expand our first operand,
3946 and pop the list. */
3947 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
3948 placeholder_list);
3949 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
3950 tmode, modifier);
3951 placeholder_list = TREE_CHAIN (placeholder_list);
3952 return target;
3953
bbf6f052 3954 case EXIT_EXPR:
e44842fe
RK
3955 expand_exit_loop_if_false (NULL_PTR,
3956 invert_truthvalue (TREE_OPERAND (exp, 0)));
bbf6f052
RK
3957 return const0_rtx;
3958
3959 case LOOP_EXPR:
0088fcb1 3960 push_temp_slots ();
bbf6f052
RK
3961 expand_start_loop (1);
3962 expand_expr_stmt (TREE_OPERAND (exp, 0));
3963 expand_end_loop ();
0088fcb1 3964 pop_temp_slots ();
bbf6f052
RK
3965
3966 return const0_rtx;
3967
3968 case BIND_EXPR:
3969 {
3970 tree vars = TREE_OPERAND (exp, 0);
3971 int vars_need_expansion = 0;
3972
3973 /* Need to open a binding contour here because
3974 if there are any cleanups they most be contained here. */
3975 expand_start_bindings (0);
3976
2df53c0b
RS
3977 /* Mark the corresponding BLOCK for output in its proper place. */
3978 if (TREE_OPERAND (exp, 2) != 0
3979 && ! TREE_USED (TREE_OPERAND (exp, 2)))
3980 insert_block (TREE_OPERAND (exp, 2));
bbf6f052
RK
3981
3982 /* If VARS have not yet been expanded, expand them now. */
3983 while (vars)
3984 {
3985 if (DECL_RTL (vars) == 0)
3986 {
3987 vars_need_expansion = 1;
3988 expand_decl (vars);
3989 }
3990 expand_decl_init (vars);
3991 vars = TREE_CHAIN (vars);
3992 }
3993
3994 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
3995
3996 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
3997
3998 return temp;
3999 }
4000
4001 case RTL_EXPR:
4002 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
4003 abort ();
4004 emit_insns (RTL_EXPR_SEQUENCE (exp));
4005 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
4006 return RTL_EXPR_RTL (exp);
4007
4008 case CONSTRUCTOR:
dd27116b
RK
4009 /* If we don't need the result, just ensure we evaluate any
4010 subexpressions. */
4011 if (ignore)
4012 {
4013 tree elt;
4014 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4015 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
4016 return const0_rtx;
4017 }
4af3895e
JVA
4018 /* All elts simple constants => refer to a constant in memory. But
4019 if this is a non-BLKmode mode, let it store a field at a time
4020 since that should make a CONST_INT or CONST_DOUBLE when we
dd27116b
RK
4021 fold. If we are making an initializer and all operands are
4022 constant, put it in memory as well. */
4023 else if ((TREE_STATIC (exp)
4024 && (mode == BLKmode || TREE_ADDRESSABLE (exp)))
4025 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
bbf6f052
RK
4026 {
4027 rtx constructor = output_constant_def (exp);
b552441b
RS
4028 if (modifier != EXPAND_CONST_ADDRESS
4029 && modifier != EXPAND_INITIALIZER
4030 && modifier != EXPAND_SUM
4031 && !memory_address_p (GET_MODE (constructor),
4032 XEXP (constructor, 0)))
bbf6f052
RK
4033 constructor = change_address (constructor, VOIDmode,
4034 XEXP (constructor, 0));
4035 return constructor;
4036 }
4037
bbf6f052
RK
4038 else
4039 {
4040 if (target == 0 || ! safe_from_p (target, exp))
4041 {
4042 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
4043 target = gen_reg_rtx (mode);
4044 else
4045 {
3b94d087
RS
4046 enum tree_code c = TREE_CODE (type);
4047 target
4048 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
e7f3c83f
RK
4049 if (c == RECORD_TYPE || c == UNION_TYPE
4050 || c == QUAL_UNION_TYPE || c == ARRAY_TYPE)
3b94d087 4051 MEM_IN_STRUCT_P (target) = 1;
bbf6f052
RK
4052 }
4053 }
4054 store_constructor (exp, target);
4055 return target;
4056 }
4057
4058 case INDIRECT_REF:
4059 {
4060 tree exp1 = TREE_OPERAND (exp, 0);
4061 tree exp2;
4062
4063 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
4064 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
4065 This code has the same general effect as simply doing
4066 expand_expr on the save expr, except that the expression PTR
4067 is computed for use as a memory address. This means different
4068 code, suitable for indexing, may be generated. */
4069 if (TREE_CODE (exp1) == SAVE_EXPR
4070 && SAVE_EXPR_RTL (exp1) == 0
4071 && TREE_CODE (exp2 = TREE_OPERAND (exp1, 0)) != ERROR_MARK
4072 && TYPE_MODE (TREE_TYPE (exp1)) == Pmode
4073 && TYPE_MODE (TREE_TYPE (exp2)) == Pmode)
4074 {
906c4e36
RK
4075 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
4076 VOIDmode, EXPAND_SUM);
bbf6f052
RK
4077 op0 = memory_address (mode, temp);
4078 op0 = copy_all_regs (op0);
4079 SAVE_EXPR_RTL (exp1) = op0;
4080 }
4081 else
4082 {
906c4e36 4083 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
bbf6f052
RK
4084 op0 = memory_address (mode, op0);
4085 }
8c8a8e34
JW
4086
4087 temp = gen_rtx (MEM, mode, op0);
4088 /* If address was computed by addition,
4089 mark this as an element of an aggregate. */
4090 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4091 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
4092 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
4093 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
4094 || TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4095 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
e7f3c83f 4096 || TREE_CODE (TREE_TYPE (exp)) == QUAL_UNION_TYPE
8c8a8e34
JW
4097 || (TREE_CODE (exp1) == ADDR_EXPR
4098 && (exp2 = TREE_OPERAND (exp1, 0))
4099 && (TREE_CODE (TREE_TYPE (exp2)) == ARRAY_TYPE
4100 || TREE_CODE (TREE_TYPE (exp2)) == RECORD_TYPE
e7f3c83f
RK
4101 || TREE_CODE (TREE_TYPE (exp2)) == UNION_TYPE
4102 || TREE_CODE (TREE_TYPE (exp2)) == QUAL_UNION_TYPE)))
8c8a8e34 4103 MEM_IN_STRUCT_P (temp) = 1;
2c4c436a 4104 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
89742723 4105#if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
bbf6f052
RK
4106 a location is accessed through a pointer to const does not mean
4107 that the value there can never change. */
8c8a8e34 4108 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
bbf6f052 4109#endif
8c8a8e34
JW
4110 return temp;
4111 }
bbf6f052
RK
4112
4113 case ARRAY_REF:
742920c7
RK
4114 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
4115 abort ();
bbf6f052 4116
bbf6f052 4117 {
742920c7
RK
4118 tree array = TREE_OPERAND (exp, 0);
4119 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
4120 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4121 tree index = TREE_OPERAND (exp, 1);
4122 tree index_type = TREE_TYPE (index);
bbf6f052 4123 int i;
bbf6f052 4124
b50d17a1
RK
4125 if (TREE_CODE (low_bound) != INTEGER_CST
4126 && contains_placeholder_p (low_bound))
4127 low_bound = build (WITH_RECORD_EXPR, sizetype, low_bound, exp);
4128
d4c89139
PB
4129 /* Optimize the special-case of a zero lower bound.
4130
4131 We convert the low_bound to sizetype to avoid some problems
4132 with constant folding. (E.g. suppose the lower bound is 1,
4133 and its mode is QI. Without the conversion, (ARRAY
4134 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
4135 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
4136
4137 But sizetype isn't quite right either (especially if
4138 the lowbound is negative). FIXME */
4139
742920c7 4140 if (! integer_zerop (low_bound))
d4c89139
PB
4141 index = fold (build (MINUS_EXPR, index_type, index,
4142 convert (sizetype, low_bound)));
742920c7
RK
4143
4144 if (TREE_CODE (index) != INTEGER_CST
4145 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4146 {
4147 /* Nonconstant array index or nonconstant element size.
4148 Generate the tree for *(&array+index) and expand that,
4149 except do it in a language-independent way
4150 and don't complain about non-lvalue arrays.
4151 `mark_addressable' should already have been called
4152 for any array for which this case will be reached. */
4153
4154 /* Don't forget the const or volatile flag from the array
4155 element. */
4156 tree variant_type = build_type_variant (type,
4157 TREE_READONLY (exp),
4158 TREE_THIS_VOLATILE (exp));
4159 tree array_adr = build1 (ADDR_EXPR,
4160 build_pointer_type (variant_type), array);
4161 tree elt;
b50d17a1 4162 tree size = size_in_bytes (type);
742920c7
RK
4163
4164 /* Convert the integer argument to a type the same size as a
4165 pointer so the multiply won't overflow spuriously. */
4166 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
4167 index = convert (type_for_size (POINTER_SIZE, 0), index);
4168
b50d17a1
RK
4169 if (TREE_CODE (size) != INTEGER_CST
4170 && contains_placeholder_p (size))
4171 size = build (WITH_RECORD_EXPR, sizetype, size, exp);
4172
742920c7
RK
4173 /* Don't think the address has side effects
4174 just because the array does.
4175 (In some cases the address might have side effects,
4176 and we fail to record that fact here. However, it should not
4177 matter, since expand_expr should not care.) */
4178 TREE_SIDE_EFFECTS (array_adr) = 0;
4179
4180 elt = build1 (INDIRECT_REF, type,
4181 fold (build (PLUS_EXPR,
4182 TYPE_POINTER_TO (variant_type),
4183 array_adr,
4184 fold (build (MULT_EXPR,
4185 TYPE_POINTER_TO (variant_type),
b50d17a1 4186 index, size)))));
742920c7
RK
4187
4188 /* Volatility, etc., of new expression is same as old
4189 expression. */
4190 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
4191 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
4192 TREE_READONLY (elt) = TREE_READONLY (exp);
4193
4194 return expand_expr (elt, target, tmode, modifier);
4195 }
4196
4197 /* Fold an expression like: "foo"[2].
4198 This is not done in fold so it won't happen inside &. */
4199
4200 if (TREE_CODE (array) == STRING_CST
4201 && TREE_CODE (index) == INTEGER_CST
4202 && !TREE_INT_CST_HIGH (index)
4203 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array))
bbf6f052 4204 {
742920c7 4205 if (TREE_TYPE (TREE_TYPE (array)) == integer_type_node)
bbf6f052 4206 {
742920c7 4207 exp = build_int_2 (((int *)TREE_STRING_POINTER (array))[i], 0);
bbf6f052
RK
4208 TREE_TYPE (exp) = integer_type_node;
4209 return expand_expr (exp, target, tmode, modifier);
4210 }
742920c7 4211 if (TREE_TYPE (TREE_TYPE (array)) == char_type_node)
bbf6f052 4212 {
742920c7 4213 exp = build_int_2 (TREE_STRING_POINTER (array)[i], 0);
bbf6f052 4214 TREE_TYPE (exp) = integer_type_node;
742920c7
RK
4215 return expand_expr (convert (TREE_TYPE (TREE_TYPE (array)),
4216 exp),
4217 target, tmode, modifier);
bbf6f052
RK
4218 }
4219 }
bbf6f052 4220
742920c7
RK
4221 /* If this is a constant index into a constant array,
4222 just get the value from the array. Handle both the cases when
4223 we have an explicit constructor and when our operand is a variable
4224 that was declared const. */
4af3895e 4225
742920c7
RK
4226 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
4227 {
4228 if (TREE_CODE (index) == INTEGER_CST
4229 && TREE_INT_CST_HIGH (index) == 0)
4230 {
4231 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
4232
4233 i = TREE_INT_CST_LOW (index);
4234 while (elem && i--)
4235 elem = TREE_CHAIN (elem);
4236 if (elem)
4237 return expand_expr (fold (TREE_VALUE (elem)), target,
4238 tmode, modifier);
4239 }
4240 }
4af3895e 4241
742920c7
RK
4242 else if (optimize >= 1
4243 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
4244 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
4245 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
4246 {
4247 if (TREE_CODE (index) == INTEGER_CST
4248 && TREE_INT_CST_HIGH (index) == 0)
4249 {
4250 tree init = DECL_INITIAL (array);
4251
4252 i = TREE_INT_CST_LOW (index);
4253 if (TREE_CODE (init) == CONSTRUCTOR)
4254 {
4255 tree elem = CONSTRUCTOR_ELTS (init);
4256
03dc44a6
RS
4257 while (elem
4258 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
742920c7
RK
4259 elem = TREE_CHAIN (elem);
4260 if (elem)
4261 return expand_expr (fold (TREE_VALUE (elem)), target,
4262 tmode, modifier);
4263 }
4264 else if (TREE_CODE (init) == STRING_CST
4265 && i < TREE_STRING_LENGTH (init))
4266 {
4267 temp = GEN_INT (TREE_STRING_POINTER (init)[i]);
4268 return convert_to_mode (mode, temp, 0);
4269 }
4270 }
4271 }
4272 }
8c8a8e34 4273
bbf6f052
RK
4274 /* Treat array-ref with constant index as a component-ref. */
4275
4276 case COMPONENT_REF:
4277 case BIT_FIELD_REF:
4af3895e
JVA
4278 /* If the operand is a CONSTRUCTOR, we can just extract the
4279 appropriate field if it is present. */
4280 if (code != ARRAY_REF
4281 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
4282 {
4283 tree elt;
4284
4285 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
4286 elt = TREE_CHAIN (elt))
4287 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
4288 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
4289 }
4290
bbf6f052
RK
4291 {
4292 enum machine_mode mode1;
4293 int bitsize;
4294 int bitpos;
7bb0943f 4295 tree offset;
bbf6f052 4296 int volatilep = 0;
7bb0943f 4297 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
bbf6f052
RK
4298 &mode1, &unsignedp, &volatilep);
4299
e7f3c83f
RK
4300 /* If we got back the original object, something is wrong. Perhaps
4301 we are evaluating an expression too early. In any event, don't
4302 infinitely recurse. */
4303 if (tem == exp)
4304 abort ();
4305
bbf6f052
RK
4306 /* In some cases, we will be offsetting OP0's address by a constant.
4307 So get it as a sum, if possible. If we will be using it
4308 directly in an insn, we validate it. */
906c4e36 4309 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_SUM);
bbf6f052 4310
8c8a8e34 4311 /* If this is a constant, put it into a register if it is a
8008b228 4312 legitimate constant and memory if it isn't. */
8c8a8e34
JW
4313 if (CONSTANT_P (op0))
4314 {
4315 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
f2878c6b 4316 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
8c8a8e34
JW
4317 op0 = force_reg (mode, op0);
4318 else
4319 op0 = validize_mem (force_const_mem (mode, op0));
4320 }
4321
7bb0943f
RS
4322 if (offset != 0)
4323 {
906c4e36 4324 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
4325
4326 if (GET_CODE (op0) != MEM)
4327 abort ();
4328 op0 = change_address (op0, VOIDmode,
4329 gen_rtx (PLUS, Pmode, XEXP (op0, 0),
4330 force_reg (Pmode, offset_rtx)));
4331 }
4332
bbf6f052
RK
4333 /* Don't forget about volatility even if this is a bitfield. */
4334 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
4335 {
4336 op0 = copy_rtx (op0);
4337 MEM_VOLATILE_P (op0) = 1;
4338 }
4339
ccc98036
RS
4340 /* In cases where an aligned union has an unaligned object
4341 as a field, we might be extracting a BLKmode value from
4342 an integer-mode (e.g., SImode) object. Handle this case
4343 by doing the extract into an object as wide as the field
4344 (which we know to be the width of a basic mode), then
4345 storing into memory, and changing the mode to BLKmode. */
bbf6f052 4346 if (mode1 == VOIDmode
0bba3f6f
RK
4347 || (mode1 != BLKmode && ! direct_load[(int) mode1]
4348 && modifier != EXPAND_CONST_ADDRESS
4349 && modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
ccc98036
RS
4350 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
4351 /* If the field isn't aligned enough to fetch as a memref,
4352 fetch it as a bit field. */
4353 || (STRICT_ALIGNMENT
4354 && TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
4355 || (STRICT_ALIGNMENT && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
bbf6f052 4356 {
bbf6f052
RK
4357 enum machine_mode ext_mode = mode;
4358
4359 if (ext_mode == BLKmode)
4360 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
4361
4362 if (ext_mode == BLKmode)
4363 abort ();
4364
4365 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
4366 unsignedp, target, ext_mode, ext_mode,
4367 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
4368 int_size_in_bytes (TREE_TYPE (tem)));
4369 if (mode == BLKmode)
4370 {
4371 rtx new = assign_stack_temp (ext_mode,
4372 bitsize / BITS_PER_UNIT, 0);
4373
4374 emit_move_insn (new, op0);
4375 op0 = copy_rtx (new);
4376 PUT_MODE (op0, BLKmode);
092dded9 4377 MEM_IN_STRUCT_P (op0) = 1;
bbf6f052
RK
4378 }
4379
4380 return op0;
4381 }
4382
4383 /* Get a reference to just this component. */
4384 if (modifier == EXPAND_CONST_ADDRESS
4385 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4386 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
4387 (bitpos / BITS_PER_UNIT)));
4388 else
4389 op0 = change_address (op0, mode1,
4390 plus_constant (XEXP (op0, 0),
4391 (bitpos / BITS_PER_UNIT)));
4392 MEM_IN_STRUCT_P (op0) = 1;
4393 MEM_VOLATILE_P (op0) |= volatilep;
4394 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
4395 return op0;
4396 if (target == 0)
4397 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4398 convert_move (target, op0, unsignedp);
4399 return target;
4400 }
4401
4402 case OFFSET_REF:
4403 {
da120c2f 4404 tree base = build1 (ADDR_EXPR, type, TREE_OPERAND (exp, 0));
bbf6f052 4405 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
906c4e36 4406 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
bbf6f052
RK
4407 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
4408 MEM_IN_STRUCT_P (temp) = 1;
f94cc92f 4409 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
89742723 4410#if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
bbf6f052
RK
4411 a location is accessed through a pointer to const does not mean
4412 that the value there can never change. */
4413 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
4414#endif
4415 return temp;
4416 }
4417
4418 /* Intended for a reference to a buffer of a file-object in Pascal.
4419 But it's not certain that a special tree code will really be
4420 necessary for these. INDIRECT_REF might work for them. */
4421 case BUFFER_REF:
4422 abort ();
4423
7308a047
RS
4424 /* IN_EXPR: Inlined pascal set IN expression.
4425
4426 Algorithm:
4427 rlo = set_low - (set_low%bits_per_word);
4428 the_word = set [ (index - rlo)/bits_per_word ];
4429 bit_index = index % bits_per_word;
4430 bitmask = 1 << bit_index;
4431 return !!(the_word & bitmask); */
4432 case IN_EXPR:
4433 preexpand_calls (exp);
4434 {
4435 tree set = TREE_OPERAND (exp, 0);
4436 tree index = TREE_OPERAND (exp, 1);
4437 tree set_type = TREE_TYPE (set);
4438
4439 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
4440 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
4441
4442 rtx index_val;
4443 rtx lo_r;
4444 rtx hi_r;
4445 rtx rlow;
4446 rtx diff, quo, rem, addr, bit, result;
4447 rtx setval, setaddr;
4448 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
4449
4450 if (target == 0)
17938e57 4451 target = gen_reg_rtx (mode);
7308a047
RS
4452
4453 /* If domain is empty, answer is no. */
4454 if (tree_int_cst_lt (set_high_bound, set_low_bound))
4455 return const0_rtx;
4456
4457 index_val = expand_expr (index, 0, VOIDmode, 0);
4458 lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
4459 hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
4460 setval = expand_expr (set, 0, VOIDmode, 0);
4461 setaddr = XEXP (setval, 0);
4462
4463 /* Compare index against bounds, if they are constant. */
4464 if (GET_CODE (index_val) == CONST_INT
17938e57
RK
4465 && GET_CODE (lo_r) == CONST_INT
4466 && INTVAL (index_val) < INTVAL (lo_r))
4467 return const0_rtx;
7308a047
RS
4468
4469 if (GET_CODE (index_val) == CONST_INT
17938e57
RK
4470 && GET_CODE (hi_r) == CONST_INT
4471 && INTVAL (hi_r) < INTVAL (index_val))
4472 return const0_rtx;
7308a047
RS
4473
4474 /* If we get here, we have to generate the code for both cases
4475 (in range and out of range). */
4476
4477 op0 = gen_label_rtx ();
4478 op1 = gen_label_rtx ();
4479
4480 if (! (GET_CODE (index_val) == CONST_INT
4481 && GET_CODE (lo_r) == CONST_INT))
4482 {
17938e57
RK
4483 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
4484 GET_MODE (index_val), 0, 0);
7308a047
RS
4485 emit_jump_insn (gen_blt (op1));
4486 }
4487
4488 if (! (GET_CODE (index_val) == CONST_INT
4489 && GET_CODE (hi_r) == CONST_INT))
4490 {
17938e57
RK
4491 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
4492 GET_MODE (index_val), 0, 0);
7308a047
RS
4493 emit_jump_insn (gen_bgt (op1));
4494 }
4495
4496 /* Calculate the element number of bit zero in the first word
4497 of the set. */
4498 if (GET_CODE (lo_r) == CONST_INT)
17938e57
RK
4499 rlow = GEN_INT (INTVAL (lo_r)
4500 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7308a047 4501 else
17938e57
RK
4502 rlow = expand_binop (index_mode, and_optab, lo_r,
4503 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
4504 NULL_RTX, 0, OPTAB_LIB_WIDEN);
7308a047
RS
4505
4506 diff = expand_binop (index_mode, sub_optab,
17938e57 4507 index_val, rlow, NULL_RTX, 0, OPTAB_LIB_WIDEN);
7308a047
RS
4508
4509 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
17938e57 4510 GEN_INT (BITS_PER_UNIT), NULL_RTX, 0);
7308a047 4511 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
17938e57 4512 GEN_INT (BITS_PER_UNIT), NULL_RTX, 0);
7308a047
RS
4513 addr = memory_address (byte_mode,
4514 expand_binop (index_mode, add_optab,
17938e57
RK
4515 diff, setaddr, NULL_RTX, 0,
4516 OPTAB_LIB_WIDEN));
7308a047
RS
4517 /* Extract the bit we want to examine */
4518 bit = expand_shift (RSHIFT_EXPR, byte_mode,
17938e57
RK
4519 gen_rtx (MEM, byte_mode, addr),
4520 make_tree (TREE_TYPE (index), rem),
4521 NULL_RTX, 1);
4522 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
4523 GET_MODE (target) == byte_mode ? target : 0,
7308a047 4524 1, OPTAB_LIB_WIDEN);
17938e57
RK
4525
4526 if (result != target)
4527 convert_move (target, result, 1);
7308a047
RS
4528
4529 /* Output the code to handle the out-of-range case. */
4530 emit_jump (op0);
4531 emit_label (op1);
4532 emit_move_insn (target, const0_rtx);
4533 emit_label (op0);
4534 return target;
4535 }
4536
bbf6f052
RK
4537 case WITH_CLEANUP_EXPR:
4538 if (RTL_EXPR_RTL (exp) == 0)
4539 {
4540 RTL_EXPR_RTL (exp)
4541 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
906c4e36
RK
4542 cleanups_this_call
4543 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
bbf6f052
RK
4544 /* That's it for this cleanup. */
4545 TREE_OPERAND (exp, 2) = 0;
4546 }
4547 return RTL_EXPR_RTL (exp);
4548
4549 case CALL_EXPR:
4550 /* Check for a built-in function. */
4551 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
4552 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == FUNCTION_DECL
4553 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4554 return expand_builtin (exp, target, subtarget, tmode, ignore);
4555 /* If this call was expanded already by preexpand_calls,
4556 just return the result we got. */
4557 if (CALL_EXPR_RTL (exp) != 0)
4558 return CALL_EXPR_RTL (exp);
8129842c 4559 return expand_call (exp, target, ignore);
bbf6f052
RK
4560
4561 case NON_LVALUE_EXPR:
4562 case NOP_EXPR:
4563 case CONVERT_EXPR:
4564 case REFERENCE_EXPR:
bbf6f052
RK
4565 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4566 return expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
4567 if (TREE_CODE (type) == UNION_TYPE)
4568 {
4569 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
4570 if (target == 0)
4571 {
4572 if (mode == BLKmode)
4573 {
4574 if (TYPE_SIZE (type) == 0
4575 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4576 abort ();
4577 target = assign_stack_temp (BLKmode,
4578 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4579 + BITS_PER_UNIT - 1)
4580 / BITS_PER_UNIT, 0);
4581 }
4582 else
4583 target = gen_reg_rtx (mode);
4584 }
4585 if (GET_CODE (target) == MEM)
4586 /* Store data into beginning of memory target. */
4587 store_expr (TREE_OPERAND (exp, 0),
1499e0a8
RK
4588 change_address (target, TYPE_MODE (valtype), 0), 0);
4589
bbf6f052
RK
4590 else if (GET_CODE (target) == REG)
4591 /* Store this field into a union of the proper type. */
4592 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
4593 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
4594 VOIDmode, 0, 1,
4595 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
4596 else
4597 abort ();
4598
4599 /* Return the entire union. */
4600 return target;
4601 }
1499e0a8 4602 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
12342f90
RS
4603 if (GET_MODE (op0) == mode)
4604 return op0;
4605 /* If arg is a constant integer being extended from a narrower mode,
4606 we must really truncate to get the extended bits right. Otherwise
4607 (unsigned long) (unsigned char) ("\377"[0])
4608 would come out as ffffffff. */
4609 if (GET_MODE (op0) == VOIDmode
4610 && (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4611 < GET_MODE_BITSIZE (mode)))
4612 {
4613 /* MODE must be narrower than HOST_BITS_PER_INT. */
4614 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4615
4616 if (width < HOST_BITS_PER_WIDE_INT)
4617 {
4618 HOST_WIDE_INT val = (GET_CODE (op0) == CONST_INT ? INTVAL (op0)
4619 : CONST_DOUBLE_LOW (op0));
4620 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4621 || !(val & ((HOST_WIDE_INT) 1 << (width - 1))))
4622 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4623 else
4624 val |= ~(((HOST_WIDE_INT) 1 << width) - 1);
4625
4626 op0 = GEN_INT (val);
4627 }
4628 else
4629 {
4630 op0 = (simplify_unary_operation
4631 ((TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4632 ? ZERO_EXTEND : SIGN_EXTEND),
4633 mode, op0,
4634 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))));
4635 if (op0 == 0)
4636 abort ();
4637 }
4638 }
4639 if (GET_MODE (op0) == VOIDmode)
bbf6f052 4640 return op0;
26fcb35a
RS
4641 if (modifier == EXPAND_INITIALIZER)
4642 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
bbf6f052
RK
4643 if (flag_force_mem && GET_CODE (op0) == MEM)
4644 op0 = copy_to_reg (op0);
4645
4646 if (target == 0)
4647 return convert_to_mode (mode, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4648 else
4649 convert_move (target, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4650 return target;
4651
4652 case PLUS_EXPR:
4653 /* We come here from MINUS_EXPR when the second operand is a constant. */
4654 plus_expr:
4655 this_optab = add_optab;
4656
4657 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
4658 something else, make sure we add the register to the constant and
4659 then to the other thing. This case can occur during strength
4660 reduction and doing it this way will produce better code if the
4661 frame pointer or argument pointer is eliminated.
4662
4663 fold-const.c will ensure that the constant is always in the inner
4664 PLUS_EXPR, so the only case we need to do anything about is if
4665 sp, ap, or fp is our second argument, in which case we must swap
4666 the innermost first argument and our second argument. */
4667
4668 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4669 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
4670 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
4671 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
4672 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
4673 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
4674 {
4675 tree t = TREE_OPERAND (exp, 1);
4676
4677 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4678 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
4679 }
4680
4681 /* If the result is to be Pmode and we are adding an integer to
4682 something, we might be forming a constant. So try to use
4683 plus_constant. If it produces a sum and we can't accept it,
4684 use force_operand. This allows P = &ARR[const] to generate
4685 efficient code on machines where a SYMBOL_REF is not a valid
4686 address.
4687
4688 If this is an EXPAND_SUM call, always return the sum. */
c980ac49
RS
4689 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4690 || mode == Pmode)
bbf6f052 4691 {
c980ac49
RS
4692 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
4693 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4694 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
4695 {
4696 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
4697 EXPAND_SUM);
4698 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
4699 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4700 op1 = force_operand (op1, target);
4701 return op1;
4702 }
bbf6f052 4703
c980ac49
RS
4704 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4705 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
4706 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
4707 {
4708 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
4709 EXPAND_SUM);
4710 if (! CONSTANT_P (op0))
4711 {
4712 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4713 VOIDmode, modifier);
709f5be1
RS
4714 /* Don't go to both_summands if modifier
4715 says it's not right to return a PLUS. */
4716 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4717 goto binop2;
c980ac49
RS
4718 goto both_summands;
4719 }
4720 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
4721 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4722 op0 = force_operand (op0, target);
4723 return op0;
4724 }
bbf6f052
RK
4725 }
4726
4727 /* No sense saving up arithmetic to be done
4728 if it's all in the wrong mode to form part of an address.
4729 And force_operand won't know whether to sign-extend or
4730 zero-extend. */
4731 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
c980ac49
RS
4732 || mode != Pmode)
4733 goto binop;
bbf6f052
RK
4734
4735 preexpand_calls (exp);
4736 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4737 subtarget = 0;
4738
4739 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
906c4e36 4740 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
bbf6f052 4741
c980ac49 4742 both_summands:
bbf6f052
RK
4743 /* Make sure any term that's a sum with a constant comes last. */
4744 if (GET_CODE (op0) == PLUS
4745 && CONSTANT_P (XEXP (op0, 1)))
4746 {
4747 temp = op0;
4748 op0 = op1;
4749 op1 = temp;
4750 }
4751 /* If adding to a sum including a constant,
4752 associate it to put the constant outside. */
4753 if (GET_CODE (op1) == PLUS
4754 && CONSTANT_P (XEXP (op1, 1)))
4755 {
4756 rtx constant_term = const0_rtx;
4757
4758 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
4759 if (temp != 0)
4760 op0 = temp;
6f90e075
JW
4761 /* Ensure that MULT comes first if there is one. */
4762 else if (GET_CODE (op0) == MULT)
4763 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
bbf6f052
RK
4764 else
4765 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
4766
4767 /* Let's also eliminate constants from op0 if possible. */
4768 op0 = eliminate_constant_term (op0, &constant_term);
4769
4770 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
4771 their sum should be a constant. Form it into OP1, since the
4772 result we want will then be OP0 + OP1. */
4773
4774 temp = simplify_binary_operation (PLUS, mode, constant_term,
4775 XEXP (op1, 1));
4776 if (temp != 0)
4777 op1 = temp;
4778 else
4779 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
4780 }
4781
4782 /* Put a constant term last and put a multiplication first. */
4783 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
4784 temp = op1, op1 = op0, op0 = temp;
4785
4786 temp = simplify_binary_operation (PLUS, mode, op0, op1);
4787 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
4788
4789 case MINUS_EXPR:
4790 /* Handle difference of two symbolic constants,
4791 for the sake of an initializer. */
4792 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4793 && really_constant_p (TREE_OPERAND (exp, 0))
4794 && really_constant_p (TREE_OPERAND (exp, 1)))
4795 {
906c4e36
RK
4796 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
4797 VOIDmode, modifier);
4798 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4799 VOIDmode, modifier);
bbf6f052
RK
4800 return gen_rtx (MINUS, mode, op0, op1);
4801 }
4802 /* Convert A - const to A + (-const). */
4803 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4804 {
4805 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0),
4806 fold (build1 (NEGATE_EXPR, type,
4807 TREE_OPERAND (exp, 1))));
4808 goto plus_expr;
4809 }
4810 this_optab = sub_optab;
4811 goto binop;
4812
4813 case MULT_EXPR:
4814 preexpand_calls (exp);
4815 /* If first operand is constant, swap them.
4816 Thus the following special case checks need only
4817 check the second operand. */
4818 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
4819 {
4820 register tree t1 = TREE_OPERAND (exp, 0);
4821 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
4822 TREE_OPERAND (exp, 1) = t1;
4823 }
4824
4825 /* Attempt to return something suitable for generating an
4826 indexed address, for machines that support that. */
4827
4828 if (modifier == EXPAND_SUM && mode == Pmode
4829 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
906c4e36 4830 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
4831 {
4832 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
4833
4834 /* Apply distributive law if OP0 is x+c. */
4835 if (GET_CODE (op0) == PLUS
4836 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
4837 return gen_rtx (PLUS, mode,
4838 gen_rtx (MULT, mode, XEXP (op0, 0),
906c4e36
RK
4839 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
4840 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
4841 * INTVAL (XEXP (op0, 1))));
bbf6f052
RK
4842
4843 if (GET_CODE (op0) != REG)
906c4e36 4844 op0 = force_operand (op0, NULL_RTX);
bbf6f052
RK
4845 if (GET_CODE (op0) != REG)
4846 op0 = copy_to_mode_reg (mode, op0);
4847
4848 return gen_rtx (MULT, mode, op0,
906c4e36 4849 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
bbf6f052
RK
4850 }
4851
4852 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4853 subtarget = 0;
4854
4855 /* Check for multiplying things that have been extended
4856 from a narrower type. If this machine supports multiplying
4857 in that narrower type with a result in the desired type,
4858 do it that way, and avoid the explicit type-conversion. */
4859 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
4860 && TREE_CODE (type) == INTEGER_TYPE
4861 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4862 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
4863 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4864 && int_fits_type_p (TREE_OPERAND (exp, 1),
4865 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4866 /* Don't use a widening multiply if a shift will do. */
4867 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
906c4e36 4868 > HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
4869 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
4870 ||
4871 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
4872 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4873 ==
4874 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
4875 /* If both operands are extended, they must either both
4876 be zero-extended or both be sign-extended. */
4877 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4878 ==
4879 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
4880 {
4881 enum machine_mode innermode
4882 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
4883 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4884 ? umul_widen_optab : smul_widen_optab);
4885 if (mode == GET_MODE_WIDER_MODE (innermode)
4886 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
4887 {
4888 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
906c4e36 4889 NULL_RTX, VOIDmode, 0);
bbf6f052 4890 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
906c4e36
RK
4891 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4892 VOIDmode, 0);
bbf6f052
RK
4893 else
4894 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
906c4e36 4895 NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4896 goto binop2;
4897 }
4898 }
4899 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 4900 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4901 return expand_mult (mode, op0, op1, target, unsignedp);
4902
4903 case TRUNC_DIV_EXPR:
4904 case FLOOR_DIV_EXPR:
4905 case CEIL_DIV_EXPR:
4906 case ROUND_DIV_EXPR:
4907 case EXACT_DIV_EXPR:
4908 preexpand_calls (exp);
4909 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4910 subtarget = 0;
4911 /* Possible optimization: compute the dividend with EXPAND_SUM
4912 then if the divisor is constant can optimize the case
4913 where some terms of the dividend have coeffs divisible by it. */
4914 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 4915 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4916 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
4917
4918 case RDIV_EXPR:
4919 this_optab = flodiv_optab;
4920 goto binop;
4921
4922 case TRUNC_MOD_EXPR:
4923 case FLOOR_MOD_EXPR:
4924 case CEIL_MOD_EXPR:
4925 case ROUND_MOD_EXPR:
4926 preexpand_calls (exp);
4927 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4928 subtarget = 0;
4929 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 4930 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4931 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
4932
4933 case FIX_ROUND_EXPR:
4934 case FIX_FLOOR_EXPR:
4935 case FIX_CEIL_EXPR:
4936 abort (); /* Not used for C. */
4937
4938 case FIX_TRUNC_EXPR:
906c4e36 4939 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4940 if (target == 0)
4941 target = gen_reg_rtx (mode);
4942 expand_fix (target, op0, unsignedp);
4943 return target;
4944
4945 case FLOAT_EXPR:
906c4e36 4946 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4947 if (target == 0)
4948 target = gen_reg_rtx (mode);
4949 /* expand_float can't figure out what to do if FROM has VOIDmode.
4950 So give it the correct mode. With -O, cse will optimize this. */
4951 if (GET_MODE (op0) == VOIDmode)
4952 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
4953 op0);
4954 expand_float (target, op0,
4955 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4956 return target;
4957
4958 case NEGATE_EXPR:
4959 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4960 temp = expand_unop (mode, neg_optab, op0, target, 0);
4961 if (temp == 0)
4962 abort ();
4963 return temp;
4964
4965 case ABS_EXPR:
4966 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4967
2d7050fd
RS
4968 /* Handle complex values specially. */
4969 {
4970 enum machine_mode opmode
4971 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4972
4973 if (GET_MODE_CLASS (opmode) == MODE_COMPLEX_INT
4974 || GET_MODE_CLASS (opmode) == MODE_COMPLEX_FLOAT)
4975 return expand_complex_abs (opmode, op0, target, unsignedp);
4976 }
4977
bbf6f052
RK
4978 /* Unsigned abs is simply the operand. Testing here means we don't
4979 risk generating incorrect code below. */
4980 if (TREE_UNSIGNED (type))
4981 return op0;
4982
4983 /* First try to do it with a special abs instruction. */
4984 temp = expand_unop (mode, abs_optab, op0, target, 0);
4985 if (temp != 0)
4986 return temp;
4987
4988 /* If this machine has expensive jumps, we can do integer absolute
4989 value of X as (((signed) x >> (W-1)) ^ x) - ((signed) x >> (W-1)),
4990 where W is the width of MODE. */
4991
4992 if (GET_MODE_CLASS (mode) == MODE_INT && BRANCH_COST >= 2)
4993 {
4994 rtx extended = expand_shift (RSHIFT_EXPR, mode, op0,
4995 size_int (GET_MODE_BITSIZE (mode) - 1),
906c4e36 4996 NULL_RTX, 0);
bbf6f052
RK
4997
4998 temp = expand_binop (mode, xor_optab, extended, op0, target, 0,
4999 OPTAB_LIB_WIDEN);
5000 if (temp != 0)
5001 temp = expand_binop (mode, sub_optab, temp, extended, target, 0,
5002 OPTAB_LIB_WIDEN);
5003
5004 if (temp != 0)
5005 return temp;
5006 }
5007
5008 /* If that does not win, use conditional jump and negate. */
5009 target = original_target;
5010 temp = gen_label_rtx ();
5011 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 0))
37568125 5012 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
bbf6f052
RK
5013 || (GET_CODE (target) == REG
5014 && REGNO (target) < FIRST_PSEUDO_REGISTER))
5015 target = gen_reg_rtx (mode);
5016 emit_move_insn (target, op0);
5017 emit_cmp_insn (target,
5018 expand_expr (convert (type, integer_zero_node),
906c4e36
RK
5019 NULL_RTX, VOIDmode, 0),
5020 GE, NULL_RTX, mode, 0, 0);
bbf6f052
RK
5021 NO_DEFER_POP;
5022 emit_jump_insn (gen_bge (temp));
5023 op0 = expand_unop (mode, neg_optab, target, target, 0);
5024 if (op0 != target)
5025 emit_move_insn (target, op0);
5026 emit_label (temp);
5027 OK_DEFER_POP;
5028 return target;
5029
5030 case MAX_EXPR:
5031 case MIN_EXPR:
5032 target = original_target;
5033 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
fc155707 5034 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
bbf6f052
RK
5035 || (GET_CODE (target) == REG
5036 && REGNO (target) < FIRST_PSEUDO_REGISTER))
5037 target = gen_reg_rtx (mode);
906c4e36 5038 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5039 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5040
5041 /* First try to do it with a special MIN or MAX instruction.
5042 If that does not win, use a conditional jump to select the proper
5043 value. */
5044 this_optab = (TREE_UNSIGNED (type)
5045 ? (code == MIN_EXPR ? umin_optab : umax_optab)
5046 : (code == MIN_EXPR ? smin_optab : smax_optab));
5047
5048 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
5049 OPTAB_WIDEN);
5050 if (temp != 0)
5051 return temp;
5052
ee456b1c
RK
5053 if (target != op0)
5054 emit_move_insn (target, op0);
bbf6f052 5055 op0 = gen_label_rtx ();
f81497d9
RS
5056 /* If this mode is an integer too wide to compare properly,
5057 compare word by word. Rely on cse to optimize constant cases. */
5058 if (GET_MODE_CLASS (mode) == MODE_INT
5059 && !can_compare_p (mode))
bbf6f052 5060 {
f81497d9 5061 if (code == MAX_EXPR)
ee456b1c 5062 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type), target, op1, NULL, op0);
bbf6f052 5063 else
ee456b1c
RK
5064 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type), op1, target, NULL, op0);
5065 emit_move_insn (target, op1);
bbf6f052 5066 }
f81497d9
RS
5067 else
5068 {
5069 if (code == MAX_EXPR)
5070 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
ee456b1c
RK
5071 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
5072 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
f81497d9
RS
5073 else
5074 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
ee456b1c
RK
5075 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
5076 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
f81497d9 5077 if (temp == const0_rtx)
ee456b1c 5078 emit_move_insn (target, op1);
f81497d9
RS
5079 else if (temp != const_true_rtx)
5080 {
5081 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
5082 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
5083 else
5084 abort ();
ee456b1c 5085 emit_move_insn (target, op1);
f81497d9
RS
5086 }
5087 }
bbf6f052
RK
5088 emit_label (op0);
5089 return target;
5090
5091/* ??? Can optimize when the operand of this is a bitwise operation,
5092 by using a different bitwise operation. */
5093 case BIT_NOT_EXPR:
5094 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5095 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
5096 if (temp == 0)
5097 abort ();
5098 return temp;
5099
5100 case FFS_EXPR:
5101 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5102 temp = expand_unop (mode, ffs_optab, op0, target, 1);
5103 if (temp == 0)
5104 abort ();
5105 return temp;
5106
5107/* ??? Can optimize bitwise operations with one arg constant.
5108 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
5109 and (a bitwise1 b) bitwise2 b (etc)
5110 but that is probably not worth while. */
5111
5112/* BIT_AND_EXPR is for bitwise anding.
5113 TRUTH_AND_EXPR is for anding two boolean values
5114 when we want in all cases to compute both of them.
5115 In general it is fastest to do TRUTH_AND_EXPR by
5116 computing both operands as actual zero-or-1 values
5117 and then bitwise anding. In cases where there cannot
5118 be any side effects, better code would be made by
5119 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR;
5120 but the question is how to recognize those cases. */
5121
5122 case TRUTH_AND_EXPR:
5123 case BIT_AND_EXPR:
5124 this_optab = and_optab;
5125 goto binop;
5126
5127/* See comment above about TRUTH_AND_EXPR; it applies here too. */
5128 case TRUTH_OR_EXPR:
5129 case BIT_IOR_EXPR:
5130 this_optab = ior_optab;
5131 goto binop;
5132
874726a8 5133 case TRUTH_XOR_EXPR:
bbf6f052
RK
5134 case BIT_XOR_EXPR:
5135 this_optab = xor_optab;
5136 goto binop;
5137
5138 case LSHIFT_EXPR:
5139 case RSHIFT_EXPR:
5140 case LROTATE_EXPR:
5141 case RROTATE_EXPR:
5142 preexpand_calls (exp);
5143 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5144 subtarget = 0;
5145 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5146 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
5147 unsignedp);
5148
5149/* Could determine the answer when only additive constants differ.
5150 Also, the addition of one can be handled by changing the condition. */
5151 case LT_EXPR:
5152 case LE_EXPR:
5153 case GT_EXPR:
5154 case GE_EXPR:
5155 case EQ_EXPR:
5156 case NE_EXPR:
5157 preexpand_calls (exp);
5158 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
5159 if (temp != 0)
5160 return temp;
5161 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
5162 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
5163 && original_target
5164 && GET_CODE (original_target) == REG
5165 && (GET_MODE (original_target)
5166 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5167 {
5168 temp = expand_expr (TREE_OPERAND (exp, 0), original_target, VOIDmode, 0);
5169 if (temp != original_target)
5170 temp = copy_to_reg (temp);
5171 op1 = gen_label_rtx ();
906c4e36 5172 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
bbf6f052
RK
5173 GET_MODE (temp), unsignedp, 0);
5174 emit_jump_insn (gen_beq (op1));
5175 emit_move_insn (temp, const1_rtx);
5176 emit_label (op1);
5177 return temp;
5178 }
5179 /* If no set-flag instruction, must generate a conditional
5180 store into a temporary variable. Drop through
5181 and handle this like && and ||. */
5182
5183 case TRUTH_ANDIF_EXPR:
5184 case TRUTH_ORIF_EXPR:
e44842fe
RK
5185 if (! ignore
5186 && (target == 0 || ! safe_from_p (target, exp)
5187 /* Make sure we don't have a hard reg (such as function's return
5188 value) live across basic blocks, if not optimizing. */
5189 || (!optimize && GET_CODE (target) == REG
5190 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
bbf6f052 5191 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
e44842fe
RK
5192
5193 if (target)
5194 emit_clr_insn (target);
5195
bbf6f052
RK
5196 op1 = gen_label_rtx ();
5197 jumpifnot (exp, op1);
e44842fe
RK
5198
5199 if (target)
5200 emit_0_to_1_insn (target);
5201
bbf6f052 5202 emit_label (op1);
e44842fe 5203 return ignore ? const0_rtx : target;
bbf6f052
RK
5204
5205 case TRUTH_NOT_EXPR:
5206 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5207 /* The parser is careful to generate TRUTH_NOT_EXPR
5208 only with operands that are always zero or one. */
906c4e36 5209 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
bbf6f052
RK
5210 target, 1, OPTAB_LIB_WIDEN);
5211 if (temp == 0)
5212 abort ();
5213 return temp;
5214
5215 case COMPOUND_EXPR:
5216 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
5217 emit_queue ();
5218 return expand_expr (TREE_OPERAND (exp, 1),
5219 (ignore ? const0_rtx : target),
5220 VOIDmode, 0);
5221
5222 case COND_EXPR:
5223 {
5224 /* Note that COND_EXPRs whose type is a structure or union
5225 are required to be constructed to contain assignments of
5226 a temporary variable, so that we can evaluate them here
5227 for side effect only. If type is void, we must do likewise. */
5228
5229 /* If an arm of the branch requires a cleanup,
5230 only that cleanup is performed. */
5231
5232 tree singleton = 0;
5233 tree binary_op = 0, unary_op = 0;
5234 tree old_cleanups = cleanups_this_call;
5235 cleanups_this_call = 0;
5236
5237 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
5238 convert it to our mode, if necessary. */
5239 if (integer_onep (TREE_OPERAND (exp, 1))
5240 && integer_zerop (TREE_OPERAND (exp, 2))
5241 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
5242 {
dd27116b
RK
5243 if (ignore)
5244 {
5245 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5246 modifier);
5247 return const0_rtx;
5248 }
5249
bbf6f052
RK
5250 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
5251 if (GET_MODE (op0) == mode)
5252 return op0;
5253 if (target == 0)
5254 target = gen_reg_rtx (mode);
5255 convert_move (target, op0, unsignedp);
5256 return target;
5257 }
5258
5259 /* If we are not to produce a result, we have no target. Otherwise,
5260 if a target was specified use it; it will not be used as an
5261 intermediate target unless it is safe. If no target, use a
5262 temporary. */
5263
dd27116b 5264 if (ignore)
bbf6f052
RK
5265 temp = 0;
5266 else if (original_target
5267 && safe_from_p (original_target, TREE_OPERAND (exp, 0)))
5268 temp = original_target;
5269 else if (mode == BLKmode)
5270 {
5271 if (TYPE_SIZE (type) == 0
5272 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5273 abort ();
673bc773 5274
bbf6f052
RK
5275 temp = assign_stack_temp (BLKmode,
5276 (TREE_INT_CST_LOW (TYPE_SIZE (type))
5277 + BITS_PER_UNIT - 1)
5278 / BITS_PER_UNIT, 0);
673bc773
RS
5279 MEM_IN_STRUCT_P (temp)
5280 = (TREE_CODE (type) == RECORD_TYPE
5281 || TREE_CODE (type) == UNION_TYPE
5282 || TREE_CODE (type) == QUAL_UNION_TYPE
5283 || TREE_CODE (type) == ARRAY_TYPE);
bbf6f052
RK
5284 }
5285 else
5286 temp = gen_reg_rtx (mode);
5287
5288 /* Check for X ? A + B : A. If we have this, we can copy
5289 A to the output and conditionally add B. Similarly for unary
5290 operations. Don't do this if X has side-effects because
5291 those side effects might affect A or B and the "?" operation is
5292 a sequence point in ANSI. (We test for side effects later.) */
5293
5294 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
5295 && operand_equal_p (TREE_OPERAND (exp, 2),
5296 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
5297 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
5298 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
5299 && operand_equal_p (TREE_OPERAND (exp, 1),
5300 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
5301 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
5302 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
5303 && operand_equal_p (TREE_OPERAND (exp, 2),
5304 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
5305 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
5306 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
5307 && operand_equal_p (TREE_OPERAND (exp, 1),
5308 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
5309 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
5310
5311 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
5312 operation, do this as A + (X != 0). Similarly for other simple
5313 binary operators. */
dd27116b 5314 if (temp && singleton && binary_op
bbf6f052
RK
5315 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5316 && (TREE_CODE (binary_op) == PLUS_EXPR
5317 || TREE_CODE (binary_op) == MINUS_EXPR
5318 || TREE_CODE (binary_op) == BIT_IOR_EXPR
5319 || TREE_CODE (binary_op) == BIT_XOR_EXPR
5320 || TREE_CODE (binary_op) == BIT_AND_EXPR)
5321 && integer_onep (TREE_OPERAND (binary_op, 1))
5322 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
5323 {
5324 rtx result;
5325 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
5326 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
5327 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
5328 : TREE_CODE (binary_op) == BIT_XOR_EXPR ? xor_optab
5329 : and_optab);
5330
5331 /* If we had X ? A : A + 1, do this as A + (X == 0).
5332
5333 We have to invert the truth value here and then put it
5334 back later if do_store_flag fails. We cannot simply copy
5335 TREE_OPERAND (exp, 0) to another variable and modify that
5336 because invert_truthvalue can modify the tree pointed to
5337 by its argument. */
5338 if (singleton == TREE_OPERAND (exp, 1))
5339 TREE_OPERAND (exp, 0)
5340 = invert_truthvalue (TREE_OPERAND (exp, 0));
5341
5342 result = do_store_flag (TREE_OPERAND (exp, 0),
906c4e36
RK
5343 (safe_from_p (temp, singleton)
5344 ? temp : NULL_RTX),
bbf6f052
RK
5345 mode, BRANCH_COST <= 1);
5346
5347 if (result)
5348 {
906c4e36 5349 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5350 return expand_binop (mode, boptab, op1, result, temp,
5351 unsignedp, OPTAB_LIB_WIDEN);
5352 }
5353 else if (singleton == TREE_OPERAND (exp, 1))
5354 TREE_OPERAND (exp, 0)
5355 = invert_truthvalue (TREE_OPERAND (exp, 0));
5356 }
5357
5358 NO_DEFER_POP;
5359 op0 = gen_label_rtx ();
5360
5361 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
5362 {
5363 if (temp != 0)
5364 {
5365 /* If the target conflicts with the other operand of the
5366 binary op, we can't use it. Also, we can't use the target
5367 if it is a hard register, because evaluating the condition
5368 might clobber it. */
5369 if ((binary_op
5370 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
5371 || (GET_CODE (temp) == REG
5372 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
5373 temp = gen_reg_rtx (mode);
5374 store_expr (singleton, temp, 0);
5375 }
5376 else
906c4e36 5377 expand_expr (singleton,
2937cf87 5378 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5379 if (cleanups_this_call)
5380 {
5381 sorry ("aggregate value in COND_EXPR");
5382 cleanups_this_call = 0;
5383 }
5384 if (singleton == TREE_OPERAND (exp, 1))
5385 jumpif (TREE_OPERAND (exp, 0), op0);
5386 else
5387 jumpifnot (TREE_OPERAND (exp, 0), op0);
5388
5389 if (binary_op && temp == 0)
5390 /* Just touch the other operand. */
5391 expand_expr (TREE_OPERAND (binary_op, 1),
906c4e36 5392 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5393 else if (binary_op)
5394 store_expr (build (TREE_CODE (binary_op), type,
5395 make_tree (type, temp),
5396 TREE_OPERAND (binary_op, 1)),
5397 temp, 0);
5398 else
5399 store_expr (build1 (TREE_CODE (unary_op), type,
5400 make_tree (type, temp)),
5401 temp, 0);
5402 op1 = op0;
5403 }
5404#if 0
5405 /* This is now done in jump.c and is better done there because it
5406 produces shorter register lifetimes. */
5407
5408 /* Check for both possibilities either constants or variables
5409 in registers (but not the same as the target!). If so, can
5410 save branches by assigning one, branching, and assigning the
5411 other. */
5412 else if (temp && GET_MODE (temp) != BLKmode
5413 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
5414 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
5415 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
5416 && DECL_RTL (TREE_OPERAND (exp, 1))
5417 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
5418 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
5419 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
5420 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
5421 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
5422 && DECL_RTL (TREE_OPERAND (exp, 2))
5423 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
5424 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
5425 {
5426 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5427 temp = gen_reg_rtx (mode);
5428 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5429 jumpifnot (TREE_OPERAND (exp, 0), op0);
5430 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5431 op1 = op0;
5432 }
5433#endif
5434 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
5435 comparison operator. If we have one of these cases, set the
5436 output to A, branch on A (cse will merge these two references),
5437 then set the output to FOO. */
5438 else if (temp
5439 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5440 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5441 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5442 TREE_OPERAND (exp, 1), 0)
5443 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5444 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
5445 {
5446 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5447 temp = gen_reg_rtx (mode);
5448 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5449 jumpif (TREE_OPERAND (exp, 0), op0);
5450 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5451 op1 = op0;
5452 }
5453 else if (temp
5454 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5455 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5456 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5457 TREE_OPERAND (exp, 2), 0)
5458 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5459 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
5460 {
5461 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5462 temp = gen_reg_rtx (mode);
5463 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5464 jumpifnot (TREE_OPERAND (exp, 0), op0);
5465 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5466 op1 = op0;
5467 }
5468 else
5469 {
5470 op1 = gen_label_rtx ();
5471 jumpifnot (TREE_OPERAND (exp, 0), op0);
5472 if (temp != 0)
5473 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5474 else
906c4e36
RK
5475 expand_expr (TREE_OPERAND (exp, 1),
5476 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5477 if (cleanups_this_call)
5478 {
5479 sorry ("aggregate value in COND_EXPR");
5480 cleanups_this_call = 0;
5481 }
5482
5483 emit_queue ();
5484 emit_jump_insn (gen_jump (op1));
5485 emit_barrier ();
5486 emit_label (op0);
5487 if (temp != 0)
5488 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5489 else
906c4e36
RK
5490 expand_expr (TREE_OPERAND (exp, 2),
5491 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5492 }
5493
5494 if (cleanups_this_call)
5495 {
5496 sorry ("aggregate value in COND_EXPR");
5497 cleanups_this_call = 0;
5498 }
5499
5500 emit_queue ();
5501 emit_label (op1);
5502 OK_DEFER_POP;
5503 cleanups_this_call = old_cleanups;
5504 return temp;
5505 }
5506
5507 case TARGET_EXPR:
5508 {
5509 /* Something needs to be initialized, but we didn't know
5510 where that thing was when building the tree. For example,
5511 it could be the return value of a function, or a parameter
5512 to a function which lays down in the stack, or a temporary
5513 variable which must be passed by reference.
5514
5515 We guarantee that the expression will either be constructed
5516 or copied into our original target. */
5517
5518 tree slot = TREE_OPERAND (exp, 0);
5c062816 5519 tree exp1;
bbf6f052
RK
5520
5521 if (TREE_CODE (slot) != VAR_DECL)
5522 abort ();
5523
5524 if (target == 0)
5525 {
5526 if (DECL_RTL (slot) != 0)
ac993f4f
MS
5527 {
5528 target = DECL_RTL (slot);
5c062816 5529 /* If we have already expanded the slot, so don't do
ac993f4f 5530 it again. (mrs) */
5c062816
MS
5531 if (TREE_OPERAND (exp, 1) == NULL_TREE)
5532 return target;
ac993f4f 5533 }
bbf6f052
RK
5534 else
5535 {
5536 target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
5537 /* All temp slots at this level must not conflict. */
5538 preserve_temp_slots (target);
5539 DECL_RTL (slot) = target;
5540 }
5541
5542#if 0
ac993f4f
MS
5543 /* I bet this needs to be done, and I bet that it needs to
5544 be above, inside the else clause. The reason is
5545 simple, how else is it going to get cleaned up? (mrs)
5546
5547 The reason is probably did not work before, and was
5548 commented out is because this was re-expanding already
5549 expanded target_exprs (target == 0 and DECL_RTL (slot)
5550 != 0) also cleaning them up many times as well. :-( */
5551
bbf6f052
RK
5552 /* Since SLOT is not known to the called function
5553 to belong to its stack frame, we must build an explicit
5554 cleanup. This case occurs when we must build up a reference
5555 to pass the reference as an argument. In this case,
5556 it is very likely that such a reference need not be
5557 built here. */
5558
5559 if (TREE_OPERAND (exp, 2) == 0)
5560 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
5561 if (TREE_OPERAND (exp, 2))
906c4e36
RK
5562 cleanups_this_call = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2),
5563 cleanups_this_call);
bbf6f052
RK
5564#endif
5565 }
5566 else
5567 {
5568 /* This case does occur, when expanding a parameter which
5569 needs to be constructed on the stack. The target
5570 is the actual stack address that we want to initialize.
5571 The function we call will perform the cleanup in this case. */
5572
8c042b47
RS
5573 /* If we have already assigned it space, use that space,
5574 not target that we were passed in, as our target
5575 parameter is only a hint. */
5576 if (DECL_RTL (slot) != 0)
5577 {
5578 target = DECL_RTL (slot);
5579 /* If we have already expanded the slot, so don't do
5580 it again. (mrs) */
5581 if (TREE_OPERAND (exp, 1) == NULL_TREE)
5582 return target;
5583 }
5584
bbf6f052
RK
5585 DECL_RTL (slot) = target;
5586 }
5587
5c062816
MS
5588 exp1 = TREE_OPERAND (exp, 1);
5589 /* Mark it as expanded. */
5590 TREE_OPERAND (exp, 1) = NULL_TREE;
5591
5592 return expand_expr (exp1, target, tmode, modifier);
bbf6f052
RK
5593 }
5594
5595 case INIT_EXPR:
5596 {
5597 tree lhs = TREE_OPERAND (exp, 0);
5598 tree rhs = TREE_OPERAND (exp, 1);
5599 tree noncopied_parts = 0;
5600 tree lhs_type = TREE_TYPE (lhs);
5601
5602 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5603 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
5604 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
5605 TYPE_NONCOPIED_PARTS (lhs_type));
5606 while (noncopied_parts != 0)
5607 {
5608 expand_assignment (TREE_VALUE (noncopied_parts),
5609 TREE_PURPOSE (noncopied_parts), 0, 0);
5610 noncopied_parts = TREE_CHAIN (noncopied_parts);
5611 }
5612 return temp;
5613 }
5614
5615 case MODIFY_EXPR:
5616 {
5617 /* If lhs is complex, expand calls in rhs before computing it.
5618 That's so we don't compute a pointer and save it over a call.
5619 If lhs is simple, compute it first so we can give it as a
5620 target if the rhs is just a call. This avoids an extra temp and copy
5621 and that prevents a partial-subsumption which makes bad code.
5622 Actually we could treat component_ref's of vars like vars. */
5623
5624 tree lhs = TREE_OPERAND (exp, 0);
5625 tree rhs = TREE_OPERAND (exp, 1);
5626 tree noncopied_parts = 0;
5627 tree lhs_type = TREE_TYPE (lhs);
5628
5629 temp = 0;
5630
5631 if (TREE_CODE (lhs) != VAR_DECL
5632 && TREE_CODE (lhs) != RESULT_DECL
5633 && TREE_CODE (lhs) != PARM_DECL)
5634 preexpand_calls (exp);
5635
5636 /* Check for |= or &= of a bitfield of size one into another bitfield
5637 of size 1. In this case, (unless we need the result of the
5638 assignment) we can do this more efficiently with a
5639 test followed by an assignment, if necessary.
5640
5641 ??? At this point, we can't get a BIT_FIELD_REF here. But if
5642 things change so we do, this code should be enhanced to
5643 support it. */
5644 if (ignore
5645 && TREE_CODE (lhs) == COMPONENT_REF
5646 && (TREE_CODE (rhs) == BIT_IOR_EXPR
5647 || TREE_CODE (rhs) == BIT_AND_EXPR)
5648 && TREE_OPERAND (rhs, 0) == lhs
5649 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
5650 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
5651 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
5652 {
5653 rtx label = gen_label_rtx ();
5654
5655 do_jump (TREE_OPERAND (rhs, 1),
5656 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
5657 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
5658 expand_assignment (lhs, convert (TREE_TYPE (rhs),
5659 (TREE_CODE (rhs) == BIT_IOR_EXPR
5660 ? integer_one_node
5661 : integer_zero_node)),
5662 0, 0);
e7c33f54 5663 do_pending_stack_adjust ();
bbf6f052
RK
5664 emit_label (label);
5665 return const0_rtx;
5666 }
5667
5668 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
5669 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
5670 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
5671 TYPE_NONCOPIED_PARTS (lhs_type));
5672
5673 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5674 while (noncopied_parts != 0)
5675 {
5676 expand_assignment (TREE_PURPOSE (noncopied_parts),
5677 TREE_VALUE (noncopied_parts), 0, 0);
5678 noncopied_parts = TREE_CHAIN (noncopied_parts);
5679 }
5680 return temp;
5681 }
5682
5683 case PREINCREMENT_EXPR:
5684 case PREDECREMENT_EXPR:
5685 return expand_increment (exp, 0);
5686
5687 case POSTINCREMENT_EXPR:
5688 case POSTDECREMENT_EXPR:
5689 /* Faster to treat as pre-increment if result is not used. */
5690 return expand_increment (exp, ! ignore);
5691
5692 case ADDR_EXPR:
5693 /* Are we taking the address of a nested function? */
5694 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
5695 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
5696 {
5697 op0 = trampoline_address (TREE_OPERAND (exp, 0));
5698 op0 = force_operand (op0, target);
5699 }
5700 else
5701 {
906c4e36 5702 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
bbf6f052
RK
5703 (modifier == EXPAND_INITIALIZER
5704 ? modifier : EXPAND_CONST_ADDRESS));
896102d0
RK
5705
5706 /* We would like the object in memory. If it is a constant,
5707 we can have it be statically allocated into memory. For
5708 a non-constant (REG or SUBREG), we need to allocate some
5709 memory and store the value into it. */
5710
5711 if (CONSTANT_P (op0))
5712 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5713 op0);
5714
b6f01001
RS
5715 /* These cases happen in Fortran. Is that legitimate?
5716 Should Fortran work in another way?
5717 Do they happen in C? */
5718 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
5719 || GET_CODE (op0) == CONCAT)
896102d0
RK
5720 {
5721 /* If this object is in a register, it must be not
5722 be BLKmode. */
5723 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
5724 enum machine_mode inner_mode = TYPE_MODE (inner_type);
5725 rtx memloc
5726 = assign_stack_temp (inner_mode,
5727 int_size_in_bytes (inner_type), 1);
5728
5729 emit_move_insn (memloc, op0);
5730 op0 = memloc;
5731 }
5732
bbf6f052
RK
5733 if (GET_CODE (op0) != MEM)
5734 abort ();
5735
5736 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5737 return XEXP (op0, 0);
5738 op0 = force_operand (XEXP (op0, 0), target);
5739 }
5740 if (flag_force_addr && GET_CODE (op0) != REG)
5741 return force_reg (Pmode, op0);
5742 return op0;
5743
5744 case ENTRY_VALUE_EXPR:
5745 abort ();
5746
7308a047
RS
5747 /* COMPLEX type for Extended Pascal & Fortran */
5748 case COMPLEX_EXPR:
5749 {
5750 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5751
5752 rtx prev;
5753
5754 /* Get the rtx code of the operands. */
5755 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5756 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
5757
5758 if (! target)
5759 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5760
5761 prev = get_last_insn ();
5762
5763 /* Tell flow that the whole of the destination is being set. */
5764 if (GET_CODE (target) == REG)
5765 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5766
5767 /* Move the real (op0) and imaginary (op1) parts to their location. */
2d7050fd
RS
5768 emit_move_insn (gen_realpart (mode, target), op0);
5769 emit_move_insn (gen_imagpart (mode, target), op1);
7308a047
RS
5770
5771 /* Complex construction should appear as a single unit. */
5772 group_insns (prev);
5773
5774 return target;
5775 }
5776
5777 case REALPART_EXPR:
2d7050fd
RS
5778 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5779 return gen_realpart (mode, op0);
7308a047
RS
5780
5781 case IMAGPART_EXPR:
2d7050fd
RS
5782 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5783 return gen_imagpart (mode, op0);
7308a047
RS
5784
5785 case CONJ_EXPR:
5786 {
5787 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5788 rtx imag_t;
5789 rtx prev;
5790
5791 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5792
5793 if (! target)
5794 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5795
5796 prev = get_last_insn ();
5797
5798 /* Tell flow that the whole of the destination is being set. */
5799 if (GET_CODE (target) == REG)
5800 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5801
5802 /* Store the realpart and the negated imagpart to target. */
2d7050fd 5803 emit_move_insn (gen_realpart (mode, target), gen_realpart (mode, op0));
7308a047 5804
2d7050fd 5805 imag_t = gen_imagpart (mode, target);
7308a047 5806 temp = expand_unop (mode, neg_optab,
2d7050fd 5807 gen_imagpart (mode, op0), imag_t, 0);
7308a047
RS
5808 if (temp != imag_t)
5809 emit_move_insn (imag_t, temp);
5810
5811 /* Conjugate should appear as a single unit */
5812 group_insns (prev);
5813
5814 return target;
5815 }
5816
bbf6f052 5817 case ERROR_MARK:
66538193
RS
5818 op0 = CONST0_RTX (tmode);
5819 if (op0 != 0)
5820 return op0;
bbf6f052
RK
5821 return const0_rtx;
5822
5823 default:
90764a87 5824 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
bbf6f052
RK
5825 }
5826
5827 /* Here to do an ordinary binary operator, generating an instruction
5828 from the optab already placed in `this_optab'. */
5829 binop:
5830 preexpand_calls (exp);
5831 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5832 subtarget = 0;
5833 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 5834 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5835 binop2:
5836 temp = expand_binop (mode, this_optab, op0, op1, target,
5837 unsignedp, OPTAB_LIB_WIDEN);
5838 if (temp == 0)
5839 abort ();
5840 return temp;
5841}
bbf6f052 5842
bbf6f052 5843
ca695ac9
JB
5844/* Emit bytecode to evaluate the given expression EXP to the stack. */
5845void
5846bc_expand_expr (exp)
5847 tree exp;
bbf6f052 5848{
ca695ac9
JB
5849 enum tree_code code;
5850 tree type, arg0;
5851 rtx r;
5852 struct binary_operator *binoptab;
5853 struct unary_operator *unoptab;
5854 struct increment_operator *incroptab;
5855 struct bc_label *lab, *lab1;
5856 enum bytecode_opcode opcode;
5857
5858
5859 code = TREE_CODE (exp);
5860
5861 switch (code)
bbf6f052 5862 {
ca695ac9
JB
5863 case PARM_DECL:
5864
5865 if (DECL_RTL (exp) == 0)
bbf6f052 5866 {
ca695ac9
JB
5867 error_with_decl (exp, "prior parameter's size depends on `%s'");
5868 return;
bbf6f052 5869 }
ca695ac9
JB
5870
5871 bc_load_parmaddr (DECL_RTL (exp));
5872 bc_load_memory (TREE_TYPE (exp), exp);
5873
5874 return;
5875
5876 case VAR_DECL:
5877
5878 if (DECL_RTL (exp) == 0)
5879 abort ();
5880
5881#if 0
e7a42772 5882 if (BYTECODE_LABEL (DECL_RTL (exp)))
ca695ac9
JB
5883 bc_load_externaddr (DECL_RTL (exp));
5884 else
5885 bc_load_localaddr (DECL_RTL (exp));
5886#endif
5887 if (TREE_PUBLIC (exp))
e7a42772
JB
5888 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
5889 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
ca695ac9
JB
5890 else
5891 bc_load_localaddr (DECL_RTL (exp));
5892
5893 bc_load_memory (TREE_TYPE (exp), exp);
5894 return;
5895
5896 case INTEGER_CST:
5897
5898#ifdef DEBUG_PRINT_CODE
5899 fprintf (stderr, " [%x]\n", TREE_INT_CST_LOW (exp));
5900#endif
6bd6178d 5901 bc_emit_instruction (mode_to_const_map[(int) (DECL_BIT_FIELD (exp)
ca695ac9 5902 ? SImode
6bd6178d 5903 : TYPE_MODE (TREE_TYPE (exp)))],
ca695ac9
JB
5904 (HOST_WIDE_INT) TREE_INT_CST_LOW (exp));
5905 return;
5906
5907 case REAL_CST:
5908
c02bd5d9 5909#if 0
ca695ac9
JB
5910#ifdef DEBUG_PRINT_CODE
5911 fprintf (stderr, " [%g]\n", (double) TREE_INT_CST_LOW (exp));
5912#endif
c02bd5d9 5913 /* FIX THIS: find a better way to pass real_cst's. -bson */
ca695ac9
JB
5914 bc_emit_instruction (mode_to_const_map[TYPE_MODE (TREE_TYPE (exp))],
5915 (double) TREE_REAL_CST (exp));
c02bd5d9
JB
5916#else
5917 abort ();
5918#endif
5919
ca695ac9
JB
5920 return;
5921
5922 case CALL_EXPR:
5923
5924 /* We build a call description vector describing the type of
5925 the return value and of the arguments; this call vector,
5926 together with a pointer to a location for the return value
5927 and the base of the argument list, is passed to the low
5928 level machine dependent call subroutine, which is responsible
5929 for putting the arguments wherever real functions expect
5930 them, as well as getting the return value back. */
5931 {
5932 tree calldesc = 0, arg;
5933 int nargs = 0, i;
5934 rtx retval;
5935
5936 /* Push the evaluated args on the evaluation stack in reverse
5937 order. Also make an entry for each arg in the calldesc
5938 vector while we're at it. */
5939
5940 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
5941
5942 for (arg = TREE_OPERAND (exp, 1); arg; arg = TREE_CHAIN (arg))
5943 {
5944 ++nargs;
5945 bc_expand_expr (TREE_VALUE (arg));
5946
5947 calldesc = tree_cons ((tree) 0,
5948 size_in_bytes (TREE_TYPE (TREE_VALUE (arg))),
5949 calldesc);
5950 calldesc = tree_cons ((tree) 0,
5951 bc_runtime_type_code (TREE_TYPE (TREE_VALUE (arg))),
5952 calldesc);
5953 }
5954
5955 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
5956
5957 /* Allocate a location for the return value and push its
5958 address on the evaluation stack. Also make an entry
5959 at the front of the calldesc for the return value type. */
5960
5961 type = TREE_TYPE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
5962 retval = bc_allocate_local (int_size_in_bytes (type), TYPE_ALIGN (type));
5963 bc_load_localaddr (retval);
5964
5965 calldesc = tree_cons ((tree) 0, size_in_bytes (type), calldesc);
5966 calldesc = tree_cons ((tree) 0, bc_runtime_type_code (type), calldesc);
5967
5968 /* Prepend the argument count. */
5969 calldesc = tree_cons ((tree) 0,
5970 build_int_2 (nargs, 0),
5971 calldesc);
5972
5973 /* Push the address of the call description vector on the stack. */
5974 calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
5975 TREE_TYPE (calldesc) = build_array_type (integer_type_node,
5976 build_index_type (build_int_2 (nargs * 2, 0)));
5977 r = output_constant_def (calldesc);
5978 bc_load_externaddr (r);
5979
5980 /* Push the address of the function to be called. */
5981 bc_expand_expr (TREE_OPERAND (exp, 0));
5982
5983 /* Call the function, popping its address and the calldesc vector
5984 address off the evaluation stack in the process. */
5985 bc_emit_instruction (call);
5986
5987 /* Pop the arguments off the stack. */
5988 bc_adjust_stack (nargs);
5989
5990 /* Load the return value onto the stack. */
5991 bc_load_localaddr (retval);
5992 bc_load_memory (type, TREE_OPERAND (exp, 0));
5993 }
5994 return;
5995
5996 case SAVE_EXPR:
5997
5998 if (!SAVE_EXPR_RTL (exp))
bbf6f052 5999 {
ca695ac9
JB
6000 /* First time around: copy to local variable */
6001 SAVE_EXPR_RTL (exp) = bc_allocate_local (int_size_in_bytes (TREE_TYPE (exp)),
6002 TYPE_ALIGN (TREE_TYPE(exp)));
6003 bc_expand_expr (TREE_OPERAND (exp, 0));
6004 bc_emit_instruction (dup);
6005
6006 bc_load_localaddr (SAVE_EXPR_RTL (exp));
6007 bc_store_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
bbf6f052 6008 }
ca695ac9 6009 else
bbf6f052 6010 {
ca695ac9
JB
6011 /* Consecutive reference: use saved copy */
6012 bc_load_localaddr (SAVE_EXPR_RTL (exp));
6013 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
bbf6f052 6014 }
ca695ac9
JB
6015 return;
6016
6017#if 0
6018 /* FIXME: the XXXX_STMT codes have been removed in GCC2, but
6019 how are they handled instead? */
6020 case LET_STMT:
6021
6022 TREE_USED (exp) = 1;
6023 bc_expand_expr (STMT_BODY (exp));
6024 return;
6025#endif
6026
6027 case NOP_EXPR:
6028 case CONVERT_EXPR:
6029
6030 bc_expand_expr (TREE_OPERAND (exp, 0));
6031 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)), TREE_TYPE (exp));
6032 return;
6033
6034 case MODIFY_EXPR:
6035
c02bd5d9 6036 expand_assignment (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 0, 0);
ca695ac9
JB
6037 return;
6038
6039 case ADDR_EXPR:
6040
6041 bc_expand_address (TREE_OPERAND (exp, 0));
6042 return;
6043
6044 case INDIRECT_REF:
6045
6046 bc_expand_expr (TREE_OPERAND (exp, 0));
6047 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6048 return;
6049
6050 case ARRAY_REF:
6051
6052 bc_expand_expr (bc_canonicalize_array_ref (exp));
6053 return;
6054
6055 case COMPONENT_REF:
6056
6057 bc_expand_component_address (exp);
6058
6059 /* If we have a bitfield, generate a proper load */
6060 bc_load_memory (TREE_TYPE (TREE_OPERAND (exp, 1)), TREE_OPERAND (exp, 1));
6061 return;
6062
6063 case COMPOUND_EXPR:
6064
6065 bc_expand_expr (TREE_OPERAND (exp, 0));
6066 bc_emit_instruction (drop);
6067 bc_expand_expr (TREE_OPERAND (exp, 1));
6068 return;
6069
6070 case COND_EXPR:
6071
6072 bc_expand_expr (TREE_OPERAND (exp, 0));
6073 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
6074 lab = bc_get_bytecode_label ();
c02bd5d9 6075 bc_emit_bytecode (xjumpifnot);
ca695ac9
JB
6076 bc_emit_bytecode_labelref (lab);
6077
6078#ifdef DEBUG_PRINT_CODE
6079 fputc ('\n', stderr);
6080#endif
6081 bc_expand_expr (TREE_OPERAND (exp, 1));
6082 lab1 = bc_get_bytecode_label ();
6083 bc_emit_bytecode (jump);
6084 bc_emit_bytecode_labelref (lab1);
6085
6086#ifdef DEBUG_PRINT_CODE
6087 fputc ('\n', stderr);
6088#endif
6089
6090 bc_emit_bytecode_labeldef (lab);
6091 bc_expand_expr (TREE_OPERAND (exp, 2));
6092 bc_emit_bytecode_labeldef (lab1);
6093 return;
6094
6095 case TRUTH_ANDIF_EXPR:
6096
c02bd5d9 6097 opcode = xjumpifnot;
ca695ac9
JB
6098 goto andorif;
6099
6100 case TRUTH_ORIF_EXPR:
6101
c02bd5d9 6102 opcode = xjumpif;
ca695ac9
JB
6103 goto andorif;
6104
6105 case PLUS_EXPR:
6106
6107 binoptab = optab_plus_expr;
6108 goto binop;
6109
6110 case MINUS_EXPR:
6111
6112 binoptab = optab_minus_expr;
6113 goto binop;
6114
6115 case MULT_EXPR:
6116
6117 binoptab = optab_mult_expr;
6118 goto binop;
6119
6120 case TRUNC_DIV_EXPR:
6121 case FLOOR_DIV_EXPR:
6122 case CEIL_DIV_EXPR:
6123 case ROUND_DIV_EXPR:
6124 case EXACT_DIV_EXPR:
6125
6126 binoptab = optab_trunc_div_expr;
6127 goto binop;
6128
6129 case TRUNC_MOD_EXPR:
6130 case FLOOR_MOD_EXPR:
6131 case CEIL_MOD_EXPR:
6132 case ROUND_MOD_EXPR:
6133
6134 binoptab = optab_trunc_mod_expr;
6135 goto binop;
6136
6137 case FIX_ROUND_EXPR:
6138 case FIX_FLOOR_EXPR:
6139 case FIX_CEIL_EXPR:
6140 abort (); /* Not used for C. */
6141
6142 case FIX_TRUNC_EXPR:
6143 case FLOAT_EXPR:
6144 case MAX_EXPR:
6145 case MIN_EXPR:
6146 case FFS_EXPR:
6147 case LROTATE_EXPR:
6148 case RROTATE_EXPR:
6149 abort (); /* FIXME */
6150
6151 case RDIV_EXPR:
6152
6153 binoptab = optab_rdiv_expr;
6154 goto binop;
6155
6156 case BIT_AND_EXPR:
6157
6158 binoptab = optab_bit_and_expr;
6159 goto binop;
6160
6161 case BIT_IOR_EXPR:
6162
6163 binoptab = optab_bit_ior_expr;
6164 goto binop;
6165
6166 case BIT_XOR_EXPR:
6167
6168 binoptab = optab_bit_xor_expr;
6169 goto binop;
6170
6171 case LSHIFT_EXPR:
6172
6173 binoptab = optab_lshift_expr;
6174 goto binop;
6175
6176 case RSHIFT_EXPR:
6177
6178 binoptab = optab_rshift_expr;
6179 goto binop;
6180
6181 case TRUTH_AND_EXPR:
6182
6183 binoptab = optab_truth_and_expr;
6184 goto binop;
6185
6186 case TRUTH_OR_EXPR:
6187
6188 binoptab = optab_truth_or_expr;
6189 goto binop;
6190
6191 case LT_EXPR:
6192
6193 binoptab = optab_lt_expr;
6194 goto binop;
6195
6196 case LE_EXPR:
6197
6198 binoptab = optab_le_expr;
6199 goto binop;
6200
6201 case GE_EXPR:
6202
6203 binoptab = optab_ge_expr;
6204 goto binop;
6205
6206 case GT_EXPR:
6207
6208 binoptab = optab_gt_expr;
6209 goto binop;
6210
6211 case EQ_EXPR:
6212
6213 binoptab = optab_eq_expr;
6214 goto binop;
6215
6216 case NE_EXPR:
6217
6218 binoptab = optab_ne_expr;
6219 goto binop;
6220
6221 case NEGATE_EXPR:
6222
6223 unoptab = optab_negate_expr;
6224 goto unop;
6225
6226 case BIT_NOT_EXPR:
6227
6228 unoptab = optab_bit_not_expr;
6229 goto unop;
6230
6231 case TRUTH_NOT_EXPR:
6232
6233 unoptab = optab_truth_not_expr;
6234 goto unop;
6235
6236 case PREDECREMENT_EXPR:
6237
6238 incroptab = optab_predecrement_expr;
6239 goto increment;
6240
6241 case PREINCREMENT_EXPR:
6242
6243 incroptab = optab_preincrement_expr;
6244 goto increment;
6245
6246 case POSTDECREMENT_EXPR:
6247
6248 incroptab = optab_postdecrement_expr;
6249 goto increment;
6250
6251 case POSTINCREMENT_EXPR:
6252
6253 incroptab = optab_postincrement_expr;
6254 goto increment;
6255
6256 case CONSTRUCTOR:
6257
6258 bc_expand_constructor (exp);
6259 return;
6260
6261 case ERROR_MARK:
6262 case RTL_EXPR:
6263
6264 return;
6265
6266 case BIND_EXPR:
6267 {
6268 tree vars = TREE_OPERAND (exp, 0);
6269 int vars_need_expansion = 0;
6270
6271 /* Need to open a binding contour here because
6272 if there are any cleanups they most be contained here. */
6273 expand_start_bindings (0);
6274
6275 /* Mark the corresponding BLOCK for output. */
6276 if (TREE_OPERAND (exp, 2) != 0)
6277 TREE_USED (TREE_OPERAND (exp, 2)) = 1;
6278
6279 /* If VARS have not yet been expanded, expand them now. */
6280 while (vars)
6281 {
6282 if (DECL_RTL (vars) == 0)
6283 {
6284 vars_need_expansion = 1;
6285 bc_expand_decl (vars, 0);
6286 }
6287 bc_expand_decl_init (vars);
6288 vars = TREE_CHAIN (vars);
6289 }
6290
6291 bc_expand_expr (TREE_OPERAND (exp, 1));
6292
6293 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6294
6295 return;
6296 }
6297 }
6298
6299 abort ();
6300
6301 binop:
6302
6303 bc_expand_binary_operation (binoptab, TREE_TYPE (exp),
6304 TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1));
6305 return;
6306
6307
6308 unop:
6309
6310 bc_expand_unary_operation (unoptab, TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6311 return;
6312
6313
6314 andorif:
6315
6316 bc_expand_expr (TREE_OPERAND (exp, 0));
6317 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
6318 lab = bc_get_bytecode_label ();
6319
6320 bc_emit_instruction (dup);
6321 bc_emit_bytecode (opcode);
6322 bc_emit_bytecode_labelref (lab);
6323
6324#ifdef DEBUG_PRINT_CODE
6325 fputc ('\n', stderr);
6326#endif
6327
6328 bc_emit_instruction (drop);
6329
6330 bc_expand_expr (TREE_OPERAND (exp, 1));
6331 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)));
6332 bc_emit_bytecode_labeldef (lab);
6333 return;
6334
6335
6336 increment:
6337
6338 type = TREE_TYPE (TREE_OPERAND (exp, 0));
6339
6340 /* Push the quantum. */
6341 bc_expand_expr (TREE_OPERAND (exp, 1));
6342
6343 /* Convert it to the lvalue's type. */
6344 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)), type);
6345
6346 /* Push the address of the lvalue */
c02bd5d9 6347 bc_expand_expr (build1 (ADDR_EXPR, TYPE_POINTER_TO (type), TREE_OPERAND (exp, 0)));
ca695ac9
JB
6348
6349 /* Perform actual increment */
c02bd5d9 6350 bc_expand_increment (incroptab, type);
ca695ac9
JB
6351 return;
6352}
6353\f
6354/* Return the alignment in bits of EXP, a pointer valued expression.
6355 But don't return more than MAX_ALIGN no matter what.
6356 The alignment returned is, by default, the alignment of the thing that
6357 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
6358
6359 Otherwise, look at the expression to see if we can do better, i.e., if the
6360 expression is actually pointing at an object whose alignment is tighter. */
6361
6362static int
6363get_pointer_alignment (exp, max_align)
6364 tree exp;
6365 unsigned max_align;
6366{
6367 unsigned align, inner;
6368
6369 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
6370 return 0;
6371
6372 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
6373 align = MIN (align, max_align);
6374
6375 while (1)
6376 {
6377 switch (TREE_CODE (exp))
6378 {
6379 case NOP_EXPR:
6380 case CONVERT_EXPR:
6381 case NON_LVALUE_EXPR:
6382 exp = TREE_OPERAND (exp, 0);
6383 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
6384 return align;
6385 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
6386 inner = MIN (inner, max_align);
6387 align = MAX (align, inner);
6388 break;
6389
6390 case PLUS_EXPR:
6391 /* If sum of pointer + int, restrict our maximum alignment to that
6392 imposed by the integer. If not, we can't do any better than
6393 ALIGN. */
6394 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
6395 return align;
6396
6397 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
6398 & (max_align - 1))
6399 != 0)
6400 max_align >>= 1;
6401
6402 exp = TREE_OPERAND (exp, 0);
6403 break;
6404
6405 case ADDR_EXPR:
6406 /* See what we are pointing at and look at its alignment. */
6407 exp = TREE_OPERAND (exp, 0);
6408 if (TREE_CODE (exp) == FUNCTION_DECL)
6409 align = MAX (align, FUNCTION_BOUNDARY);
6410 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
6411 align = MAX (align, DECL_ALIGN (exp));
6412#ifdef CONSTANT_ALIGNMENT
6413 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
6414 align = CONSTANT_ALIGNMENT (exp, align);
6415#endif
6416 return MIN (align, max_align);
6417
6418 default:
6419 return align;
6420 }
6421 }
6422}
6423\f
6424/* Return the tree node and offset if a given argument corresponds to
6425 a string constant. */
6426
6427static tree
6428string_constant (arg, ptr_offset)
6429 tree arg;
6430 tree *ptr_offset;
6431{
6432 STRIP_NOPS (arg);
6433
6434 if (TREE_CODE (arg) == ADDR_EXPR
6435 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
6436 {
6437 *ptr_offset = integer_zero_node;
6438 return TREE_OPERAND (arg, 0);
6439 }
6440 else if (TREE_CODE (arg) == PLUS_EXPR)
6441 {
6442 tree arg0 = TREE_OPERAND (arg, 0);
6443 tree arg1 = TREE_OPERAND (arg, 1);
6444
6445 STRIP_NOPS (arg0);
6446 STRIP_NOPS (arg1);
6447
6448 if (TREE_CODE (arg0) == ADDR_EXPR
6449 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
6450 {
6451 *ptr_offset = arg1;
6452 return TREE_OPERAND (arg0, 0);
6453 }
6454 else if (TREE_CODE (arg1) == ADDR_EXPR
6455 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
6456 {
6457 *ptr_offset = arg0;
6458 return TREE_OPERAND (arg1, 0);
6459 }
6460 }
6461
6462 return 0;
6463}
6464
6465/* Compute the length of a C string. TREE_STRING_LENGTH is not the right
6466 way, because it could contain a zero byte in the middle.
6467 TREE_STRING_LENGTH is the size of the character array, not the string.
6468
6469 Unfortunately, string_constant can't access the values of const char
6470 arrays with initializers, so neither can we do so here. */
6471
6472static tree
6473c_strlen (src)
6474 tree src;
6475{
6476 tree offset_node;
6477 int offset, max;
6478 char *ptr;
6479
6480 src = string_constant (src, &offset_node);
6481 if (src == 0)
6482 return 0;
6483 max = TREE_STRING_LENGTH (src);
6484 ptr = TREE_STRING_POINTER (src);
6485 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
6486 {
6487 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
6488 compute the offset to the following null if we don't know where to
6489 start searching for it. */
6490 int i;
6491 for (i = 0; i < max; i++)
6492 if (ptr[i] == 0)
6493 return 0;
6494 /* We don't know the starting offset, but we do know that the string
6495 has no internal zero bytes. We can assume that the offset falls
6496 within the bounds of the string; otherwise, the programmer deserves
6497 what he gets. Subtract the offset from the length of the string,
6498 and return that. */
6499 /* This would perhaps not be valid if we were dealing with named
6500 arrays in addition to literal string constants. */
6501 return size_binop (MINUS_EXPR, size_int (max), offset_node);
6502 }
6503
6504 /* We have a known offset into the string. Start searching there for
6505 a null character. */
6506 if (offset_node == 0)
6507 offset = 0;
6508 else
6509 {
6510 /* Did we get a long long offset? If so, punt. */
6511 if (TREE_INT_CST_HIGH (offset_node) != 0)
6512 return 0;
6513 offset = TREE_INT_CST_LOW (offset_node);
6514 }
6515 /* If the offset is known to be out of bounds, warn, and call strlen at
6516 runtime. */
6517 if (offset < 0 || offset > max)
6518 {
6519 warning ("offset outside bounds of constant string");
6520 return 0;
6521 }
6522 /* Use strlen to search for the first zero byte. Since any strings
6523 constructed with build_string will have nulls appended, we win even
6524 if we get handed something like (char[4])"abcd".
6525
6526 Since OFFSET is our starting index into the string, no further
6527 calculation is needed. */
6528 return size_int (strlen (ptr + offset));
6529}
6530\f
6531/* Expand an expression EXP that calls a built-in function,
6532 with result going to TARGET if that's convenient
6533 (and in mode MODE if that's convenient).
6534 SUBTARGET may be used as the target for computing one of EXP's operands.
6535 IGNORE is nonzero if the value is to be ignored. */
6536
6537static rtx
6538expand_builtin (exp, target, subtarget, mode, ignore)
6539 tree exp;
6540 rtx target;
6541 rtx subtarget;
6542 enum machine_mode mode;
6543 int ignore;
6544{
6545 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6546 tree arglist = TREE_OPERAND (exp, 1);
6547 rtx op0;
6548 rtx lab1, insns;
6549 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
6550 optab builtin_optab;
6551
6552 switch (DECL_FUNCTION_CODE (fndecl))
6553 {
6554 case BUILT_IN_ABS:
6555 case BUILT_IN_LABS:
6556 case BUILT_IN_FABS:
6557 /* build_function_call changes these into ABS_EXPR. */
6558 abort ();
6559
6560 case BUILT_IN_SIN:
6561 case BUILT_IN_COS:
6562 case BUILT_IN_FSQRT:
6563 /* If not optimizing, call the library function. */
6564 if (! optimize)
6565 break;
6566
6567 if (arglist == 0
6568 /* Arg could be wrong type if user redeclared this fcn wrong. */
6569 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
6570 return CONST0_RTX (TYPE_MODE (TREE_TYPE (exp)));
6571
6572 /* Stabilize and compute the argument. */
6573 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
6574 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
6575 {
6576 exp = copy_node (exp);
6577 arglist = copy_node (arglist);
6578 TREE_OPERAND (exp, 1) = arglist;
6579 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
6580 }
6581 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
6582
6583 /* Make a suitable register to place result in. */
6584 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6585
6586 emit_queue ();
6587 start_sequence ();
6588
6589 switch (DECL_FUNCTION_CODE (fndecl))
6590 {
6591 case BUILT_IN_SIN:
6592 builtin_optab = sin_optab; break;
6593 case BUILT_IN_COS:
6594 builtin_optab = cos_optab; break;
6595 case BUILT_IN_FSQRT:
6596 builtin_optab = sqrt_optab; break;
6597 default:
6598 abort ();
6599 }
6600
6601 /* Compute into TARGET.
6602 Set TARGET to wherever the result comes back. */
6603 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
6604 builtin_optab, op0, target, 0);
6605
6606 /* If we were unable to expand via the builtin, stop the
6607 sequence (without outputting the insns) and break, causing
6608 a call the the library function. */
6609 if (target == 0)
6610 {
6611 end_sequence ();
6612 break;
6613 }
6614
6615 /* Check the results by default. But if flag_fast_math is turned on,
6616 then assume sqrt will always be called with valid arguments. */
6617
6618 if (! flag_fast_math)
6619 {
6620 /* Don't define the builtin FP instructions
6621 if your machine is not IEEE. */
6622 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
6623 abort ();
6624
6625 lab1 = gen_label_rtx ();
6626
6627 /* Test the result; if it is NaN, set errno=EDOM because
6628 the argument was not in the domain. */
6629 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
6630 emit_jump_insn (gen_beq (lab1));
6631
6632#if TARGET_EDOM
6633 {
6634#ifdef GEN_ERRNO_RTX
6635 rtx errno_rtx = GEN_ERRNO_RTX;
6636#else
6637 rtx errno_rtx
6638 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "*errno"));
6639#endif
6640
6641 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
6642 }
6643#else
6644 /* We can't set errno=EDOM directly; let the library call do it.
6645 Pop the arguments right away in case the call gets deleted. */
6646 NO_DEFER_POP;
6647 expand_call (exp, target, 0);
6648 OK_DEFER_POP;
6649#endif
6650
6651 emit_label (lab1);
6652 }
6653
6654 /* Output the entire sequence. */
6655 insns = get_insns ();
6656 end_sequence ();
6657 emit_insns (insns);
6658
6659 return target;
6660
6661 /* __builtin_apply_args returns block of memory allocated on
6662 the stack into which is stored the arg pointer, structure
6663 value address, static chain, and all the registers that might
6664 possibly be used in performing a function call. The code is
6665 moved to the start of the function so the incoming values are
6666 saved. */
6667 case BUILT_IN_APPLY_ARGS:
6668 /* Don't do __builtin_apply_args more than once in a function.
6669 Save the result of the first call and reuse it. */
6670 if (apply_args_value != 0)
6671 return apply_args_value;
6672 {
6673 /* When this function is called, it means that registers must be
6674 saved on entry to this function. So we migrate the
6675 call to the first insn of this function. */
6676 rtx temp;
6677 rtx seq;
6678
6679 start_sequence ();
6680 temp = expand_builtin_apply_args ();
6681 seq = get_insns ();
6682 end_sequence ();
6683
6684 apply_args_value = temp;
6685
6686 /* Put the sequence after the NOTE that starts the function.
6687 If this is inside a SEQUENCE, make the outer-level insn
6688 chain current, so the code is placed at the start of the
6689 function. */
6690 push_topmost_sequence ();
6691 emit_insns_before (seq, NEXT_INSN (get_insns ()));
6692 pop_topmost_sequence ();
6693 return temp;
6694 }
6695
6696 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6697 FUNCTION with a copy of the parameters described by
6698 ARGUMENTS, and ARGSIZE. It returns a block of memory
6699 allocated on the stack into which is stored all the registers
6700 that might possibly be used for returning the result of a
6701 function. ARGUMENTS is the value returned by
6702 __builtin_apply_args. ARGSIZE is the number of bytes of
6703 arguments that must be copied. ??? How should this value be
6704 computed? We'll also need a safe worst case value for varargs
6705 functions. */
6706 case BUILT_IN_APPLY:
6707 if (arglist == 0
6708 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6709 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6710 || TREE_CHAIN (arglist) == 0
6711 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6712 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6713 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6714 return const0_rtx;
6715 else
6716 {
6717 int i;
6718 tree t;
6719 rtx ops[3];
6720
6721 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
6722 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
6723
6724 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6725 }
6726
6727 /* __builtin_return (RESULT) causes the function to return the
6728 value described by RESULT. RESULT is address of the block of
6729 memory returned by __builtin_apply. */
6730 case BUILT_IN_RETURN:
6731 if (arglist
6732 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6733 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
6734 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
6735 NULL_RTX, VOIDmode, 0));
6736 return const0_rtx;
6737
6738 case BUILT_IN_SAVEREGS:
6739 /* Don't do __builtin_saveregs more than once in a function.
6740 Save the result of the first call and reuse it. */
6741 if (saveregs_value != 0)
6742 return saveregs_value;
6743 {
6744 /* When this function is called, it means that registers must be
6745 saved on entry to this function. So we migrate the
6746 call to the first insn of this function. */
6747 rtx temp;
6748 rtx seq;
6749 rtx valreg, saved_valreg;
6750
6751 /* Now really call the function. `expand_call' does not call
6752 expand_builtin, so there is no danger of infinite recursion here. */
6753 start_sequence ();
6754
6755#ifdef EXPAND_BUILTIN_SAVEREGS
6756 /* Do whatever the machine needs done in this case. */
6757 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
6758#else
6759 /* The register where the function returns its value
6760 is likely to have something else in it, such as an argument.
6761 So preserve that register around the call. */
6762 if (value_mode != VOIDmode)
6763 {
6764 valreg = hard_libcall_value (value_mode);
6765 saved_valreg = gen_reg_rtx (value_mode);
6766 emit_move_insn (saved_valreg, valreg);
6767 }
6768
6769 /* Generate the call, putting the value in a pseudo. */
6770 temp = expand_call (exp, target, ignore);
6771
6772 if (value_mode != VOIDmode)
6773 emit_move_insn (valreg, saved_valreg);
6774#endif
6775
6776 seq = get_insns ();
6777 end_sequence ();
6778
6779 saveregs_value = temp;
6780
6781 /* Put the sequence after the NOTE that starts the function.
6782 If this is inside a SEQUENCE, make the outer-level insn
6783 chain current, so the code is placed at the start of the
6784 function. */
6785 push_topmost_sequence ();
6786 emit_insns_before (seq, NEXT_INSN (get_insns ()));
6787 pop_topmost_sequence ();
6788 return temp;
6789 }
6790
6791 /* __builtin_args_info (N) returns word N of the arg space info
6792 for the current function. The number and meanings of words
6793 is controlled by the definition of CUMULATIVE_ARGS. */
6794 case BUILT_IN_ARGS_INFO:
6795 {
6796 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
6797 int i;
6798 int *word_ptr = (int *) &current_function_args_info;
6799 tree type, elts, result;
6800
6801 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
6802 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
6803 __FILE__, __LINE__);
6804
6805 if (arglist != 0)
6806 {
6807 tree arg = TREE_VALUE (arglist);
6808 if (TREE_CODE (arg) != INTEGER_CST)
6809 error ("argument of `__builtin_args_info' must be constant");
6810 else
6811 {
6812 int wordnum = TREE_INT_CST_LOW (arg);
6813
6814 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
6815 error ("argument of `__builtin_args_info' out of range");
6816 else
6817 return GEN_INT (word_ptr[wordnum]);
6818 }
6819 }
6820 else
6821 error ("missing argument in `__builtin_args_info'");
6822
6823 return const0_rtx;
6824
6825#if 0
6826 for (i = 0; i < nwords; i++)
6827 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
6828
6829 type = build_array_type (integer_type_node,
6830 build_index_type (build_int_2 (nwords, 0)));
6831 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
6832 TREE_CONSTANT (result) = 1;
6833 TREE_STATIC (result) = 1;
6834 result = build (INDIRECT_REF, build_pointer_type (type), result);
6835 TREE_CONSTANT (result) = 1;
6836 return expand_expr (result, NULL_RTX, VOIDmode, 0);
6837#endif
6838 }
6839
6840 /* Return the address of the first anonymous stack arg. */
6841 case BUILT_IN_NEXT_ARG:
6842 {
6843 tree fntype = TREE_TYPE (current_function_decl);
6844 if (!(TYPE_ARG_TYPES (fntype) != 0
6845 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
6846 != void_type_node)))
6847 {
6848 error ("`va_start' used in function with fixed args");
6849 return const0_rtx;
6850 }
6851 }
6852
6853 return expand_binop (Pmode, add_optab,
6854 current_function_internal_arg_pointer,
6855 current_function_arg_offset_rtx,
6856 NULL_RTX, 0, OPTAB_LIB_WIDEN);
6857
6858 case BUILT_IN_CLASSIFY_TYPE:
6859 if (arglist != 0)
6860 {
6861 tree type = TREE_TYPE (TREE_VALUE (arglist));
6862 enum tree_code code = TREE_CODE (type);
6863 if (code == VOID_TYPE)
6864 return GEN_INT (void_type_class);
6865 if (code == INTEGER_TYPE)
6866 return GEN_INT (integer_type_class);
6867 if (code == CHAR_TYPE)
6868 return GEN_INT (char_type_class);
6869 if (code == ENUMERAL_TYPE)
6870 return GEN_INT (enumeral_type_class);
6871 if (code == BOOLEAN_TYPE)
6872 return GEN_INT (boolean_type_class);
6873 if (code == POINTER_TYPE)
6874 return GEN_INT (pointer_type_class);
6875 if (code == REFERENCE_TYPE)
6876 return GEN_INT (reference_type_class);
6877 if (code == OFFSET_TYPE)
6878 return GEN_INT (offset_type_class);
6879 if (code == REAL_TYPE)
6880 return GEN_INT (real_type_class);
6881 if (code == COMPLEX_TYPE)
6882 return GEN_INT (complex_type_class);
6883 if (code == FUNCTION_TYPE)
6884 return GEN_INT (function_type_class);
6885 if (code == METHOD_TYPE)
6886 return GEN_INT (method_type_class);
6887 if (code == RECORD_TYPE)
6888 return GEN_INT (record_type_class);
6889 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
6890 return GEN_INT (union_type_class);
6891 if (code == ARRAY_TYPE)
6892 return GEN_INT (array_type_class);
6893 if (code == STRING_TYPE)
6894 return GEN_INT (string_type_class);
6895 if (code == SET_TYPE)
6896 return GEN_INT (set_type_class);
6897 if (code == FILE_TYPE)
6898 return GEN_INT (file_type_class);
6899 if (code == LANG_TYPE)
6900 return GEN_INT (lang_type_class);
6901 }
6902 return GEN_INT (no_type_class);
6903
6904 case BUILT_IN_CONSTANT_P:
6905 if (arglist == 0)
6906 return const0_rtx;
6907 else
6908 return (TREE_CODE_CLASS (TREE_CODE (TREE_VALUE (arglist))) == 'c'
6909 ? const1_rtx : const0_rtx);
6910
6911 case BUILT_IN_FRAME_ADDRESS:
6912 /* The argument must be a nonnegative integer constant.
6913 It counts the number of frames to scan up the stack.
6914 The value is the address of that frame. */
6915 case BUILT_IN_RETURN_ADDRESS:
6916 /* The argument must be a nonnegative integer constant.
6917 It counts the number of frames to scan up the stack.
6918 The value is the return address saved in that frame. */
6919 if (arglist == 0)
6920 /* Warning about missing arg was already issued. */
6921 return const0_rtx;
6922 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
6923 {
6924 error ("invalid arg to `__builtin_return_address'");
6925 return const0_rtx;
6926 }
6927 else if (tree_int_cst_lt (TREE_VALUE (arglist), integer_zero_node))
6928 {
6929 error ("invalid arg to `__builtin_return_address'");
6930 return const0_rtx;
6931 }
6932 else
6933 {
6934 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
6935 rtx tem = frame_pointer_rtx;
6936 int i;
6937
6938 /* Some machines need special handling before we can access arbitrary
6939 frames. For example, on the sparc, we must first flush all
6940 register windows to the stack. */
6941#ifdef SETUP_FRAME_ADDRESSES
6942 SETUP_FRAME_ADDRESSES ();
6943#endif
6944
6945 /* On the sparc, the return address is not in the frame, it is
6946 in a register. There is no way to access it off of the current
6947 frame pointer, but it can be accessed off the previous frame
6948 pointer by reading the value from the register window save
6949 area. */
6950#ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
6951 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_RETURN_ADDRESS)
6952 count--;
6953#endif
6954
6955 /* Scan back COUNT frames to the specified frame. */
6956 for (i = 0; i < count; i++)
6957 {
6958 /* Assume the dynamic chain pointer is in the word that
6959 the frame address points to, unless otherwise specified. */
6960#ifdef DYNAMIC_CHAIN_ADDRESS
6961 tem = DYNAMIC_CHAIN_ADDRESS (tem);
6962#endif
6963 tem = memory_address (Pmode, tem);
6964 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
6965 }
6966
6967 /* For __builtin_frame_address, return what we've got. */
6968 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
6969 return tem;
6970
6971 /* For __builtin_return_address,
6972 Get the return address from that frame. */
6973#ifdef RETURN_ADDR_RTX
6974 return RETURN_ADDR_RTX (count, tem);
6975#else
6976 tem = memory_address (Pmode,
6977 plus_constant (tem, GET_MODE_SIZE (Pmode)));
6978 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
6979#endif
6980 }
6981
6982 case BUILT_IN_ALLOCA:
6983 if (arglist == 0
6984 /* Arg could be non-integer if user redeclared this fcn wrong. */
6985 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
6986 return const0_rtx;
6987 current_function_calls_alloca = 1;
6988 /* Compute the argument. */
6989 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
6990
6991 /* Allocate the desired space. */
6992 target = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
6993
6994 /* Record the new stack level for nonlocal gotos. */
6995 if (nonlocal_goto_handler_slot != 0)
6996 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
6997 return target;
6998
6999 case BUILT_IN_FFS:
7000 /* If not optimizing, call the library function. */
7001 if (!optimize)
7002 break;
7003
7004 if (arglist == 0
7005 /* Arg could be non-integer if user redeclared this fcn wrong. */
7006 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
7007 return const0_rtx;
7008
7009 /* Compute the argument. */
7010 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
7011 /* Compute ffs, into TARGET if possible.
7012 Set TARGET to wherever the result comes back. */
7013 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
7014 ffs_optab, op0, target, 1);
7015 if (target == 0)
7016 abort ();
7017 return target;
7018
7019 case BUILT_IN_STRLEN:
7020 /* If not optimizing, call the library function. */
7021 if (!optimize)
7022 break;
7023
7024 if (arglist == 0
7025 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7026 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
7027 return const0_rtx;
7028 else
7029 {
7030 tree src = TREE_VALUE (arglist);
7031 tree len = c_strlen (src);
7032
7033 int align
7034 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7035
7036 rtx result, src_rtx, char_rtx;
7037 enum machine_mode insn_mode = value_mode, char_mode;
7038 enum insn_code icode;
7039
7040 /* If the length is known, just return it. */
7041 if (len != 0)
7042 return expand_expr (len, target, mode, 0);
7043
7044 /* If SRC is not a pointer type, don't do this operation inline. */
7045 if (align == 0)
7046 break;
7047
7048 /* Call a function if we can't compute strlen in the right mode. */
7049
7050 while (insn_mode != VOIDmode)
7051 {
7052 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
7053 if (icode != CODE_FOR_nothing)
7054 break;
bbf6f052 7055
ca695ac9
JB
7056 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
7057 }
7058 if (insn_mode == VOIDmode)
7059 break;
bbf6f052 7060
ca695ac9
JB
7061 /* Make a place to write the result of the instruction. */
7062 result = target;
7063 if (! (result != 0
7064 && GET_CODE (result) == REG
7065 && GET_MODE (result) == insn_mode
7066 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
7067 result = gen_reg_rtx (insn_mode);
bbf6f052 7068
ca695ac9
JB
7069 /* Make sure the operands are acceptable to the predicates. */
7070
7071 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
7072 result = gen_reg_rtx (insn_mode);
7073
7074 src_rtx = memory_address (BLKmode,
7075 expand_expr (src, NULL_RTX, Pmode,
7076 EXPAND_NORMAL));
7077 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
7078 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
7079
7080 char_rtx = const0_rtx;
7081 char_mode = insn_operand_mode[(int)icode][2];
7082 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
7083 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
7084
7085 emit_insn (GEN_FCN (icode) (result,
7086 gen_rtx (MEM, BLKmode, src_rtx),
7087 char_rtx, GEN_INT (align)));
7088
7089 /* Return the value in the proper mode for this function. */
7090 if (GET_MODE (result) == value_mode)
7091 return result;
7092 else if (target != 0)
7093 {
7094 convert_move (target, result, 0);
7095 return target;
7096 }
7097 else
7098 return convert_to_mode (value_mode, result, 0);
7099 }
7100
7101 case BUILT_IN_STRCPY:
e87b4f3f 7102 /* If not optimizing, call the library function. */
ca695ac9 7103 if (!optimize)
e87b4f3f
RS
7104 break;
7105
7106 if (arglist == 0
ca695ac9
JB
7107 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7108 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7109 || TREE_CHAIN (arglist) == 0
7110 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
7111 return const0_rtx;
7112 else
db0e6d01 7113 {
ca695ac9 7114 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
e7c33f54 7115
ca695ac9
JB
7116 if (len == 0)
7117 break;
e7c33f54 7118
ca695ac9 7119 len = size_binop (PLUS_EXPR, len, integer_one_node);
e7c33f54 7120
ca695ac9 7121 chainon (arglist, build_tree_list (NULL_TREE, len));
1bbddf11
JVA
7122 }
7123
ca695ac9
JB
7124 /* Drops in. */
7125 case BUILT_IN_MEMCPY:
7126 /* If not optimizing, call the library function. */
7127 if (!optimize)
7128 break;
e7c33f54 7129
ca695ac9
JB
7130 if (arglist == 0
7131 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7132 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7133 || TREE_CHAIN (arglist) == 0
7134 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
7135 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
7136 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7137 return const0_rtx;
7138 else
e7c33f54 7139 {
ca695ac9
JB
7140 tree dest = TREE_VALUE (arglist);
7141 tree src = TREE_VALUE (TREE_CHAIN (arglist));
7142 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
e87b4f3f 7143
ca695ac9
JB
7144 int src_align
7145 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7146 int dest_align
7147 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7148 rtx dest_rtx, dest_mem, src_mem;
60bac6ea 7149
ca695ac9
JB
7150 /* If either SRC or DEST is not a pointer type, don't do
7151 this operation in-line. */
7152 if (src_align == 0 || dest_align == 0)
7153 {
7154 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
7155 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
7156 break;
7157 }
7158
7159 dest_rtx = expand_expr (dest, NULL_RTX, Pmode, EXPAND_NORMAL);
7160 dest_mem = gen_rtx (MEM, BLKmode,
7161 memory_address (BLKmode, dest_rtx));
7162 src_mem = gen_rtx (MEM, BLKmode,
7163 memory_address (BLKmode,
7164 expand_expr (src, NULL_RTX,
7165 Pmode,
7166 EXPAND_NORMAL)));
7167
7168 /* Copy word part most expediently. */
7169 emit_block_move (dest_mem, src_mem,
7170 expand_expr (len, NULL_RTX, VOIDmode, 0),
7171 MIN (src_align, dest_align));
7172 return dest_rtx;
7173 }
7174
7175/* These comparison functions need an instruction that returns an actual
7176 index. An ordinary compare that just sets the condition codes
7177 is not enough. */
7178#ifdef HAVE_cmpstrsi
7179 case BUILT_IN_STRCMP:
7180 /* If not optimizing, call the library function. */
7181 if (!optimize)
7182 break;
7183
7184 if (arglist == 0
7185 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7186 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7187 || TREE_CHAIN (arglist) == 0
7188 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
7189 return const0_rtx;
7190 else if (!HAVE_cmpstrsi)
7191 break;
7192 {
7193 tree arg1 = TREE_VALUE (arglist);
7194 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
7195 tree offset;
7196 tree len, len2;
7197
7198 len = c_strlen (arg1);
7199 if (len)
7200 len = size_binop (PLUS_EXPR, integer_one_node, len);
7201 len2 = c_strlen (arg2);
7202 if (len2)
7203 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
7204
7205 /* If we don't have a constant length for the first, use the length
7206 of the second, if we know it. We don't require a constant for
7207 this case; some cost analysis could be done if both are available
7208 but neither is constant. For now, assume they're equally cheap.
7209
7210 If both strings have constant lengths, use the smaller. This
7211 could arise if optimization results in strcpy being called with
7212 two fixed strings, or if the code was machine-generated. We should
7213 add some code to the `memcmp' handler below to deal with such
7214 situations, someday. */
7215 if (!len || TREE_CODE (len) != INTEGER_CST)
7216 {
7217 if (len2)
7218 len = len2;
7219 else if (len == 0)
7220 break;
7221 }
7222 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
7223 {
7224 if (tree_int_cst_lt (len2, len))
7225 len = len2;
7226 }
7227
7228 chainon (arglist, build_tree_list (NULL_TREE, len));
7229 }
7230
7231 /* Drops in. */
7232 case BUILT_IN_MEMCMP:
7233 /* If not optimizing, call the library function. */
7234 if (!optimize)
7235 break;
7236
7237 if (arglist == 0
7238 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7239 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7240 || TREE_CHAIN (arglist) == 0
7241 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
7242 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
7243 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7244 return const0_rtx;
7245 else if (!HAVE_cmpstrsi)
7246 break;
7247 {
7248 tree arg1 = TREE_VALUE (arglist);
7249 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
7250 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7251 rtx result;
7252
7253 int arg1_align
7254 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7255 int arg2_align
7256 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7257 enum machine_mode insn_mode
7258 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
60bac6ea 7259
ca695ac9
JB
7260 /* If we don't have POINTER_TYPE, call the function. */
7261 if (arg1_align == 0 || arg2_align == 0)
7262 {
7263 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
7264 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
7265 break;
7266 }
60bac6ea 7267
ca695ac9
JB
7268 /* Make a place to write the result of the instruction. */
7269 result = target;
7270 if (! (result != 0
7271 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
7272 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
7273 result = gen_reg_rtx (insn_mode);
60bac6ea 7274
ca695ac9
JB
7275 emit_insn (gen_cmpstrsi (result,
7276 gen_rtx (MEM, BLKmode,
7277 expand_expr (arg1, NULL_RTX, Pmode,
7278 EXPAND_NORMAL)),
7279 gen_rtx (MEM, BLKmode,
7280 expand_expr (arg2, NULL_RTX, Pmode,
7281 EXPAND_NORMAL)),
7282 expand_expr (len, NULL_RTX, VOIDmode, 0),
7283 GEN_INT (MIN (arg1_align, arg2_align))));
60bac6ea 7284
ca695ac9
JB
7285 /* Return the value in the proper mode for this function. */
7286 mode = TYPE_MODE (TREE_TYPE (exp));
7287 if (GET_MODE (result) == mode)
7288 return result;
7289 else if (target != 0)
7290 {
7291 convert_move (target, result, 0);
7292 return target;
60bac6ea 7293 }
ca695ac9
JB
7294 else
7295 return convert_to_mode (mode, result, 0);
7296 }
60bac6ea 7297#else
ca695ac9
JB
7298 case BUILT_IN_STRCMP:
7299 case BUILT_IN_MEMCMP:
7300 break;
60bac6ea
RS
7301#endif
7302
ca695ac9
JB
7303 default: /* just do library call, if unknown builtin */
7304 error ("built-in function `%s' not currently supported",
7305 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
7306 }
e87b4f3f 7307
ca695ac9
JB
7308 /* The switch statement above can drop through to cause the function
7309 to be called normally. */
e7c33f54 7310
ca695ac9
JB
7311 return expand_call (exp, target, ignore);
7312}
7313\f
7314/* Built-in functions to perform an untyped call and return. */
0006469d 7315
ca695ac9
JB
7316/* For each register that may be used for calling a function, this
7317 gives a mode used to copy the register's value. VOIDmode indicates
7318 the register is not used for calling a function. If the machine
7319 has register windows, this gives only the outbound registers.
7320 INCOMING_REGNO gives the corresponding inbound register. */
7321static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
0006469d 7322
ca695ac9
JB
7323/* For each register that may be used for returning values, this gives
7324 a mode used to copy the register's value. VOIDmode indicates the
7325 register is not used for returning values. If the machine has
7326 register windows, this gives only the outbound registers.
7327 INCOMING_REGNO gives the corresponding inbound register. */
7328static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
0006469d 7329
ca695ac9
JB
7330/* For each register that may be used for calling a function, this
7331 gives the offset of that register into the block returned by
7332 __bultin_apply_args. 0 indicates that the register is not
7333 used for calling a function. */
7334static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
0006469d 7335
ca695ac9
JB
7336/* Return the offset of register REGNO into the block returned by
7337 __builtin_apply_args. This is not declared static, since it is
7338 needed in objc-act.c. */
0006469d 7339
ca695ac9
JB
7340int
7341apply_args_register_offset (regno)
7342 int regno;
7343{
7344 apply_args_size ();
0006469d 7345
ca695ac9
JB
7346 /* Arguments are always put in outgoing registers (in the argument
7347 block) if such make sense. */
7348#ifdef OUTGOING_REGNO
7349 regno = OUTGOING_REGNO(regno);
7350#endif
7351 return apply_args_reg_offset[regno];
7352}
0006469d 7353
ca695ac9
JB
7354/* Return the size required for the block returned by __builtin_apply_args,
7355 and initialize apply_args_mode. */
0006469d 7356
ca695ac9
JB
7357static int
7358apply_args_size ()
7359{
7360 static int size = -1;
7361 int align, regno;
7362 enum machine_mode mode;
bbf6f052 7363
ca695ac9
JB
7364 /* The values computed by this function never change. */
7365 if (size < 0)
7366 {
7367 /* The first value is the incoming arg-pointer. */
7368 size = GET_MODE_SIZE (Pmode);
bbf6f052 7369
ca695ac9
JB
7370 /* The second value is the structure value address unless this is
7371 passed as an "invisible" first argument. */
7372 if (struct_value_rtx)
7373 size += GET_MODE_SIZE (Pmode);
7374
7375 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7376 if (FUNCTION_ARG_REGNO_P (regno))
bbf6f052 7377 {
ca695ac9
JB
7378 /* Search for the proper mode for copying this register's
7379 value. I'm not sure this is right, but it works so far. */
7380 enum machine_mode best_mode = VOIDmode;
7381
7382 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
7383 mode != VOIDmode;
7384 mode = GET_MODE_WIDER_MODE (mode))
7385 if (HARD_REGNO_MODE_OK (regno, mode)
7386 && HARD_REGNO_NREGS (regno, mode) == 1)
7387 best_mode = mode;
7388
7389 if (best_mode == VOIDmode)
7390 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
7391 mode != VOIDmode;
7392 mode = GET_MODE_WIDER_MODE (mode))
7393 if (HARD_REGNO_MODE_OK (regno, mode)
7394 && (mov_optab->handlers[(int) mode].insn_code
7395 != CODE_FOR_nothing))
7396 best_mode = mode;
7397
7398 mode = best_mode;
7399 if (mode == VOIDmode)
7400 abort ();
7401
7402 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7403 if (size % align != 0)
7404 size = CEIL (size, align) * align;
7405 apply_args_reg_offset[regno] = size;
7406 size += GET_MODE_SIZE (mode);
7407 apply_args_mode[regno] = mode;
7408 }
7409 else
7410 {
7411 apply_args_mode[regno] = VOIDmode;
7412 apply_args_reg_offset[regno] = 0;
bbf6f052 7413 }
ca695ac9
JB
7414 }
7415 return size;
7416}
bbf6f052 7417
ca695ac9
JB
7418/* Return the size required for the block returned by __builtin_apply,
7419 and initialize apply_result_mode. */
bbf6f052 7420
ca695ac9
JB
7421static int
7422apply_result_size ()
7423{
7424 static int size = -1;
7425 int align, regno;
7426 enum machine_mode mode;
bbf6f052 7427
ca695ac9
JB
7428 /* The values computed by this function never change. */
7429 if (size < 0)
7430 {
7431 size = 0;
bbf6f052 7432
ca695ac9
JB
7433 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7434 if (FUNCTION_VALUE_REGNO_P (regno))
7435 {
7436 /* Search for the proper mode for copying this register's
7437 value. I'm not sure this is right, but it works so far. */
7438 enum machine_mode best_mode = VOIDmode;
bbf6f052 7439
ca695ac9
JB
7440 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
7441 mode != TImode;
7442 mode = GET_MODE_WIDER_MODE (mode))
7443 if (HARD_REGNO_MODE_OK (regno, mode))
7444 best_mode = mode;
bbf6f052 7445
ca695ac9
JB
7446 if (best_mode == VOIDmode)
7447 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
7448 mode != VOIDmode;
7449 mode = GET_MODE_WIDER_MODE (mode))
7450 if (HARD_REGNO_MODE_OK (regno, mode)
7451 && (mov_optab->handlers[(int) mode].insn_code
7452 != CODE_FOR_nothing))
7453 best_mode = mode;
bbf6f052 7454
ca695ac9
JB
7455 mode = best_mode;
7456 if (mode == VOIDmode)
7457 abort ();
bbf6f052 7458
ca695ac9
JB
7459 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7460 if (size % align != 0)
7461 size = CEIL (size, align) * align;
7462 size += GET_MODE_SIZE (mode);
7463 apply_result_mode[regno] = mode;
bbf6f052
RK
7464 }
7465 else
ca695ac9 7466 apply_result_mode[regno] = VOIDmode;
bbf6f052 7467
ca695ac9
JB
7468 /* Allow targets that use untyped_call and untyped_return to override
7469 the size so that machine-specific information can be stored here. */
7470#ifdef APPLY_RESULT_SIZE
7471 size = APPLY_RESULT_SIZE;
7472#endif
7473 }
7474 return size;
7475}
bbf6f052 7476
ca695ac9
JB
7477#if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
7478/* Create a vector describing the result block RESULT. If SAVEP is true,
7479 the result block is used to save the values; otherwise it is used to
7480 restore the values. */
bbf6f052 7481
ca695ac9
JB
7482static rtx
7483result_vector (savep, result)
7484 int savep;
7485 rtx result;
7486{
7487 int regno, size, align, nelts;
7488 enum machine_mode mode;
7489 rtx reg, mem;
7490 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
7491
7492 size = nelts = 0;
7493 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7494 if ((mode = apply_result_mode[regno]) != VOIDmode)
7495 {
7496 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7497 if (size % align != 0)
7498 size = CEIL (size, align) * align;
7499 reg = gen_rtx (REG, mode, savep ? INCOMING_REGNO (regno) : regno);
7500 mem = change_address (result, mode,
7501 plus_constant (XEXP (result, 0), size));
7502 savevec[nelts++] = (savep
7503 ? gen_rtx (SET, VOIDmode, mem, reg)
7504 : gen_rtx (SET, VOIDmode, reg, mem));
7505 size += GET_MODE_SIZE (mode);
bbf6f052 7506 }
ca695ac9
JB
7507 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
7508}
7509#endif /* HAVE_untyped_call or HAVE_untyped_return */
bbf6f052 7510
ca695ac9
JB
7511/* Save the state required to perform an untyped call with the same
7512 arguments as were passed to the current function. */
7513
7514static rtx
7515expand_builtin_apply_args ()
7516{
7517 rtx registers;
7518 int size, align, regno;
7519 enum machine_mode mode;
7520
7521 /* Create a block where the arg-pointer, structure value address,
7522 and argument registers can be saved. */
7523 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
7524
7525 /* Walk past the arg-pointer and structure value address. */
7526 size = GET_MODE_SIZE (Pmode);
7527 if (struct_value_rtx)
7528 size += GET_MODE_SIZE (Pmode);
7529
7530 /* Save each register used in calling a function to the block. */
7531 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7532 if ((mode = apply_args_mode[regno]) != VOIDmode)
bbf6f052 7533 {
ca695ac9
JB
7534 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7535 if (size % align != 0)
7536 size = CEIL (size, align) * align;
7537 emit_move_insn (change_address (registers, mode,
7538 plus_constant (XEXP (registers, 0),
7539 size)),
7540 gen_rtx (REG, mode, INCOMING_REGNO (regno)));
7541 size += GET_MODE_SIZE (mode);
bbf6f052
RK
7542 }
7543
ca695ac9
JB
7544 /* Save the arg pointer to the block. */
7545 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
7546 copy_to_reg (virtual_incoming_args_rtx));
7547 size = GET_MODE_SIZE (Pmode);
bbf6f052 7548
ca695ac9
JB
7549 /* Save the structure value address unless this is passed as an
7550 "invisible" first argument. */
7551 if (struct_value_incoming_rtx)
7552 {
7553 emit_move_insn (change_address (registers, Pmode,
7554 plus_constant (XEXP (registers, 0),
7555 size)),
7556 copy_to_reg (struct_value_incoming_rtx));
7557 size += GET_MODE_SIZE (Pmode);
7558 }
7559
7560 /* Return the address of the block. */
7561 return copy_addr_to_reg (XEXP (registers, 0));
7562}
7563
7564/* Perform an untyped call and save the state required to perform an
7565 untyped return of whatever value was returned by the given function. */
7566
7567static rtx
7568expand_builtin_apply (function, arguments, argsize)
7569 rtx function, arguments, argsize;
7570{
7571 int size, align, regno;
7572 enum machine_mode mode;
7573 rtx incoming_args, result, reg, dest, call_insn;
7574 rtx old_stack_level = 0;
7575 rtx use_insns = 0;
bbf6f052 7576
ca695ac9
JB
7577 /* Create a block where the return registers can be saved. */
7578 result = assign_stack_local (BLKmode, apply_result_size (), -1);
bbf6f052 7579
ca695ac9 7580 /* ??? The argsize value should be adjusted here. */
bbf6f052 7581
ca695ac9
JB
7582 /* Fetch the arg pointer from the ARGUMENTS block. */
7583 incoming_args = gen_reg_rtx (Pmode);
7584 emit_move_insn (incoming_args,
7585 gen_rtx (MEM, Pmode, arguments));
7586#ifndef STACK_GROWS_DOWNWARD
7587 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
7588 incoming_args, 0, OPTAB_LIB_WIDEN);
46b68a37
JW
7589#endif
7590
ca695ac9
JB
7591 /* Perform postincrements before actually calling the function. */
7592 emit_queue ();
46b68a37 7593
ca695ac9
JB
7594 /* Push a new argument block and copy the arguments. */
7595 do_pending_stack_adjust ();
7596 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
bbf6f052 7597
ca695ac9
JB
7598 /* Push a block of memory onto the stack to store the memory arguments.
7599 Save the address in a register, and copy the memory arguments. ??? I
7600 haven't figured out how the calling convention macros effect this,
7601 but it's likely that the source and/or destination addresses in
7602 the block copy will need updating in machine specific ways. */
7603 dest = copy_addr_to_reg (push_block (argsize, 0, 0));
7604 emit_block_move (gen_rtx (MEM, BLKmode, dest),
7605 gen_rtx (MEM, BLKmode, incoming_args),
7606 argsize,
7607 PARM_BOUNDARY / BITS_PER_UNIT);
bbf6f052 7608
ca695ac9
JB
7609 /* Refer to the argument block. */
7610 apply_args_size ();
7611 arguments = gen_rtx (MEM, BLKmode, arguments);
7612
7613 /* Walk past the arg-pointer and structure value address. */
7614 size = GET_MODE_SIZE (Pmode);
7615 if (struct_value_rtx)
7616 size += GET_MODE_SIZE (Pmode);
7617
7618 /* Restore each of the registers previously saved. Make USE insns
7619 for each of these registers for use in making the call. */
7620 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7621 if ((mode = apply_args_mode[regno]) != VOIDmode)
7622 {
7623 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7624 if (size % align != 0)
7625 size = CEIL (size, align) * align;
7626 reg = gen_rtx (REG, mode, regno);
7627 emit_move_insn (reg,
7628 change_address (arguments, mode,
7629 plus_constant (XEXP (arguments, 0),
7630 size)));
7631
7632 push_to_sequence (use_insns);
7633 emit_insn (gen_rtx (USE, VOIDmode, reg));
7634 use_insns = get_insns ();
7635 end_sequence ();
7636 size += GET_MODE_SIZE (mode);
7637 }
7638
7639 /* Restore the structure value address unless this is passed as an
7640 "invisible" first argument. */
7641 size = GET_MODE_SIZE (Pmode);
7642 if (struct_value_rtx)
7643 {
7644 rtx value = gen_reg_rtx (Pmode);
7645 emit_move_insn (value,
7646 change_address (arguments, Pmode,
7647 plus_constant (XEXP (arguments, 0),
7648 size)));
7649 emit_move_insn (struct_value_rtx, value);
7650 if (GET_CODE (struct_value_rtx) == REG)
7651 {
7652 push_to_sequence (use_insns);
7653 emit_insn (gen_rtx (USE, VOIDmode, struct_value_rtx));
7654 use_insns = get_insns ();
7655 end_sequence ();
bbf6f052 7656 }
ca695ac9
JB
7657 size += GET_MODE_SIZE (Pmode);
7658 }
bbf6f052 7659
ca695ac9
JB
7660 /* All arguments and registers used for the call are set up by now! */
7661 function = prepare_call_address (function, NULL_TREE, &use_insns);
bbf6f052 7662
ca695ac9
JB
7663 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
7664 and we don't want to load it into a register as an optimization,
7665 because prepare_call_address already did it if it should be done. */
7666 if (GET_CODE (function) != SYMBOL_REF)
7667 function = memory_address (FUNCTION_MODE, function);
bbf6f052 7668
ca695ac9
JB
7669 /* Generate the actual call instruction and save the return value. */
7670#ifdef HAVE_untyped_call
7671 if (HAVE_untyped_call)
7672 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
7673 result, result_vector (1, result)));
7674 else
7675#endif
7676#ifdef HAVE_call_value
7677 if (HAVE_call_value)
7678 {
7679 rtx valreg = 0;
bbf6f052 7680
ca695ac9
JB
7681 /* Locate the unique return register. It is not possible to
7682 express a call that sets more than one return register using
7683 call_value; use untyped_call for that. In fact, untyped_call
7684 only needs to save the return registers in the given block. */
7685 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7686 if ((mode = apply_result_mode[regno]) != VOIDmode)
7687 {
7688 if (valreg)
7689 abort (); /* HAVE_untyped_call required. */
7690 valreg = gen_rtx (REG, mode, regno);
7691 }
bbf6f052 7692
ca695ac9
JB
7693 emit_call_insn (gen_call_value (valreg,
7694 gen_rtx (MEM, FUNCTION_MODE, function),
7695 const0_rtx, NULL_RTX, const0_rtx));
bbf6f052 7696
ca695ac9
JB
7697 emit_move_insn (change_address (result, GET_MODE (valreg),
7698 XEXP (result, 0)),
7699 valreg);
7700 }
7701 else
7702#endif
7703 abort ();
bbf6f052 7704
ca695ac9
JB
7705 /* Find the CALL insn we just emitted and write the USE insns before it. */
7706 for (call_insn = get_last_insn ();
7707 call_insn && GET_CODE (call_insn) != CALL_INSN;
7708 call_insn = PREV_INSN (call_insn))
7709 ;
bbf6f052 7710
ca695ac9
JB
7711 if (! call_insn)
7712 abort ();
bbf6f052 7713
ca695ac9
JB
7714 /* Put the USE insns before the CALL. */
7715 emit_insns_before (use_insns, call_insn);
e7c33f54 7716
ca695ac9
JB
7717 /* Restore the stack. */
7718 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
e7c33f54 7719
ca695ac9
JB
7720 /* Return the address of the result block. */
7721 return copy_addr_to_reg (XEXP (result, 0));
7722}
e7c33f54 7723
ca695ac9 7724/* Perform an untyped return. */
e7c33f54 7725
ca695ac9
JB
7726static void
7727expand_builtin_return (result)
7728 rtx result;
7729{
7730 int size, align, regno;
7731 enum machine_mode mode;
7732 rtx reg;
7733 rtx use_insns = 0;
e7c33f54 7734
ca695ac9
JB
7735 apply_result_size ();
7736 result = gen_rtx (MEM, BLKmode, result);
e7c33f54 7737
ca695ac9
JB
7738#ifdef HAVE_untyped_return
7739 if (HAVE_untyped_return)
7740 {
7741 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
7742 emit_barrier ();
7743 return;
7744 }
7745#endif
e7c33f54 7746
ca695ac9
JB
7747 /* Restore the return value and note that each value is used. */
7748 size = 0;
7749 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7750 if ((mode = apply_result_mode[regno]) != VOIDmode)
7751 {
7752 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7753 if (size % align != 0)
7754 size = CEIL (size, align) * align;
7755 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
7756 emit_move_insn (reg,
7757 change_address (result, mode,
7758 plus_constant (XEXP (result, 0),
7759 size)));
e7c33f54 7760
ca695ac9
JB
7761 push_to_sequence (use_insns);
7762 emit_insn (gen_rtx (USE, VOIDmode, reg));
7763 use_insns = get_insns ();
7764 end_sequence ();
7765 size += GET_MODE_SIZE (mode);
7766 }
e7c33f54 7767
ca695ac9
JB
7768 /* Put the USE insns before the return. */
7769 emit_insns (use_insns);
e7c33f54 7770
ca695ac9
JB
7771 /* Return whatever values was restored by jumping directly to the end
7772 of the function. */
7773 expand_null_return ();
7774}
7775\f
7776/* Expand code for a post- or pre- increment or decrement
7777 and return the RTX for the result.
7778 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
e7c33f54 7779
ca695ac9
JB
7780static rtx
7781expand_increment (exp, post)
7782 register tree exp;
7783 int post;
7784{
7785 register rtx op0, op1;
7786 register rtx temp, value;
7787 register tree incremented = TREE_OPERAND (exp, 0);
7788 optab this_optab = add_optab;
7789 int icode;
7790 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
7791 int op0_is_copy = 0;
7792 int single_insn = 0;
a97f5a86
RS
7793 /* 1 means we can't store into OP0 directly,
7794 because it is a subreg narrower than a word,
7795 and we don't dare clobber the rest of the word. */
7796 int bad_subreg = 0;
e7c33f54 7797
ca695ac9 7798 if (output_bytecode)
c02bd5d9
JB
7799 {
7800 bc_expand_expr (exp);
7801 return NULL_RTX;
7802 }
e7c33f54 7803
ca695ac9
JB
7804 /* Stabilize any component ref that might need to be
7805 evaluated more than once below. */
7806 if (!post
7807 || TREE_CODE (incremented) == BIT_FIELD_REF
7808 || (TREE_CODE (incremented) == COMPONENT_REF
7809 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
7810 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
7811 incremented = stabilize_reference (incremented);
7812 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
7813 ones into save exprs so that they don't accidentally get evaluated
7814 more than once by the code below. */
7815 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
7816 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
7817 incremented = save_expr (incremented);
bbf6f052 7818
ca695ac9
JB
7819 /* Compute the operands as RTX.
7820 Note whether OP0 is the actual lvalue or a copy of it:
7821 I believe it is a copy iff it is a register or subreg
7822 and insns were generated in computing it. */
bbf6f052 7823
ca695ac9
JB
7824 temp = get_last_insn ();
7825 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
bbf6f052 7826
ca695ac9
JB
7827 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
7828 in place but intead must do sign- or zero-extension during assignment,
7829 so we copy it into a new register and let the code below use it as
7830 a copy.
bbf6f052 7831
ca695ac9
JB
7832 Note that we can safely modify this SUBREG since it is know not to be
7833 shared (it was made by the expand_expr call above). */
bbf6f052 7834
ca695ac9
JB
7835 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
7836 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
a97f5a86
RS
7837 else if (GET_CODE (op0) == SUBREG
7838 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
7839 bad_subreg = 1;
bbf6f052 7840
ca695ac9
JB
7841 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
7842 && temp != get_last_insn ());
7843 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052 7844
ca695ac9
JB
7845 /* Decide whether incrementing or decrementing. */
7846 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
7847 || TREE_CODE (exp) == PREDECREMENT_EXPR)
7848 this_optab = sub_optab;
bbf6f052 7849
ca695ac9
JB
7850 /* Convert decrement by a constant into a negative increment. */
7851 if (this_optab == sub_optab
7852 && GET_CODE (op1) == CONST_INT)
7853 {
7854 op1 = GEN_INT (- INTVAL (op1));
7855 this_optab = add_optab;
7856 }
bbf6f052 7857
ca695ac9
JB
7858 /* For a preincrement, see if we can do this with a single instruction. */
7859 if (!post)
7860 {
7861 icode = (int) this_optab->handlers[(int) mode].insn_code;
7862 if (icode != (int) CODE_FOR_nothing
7863 /* Make sure that OP0 is valid for operands 0 and 1
7864 of the insn we want to queue. */
7865 && (*insn_operand_predicate[icode][0]) (op0, mode)
7866 && (*insn_operand_predicate[icode][1]) (op0, mode)
7867 && (*insn_operand_predicate[icode][2]) (op1, mode))
7868 single_insn = 1;
7869 }
bbf6f052 7870
ca695ac9
JB
7871 /* If OP0 is not the actual lvalue, but rather a copy in a register,
7872 then we cannot just increment OP0. We must therefore contrive to
7873 increment the original value. Then, for postincrement, we can return
7874 OP0 since it is a copy of the old value. For preincrement, expand here
a97f5a86
RS
7875 unless we can do it with a single insn.
7876
7877 Likewise if storing directly into OP0 would clobber high bits
7878 we need to preserve (bad_subreg). */
7879 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
ca695ac9
JB
7880 {
7881 /* This is the easiest way to increment the value wherever it is.
7882 Problems with multiple evaluation of INCREMENTED are prevented
7883 because either (1) it is a component_ref or preincrement,
7884 in which case it was stabilized above, or (2) it is an array_ref
7885 with constant index in an array in a register, which is
7886 safe to reevaluate. */
7887 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
7888 || TREE_CODE (exp) == PREDECREMENT_EXPR)
7889 ? MINUS_EXPR : PLUS_EXPR),
7890 TREE_TYPE (exp),
7891 incremented,
7892 TREE_OPERAND (exp, 1));
7893 temp = expand_assignment (incremented, newexp, ! post, 0);
7894 return post ? op0 : temp;
7895 }
bbf6f052 7896
ca695ac9
JB
7897 if (post)
7898 {
7899 /* We have a true reference to the value in OP0.
7900 If there is an insn to add or subtract in this mode, queue it.
7901 Queueing the increment insn avoids the register shuffling
7902 that often results if we must increment now and first save
7903 the old value for subsequent use. */
bbf6f052 7904
ca695ac9
JB
7905#if 0 /* Turned off to avoid making extra insn for indexed memref. */
7906 op0 = stabilize (op0);
7907#endif
bbf6f052 7908
ca695ac9
JB
7909 icode = (int) this_optab->handlers[(int) mode].insn_code;
7910 if (icode != (int) CODE_FOR_nothing
7911 /* Make sure that OP0 is valid for operands 0 and 1
7912 of the insn we want to queue. */
7913 && (*insn_operand_predicate[icode][0]) (op0, mode)
7914 && (*insn_operand_predicate[icode][1]) (op0, mode))
7915 {
7916 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
7917 op1 = force_reg (mode, op1);
bbf6f052 7918
ca695ac9
JB
7919 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
7920 }
7921 }
bbf6f052 7922
ca695ac9
JB
7923 /* Preincrement, or we can't increment with one simple insn. */
7924 if (post)
7925 /* Save a copy of the value before inc or dec, to return it later. */
7926 temp = value = copy_to_reg (op0);
7927 else
7928 /* Arrange to return the incremented value. */
7929 /* Copy the rtx because expand_binop will protect from the queue,
7930 and the results of that would be invalid for us to return
7931 if our caller does emit_queue before using our result. */
7932 temp = copy_rtx (value = op0);
bbf6f052 7933
ca695ac9
JB
7934 /* Increment however we can. */
7935 op1 = expand_binop (mode, this_optab, value, op1, op0,
7936 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
7937 /* Make sure the value is stored into OP0. */
7938 if (op1 != op0)
7939 emit_move_insn (op0, op1);
bbf6f052 7940
ca695ac9
JB
7941 return temp;
7942}
7943\f
7944/* Expand all function calls contained within EXP, innermost ones first.
7945 But don't look within expressions that have sequence points.
7946 For each CALL_EXPR, record the rtx for its value
7947 in the CALL_EXPR_RTL field. */
bbf6f052 7948
ca695ac9
JB
7949static void
7950preexpand_calls (exp)
7951 tree exp;
7952{
7953 register int nops, i;
7954 int type = TREE_CODE_CLASS (TREE_CODE (exp));
bbf6f052 7955
ca695ac9
JB
7956 if (! do_preexpand_calls)
7957 return;
bbf6f052 7958
ca695ac9 7959 /* Only expressions and references can contain calls. */
bbf6f052 7960
ca695ac9
JB
7961 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
7962 return;
bbf6f052 7963
ca695ac9
JB
7964 switch (TREE_CODE (exp))
7965 {
7966 case CALL_EXPR:
7967 /* Do nothing if already expanded. */
7968 if (CALL_EXPR_RTL (exp) != 0)
7969 return;
bbf6f052 7970
ca695ac9
JB
7971 /* Do nothing to built-in functions. */
7972 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
7973 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
7974 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7975 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
7976 return;
bbf6f052 7977
ca695ac9
JB
7978 case COMPOUND_EXPR:
7979 case COND_EXPR:
7980 case TRUTH_ANDIF_EXPR:
7981 case TRUTH_ORIF_EXPR:
7982 /* If we find one of these, then we can be sure
7983 the adjust will be done for it (since it makes jumps).
7984 Do it now, so that if this is inside an argument
7985 of a function, we don't get the stack adjustment
7986 after some other args have already been pushed. */
7987 do_pending_stack_adjust ();
7988 return;
bbf6f052 7989
ca695ac9
JB
7990 case BLOCK:
7991 case RTL_EXPR:
7992 case WITH_CLEANUP_EXPR:
7993 return;
bbf6f052 7994
ca695ac9
JB
7995 case SAVE_EXPR:
7996 if (SAVE_EXPR_RTL (exp) != 0)
7997 return;
7998 }
bbf6f052 7999
ca695ac9
JB
8000 nops = tree_code_length[(int) TREE_CODE (exp)];
8001 for (i = 0; i < nops; i++)
8002 if (TREE_OPERAND (exp, i) != 0)
8003 {
8004 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
8005 if (type == 'e' || type == '<' || type == '1' || type == '2'
8006 || type == 'r')
8007 preexpand_calls (TREE_OPERAND (exp, i));
8008 }
bbf6f052
RK
8009}
8010\f
ca695ac9
JB
8011/* At the start of a function, record that we have no previously-pushed
8012 arguments waiting to be popped. */
0006469d 8013
ca695ac9
JB
8014void
8015init_pending_stack_adjust ()
8016{
8017 pending_stack_adjust = 0;
8018}
fb2ca25a 8019
ca695ac9
JB
8020/* When exiting from function, if safe, clear out any pending stack adjust
8021 so the adjustment won't get done. */
904762c8 8022
ca695ac9
JB
8023void
8024clear_pending_stack_adjust ()
fb2ca25a 8025{
ca695ac9
JB
8026#ifdef EXIT_IGNORE_STACK
8027 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
8028 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
8029 && ! flag_inline_functions)
8030 pending_stack_adjust = 0;
fb2ca25a 8031#endif
fb2ca25a
KKT
8032}
8033
ca695ac9
JB
8034/* Pop any previously-pushed arguments that have not been popped yet. */
8035
8036void
8037do_pending_stack_adjust ()
8038{
8039 if (inhibit_defer_pop == 0)
8040 {
8041 if (pending_stack_adjust != 0)
8042 adjust_stack (GEN_INT (pending_stack_adjust));
8043 pending_stack_adjust = 0;
8044 }
8045}
8046
8047/* Expand all cleanups up to OLD_CLEANUPS.
8048 Needed here, and also for language-dependent calls. */
904762c8 8049
ca695ac9
JB
8050void
8051expand_cleanups_to (old_cleanups)
8052 tree old_cleanups;
0006469d 8053{
ca695ac9 8054 while (cleanups_this_call != old_cleanups)
0006469d 8055 {
ca695ac9
JB
8056 expand_expr (TREE_VALUE (cleanups_this_call), NULL_RTX, VOIDmode, 0);
8057 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
8058 }
8059}
8060\f
8061/* Expand conditional expressions. */
0006469d 8062
ca695ac9
JB
8063/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
8064 LABEL is an rtx of code CODE_LABEL, in this function and all the
8065 functions here. */
0006469d 8066
ca695ac9
JB
8067void
8068jumpifnot (exp, label)
8069 tree exp;
8070 rtx label;
8071{
8072 do_jump (exp, label, NULL_RTX);
8073}
0006469d 8074
ca695ac9 8075/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
0006469d 8076
ca695ac9
JB
8077void
8078jumpif (exp, label)
8079 tree exp;
8080 rtx label;
8081{
8082 do_jump (exp, NULL_RTX, label);
8083}
0006469d 8084
ca695ac9
JB
8085/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
8086 the result is zero, or IF_TRUE_LABEL if the result is one.
8087 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
8088 meaning fall through in that case.
0006469d 8089
ca695ac9
JB
8090 do_jump always does any pending stack adjust except when it does not
8091 actually perform a jump. An example where there is no jump
8092 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
0006469d 8093
ca695ac9
JB
8094 This function is responsible for optimizing cases such as
8095 &&, || and comparison operators in EXP. */
904762c8 8096
ca695ac9
JB
8097void
8098do_jump (exp, if_false_label, if_true_label)
8099 tree exp;
8100 rtx if_false_label, if_true_label;
0006469d 8101{
ca695ac9
JB
8102 register enum tree_code code = TREE_CODE (exp);
8103 /* Some cases need to create a label to jump to
8104 in order to properly fall through.
8105 These cases set DROP_THROUGH_LABEL nonzero. */
8106 rtx drop_through_label = 0;
8107 rtx temp;
8108 rtx comparison = 0;
8109 int i;
8110 tree type;
0006469d 8111
ca695ac9 8112 emit_queue ();
0006469d 8113
ca695ac9
JB
8114 switch (code)
8115 {
8116 case ERROR_MARK:
8117 break;
0006469d 8118
ca695ac9
JB
8119 case INTEGER_CST:
8120 temp = integer_zerop (exp) ? if_false_label : if_true_label;
8121 if (temp)
8122 emit_jump (temp);
8123 break;
0006469d 8124
ca695ac9
JB
8125#if 0
8126 /* This is not true with #pragma weak */
8127 case ADDR_EXPR:
8128 /* The address of something can never be zero. */
8129 if (if_true_label)
8130 emit_jump (if_true_label);
8131 break;
8132#endif
0006469d 8133
ca695ac9
JB
8134 case NOP_EXPR:
8135 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
8136 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
8137 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
8138 goto normal;
8139 case CONVERT_EXPR:
8140 /* If we are narrowing the operand, we have to do the compare in the
8141 narrower mode. */
8142 if ((TYPE_PRECISION (TREE_TYPE (exp))
8143 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8144 goto normal;
8145 case NON_LVALUE_EXPR:
8146 case REFERENCE_EXPR:
8147 case ABS_EXPR:
8148 case NEGATE_EXPR:
8149 case LROTATE_EXPR:
8150 case RROTATE_EXPR:
8151 /* These cannot change zero->non-zero or vice versa. */
8152 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8153 break;
0006469d 8154
ca695ac9
JB
8155#if 0
8156 /* This is never less insns than evaluating the PLUS_EXPR followed by
8157 a test and can be longer if the test is eliminated. */
8158 case PLUS_EXPR:
8159 /* Reduce to minus. */
8160 exp = build (MINUS_EXPR, TREE_TYPE (exp),
8161 TREE_OPERAND (exp, 0),
8162 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
8163 TREE_OPERAND (exp, 1))));
8164 /* Process as MINUS. */
0006469d 8165#endif
0006469d 8166
ca695ac9
JB
8167 case MINUS_EXPR:
8168 /* Non-zero iff operands of minus differ. */
8169 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
8170 TREE_OPERAND (exp, 0),
8171 TREE_OPERAND (exp, 1)),
8172 NE, NE);
8173 break;
904762c8 8174
ca695ac9
JB
8175 case BIT_AND_EXPR:
8176 /* If we are AND'ing with a small constant, do this comparison in the
8177 smallest type that fits. If the machine doesn't have comparisons
8178 that small, it will be converted back to the wider comparison.
8179 This helps if we are testing the sign bit of a narrower object.
8180 combine can't do this for us because it can't know whether a
8181 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
0006469d 8182
ca695ac9
JB
8183 if (! SLOW_BYTE_ACCESS
8184 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8185 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
8186 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
8187 && (type = type_for_size (i + 1, 1)) != 0
8188 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
8189 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
8190 != CODE_FOR_nothing))
8191 {
8192 do_jump (convert (type, exp), if_false_label, if_true_label);
8193 break;
8194 }
8195 goto normal;
904762c8 8196
ca695ac9
JB
8197 case TRUTH_NOT_EXPR:
8198 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8199 break;
0006469d 8200
ca695ac9
JB
8201 case TRUTH_ANDIF_EXPR:
8202 if (if_false_label == 0)
8203 if_false_label = drop_through_label = gen_label_rtx ();
8204 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
8205 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8206 break;
0006469d 8207
ca695ac9
JB
8208 case TRUTH_ORIF_EXPR:
8209 if (if_true_label == 0)
8210 if_true_label = drop_through_label = gen_label_rtx ();
8211 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
8212 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8213 break;
0006469d 8214
ca695ac9 8215 case COMPOUND_EXPR:
0088fcb1 8216 push_temp_slots ();
ca695ac9
JB
8217 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8218 free_temp_slots ();
0088fcb1 8219 pop_temp_slots ();
ca695ac9
JB
8220 emit_queue ();
8221 do_pending_stack_adjust ();
8222 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8223 break;
0006469d 8224
ca695ac9
JB
8225 case COMPONENT_REF:
8226 case BIT_FIELD_REF:
8227 case ARRAY_REF:
8228 {
8229 int bitsize, bitpos, unsignedp;
8230 enum machine_mode mode;
8231 tree type;
8232 tree offset;
8233 int volatilep = 0;
0006469d 8234
ca695ac9
JB
8235 /* Get description of this reference. We don't actually care
8236 about the underlying object here. */
8237 get_inner_reference (exp, &bitsize, &bitpos, &offset,
8238 &mode, &unsignedp, &volatilep);
0006469d 8239
ca695ac9
JB
8240 type = type_for_size (bitsize, unsignedp);
8241 if (! SLOW_BYTE_ACCESS
8242 && type != 0 && bitsize >= 0
8243 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
8244 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
8245 != CODE_FOR_nothing))
8246 {
8247 do_jump (convert (type, exp), if_false_label, if_true_label);
8248 break;
8249 }
8250 goto normal;
8251 }
0006469d 8252
ca695ac9
JB
8253 case COND_EXPR:
8254 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
8255 if (integer_onep (TREE_OPERAND (exp, 1))
8256 && integer_zerop (TREE_OPERAND (exp, 2)))
8257 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
904762c8 8258
ca695ac9
JB
8259 else if (integer_zerop (TREE_OPERAND (exp, 1))
8260 && integer_onep (TREE_OPERAND (exp, 2)))
8261 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
0006469d 8262
ca695ac9
JB
8263 else
8264 {
8265 register rtx label1 = gen_label_rtx ();
8266 drop_through_label = gen_label_rtx ();
8267 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
8268 /* Now the THEN-expression. */
8269 do_jump (TREE_OPERAND (exp, 1),
8270 if_false_label ? if_false_label : drop_through_label,
8271 if_true_label ? if_true_label : drop_through_label);
8272 /* In case the do_jump just above never jumps. */
8273 do_pending_stack_adjust ();
8274 emit_label (label1);
8275 /* Now the ELSE-expression. */
8276 do_jump (TREE_OPERAND (exp, 2),
8277 if_false_label ? if_false_label : drop_through_label,
8278 if_true_label ? if_true_label : drop_through_label);
8279 }
8280 break;
0006469d 8281
ca695ac9
JB
8282 case EQ_EXPR:
8283 if (integer_zerop (TREE_OPERAND (exp, 1)))
8284 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8285 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8286 == MODE_INT)
8287 &&
8288 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8289 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
8290 else
8291 comparison = compare (exp, EQ, EQ);
8292 break;
0006469d 8293
ca695ac9
JB
8294 case NE_EXPR:
8295 if (integer_zerop (TREE_OPERAND (exp, 1)))
8296 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8297 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8298 == MODE_INT)
8299 &&
8300 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8301 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
8302 else
8303 comparison = compare (exp, NE, NE);
8304 break;
0006469d 8305
ca695ac9
JB
8306 case LT_EXPR:
8307 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8308 == MODE_INT)
8309 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8310 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
8311 else
8312 comparison = compare (exp, LT, LTU);
8313 break;
0006469d 8314
ca695ac9
JB
8315 case LE_EXPR:
8316 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8317 == MODE_INT)
8318 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8319 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
8320 else
8321 comparison = compare (exp, LE, LEU);
8322 break;
0006469d 8323
ca695ac9
JB
8324 case GT_EXPR:
8325 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8326 == MODE_INT)
8327 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8328 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
8329 else
8330 comparison = compare (exp, GT, GTU);
8331 break;
0006469d 8332
ca695ac9
JB
8333 case GE_EXPR:
8334 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8335 == MODE_INT)
8336 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8337 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
8338 else
8339 comparison = compare (exp, GE, GEU);
8340 break;
0006469d 8341
ca695ac9
JB
8342 default:
8343 normal:
8344 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
8345#if 0
8346 /* This is not needed any more and causes poor code since it causes
8347 comparisons and tests from non-SI objects to have different code
8348 sequences. */
8349 /* Copy to register to avoid generating bad insns by cse
8350 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
8351 if (!cse_not_expected && GET_CODE (temp) == MEM)
8352 temp = copy_to_reg (temp);
8353#endif
8354 do_pending_stack_adjust ();
8355 if (GET_CODE (temp) == CONST_INT)
8356 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
8357 else if (GET_CODE (temp) == LABEL_REF)
8358 comparison = const_true_rtx;
8359 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
8360 && !can_compare_p (GET_MODE (temp)))
8361 /* Note swapping the labels gives us not-equal. */
8362 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
8363 else if (GET_MODE (temp) != VOIDmode)
8364 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
8365 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
8366 GET_MODE (temp), NULL_RTX, 0);
8367 else
8368 abort ();
8369 }
0006469d 8370
ca695ac9
JB
8371 /* Do any postincrements in the expression that was tested. */
8372 emit_queue ();
0006469d 8373
ca695ac9
JB
8374 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
8375 straight into a conditional jump instruction as the jump condition.
8376 Otherwise, all the work has been done already. */
0006469d 8377
ca695ac9 8378 if (comparison == const_true_rtx)
0006469d 8379 {
ca695ac9
JB
8380 if (if_true_label)
8381 emit_jump (if_true_label);
0006469d 8382 }
ca695ac9
JB
8383 else if (comparison == const0_rtx)
8384 {
8385 if (if_false_label)
8386 emit_jump (if_false_label);
8387 }
8388 else if (comparison)
8389 do_jump_for_compare (comparison, if_false_label, if_true_label);
0006469d 8390
ca695ac9 8391 if (drop_through_label)
0006469d 8392 {
ca695ac9
JB
8393 /* If do_jump produces code that might be jumped around,
8394 do any stack adjusts from that code, before the place
8395 where control merges in. */
8396 do_pending_stack_adjust ();
8397 emit_label (drop_through_label);
8398 }
8399}
8400\f
8401/* Given a comparison expression EXP for values too wide to be compared
8402 with one insn, test the comparison and jump to the appropriate label.
8403 The code of EXP is ignored; we always test GT if SWAP is 0,
8404 and LT if SWAP is 1. */
0006469d 8405
ca695ac9
JB
8406static void
8407do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
8408 tree exp;
8409 int swap;
8410 rtx if_false_label, if_true_label;
8411{
8412 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
8413 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
8414 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
8415 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
8416 rtx drop_through_label = 0;
8417 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
8418 int i;
0006469d 8419
ca695ac9
JB
8420 if (! if_true_label || ! if_false_label)
8421 drop_through_label = gen_label_rtx ();
8422 if (! if_true_label)
8423 if_true_label = drop_through_label;
8424 if (! if_false_label)
8425 if_false_label = drop_through_label;
0006469d 8426
ca695ac9
JB
8427 /* Compare a word at a time, high order first. */
8428 for (i = 0; i < nwords; i++)
8429 {
8430 rtx comp;
8431 rtx op0_word, op1_word;
0006469d 8432
ca695ac9
JB
8433 if (WORDS_BIG_ENDIAN)
8434 {
8435 op0_word = operand_subword_force (op0, i, mode);
8436 op1_word = operand_subword_force (op1, i, mode);
8437 }
8438 else
8439 {
8440 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
8441 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
8442 }
0006469d 8443
ca695ac9
JB
8444 /* All but high-order word must be compared as unsigned. */
8445 comp = compare_from_rtx (op0_word, op1_word,
8446 (unsignedp || i > 0) ? GTU : GT,
8447 unsignedp, word_mode, NULL_RTX, 0);
8448 if (comp == const_true_rtx)
8449 emit_jump (if_true_label);
8450 else if (comp != const0_rtx)
8451 do_jump_for_compare (comp, NULL_RTX, if_true_label);
0006469d 8452
ca695ac9
JB
8453 /* Consider lower words only if these are equal. */
8454 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
8455 NULL_RTX, 0);
8456 if (comp == const_true_rtx)
8457 emit_jump (if_false_label);
8458 else if (comp != const0_rtx)
8459 do_jump_for_compare (comp, NULL_RTX, if_false_label);
8460 }
0006469d 8461
ca695ac9
JB
8462 if (if_false_label)
8463 emit_jump (if_false_label);
8464 if (drop_through_label)
8465 emit_label (drop_through_label);
0006469d
TW
8466}
8467
ca695ac9
JB
8468/* Compare OP0 with OP1, word at a time, in mode MODE.
8469 UNSIGNEDP says to do unsigned comparison.
8470 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
904762c8 8471
0006469d 8472static void
ca695ac9
JB
8473do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
8474 enum machine_mode mode;
8475 int unsignedp;
8476 rtx op0, op1;
8477 rtx if_false_label, if_true_label;
0006469d 8478{
ca695ac9
JB
8479 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
8480 rtx drop_through_label = 0;
8481 int i;
0006469d 8482
ca695ac9
JB
8483 if (! if_true_label || ! if_false_label)
8484 drop_through_label = gen_label_rtx ();
8485 if (! if_true_label)
8486 if_true_label = drop_through_label;
8487 if (! if_false_label)
8488 if_false_label = drop_through_label;
0006469d 8489
ca695ac9
JB
8490 /* Compare a word at a time, high order first. */
8491 for (i = 0; i < nwords; i++)
0006469d 8492 {
ca695ac9
JB
8493 rtx comp;
8494 rtx op0_word, op1_word;
0006469d 8495
ca695ac9
JB
8496 if (WORDS_BIG_ENDIAN)
8497 {
8498 op0_word = operand_subword_force (op0, i, mode);
8499 op1_word = operand_subword_force (op1, i, mode);
8500 }
8501 else
8502 {
8503 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
8504 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
8505 }
0006469d 8506
ca695ac9
JB
8507 /* All but high-order word must be compared as unsigned. */
8508 comp = compare_from_rtx (op0_word, op1_word,
8509 (unsignedp || i > 0) ? GTU : GT,
8510 unsignedp, word_mode, NULL_RTX, 0);
8511 if (comp == const_true_rtx)
8512 emit_jump (if_true_label);
8513 else if (comp != const0_rtx)
8514 do_jump_for_compare (comp, NULL_RTX, if_true_label);
0006469d 8515
ca695ac9
JB
8516 /* Consider lower words only if these are equal. */
8517 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
8518 NULL_RTX, 0);
8519 if (comp == const_true_rtx)
8520 emit_jump (if_false_label);
8521 else if (comp != const0_rtx)
8522 do_jump_for_compare (comp, NULL_RTX, if_false_label);
8523 }
0006469d 8524
ca695ac9
JB
8525 if (if_false_label)
8526 emit_jump (if_false_label);
8527 if (drop_through_label)
8528 emit_label (drop_through_label);
0006469d 8529}
bbf6f052 8530
ca695ac9
JB
8531/* Given an EQ_EXPR expression EXP for values too wide to be compared
8532 with one insn, test the comparison and jump to the appropriate label. */
8533
8534static void
8535do_jump_by_parts_equality (exp, if_false_label, if_true_label)
8536 tree exp;
8537 rtx if_false_label, if_true_label;
bbf6f052 8538{
ca695ac9
JB
8539 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8540 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8541 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
8542 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
8543 int i;
8544 rtx drop_through_label = 0;
bbf6f052 8545
ca695ac9
JB
8546 if (! if_false_label)
8547 drop_through_label = if_false_label = gen_label_rtx ();
bbf6f052 8548
ca695ac9
JB
8549 for (i = 0; i < nwords; i++)
8550 {
8551 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
8552 operand_subword_force (op1, i, mode),
8553 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
8554 word_mode, NULL_RTX, 0);
8555 if (comp == const_true_rtx)
8556 emit_jump (if_false_label);
8557 else if (comp != const0_rtx)
8558 do_jump_for_compare (comp, if_false_label, NULL_RTX);
8559 }
1499e0a8 8560
ca695ac9
JB
8561 if (if_true_label)
8562 emit_jump (if_true_label);
8563 if (drop_through_label)
8564 emit_label (drop_through_label);
8565}
8566\f
8567/* Jump according to whether OP0 is 0.
8568 We assume that OP0 has an integer mode that is too wide
8569 for the available compare insns. */
1499e0a8 8570
ca695ac9
JB
8571static void
8572do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
8573 rtx op0;
8574 rtx if_false_label, if_true_label;
8575{
8576 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
8577 int i;
8578 rtx drop_through_label = 0;
1499e0a8 8579
ca695ac9
JB
8580 if (! if_false_label)
8581 drop_through_label = if_false_label = gen_label_rtx ();
1499e0a8 8582
ca695ac9
JB
8583 for (i = 0; i < nwords; i++)
8584 {
8585 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
8586 GET_MODE (op0)),
8587 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
8588 if (comp == const_true_rtx)
8589 emit_jump (if_false_label);
8590 else if (comp != const0_rtx)
8591 do_jump_for_compare (comp, if_false_label, NULL_RTX);
8592 }
1499e0a8 8593
ca695ac9
JB
8594 if (if_true_label)
8595 emit_jump (if_true_label);
8596 if (drop_through_label)
8597 emit_label (drop_through_label);
8598}
bbf6f052 8599
ca695ac9
JB
8600/* Given a comparison expression in rtl form, output conditional branches to
8601 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
bbf6f052 8602
ca695ac9
JB
8603static void
8604do_jump_for_compare (comparison, if_false_label, if_true_label)
8605 rtx comparison, if_false_label, if_true_label;
8606{
8607 if (if_true_label)
a358cee0 8608 {
ca695ac9
JB
8609 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
8610 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
8611 else
8612 abort ();
a358cee0 8613
ca695ac9
JB
8614 if (if_false_label)
8615 emit_jump (if_false_label);
c980ac49 8616 }
ca695ac9 8617 else if (if_false_label)
bbf6f052 8618 {
ca695ac9 8619 rtx insn;
f12f485a 8620 rtx prev = get_last_insn ();
ca695ac9 8621 rtx branch = 0;
bbf6f052 8622
f12f485a
RK
8623 if (prev != 0)
8624 prev = PREV_INSN (prev);
8625
ca695ac9
JB
8626 /* Output the branch with the opposite condition. Then try to invert
8627 what is generated. If more than one insn is a branch, or if the
8628 branch is not the last insn written, abort. If we can't invert
8629 the branch, emit make a true label, redirect this jump to that,
8630 emit a jump to the false label and define the true label. */
bbf6f052 8631
ca695ac9
JB
8632 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
8633 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_false_label));
8634 else
8635 abort ();
bbf6f052 8636
ca695ac9
JB
8637 /* Here we get the insn before what was just emitted.
8638 On some machines, emitting the branch can discard
8639 the previous compare insn and emit a replacement. */
8640 if (prev == 0)
8641 /* If there's only one preceding insn... */
8642 insn = get_insns ();
8643 else
8644 insn = NEXT_INSN (prev);
bbf6f052 8645
ca695ac9
JB
8646 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
8647 if (GET_CODE (insn) == JUMP_INSN)
8648 {
8649 if (branch)
8650 abort ();
8651 branch = insn;
8652 }
8653
8654 if (branch != get_last_insn ())
8655 abort ();
8656
8657 if (! invert_jump (branch, if_false_label))
8658 {
8659 if_true_label = gen_label_rtx ();
8660 redirect_jump (branch, if_true_label);
8661 emit_jump (if_false_label);
8662 emit_label (if_true_label);
bbf6f052
RK
8663 }
8664 }
ca695ac9
JB
8665}
8666\f
8667/* Generate code for a comparison expression EXP
8668 (including code to compute the values to be compared)
8669 and set (CC0) according to the result.
8670 SIGNED_CODE should be the rtx operation for this comparison for
8671 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
8672
8673 We force a stack adjustment unless there are currently
8674 things pushed on the stack that aren't yet used. */
8675
8676static rtx
8677compare (exp, signed_code, unsigned_code)
8678 register tree exp;
8679 enum rtx_code signed_code, unsigned_code;
8680{
8681 register rtx op0
8682 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8683 register rtx op1
8684 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8685 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
8686 register enum machine_mode mode = TYPE_MODE (type);
8687 int unsignedp = TREE_UNSIGNED (type);
8688 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
bbf6f052 8689
ca695ac9
JB
8690 return compare_from_rtx (op0, op1, code, unsignedp, mode,
8691 ((mode == BLKmode)
8692 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
8693 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
8694}
bbf6f052 8695
ca695ac9
JB
8696/* Like compare but expects the values to compare as two rtx's.
8697 The decision as to signed or unsigned comparison must be made by the caller.
bbf6f052 8698
ca695ac9
JB
8699 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
8700 compared.
bbf6f052 8701
ca695ac9
JB
8702 If ALIGN is non-zero, it is the alignment of this type; if zero, the
8703 size of MODE should be used. */
bbf6f052 8704
ca695ac9
JB
8705rtx
8706compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
8707 register rtx op0, op1;
8708 enum rtx_code code;
8709 int unsignedp;
8710 enum machine_mode mode;
8711 rtx size;
8712 int align;
8713{
8714 rtx tem;
bbf6f052 8715
ca695ac9
JB
8716 /* If one operand is constant, make it the second one. Only do this
8717 if the other operand is not constant as well. */
bbf6f052 8718
ca695ac9
JB
8719 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
8720 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
8721 {
8722 tem = op0;
8723 op0 = op1;
8724 op1 = tem;
8725 code = swap_condition (code);
8726 }
bbf6f052 8727
ca695ac9 8728 if (flag_force_mem)
bbf6f052 8729 {
ca695ac9
JB
8730 op0 = force_not_mem (op0);
8731 op1 = force_not_mem (op1);
8732 }
bbf6f052 8733
ca695ac9 8734 do_pending_stack_adjust ();
bbf6f052 8735
ca695ac9
JB
8736 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
8737 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
8738 return tem;
bbf6f052 8739
ca695ac9
JB
8740#if 0
8741 /* There's no need to do this now that combine.c can eliminate lots of
8742 sign extensions. This can be less efficient in certain cases on other
8743 machines. */
bbf6f052 8744
ca695ac9
JB
8745 /* If this is a signed equality comparison, we can do it as an
8746 unsigned comparison since zero-extension is cheaper than sign
8747 extension and comparisons with zero are done as unsigned. This is
8748 the case even on machines that can do fast sign extension, since
8749 zero-extension is easier to combine with other operations than
8750 sign-extension is. If we are comparing against a constant, we must
8751 convert it to what it would look like unsigned. */
8752 if ((code == EQ || code == NE) && ! unsignedp
8753 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
8754 {
8755 if (GET_CODE (op1) == CONST_INT
8756 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
8757 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
8758 unsignedp = 1;
bbf6f052 8759 }
ca695ac9
JB
8760#endif
8761
8762 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
bbf6f052 8763
ca695ac9 8764 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
bbf6f052
RK
8765}
8766\f
ca695ac9
JB
8767/* Generate code to calculate EXP using a store-flag instruction
8768 and return an rtx for the result. EXP is either a comparison
8769 or a TRUTH_NOT_EXPR whose operand is a comparison.
bbf6f052 8770
ca695ac9 8771 If TARGET is nonzero, store the result there if convenient.
bbf6f052 8772
ca695ac9
JB
8773 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
8774 cheap.
bbf6f052 8775
ca695ac9
JB
8776 Return zero if there is no suitable set-flag instruction
8777 available on this machine.
bbf6f052 8778
ca695ac9
JB
8779 Once expand_expr has been called on the arguments of the comparison,
8780 we are committed to doing the store flag, since it is not safe to
8781 re-evaluate the expression. We emit the store-flag insn by calling
8782 emit_store_flag, but only expand the arguments if we have a reason
8783 to believe that emit_store_flag will be successful. If we think that
8784 it will, but it isn't, we have to simulate the store-flag with a
8785 set/jump/set sequence. */
bbf6f052 8786
ca695ac9
JB
8787static rtx
8788do_store_flag (exp, target, mode, only_cheap)
8789 tree exp;
8790 rtx target;
8791 enum machine_mode mode;
8792 int only_cheap;
bbf6f052 8793{
ca695ac9
JB
8794 enum rtx_code code;
8795 tree arg0, arg1, type;
8796 tree tem;
8797 enum machine_mode operand_mode;
8798 int invert = 0;
8799 int unsignedp;
8800 rtx op0, op1;
8801 enum insn_code icode;
8802 rtx subtarget = target;
8803 rtx result, label, pattern, jump_pat;
bbf6f052 8804
ca695ac9
JB
8805 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
8806 result at the end. We can't simply invert the test since it would
8807 have already been inverted if it were valid. This case occurs for
8808 some floating-point comparisons. */
8809
8810 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
8811 invert = 1, exp = TREE_OPERAND (exp, 0);
8812
8813 arg0 = TREE_OPERAND (exp, 0);
8814 arg1 = TREE_OPERAND (exp, 1);
8815 type = TREE_TYPE (arg0);
8816 operand_mode = TYPE_MODE (type);
8817 unsignedp = TREE_UNSIGNED (type);
8818
8819 /* We won't bother with BLKmode store-flag operations because it would mean
8820 passing a lot of information to emit_store_flag. */
8821 if (operand_mode == BLKmode)
8822 return 0;
8823
8824 STRIP_NOPS (arg0);
8825 STRIP_NOPS (arg1);
8826
8827 /* Get the rtx comparison code to use. We know that EXP is a comparison
8828 operation of some type. Some comparisons against 1 and -1 can be
8829 converted to comparisons with zero. Do so here so that the tests
8830 below will be aware that we have a comparison with zero. These
8831 tests will not catch constants in the first operand, but constants
8832 are rarely passed as the first operand. */
8833
8834 switch (TREE_CODE (exp))
8835 {
8836 case EQ_EXPR:
8837 code = EQ;
8838 break;
8839 case NE_EXPR:
8840 code = NE;
8841 break;
8842 case LT_EXPR:
8843 if (integer_onep (arg1))
8844 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
8845 else
8846 code = unsignedp ? LTU : LT;
8847 break;
8848 case LE_EXPR:
8849 if (! unsignedp && integer_all_onesp (arg1))
8850 arg1 = integer_zero_node, code = LT;
8851 else
8852 code = unsignedp ? LEU : LE;
8853 break;
8854 case GT_EXPR:
8855 if (! unsignedp && integer_all_onesp (arg1))
8856 arg1 = integer_zero_node, code = GE;
8857 else
8858 code = unsignedp ? GTU : GT;
8859 break;
8860 case GE_EXPR:
8861 if (integer_onep (arg1))
8862 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
8863 else
8864 code = unsignedp ? GEU : GE;
8865 break;
8866 default:
8867 abort ();
8868 }
bbf6f052 8869
ca695ac9
JB
8870 /* Put a constant second. */
8871 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
bbf6f052 8872 {
ca695ac9
JB
8873 tem = arg0; arg0 = arg1; arg1 = tem;
8874 code = swap_condition (code);
bbf6f052 8875 }
bbf6f052 8876
ca695ac9
JB
8877 /* If this is an equality or inequality test of a single bit, we can
8878 do this by shifting the bit being tested to the low-order bit and
8879 masking the result with the constant 1. If the condition was EQ,
8880 we xor it with 1. This does not require an scc insn and is faster
8881 than an scc insn even if we have it. */
bbf6f052 8882
ca695ac9
JB
8883 if ((code == NE || code == EQ)
8884 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
8885 && integer_pow2p (TREE_OPERAND (arg0, 1))
8886 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
8887 {
8888 tree inner = TREE_OPERAND (arg0, 0);
8889 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
8890 NULL_RTX, VOIDmode, 0)));
8891 int ops_unsignedp;
bbf6f052 8892
ca695ac9
JB
8893 /* If INNER is a right shift of a constant and it plus BITNUM does
8894 not overflow, adjust BITNUM and INNER. */
bbf6f052 8895
ca695ac9
JB
8896 if (TREE_CODE (inner) == RSHIFT_EXPR
8897 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
8898 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
8899 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
8900 < TYPE_PRECISION (type)))
8901 {
8902 bitnum +=TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
8903 inner = TREE_OPERAND (inner, 0);
8904 }
bbf6f052 8905
ca695ac9
JB
8906 /* If we are going to be able to omit the AND below, we must do our
8907 operations as unsigned. If we must use the AND, we have a choice.
8908 Normally unsigned is faster, but for some machines signed is. */
8909 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
ad92c826
RK
8910#ifdef LOAD_EXTEND_OP
8911 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
ca695ac9
JB
8912#else
8913 : 1
8914#endif
8915 );
bbf6f052 8916
ca695ac9
JB
8917 if (subtarget == 0 || GET_CODE (subtarget) != REG
8918 || GET_MODE (subtarget) != operand_mode
8919 || ! safe_from_p (subtarget, inner))
8920 subtarget = 0;
e7c33f54 8921
ca695ac9 8922 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
bbf6f052 8923
ca695ac9
JB
8924 if (bitnum != 0)
8925 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
8926 size_int (bitnum), target, ops_unsignedp);
bbf6f052 8927
ca695ac9
JB
8928 if (GET_MODE (op0) != mode)
8929 op0 = convert_to_mode (mode, op0, ops_unsignedp);
bbf6f052 8930
ca695ac9
JB
8931 if ((code == EQ && ! invert) || (code == NE && invert))
8932 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, target,
8933 ops_unsignedp, OPTAB_LIB_WIDEN);
bbf6f052 8934
ca695ac9
JB
8935 /* Put the AND last so it can combine with more things. */
8936 if (bitnum != TYPE_PRECISION (type) - 1)
8937 op0 = expand_and (op0, const1_rtx, target);
bbf6f052 8938
ca695ac9
JB
8939 return op0;
8940 }
bbf6f052 8941
ca695ac9
JB
8942 /* Now see if we are likely to be able to do this. Return if not. */
8943 if (! can_compare_p (operand_mode))
8944 return 0;
8945 icode = setcc_gen_code[(int) code];
8946 if (icode == CODE_FOR_nothing
8947 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
8948 {
8949 /* We can only do this if it is one of the special cases that
8950 can be handled without an scc insn. */
8951 if ((code == LT && integer_zerop (arg1))
8952 || (! only_cheap && code == GE && integer_zerop (arg1)))
8953 ;
8954 else if (BRANCH_COST >= 0
8955 && ! only_cheap && (code == NE || code == EQ)
8956 && TREE_CODE (type) != REAL_TYPE
8957 && ((abs_optab->handlers[(int) operand_mode].insn_code
8958 != CODE_FOR_nothing)
8959 || (ffs_optab->handlers[(int) operand_mode].insn_code
8960 != CODE_FOR_nothing)))
8961 ;
8962 else
8963 return 0;
8964 }
8965
8966 preexpand_calls (exp);
8967 if (subtarget == 0 || GET_CODE (subtarget) != REG
8968 || GET_MODE (subtarget) != operand_mode
8969 || ! safe_from_p (subtarget, arg1))
8970 subtarget = 0;
bbf6f052 8971
ca695ac9
JB
8972 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
8973 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
bbf6f052 8974
ca695ac9
JB
8975 if (target == 0)
8976 target = gen_reg_rtx (mode);
bbf6f052 8977
ca695ac9
JB
8978 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
8979 because, if the emit_store_flag does anything it will succeed and
8980 OP0 and OP1 will not be used subsequently. */
bbf6f052 8981
ca695ac9
JB
8982 result = emit_store_flag (target, code,
8983 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
8984 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
8985 operand_mode, unsignedp, 1);
bbf6f052 8986
ca695ac9
JB
8987 if (result)
8988 {
8989 if (invert)
8990 result = expand_binop (mode, xor_optab, result, const1_rtx,
8991 result, 0, OPTAB_LIB_WIDEN);
8992 return result;
8993 }
bbf6f052 8994
ca695ac9
JB
8995 /* If this failed, we have to do this with set/compare/jump/set code. */
8996 if (target == 0 || GET_CODE (target) != REG
8997 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
8998 target = gen_reg_rtx (GET_MODE (target));
bbf6f052 8999
ca695ac9
JB
9000 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9001 result = compare_from_rtx (op0, op1, code, unsignedp,
9002 operand_mode, NULL_RTX, 0);
9003 if (GET_CODE (result) == CONST_INT)
9004 return (((result == const0_rtx && ! invert)
9005 || (result != const0_rtx && invert))
9006 ? const0_rtx : const1_rtx);
bbf6f052 9007
ca695ac9
JB
9008 label = gen_label_rtx ();
9009 if (bcc_gen_fctn[(int) code] == 0)
9010 abort ();
bbf6f052 9011
ca695ac9
JB
9012 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9013 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9014 emit_label (label);
bbf6f052 9015
ca695ac9
JB
9016 return target;
9017}
9018\f
9019/* Generate a tablejump instruction (used for switch statements). */
bbf6f052 9020
ca695ac9 9021#ifdef HAVE_tablejump
bbf6f052 9022
ca695ac9
JB
9023/* INDEX is the value being switched on, with the lowest value
9024 in the table already subtracted.
9025 MODE is its expected mode (needed if INDEX is constant).
9026 RANGE is the length of the jump table.
9027 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
bbf6f052 9028
ca695ac9
JB
9029 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9030 index value is out of range. */
bbf6f052 9031
ca695ac9
JB
9032void
9033do_tablejump (index, mode, range, table_label, default_label)
9034 rtx index, range, table_label, default_label;
9035 enum machine_mode mode;
9036{
9037 register rtx temp, vector;
bbf6f052 9038
ca695ac9
JB
9039 /* Do an unsigned comparison (in the proper mode) between the index
9040 expression and the value which represents the length of the range.
9041 Since we just finished subtracting the lower bound of the range
9042 from the index expression, this comparison allows us to simultaneously
9043 check that the original index expression value is both greater than
9044 or equal to the minimum value of the range and less than or equal to
9045 the maximum value of the range. */
bbf6f052 9046
ca695ac9
JB
9047 emit_cmp_insn (range, index, LTU, NULL_RTX, mode, 1, 0);
9048 emit_jump_insn (gen_bltu (default_label));
bbf6f052 9049
ca695ac9
JB
9050 /* If index is in range, it must fit in Pmode.
9051 Convert to Pmode so we can index with it. */
9052 if (mode != Pmode)
9053 index = convert_to_mode (Pmode, index, 1);
bbf6f052 9054
ca695ac9
JB
9055 /* Don't let a MEM slip thru, because then INDEX that comes
9056 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9057 and break_out_memory_refs will go to work on it and mess it up. */
9058#ifdef PIC_CASE_VECTOR_ADDRESS
9059 if (flag_pic && GET_CODE (index) != REG)
9060 index = copy_to_mode_reg (Pmode, index);
9061#endif
bbf6f052 9062
ca695ac9
JB
9063 /* If flag_force_addr were to affect this address
9064 it could interfere with the tricky assumptions made
9065 about addresses that contain label-refs,
9066 which may be valid only very near the tablejump itself. */
9067 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9068 GET_MODE_SIZE, because this indicates how large insns are. The other
9069 uses should all be Pmode, because they are addresses. This code
9070 could fail if addresses and insns are not the same size. */
9071 index = gen_rtx (PLUS, Pmode,
9072 gen_rtx (MULT, Pmode, index,
9073 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9074 gen_rtx (LABEL_REF, Pmode, table_label));
9075#ifdef PIC_CASE_VECTOR_ADDRESS
9076 if (flag_pic)
9077 index = PIC_CASE_VECTOR_ADDRESS (index);
9078 else
9079#endif
9080 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9081 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9082 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
9083 RTX_UNCHANGING_P (vector) = 1;
9084 convert_move (temp, vector, 0);
bbf6f052 9085
ca695ac9 9086 emit_jump_insn (gen_tablejump (temp, table_label));
bbf6f052 9087
ca695ac9
JB
9088#ifndef CASE_VECTOR_PC_RELATIVE
9089 /* If we are generating PIC code or if the table is PC-relative, the
9090 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9091 if (! flag_pic)
9092 emit_barrier ();
bbf6f052 9093#endif
ca695ac9 9094}
bbf6f052 9095
ca695ac9 9096#endif /* HAVE_tablejump */
bbf6f052 9097
bbf6f052 9098
ca695ac9
JB
9099/* Emit a suitable bytecode to load a value from memory, assuming a pointer
9100 to that value is on the top of the stack. The resulting type is TYPE, and
9101 the source declaration is DECL. */
bbf6f052 9102
ca695ac9
JB
9103void
9104bc_load_memory (type, decl)
9105 tree type, decl;
9106{
9107 enum bytecode_opcode opcode;
9108
9109
9110 /* Bit fields are special. We only know about signed and
9111 unsigned ints, and enums. The latter are treated as
9112 signed integers. */
9113
9114 if (DECL_BIT_FIELD (decl))
9115 if (TREE_CODE (type) == ENUMERAL_TYPE
9116 || TREE_CODE (type) == INTEGER_TYPE)
9117 opcode = TREE_UNSIGNED (type) ? zxloadBI : sxloadBI;
9118 else
9119 abort ();
9120 else
9121 /* See corresponding comment in bc_store_memory(). */
9122 if (TYPE_MODE (type) == BLKmode
9123 || TYPE_MODE (type) == VOIDmode)
9124 return;
9125 else
6bd6178d 9126 opcode = mode_to_load_map [(int) TYPE_MODE (type)];
bbf6f052 9127
ca695ac9
JB
9128 if (opcode == neverneverland)
9129 abort ();
9130
9131 bc_emit_bytecode (opcode);
9132
9133#ifdef DEBUG_PRINT_CODE
9134 fputc ('\n', stderr);
9135#endif
bbf6f052 9136}
bbf6f052 9137
bbf6f052 9138
ca695ac9
JB
9139/* Store the contents of the second stack slot to the address in the
9140 top stack slot. DECL is the declaration of the destination and is used
9141 to determine whether we're dealing with a bitfield. */
bbf6f052 9142
ca695ac9
JB
9143void
9144bc_store_memory (type, decl)
9145 tree type, decl;
9146{
9147 enum bytecode_opcode opcode;
9148
9149
9150 if (DECL_BIT_FIELD (decl))
f81497d9 9151 {
ca695ac9
JB
9152 if (TREE_CODE (type) == ENUMERAL_TYPE
9153 || TREE_CODE (type) == INTEGER_TYPE)
9154 opcode = sstoreBI;
f81497d9 9155 else
ca695ac9 9156 abort ();
f81497d9 9157 }
ca695ac9
JB
9158 else
9159 if (TYPE_MODE (type) == BLKmode)
9160 {
9161 /* Copy structure. This expands to a block copy instruction, storeBLK.
9162 In addition to the arguments expected by the other store instructions,
9163 it also expects a type size (SImode) on top of the stack, which is the
9164 structure size in size units (usually bytes). The two first arguments
9165 are already on the stack; so we just put the size on level 1. For some
9166 other languages, the size may be variable, this is why we don't encode
9167 it as a storeBLK literal, but rather treat it as a full-fledged expression. */
9168
9169 bc_expand_expr (TYPE_SIZE (type));
9170 opcode = storeBLK;
9171 }
9172 else
6bd6178d 9173 opcode = mode_to_store_map [(int) TYPE_MODE (type)];
f81497d9 9174
ca695ac9
JB
9175 if (opcode == neverneverland)
9176 abort ();
9177
9178 bc_emit_bytecode (opcode);
9179
9180#ifdef DEBUG_PRINT_CODE
9181 fputc ('\n', stderr);
9182#endif
f81497d9
RS
9183}
9184
f81497d9 9185
ca695ac9
JB
9186/* Allocate local stack space sufficient to hold a value of the given
9187 SIZE at alignment boundary ALIGNMENT bits. ALIGNMENT must be an
9188 integral power of 2. A special case is locals of type VOID, which
9189 have size 0 and alignment 1 - any "voidish" SIZE or ALIGNMENT is
9190 remapped into the corresponding attribute of SI. */
9191
9192rtx
9193bc_allocate_local (size, alignment)
9194 int size, alignment;
f81497d9 9195{
ca695ac9
JB
9196 rtx retval;
9197 int byte_alignment;
f81497d9 9198
ca695ac9
JB
9199 if (size < 0)
9200 abort ();
f81497d9 9201
ca695ac9
JB
9202 /* Normalize size and alignment */
9203 if (!size)
9204 size = UNITS_PER_WORD;
bbf6f052 9205
ca695ac9
JB
9206 if (alignment < BITS_PER_UNIT)
9207 byte_alignment = 1 << (INT_ALIGN - 1);
9208 else
9209 /* Align */
9210 byte_alignment = alignment / BITS_PER_UNIT;
bbf6f052 9211
ca695ac9
JB
9212 if (local_vars_size & (byte_alignment - 1))
9213 local_vars_size += byte_alignment - (local_vars_size & (byte_alignment - 1));
bbf6f052 9214
ca695ac9
JB
9215 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
9216 local_vars_size += size;
bbf6f052 9217
ca695ac9 9218 return retval;
bbf6f052
RK
9219}
9220
bbf6f052 9221
ca695ac9
JB
9222/* Allocate variable-sized local array. Variable-sized arrays are
9223 actually pointers to the address in memory where they are stored. */
9224
9225rtx
9226bc_allocate_variable_array (size)
9227 tree size;
bbf6f052 9228{
ca695ac9
JB
9229 rtx retval;
9230 const int ptralign = (1 << (PTR_ALIGN - 1));
bbf6f052 9231
ca695ac9
JB
9232 /* Align pointer */
9233 if (local_vars_size & ptralign)
9234 local_vars_size += ptralign - (local_vars_size & ptralign);
bbf6f052 9235
ca695ac9
JB
9236 /* Note down local space needed: pointer to block; also return
9237 dummy rtx */
bbf6f052 9238
ca695ac9
JB
9239 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
9240 local_vars_size += POINTER_SIZE / BITS_PER_UNIT;
9241 return retval;
bbf6f052 9242}
bbf6f052 9243
bbf6f052 9244
ca695ac9
JB
9245/* Push the machine address for the given external variable offset. */
9246void
9247bc_load_externaddr (externaddr)
9248 rtx externaddr;
9249{
9250 bc_emit_bytecode (constP);
e7a42772
JB
9251 bc_emit_code_labelref (BYTECODE_LABEL (externaddr),
9252 BYTECODE_BC_LABEL (externaddr)->offset);
bbf6f052 9253
ca695ac9
JB
9254#ifdef DEBUG_PRINT_CODE
9255 fputc ('\n', stderr);
9256#endif
bbf6f052
RK
9257}
9258
bbf6f052 9259
ca695ac9
JB
9260static char *
9261bc_strdup (s)
9262 char *s;
bbf6f052 9263{
5e70898c
RS
9264 char *new = (char *) xmalloc ((strlen (s) + 1) * sizeof *s);
9265 strcpy (new, s);
9266 return new;
ca695ac9 9267}
bbf6f052 9268
bbf6f052 9269
ca695ac9
JB
9270/* Like above, but expects an IDENTIFIER. */
9271void
9272bc_load_externaddr_id (id, offset)
9273 tree id;
9274 int offset;
9275{
9276 if (!IDENTIFIER_POINTER (id))
9277 abort ();
bbf6f052 9278
ca695ac9
JB
9279 bc_emit_bytecode (constP);
9280 bc_emit_code_labelref (bc_xstrdup (IDENTIFIER_POINTER (id)), offset);
bbf6f052 9281
ca695ac9
JB
9282#ifdef DEBUG_PRINT_CODE
9283 fputc ('\n', stderr);
9284#endif
9285}
bbf6f052 9286
bbf6f052 9287
ca695ac9
JB
9288/* Push the machine address for the given local variable offset. */
9289void
9290bc_load_localaddr (localaddr)
9291 rtx localaddr;
9292{
e7a42772 9293 bc_emit_instruction (localP, (HOST_WIDE_INT) BYTECODE_BC_LABEL (localaddr)->offset);
bbf6f052 9294}
bbf6f052 9295
bbf6f052 9296
ca695ac9
JB
9297/* Push the machine address for the given parameter offset.
9298 NOTE: offset is in bits. */
9299void
9300bc_load_parmaddr (parmaddr)
9301 rtx parmaddr;
bbf6f052 9302{
e7a42772
JB
9303 bc_emit_instruction (argP, ((HOST_WIDE_INT) BYTECODE_BC_LABEL (parmaddr)->offset
9304 / BITS_PER_UNIT));
ca695ac9 9305}
bbf6f052 9306
ca695ac9
JB
9307
9308/* Convert a[i] into *(a + i). */
9309tree
9310bc_canonicalize_array_ref (exp)
9311 tree exp;
9312{
9313 tree type = TREE_TYPE (exp);
9314 tree array_adr = build1 (ADDR_EXPR, TYPE_POINTER_TO (type),
9315 TREE_OPERAND (exp, 0));
9316 tree index = TREE_OPERAND (exp, 1);
9317
9318
9319 /* Convert the integer argument to a type the same size as a pointer
9320 so the multiply won't overflow spuriously. */
9321
9322 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
9323 index = convert (type_for_size (POINTER_SIZE, 0), index);
9324
9325 /* The array address isn't volatile even if the array is.
9326 (Of course this isn't terribly relevant since the bytecode
9327 translator treats nearly everything as volatile anyway.) */
9328 TREE_THIS_VOLATILE (array_adr) = 0;
9329
9330 return build1 (INDIRECT_REF, type,
9331 fold (build (PLUS_EXPR,
9332 TYPE_POINTER_TO (type),
9333 array_adr,
9334 fold (build (MULT_EXPR,
9335 TYPE_POINTER_TO (type),
9336 index,
9337 size_in_bytes (type))))));
bbf6f052
RK
9338}
9339
bbf6f052 9340
ca695ac9
JB
9341/* Load the address of the component referenced by the given
9342 COMPONENT_REF expression.
bbf6f052 9343
ca695ac9 9344 Returns innermost lvalue. */
bbf6f052 9345
ca695ac9
JB
9346tree
9347bc_expand_component_address (exp)
9348 tree exp;
bbf6f052 9349{
ca695ac9
JB
9350 tree tem, chain;
9351 enum machine_mode mode;
9352 int bitpos = 0;
9353 HOST_WIDE_INT SIval;
a7c5971a 9354
bbf6f052 9355
ca695ac9
JB
9356 tem = TREE_OPERAND (exp, 1);
9357 mode = DECL_MODE (tem);
bbf6f052 9358
ca695ac9
JB
9359
9360 /* Compute cumulative bit offset for nested component refs
9361 and array refs, and find the ultimate containing object. */
9362
9363 for (tem = exp;; tem = TREE_OPERAND (tem, 0))
bbf6f052 9364 {
ca695ac9
JB
9365 if (TREE_CODE (tem) == COMPONENT_REF)
9366 bitpos += TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (tem, 1)));
9367 else
9368 if (TREE_CODE (tem) == ARRAY_REF
9369 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
9370 && TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) == INTEGER_CST)
bbf6f052 9371
ca695ac9
JB
9372 bitpos += (TREE_INT_CST_LOW (TREE_OPERAND (tem, 1))
9373 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)))
9374 /* * TYPE_SIZE_UNIT (TREE_TYPE (tem)) */);
9375 else
9376 break;
9377 }
bbf6f052 9378
c02bd5d9 9379 bc_expand_expr (tem);
bbf6f052 9380
cd1b4b44 9381
ca695ac9
JB
9382 /* For bitfields also push their offset and size */
9383 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
9384 bc_push_offset_and_size (bitpos, /* DECL_SIZE_UNIT */ (TREE_OPERAND (exp, 1)));
9385 else
9386 if (SIval = bitpos / BITS_PER_UNIT)
9387 bc_emit_instruction (addconstPSI, SIval);
bbf6f052 9388
ca695ac9 9389 return (TREE_OPERAND (exp, 1));
bbf6f052 9390}
e7c33f54 9391
bbf6f052 9392
ca695ac9
JB
9393/* Emit code to push two SI constants */
9394void
9395bc_push_offset_and_size (offset, size)
9396 HOST_WIDE_INT offset, size;
9397{
9398 bc_emit_instruction (constSI, offset);
9399 bc_emit_instruction (constSI, size);
9400}
bbf6f052 9401
bbf6f052 9402
ca695ac9
JB
9403/* Emit byte code to push the address of the given lvalue expression to
9404 the stack. If it's a bit field, we also push offset and size info.
bbf6f052 9405
ca695ac9
JB
9406 Returns innermost component, which allows us to determine not only
9407 its type, but also whether it's a bitfield. */
9408
9409tree
9410bc_expand_address (exp)
bbf6f052 9411 tree exp;
bbf6f052 9412{
ca695ac9
JB
9413 /* Safeguard */
9414 if (!exp || TREE_CODE (exp) == ERROR_MARK)
9415 return (exp);
bbf6f052 9416
e7c33f54 9417
ca695ac9
JB
9418 switch (TREE_CODE (exp))
9419 {
9420 case ARRAY_REF:
e7c33f54 9421
ca695ac9 9422 return (bc_expand_address (bc_canonicalize_array_ref (exp)));
e7c33f54 9423
ca695ac9 9424 case COMPONENT_REF:
bbf6f052 9425
ca695ac9 9426 return (bc_expand_component_address (exp));
bbf6f052 9427
ca695ac9 9428 case INDIRECT_REF:
bbf6f052 9429
ca695ac9
JB
9430 bc_expand_expr (TREE_OPERAND (exp, 0));
9431
9432 /* For variable-sized types: retrieve pointer. Sometimes the
9433 TYPE_SIZE tree is NULL. Is this a bug or a feature? Let's
9434 also make sure we have an operand, just in case... */
9435
9436 if (TREE_OPERAND (exp, 0)
9437 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))
9438 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))) != INTEGER_CST)
9439 bc_emit_instruction (loadP);
9440
9441 /* If packed, also return offset and size */
9442 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 0)))
9443
9444 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (exp, 0))),
9445 TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (exp, 0))));
9446
9447 return (TREE_OPERAND (exp, 0));
9448
9449 case FUNCTION_DECL:
9450
e7a42772
JB
9451 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
9452 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
bbf6f052 9453 break;
ca695ac9
JB
9454
9455 case PARM_DECL:
9456
9457 bc_load_parmaddr (DECL_RTL (exp));
9458
9459 /* For variable-sized types: retrieve pointer */
9460 if (TYPE_SIZE (TREE_TYPE (exp))
9461 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
9462 bc_emit_instruction (loadP);
9463
9464 /* If packed, also return offset and size */
9465 if (DECL_BIT_FIELD (exp))
9466 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
9467 TREE_INT_CST_LOW (DECL_SIZE (exp)));
9468
bbf6f052 9469 break;
ca695ac9
JB
9470
9471 case RESULT_DECL:
9472
9473 bc_emit_instruction (returnP);
bbf6f052 9474 break;
ca695ac9
JB
9475
9476 case VAR_DECL:
9477
9478#if 0
e7a42772 9479 if (BYTECODE_LABEL (DECL_RTL (exp)))
ca695ac9
JB
9480 bc_load_externaddr (DECL_RTL (exp));
9481#endif
9482
9483 if (DECL_EXTERNAL (exp))
e7a42772 9484 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
eb862a37 9485 (BYTECODE_BC_LABEL (DECL_RTL (exp)))->offset);
bbf6f052 9486 else
ca695ac9
JB
9487 bc_load_localaddr (DECL_RTL (exp));
9488
9489 /* For variable-sized types: retrieve pointer */
9490 if (TYPE_SIZE (TREE_TYPE (exp))
9491 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
9492 bc_emit_instruction (loadP);
9493
9494 /* If packed, also return offset and size */
9495 if (DECL_BIT_FIELD (exp))
9496 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
9497 TREE_INT_CST_LOW (DECL_SIZE (exp)));
9498
bbf6f052 9499 break;
ca695ac9
JB
9500
9501 case STRING_CST:
9502 {
9503 rtx r;
9504
9505 bc_emit_bytecode (constP);
9506 r = output_constant_def (exp);
e7a42772 9507 bc_emit_code_labelref (BYTECODE_LABEL (r), BYTECODE_BC_LABEL (r)->offset);
ca695ac9
JB
9508
9509#ifdef DEBUG_PRINT_CODE
9510 fputc ('\n', stderr);
9511#endif
9512 }
bbf6f052 9513 break;
ca695ac9 9514
bbf6f052 9515 default:
bbf6f052 9516
ca695ac9
JB
9517 abort();
9518 break;
bbf6f052
RK
9519 }
9520
ca695ac9
JB
9521 /* Most lvalues don't have components. */
9522 return (exp);
9523}
bbf6f052 9524
ca695ac9
JB
9525
9526/* Emit a type code to be used by the runtime support in handling
9527 parameter passing. The type code consists of the machine mode
9528 plus the minimal alignment shifted left 8 bits. */
9529
9530tree
9531bc_runtime_type_code (type)
9532 tree type;
9533{
9534 int val;
9535
9536 switch (TREE_CODE (type))
bbf6f052 9537 {
ca695ac9
JB
9538 case VOID_TYPE:
9539 case INTEGER_TYPE:
9540 case REAL_TYPE:
9541 case COMPLEX_TYPE:
9542 case ENUMERAL_TYPE:
9543 case POINTER_TYPE:
9544 case RECORD_TYPE:
9545
6bd6178d 9546 val = (int) TYPE_MODE (type) | TYPE_ALIGN (type) << 8;
ca695ac9
JB
9547 break;
9548
9549 case ERROR_MARK:
9550
9551 val = 0;
9552 break;
9553
9554 default:
af508edd 9555
ca695ac9
JB
9556 abort ();
9557 }
9558 return build_int_2 (val, 0);
9559}
af508edd 9560
af508edd 9561
ca695ac9
JB
9562/* Generate constructor label */
9563char *
9564bc_gen_constr_label ()
9565{
9566 static int label_counter;
9567 static char label[20];
bbf6f052 9568
ca695ac9 9569 sprintf (label, "*LR%d", label_counter++);
bbf6f052 9570
ca695ac9
JB
9571 return (obstack_copy0 (&permanent_obstack, label, strlen (label)));
9572}
bbf6f052 9573
bbf6f052 9574
ca695ac9
JB
9575/* Evaluate constructor CONSTR and return pointer to it on level one. We
9576 expand the constructor data as static data, and push a pointer to it.
9577 The pointer is put in the pointer table and is retrieved by a constP
9578 bytecode instruction. We then loop and store each constructor member in
9579 the corresponding component. Finally, we return the original pointer on
9580 the stack. */
af508edd 9581
ca695ac9
JB
9582void
9583bc_expand_constructor (constr)
9584 tree constr;
9585{
9586 char *l;
9587 HOST_WIDE_INT ptroffs;
9588 rtx constr_rtx;
bbf6f052 9589
ca695ac9
JB
9590
9591 /* Literal constructors are handled as constants, whereas
9592 non-literals are evaluated and stored element by element
9593 into the data segment. */
9594
9595 /* Allocate space in proper segment and push pointer to space on stack.
9596 */
bbf6f052 9597
ca695ac9 9598 l = bc_gen_constr_label ();
bbf6f052 9599
ca695ac9 9600 if (TREE_CONSTANT (constr))
bbf6f052 9601 {
ca695ac9
JB
9602 text_section ();
9603
9604 bc_emit_const_labeldef (l);
9605 bc_output_constructor (constr, int_size_in_bytes (TREE_TYPE (constr)));
bbf6f052 9606 }
ca695ac9
JB
9607 else
9608 {
9609 data_section ();
bbf6f052 9610
ca695ac9
JB
9611 bc_emit_data_labeldef (l);
9612 bc_output_data_constructor (constr);
9613 }
bbf6f052 9614
ca695ac9
JB
9615
9616 /* Add reference to pointer table and recall pointer to stack;
9617 this code is common for both types of constructors: literals
9618 and non-literals. */
bbf6f052 9619
de7d9320
JB
9620 ptroffs = bc_define_pointer (l);
9621 bc_emit_instruction (constP, ptroffs);
d39985fa 9622
ca695ac9
JB
9623 /* This is all that has to be done if it's a literal. */
9624 if (TREE_CONSTANT (constr))
9625 return;
bbf6f052 9626
ca695ac9
JB
9627
9628 /* At this point, we have the pointer to the structure on top of the stack.
9629 Generate sequences of store_memory calls for the constructor. */
9630
9631 /* constructor type is structure */
9632 if (TREE_CODE (TREE_TYPE (constr)) == RECORD_TYPE)
e7c33f54 9633 {
ca695ac9
JB
9634 register tree elt;
9635
9636 /* If the constructor has fewer fields than the structure,
9637 clear the whole structure first. */
9638
9639 if (list_length (CONSTRUCTOR_ELTS (constr))
9640 != list_length (TYPE_FIELDS (TREE_TYPE (constr))))
9641 {
9642 bc_emit_instruction (dup);
9643 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
9644 bc_emit_instruction (clearBLK);
9645 }
9646
9647 /* Store each element of the constructor into the corresponding
9648 field of TARGET. */
9649
9650 for (elt = CONSTRUCTOR_ELTS (constr); elt; elt = TREE_CHAIN (elt))
9651 {
9652 register tree field = TREE_PURPOSE (elt);
9653 register enum machine_mode mode;
9654 int bitsize;
9655 int bitpos;
9656 int unsignedp;
9657
9658 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field)) /* * DECL_SIZE_UNIT (field) */;
9659 mode = DECL_MODE (field);
9660 unsignedp = TREE_UNSIGNED (field);
9661
9662 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
9663
9664 bc_store_field (elt, bitsize, bitpos, mode, TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
9665 /* The alignment of TARGET is
9666 at least what its type requires. */
9667 VOIDmode, 0,
9668 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
9669 int_size_in_bytes (TREE_TYPE (constr)));
9670 }
e7c33f54 9671 }
ca695ac9
JB
9672 else
9673
9674 /* Constructor type is array */
9675 if (TREE_CODE (TREE_TYPE (constr)) == ARRAY_TYPE)
9676 {
9677 register tree elt;
9678 register int i;
9679 tree domain = TYPE_DOMAIN (TREE_TYPE (constr));
9680 int minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
9681 int maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
9682 tree elttype = TREE_TYPE (TREE_TYPE (constr));
9683
9684 /* If the constructor has fewer fields than the structure,
9685 clear the whole structure first. */
9686
9687 if (list_length (CONSTRUCTOR_ELTS (constr)) < maxelt - minelt + 1)
9688 {
9689 bc_emit_instruction (dup);
9690 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
9691 bc_emit_instruction (clearBLK);
9692 }
9693
9694
9695 /* Store each element of the constructor into the corresponding
9696 element of TARGET, determined by counting the elements. */
9697
9698 for (elt = CONSTRUCTOR_ELTS (constr), i = 0;
9699 elt;
9700 elt = TREE_CHAIN (elt), i++)
9701 {
9702 register enum machine_mode mode;
9703 int bitsize;
9704 int bitpos;
9705 int unsignedp;
9706
9707 mode = TYPE_MODE (elttype);
9708 bitsize = GET_MODE_BITSIZE (mode);
9709 unsignedp = TREE_UNSIGNED (elttype);
9710
9711 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype))
9712 /* * TYPE_SIZE_UNIT (elttype) */ );
9713
9714 bc_store_field (elt, bitsize, bitpos, mode,
9715 TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
9716 /* The alignment of TARGET is
9717 at least what its type requires. */
9718 VOIDmode, 0,
9719 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
9720 int_size_in_bytes (TREE_TYPE (constr)));
9721 }
9722
9723 }
9724}
bbf6f052 9725
bbf6f052 9726
ca695ac9
JB
9727/* Store the value of EXP (an expression tree) into member FIELD of
9728 structure at address on stack, which has type TYPE, mode MODE and
9729 occupies BITSIZE bits, starting BITPOS bits from the beginning of the
9730 structure.
bbf6f052 9731
ca695ac9
JB
9732 ALIGN is the alignment that TARGET is known to have, measured in bytes.
9733 TOTAL_SIZE is its size in bytes, or -1 if variable. */
bbf6f052 9734
ca695ac9
JB
9735void
9736bc_store_field (field, bitsize, bitpos, mode, exp, type,
9737 value_mode, unsignedp, align, total_size)
9738 int bitsize, bitpos;
9739 enum machine_mode mode;
9740 tree field, exp, type;
9741 enum machine_mode value_mode;
9742 int unsignedp;
9743 int align;
9744 int total_size;
9745{
bbf6f052 9746
ca695ac9
JB
9747 /* Expand expression and copy pointer */
9748 bc_expand_expr (exp);
9749 bc_emit_instruction (over);
bbf6f052 9750
bbf6f052 9751
ca695ac9
JB
9752 /* If the component is a bit field, we cannot use addressing to access
9753 it. Use bit-field techniques to store in it. */
bbf6f052 9754
ca695ac9
JB
9755 if (DECL_BIT_FIELD (field))
9756 {
9757 bc_store_bit_field (bitpos, bitsize, unsignedp);
9758 return;
9759 }
9760 else
9761 /* Not bit field */
9762 {
9763 HOST_WIDE_INT offset = bitpos / BITS_PER_UNIT;
9764
9765 /* Advance pointer to the desired member */
9766 if (offset)
9767 bc_emit_instruction (addconstPSI, offset);
9768
9769 /* Store */
9770 bc_store_memory (type, field);
9771 }
9772}
bbf6f052 9773
ca695ac9
JB
9774
9775/* Store SI/SU in bitfield */
bbf6f052 9776void
ca695ac9
JB
9777bc_store_bit_field (offset, size, unsignedp)
9778 int offset, size, unsignedp;
bbf6f052 9779{
ca695ac9
JB
9780 /* Push bitfield offset and size */
9781 bc_push_offset_and_size (offset, size);
bbf6f052 9782
ca695ac9
JB
9783 /* Store */
9784 bc_emit_instruction (sstoreBI);
9785}
e87b4f3f 9786
88d3b7f0 9787
ca695ac9
JB
9788/* Load SI/SU from bitfield */
9789void
9790bc_load_bit_field (offset, size, unsignedp)
9791 int offset, size, unsignedp;
9792{
9793 /* Push bitfield offset and size */
9794 bc_push_offset_and_size (offset, size);
88d3b7f0 9795
ca695ac9
JB
9796 /* Load: sign-extend if signed, else zero-extend */
9797 bc_emit_instruction (unsignedp ? zxloadBI : sxloadBI);
9798}
709f5be1 9799
bbf6f052 9800
ca695ac9
JB
9801/* Adjust interpreter stack by NLEVELS. Positive means drop NLEVELS
9802 (adjust stack pointer upwards), negative means add that number of
9803 levels (adjust the stack pointer downwards). Only positive values
9804 normally make sense. */
bbf6f052 9805
ca695ac9
JB
9806void
9807bc_adjust_stack (nlevels)
9808 int nlevels;
9809{
9810 switch (nlevels)
9811 {
9812 case 0:
9813 break;
9814
9815 case 2:
9816 bc_emit_instruction (drop);
9817
9818 case 1:
9819 bc_emit_instruction (drop);
9820 break;
9821
9822 default:
9823
9824 bc_emit_instruction (adjstackSI, (HOST_WIDE_INT) nlevels);
9825 stack_depth -= nlevels;
9826 }
9827
a68c7608
RS
9828#if defined (VALIDATE_STACK_FOR_BC)
9829 VALIDATE_STACK_FOR_BC ();
bbf6f052
RK
9830#endif
9831}
This page took 1.446162 seconds and 5 git commands to generate.