]> gcc.gnu.org Git - gcc.git/blame - gcc/expr.c
(expand_complex_abs): Replace some erroneous uses of MODE with
[gcc.git] / gcc / expr.c
CommitLineData
bbf6f052 1/* Convert tree expression to rtl instructions, for GNU compiler.
4be204f0 2 Copyright (C) 1988, 1992, 1993 Free Software Foundation, Inc.
bbf6f052
RK
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21#include "config.h"
ca695ac9 22#include "machmode.h"
bbf6f052
RK
23#include "rtl.h"
24#include "tree.h"
ca695ac9 25#include "obstack.h"
bbf6f052
RK
26#include "flags.h"
27#include "function.h"
28#include "insn-flags.h"
29#include "insn-codes.h"
30#include "expr.h"
31#include "insn-config.h"
32#include "recog.h"
33#include "output.h"
bbf6f052
RK
34#include "typeclass.h"
35
ca695ac9
JB
36#include "bytecode.h"
37#include "bc-opcode.h"
38#include "bc-typecd.h"
39#include "bc-optab.h"
40#include "bc-emit.h"
41
42
bbf6f052
RK
43#define CEIL(x,y) (((x) + (y) - 1) / (y))
44
45/* Decide whether a function's arguments should be processed
bbc8a071
RK
46 from first to last or from last to first.
47
48 They should if the stack and args grow in opposite directions, but
49 only if we have push insns. */
bbf6f052 50
bbf6f052 51#ifdef PUSH_ROUNDING
bbc8a071 52
3319a347 53#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
bbf6f052
RK
54#define PUSH_ARGS_REVERSED /* If it's last to first */
55#endif
bbc8a071 56
bbf6f052
RK
57#endif
58
59#ifndef STACK_PUSH_CODE
60#ifdef STACK_GROWS_DOWNWARD
61#define STACK_PUSH_CODE PRE_DEC
62#else
63#define STACK_PUSH_CODE PRE_INC
64#endif
65#endif
66
67/* Like STACK_BOUNDARY but in units of bytes, not bits. */
68#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
69
70/* If this is nonzero, we do not bother generating VOLATILE
71 around volatile memory references, and we are willing to
72 output indirect addresses. If cse is to follow, we reject
73 indirect addresses so a useful potential cse is generated;
74 if it is used only once, instruction combination will produce
75 the same indirect address eventually. */
76int cse_not_expected;
77
78/* Nonzero to generate code for all the subroutines within an
79 expression before generating the upper levels of the expression.
80 Nowadays this is never zero. */
81int do_preexpand_calls = 1;
82
83/* Number of units that we should eventually pop off the stack.
84 These are the arguments to function calls that have already returned. */
85int pending_stack_adjust;
86
87/* Nonzero means stack pops must not be deferred, and deferred stack
88 pops must not be output. It is nonzero inside a function call,
89 inside a conditional expression, inside a statement expression,
90 and in other cases as well. */
91int inhibit_defer_pop;
92
93/* A list of all cleanups which belong to the arguments of
94 function calls being expanded by expand_call. */
95tree cleanups_this_call;
96
97/* Nonzero means __builtin_saveregs has already been done in this function.
98 The value is the pseudoreg containing the value __builtin_saveregs
99 returned. */
100static rtx saveregs_value;
101
dcf76fff
TW
102/* Similarly for __builtin_apply_args. */
103static rtx apply_args_value;
104
4969d05d
RK
105/* This structure is used by move_by_pieces to describe the move to
106 be performed. */
107
108struct move_by_pieces
109{
110 rtx to;
111 rtx to_addr;
112 int autinc_to;
113 int explicit_inc_to;
114 rtx from;
115 rtx from_addr;
116 int autinc_from;
117 int explicit_inc_from;
118 int len;
119 int offset;
120 int reverse;
121};
122
c02bd5d9
JB
123/* Used to generate bytecodes: keep track of size of local variables,
124 as well as depth of arithmetic stack. (Notice that variables are
125 stored on the machine's stack, not the arithmetic stack.) */
126
127int local_vars_size;
128extern int stack_depth;
129extern int max_stack_depth;
292b1216 130extern struct obstack permanent_obstack;
c02bd5d9
JB
131
132
4969d05d
RK
133static rtx enqueue_insn PROTO((rtx, rtx));
134static int queued_subexp_p PROTO((rtx));
135static void init_queue PROTO((void));
136static void move_by_pieces PROTO((rtx, rtx, int, int));
137static int move_by_pieces_ninsns PROTO((unsigned int, int));
138static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
139 struct move_by_pieces *));
140static void group_insns PROTO((rtx));
141static void store_constructor PROTO((tree, rtx));
142static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
143 enum machine_mode, int, int, int));
144static tree save_noncopied_parts PROTO((tree, tree));
145static tree init_noncopied_parts PROTO((tree, tree));
146static int safe_from_p PROTO((rtx, tree));
147static int fixed_type_p PROTO((tree));
148static int get_pointer_alignment PROTO((tree, unsigned));
149static tree string_constant PROTO((tree, tree *));
150static tree c_strlen PROTO((tree));
151static rtx expand_builtin PROTO((tree, rtx, rtx, enum machine_mode, int));
0006469d
TW
152static int apply_args_size PROTO((void));
153static int apply_result_size PROTO((void));
154static rtx result_vector PROTO((int, rtx));
155static rtx expand_builtin_apply_args PROTO((void));
156static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
157static void expand_builtin_return PROTO((rtx));
4969d05d 158static rtx expand_increment PROTO((tree, int));
ca695ac9
JB
159rtx bc_expand_increment PROTO((struct increment_operator *, tree));
160tree bc_runtime_type_code PROTO((tree));
161rtx bc_allocate_local PROTO((int, int));
162void bc_store_memory PROTO((tree, tree));
163tree bc_expand_component_address PROTO((tree));
164tree bc_expand_address PROTO((tree));
165void bc_expand_constructor PROTO((tree));
166void bc_adjust_stack PROTO((int));
167tree bc_canonicalize_array_ref PROTO((tree));
168void bc_load_memory PROTO((tree, tree));
169void bc_load_externaddr PROTO((rtx));
170void bc_load_externaddr_id PROTO((tree, int));
171void bc_load_localaddr PROTO((rtx));
172void bc_load_parmaddr PROTO((rtx));
4969d05d
RK
173static void preexpand_calls PROTO((tree));
174static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
f81497d9 175static void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
4969d05d
RK
176static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
177static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
178static void do_jump_for_compare PROTO((rtx, rtx, rtx));
179static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
180static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
bbf6f052 181
4fa52007
RK
182/* Record for each mode whether we can move a register directly to or
183 from an object of that mode in memory. If we can't, we won't try
184 to use that mode directly when accessing a field of that mode. */
185
186static char direct_load[NUM_MACHINE_MODES];
187static char direct_store[NUM_MACHINE_MODES];
188
bbf6f052
RK
189/* MOVE_RATIO is the number of move instructions that is better than
190 a block move. */
191
192#ifndef MOVE_RATIO
266007a7 193#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
bbf6f052
RK
194#define MOVE_RATIO 2
195#else
196/* A value of around 6 would minimize code size; infinity would minimize
197 execution time. */
198#define MOVE_RATIO 15
199#endif
200#endif
e87b4f3f 201
266007a7 202/* This array records the insn_code of insns to perform block moves. */
e6677db3 203enum insn_code movstr_optab[NUM_MACHINE_MODES];
266007a7 204
e87b4f3f
RS
205/* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
206
207#ifndef SLOW_UNALIGNED_ACCESS
208#define SLOW_UNALIGNED_ACCESS 0
209#endif
0006469d
TW
210
211/* Register mappings for target machines without register windows. */
212#ifndef INCOMING_REGNO
213#define INCOMING_REGNO(OUT) (OUT)
214#endif
215#ifndef OUTGOING_REGNO
216#define OUTGOING_REGNO(IN) (IN)
217#endif
bbf6f052 218\f
ca695ac9
JB
219/* Maps used to convert modes to const, load, and store bytecodes. */
220enum bytecode_opcode mode_to_const_map[MAX_MACHINE_MODE];
221enum bytecode_opcode mode_to_load_map[MAX_MACHINE_MODE];
222enum bytecode_opcode mode_to_store_map[MAX_MACHINE_MODE];
223
224/* Initialize maps used to convert modes to const, load, and store
225 bytecodes. */
226void
227bc_init_mode_to_opcode_maps ()
228{
229 int mode;
230
6bd6178d 231 for (mode = 0; mode < (int) MAX_MACHINE_MODE; mode++)
ca695ac9
JB
232 mode_to_const_map[mode] =
233 mode_to_load_map[mode] =
234 mode_to_store_map[mode] = neverneverland;
235
236#define DEF_MODEMAP(SYM, CODE, UCODE, CONST, LOAD, STORE) \
6bd6178d
RK
237 mode_to_const_map[(int) SYM] = CONST; \
238 mode_to_load_map[(int) SYM] = LOAD; \
239 mode_to_store_map[(int) SYM] = STORE;
ca695ac9
JB
240
241#include "modemap.def"
242#undef DEF_MODEMAP
243}
244\f
4fa52007 245/* This is run once per compilation to set up which modes can be used
266007a7 246 directly in memory and to initialize the block move optab. */
4fa52007
RK
247
248void
249init_expr_once ()
250{
251 rtx insn, pat;
252 enum machine_mode mode;
e2549997
RS
253 /* Try indexing by frame ptr and try by stack ptr.
254 It is known that on the Convex the stack ptr isn't a valid index.
255 With luck, one or the other is valid on any machine. */
4fa52007 256 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
e2549997 257 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
4fa52007
RK
258
259 start_sequence ();
260 insn = emit_insn (gen_rtx (SET, 0, 0));
261 pat = PATTERN (insn);
262
263 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
264 mode = (enum machine_mode) ((int) mode + 1))
265 {
266 int regno;
267 rtx reg;
268 int num_clobbers;
269
270 direct_load[(int) mode] = direct_store[(int) mode] = 0;
271 PUT_MODE (mem, mode);
e2549997 272 PUT_MODE (mem1, mode);
4fa52007 273
e6fe56a4
RK
274 /* See if there is some register that can be used in this mode and
275 directly loaded or stored from memory. */
276
7308a047
RS
277 if (mode != VOIDmode && mode != BLKmode)
278 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
279 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
280 regno++)
281 {
282 if (! HARD_REGNO_MODE_OK (regno, mode))
283 continue;
e6fe56a4 284
7308a047 285 reg = gen_rtx (REG, mode, regno);
e6fe56a4 286
7308a047
RS
287 SET_SRC (pat) = mem;
288 SET_DEST (pat) = reg;
289 if (recog (pat, insn, &num_clobbers) >= 0)
290 direct_load[(int) mode] = 1;
e6fe56a4 291
e2549997
RS
292 SET_SRC (pat) = mem1;
293 SET_DEST (pat) = reg;
294 if (recog (pat, insn, &num_clobbers) >= 0)
295 direct_load[(int) mode] = 1;
296
7308a047
RS
297 SET_SRC (pat) = reg;
298 SET_DEST (pat) = mem;
299 if (recog (pat, insn, &num_clobbers) >= 0)
300 direct_store[(int) mode] = 1;
e2549997
RS
301
302 SET_SRC (pat) = reg;
303 SET_DEST (pat) = mem1;
304 if (recog (pat, insn, &num_clobbers) >= 0)
305 direct_store[(int) mode] = 1;
7308a047 306 }
4fa52007
RK
307 }
308
309 end_sequence ();
310}
311
bbf6f052
RK
312/* This is run at the start of compiling a function. */
313
314void
315init_expr ()
316{
317 init_queue ();
318
319 pending_stack_adjust = 0;
320 inhibit_defer_pop = 0;
321 cleanups_this_call = 0;
322 saveregs_value = 0;
0006469d 323 apply_args_value = 0;
e87b4f3f 324 forced_labels = 0;
bbf6f052
RK
325}
326
327/* Save all variables describing the current status into the structure *P.
328 This is used before starting a nested function. */
329
330void
331save_expr_status (p)
332 struct function *p;
333{
334 /* Instead of saving the postincrement queue, empty it. */
335 emit_queue ();
336
337 p->pending_stack_adjust = pending_stack_adjust;
338 p->inhibit_defer_pop = inhibit_defer_pop;
339 p->cleanups_this_call = cleanups_this_call;
340 p->saveregs_value = saveregs_value;
0006469d 341 p->apply_args_value = apply_args_value;
e87b4f3f 342 p->forced_labels = forced_labels;
bbf6f052
RK
343
344 pending_stack_adjust = 0;
345 inhibit_defer_pop = 0;
346 cleanups_this_call = 0;
347 saveregs_value = 0;
0006469d 348 apply_args_value = 0;
e87b4f3f 349 forced_labels = 0;
bbf6f052
RK
350}
351
352/* Restore all variables describing the current status from the structure *P.
353 This is used after a nested function. */
354
355void
356restore_expr_status (p)
357 struct function *p;
358{
359 pending_stack_adjust = p->pending_stack_adjust;
360 inhibit_defer_pop = p->inhibit_defer_pop;
361 cleanups_this_call = p->cleanups_this_call;
362 saveregs_value = p->saveregs_value;
0006469d 363 apply_args_value = p->apply_args_value;
e87b4f3f 364 forced_labels = p->forced_labels;
bbf6f052
RK
365}
366\f
367/* Manage the queue of increment instructions to be output
368 for POSTINCREMENT_EXPR expressions, etc. */
369
370static rtx pending_chain;
371
372/* Queue up to increment (or change) VAR later. BODY says how:
373 BODY should be the same thing you would pass to emit_insn
374 to increment right away. It will go to emit_insn later on.
375
376 The value is a QUEUED expression to be used in place of VAR
377 where you want to guarantee the pre-incrementation value of VAR. */
378
379static rtx
380enqueue_insn (var, body)
381 rtx var, body;
382{
383 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
906c4e36 384 var, NULL_RTX, NULL_RTX, body, pending_chain);
bbf6f052
RK
385 return pending_chain;
386}
387
388/* Use protect_from_queue to convert a QUEUED expression
389 into something that you can put immediately into an instruction.
390 If the queued incrementation has not happened yet,
391 protect_from_queue returns the variable itself.
392 If the incrementation has happened, protect_from_queue returns a temp
393 that contains a copy of the old value of the variable.
394
395 Any time an rtx which might possibly be a QUEUED is to be put
396 into an instruction, it must be passed through protect_from_queue first.
397 QUEUED expressions are not meaningful in instructions.
398
399 Do not pass a value through protect_from_queue and then hold
400 on to it for a while before putting it in an instruction!
401 If the queue is flushed in between, incorrect code will result. */
402
403rtx
404protect_from_queue (x, modify)
405 register rtx x;
406 int modify;
407{
408 register RTX_CODE code = GET_CODE (x);
409
410#if 0 /* A QUEUED can hang around after the queue is forced out. */
411 /* Shortcut for most common case. */
412 if (pending_chain == 0)
413 return x;
414#endif
415
416 if (code != QUEUED)
417 {
418 /* A special hack for read access to (MEM (QUEUED ...))
419 to facilitate use of autoincrement.
420 Make a copy of the contents of the memory location
421 rather than a copy of the address, but not
422 if the value is of mode BLKmode. */
423 if (code == MEM && GET_MODE (x) != BLKmode
424 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
425 {
426 register rtx y = XEXP (x, 0);
427 XEXP (x, 0) = QUEUED_VAR (y);
428 if (QUEUED_INSN (y))
429 {
430 register rtx temp = gen_reg_rtx (GET_MODE (x));
431 emit_insn_before (gen_move_insn (temp, x),
432 QUEUED_INSN (y));
433 return temp;
434 }
435 return x;
436 }
437 /* Otherwise, recursively protect the subexpressions of all
438 the kinds of rtx's that can contain a QUEUED. */
439 if (code == MEM)
3f15938e
RS
440 {
441 rtx tem = protect_from_queue (XEXP (x, 0), 0);
442 if (tem != XEXP (x, 0))
443 {
444 x = copy_rtx (x);
445 XEXP (x, 0) = tem;
446 }
447 }
bbf6f052
RK
448 else if (code == PLUS || code == MULT)
449 {
3f15938e
RS
450 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
451 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
452 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
453 {
454 x = copy_rtx (x);
455 XEXP (x, 0) = new0;
456 XEXP (x, 1) = new1;
457 }
bbf6f052
RK
458 }
459 return x;
460 }
461 /* If the increment has not happened, use the variable itself. */
462 if (QUEUED_INSN (x) == 0)
463 return QUEUED_VAR (x);
464 /* If the increment has happened and a pre-increment copy exists,
465 use that copy. */
466 if (QUEUED_COPY (x) != 0)
467 return QUEUED_COPY (x);
468 /* The increment has happened but we haven't set up a pre-increment copy.
469 Set one up now, and use it. */
470 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
471 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
472 QUEUED_INSN (x));
473 return QUEUED_COPY (x);
474}
475
476/* Return nonzero if X contains a QUEUED expression:
477 if it contains anything that will be altered by a queued increment.
478 We handle only combinations of MEM, PLUS, MINUS and MULT operators
479 since memory addresses generally contain only those. */
480
481static int
482queued_subexp_p (x)
483 rtx x;
484{
485 register enum rtx_code code = GET_CODE (x);
486 switch (code)
487 {
488 case QUEUED:
489 return 1;
490 case MEM:
491 return queued_subexp_p (XEXP (x, 0));
492 case MULT:
493 case PLUS:
494 case MINUS:
495 return queued_subexp_p (XEXP (x, 0))
496 || queued_subexp_p (XEXP (x, 1));
497 }
498 return 0;
499}
500
501/* Perform all the pending incrementations. */
502
503void
504emit_queue ()
505{
506 register rtx p;
507 while (p = pending_chain)
508 {
509 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
510 pending_chain = QUEUED_NEXT (p);
511 }
512}
513
514static void
515init_queue ()
516{
517 if (pending_chain)
518 abort ();
519}
520\f
521/* Copy data from FROM to TO, where the machine modes are not the same.
522 Both modes may be integer, or both may be floating.
523 UNSIGNEDP should be nonzero if FROM is an unsigned type.
524 This causes zero-extension instead of sign-extension. */
525
526void
527convert_move (to, from, unsignedp)
528 register rtx to, from;
529 int unsignedp;
530{
531 enum machine_mode to_mode = GET_MODE (to);
532 enum machine_mode from_mode = GET_MODE (from);
533 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
534 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
535 enum insn_code code;
536 rtx libcall;
537
538 /* rtx code for making an equivalent value. */
539 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
540
541 to = protect_from_queue (to, 1);
542 from = protect_from_queue (from, 0);
543
544 if (to_real != from_real)
545 abort ();
546
1499e0a8
RK
547 /* If FROM is a SUBREG that indicates that we have already done at least
548 the required extension, strip it. We don't handle such SUBREGs as
549 TO here. */
550
551 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
552 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
553 >= GET_MODE_SIZE (to_mode))
554 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
555 from = gen_lowpart (to_mode, from), from_mode = to_mode;
556
557 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
558 abort ();
559
bbf6f052
RK
560 if (to_mode == from_mode
561 || (from_mode == VOIDmode && CONSTANT_P (from)))
562 {
563 emit_move_insn (to, from);
564 return;
565 }
566
567 if (to_real)
568 {
81d79e2c
RS
569 rtx value;
570
b424402e
RS
571#ifdef HAVE_extendqfhf2
572 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == HFmode)
573 {
574 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
575 return;
576 }
577#endif
578#ifdef HAVE_extendqfsf2
579 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == SFmode)
580 {
581 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
582 return;
583 }
584#endif
585#ifdef HAVE_extendqfdf2
586 if (HAVE_extendqfdf2 && from_mode == QFmode && to_mode == DFmode)
587 {
588 emit_unop_insn (CODE_FOR_extendqfdf2, to, from, UNKNOWN);
589 return;
590 }
591#endif
592#ifdef HAVE_extendqfxf2
593 if (HAVE_extendqfxf2 && from_mode == QFmode && to_mode == XFmode)
594 {
595 emit_unop_insn (CODE_FOR_extendqfxf2, to, from, UNKNOWN);
596 return;
597 }
598#endif
599#ifdef HAVE_extendqftf2
600 if (HAVE_extendqftf2 && from_mode == QFmode && to_mode == TFmode)
601 {
602 emit_unop_insn (CODE_FOR_extendqftf2, to, from, UNKNOWN);
603 return;
604 }
605#endif
606
607#ifdef HAVE_extendhfsf2
608 if (HAVE_extendhfsf2 && from_mode == HFmode && to_mode == SFmode)
609 {
610 emit_unop_insn (CODE_FOR_extendhfsf2, to, from, UNKNOWN);
611 return;
612 }
613#endif
614#ifdef HAVE_extendhfdf2
615 if (HAVE_extendhfdf2 && from_mode == HFmode && to_mode == DFmode)
616 {
617 emit_unop_insn (CODE_FOR_extendhfdf2, to, from, UNKNOWN);
618 return;
619 }
620#endif
621#ifdef HAVE_extendhfxf2
622 if (HAVE_extendhfxf2 && from_mode == HFmode && to_mode == XFmode)
623 {
624 emit_unop_insn (CODE_FOR_extendhfxf2, to, from, UNKNOWN);
625 return;
626 }
627#endif
628#ifdef HAVE_extendhftf2
629 if (HAVE_extendhftf2 && from_mode == HFmode && to_mode == TFmode)
630 {
631 emit_unop_insn (CODE_FOR_extendhftf2, to, from, UNKNOWN);
632 return;
633 }
634#endif
635
bbf6f052
RK
636#ifdef HAVE_extendsfdf2
637 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
638 {
639 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
640 return;
641 }
642#endif
b092b471
JW
643#ifdef HAVE_extendsfxf2
644 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
645 {
646 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
647 return;
648 }
649#endif
bbf6f052
RK
650#ifdef HAVE_extendsftf2
651 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
652 {
653 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
654 return;
655 }
656#endif
b092b471
JW
657#ifdef HAVE_extenddfxf2
658 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
659 {
660 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
661 return;
662 }
663#endif
bbf6f052
RK
664#ifdef HAVE_extenddftf2
665 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
666 {
667 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
668 return;
669 }
670#endif
b424402e
RS
671
672#ifdef HAVE_trunchfqf2
673 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
674 {
675 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
676 return;
677 }
678#endif
679#ifdef HAVE_truncsfqf2
680 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
681 {
682 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
683 return;
684 }
685#endif
686#ifdef HAVE_truncdfqf2
687 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
688 {
689 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
690 return;
691 }
692#endif
693#ifdef HAVE_truncxfqf2
694 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
695 {
696 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
697 return;
698 }
699#endif
700#ifdef HAVE_trunctfqf2
701 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
702 {
703 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
704 return;
705 }
706#endif
707#ifdef HAVE_truncsfhf2
708 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
709 {
710 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
711 return;
712 }
713#endif
714#ifdef HAVE_truncdfhf2
715 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
716 {
717 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
718 return;
719 }
720#endif
721#ifdef HAVE_truncxfhf2
722 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
723 {
724 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
725 return;
726 }
727#endif
728#ifdef HAVE_trunctfhf2
729 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
730 {
731 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
732 return;
733 }
734#endif
bbf6f052
RK
735#ifdef HAVE_truncdfsf2
736 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
737 {
738 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
739 return;
740 }
741#endif
b092b471
JW
742#ifdef HAVE_truncxfsf2
743 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
744 {
745 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
746 return;
747 }
748#endif
bbf6f052
RK
749#ifdef HAVE_trunctfsf2
750 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
751 {
752 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
753 return;
754 }
755#endif
b092b471
JW
756#ifdef HAVE_truncxfdf2
757 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
758 {
759 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
760 return;
761 }
762#endif
bbf6f052
RK
763#ifdef HAVE_trunctfdf2
764 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
765 {
766 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
767 return;
768 }
769#endif
770
b092b471
JW
771 libcall = (rtx) 0;
772 switch (from_mode)
773 {
774 case SFmode:
775 switch (to_mode)
776 {
777 case DFmode:
778 libcall = extendsfdf2_libfunc;
779 break;
780
781 case XFmode:
782 libcall = extendsfxf2_libfunc;
783 break;
784
785 case TFmode:
786 libcall = extendsftf2_libfunc;
787 break;
788 }
789 break;
790
791 case DFmode:
792 switch (to_mode)
793 {
794 case SFmode:
795 libcall = truncdfsf2_libfunc;
796 break;
797
798 case XFmode:
799 libcall = extenddfxf2_libfunc;
800 break;
801
802 case TFmode:
803 libcall = extenddftf2_libfunc;
804 break;
805 }
806 break;
807
808 case XFmode:
809 switch (to_mode)
810 {
811 case SFmode:
812 libcall = truncxfsf2_libfunc;
813 break;
814
815 case DFmode:
816 libcall = truncxfdf2_libfunc;
817 break;
818 }
819 break;
820
821 case TFmode:
822 switch (to_mode)
823 {
824 case SFmode:
825 libcall = trunctfsf2_libfunc;
826 break;
827
828 case DFmode:
829 libcall = trunctfdf2_libfunc;
830 break;
831 }
832 break;
833 }
834
835 if (libcall == (rtx) 0)
836 /* This conversion is not implemented yet. */
bbf6f052
RK
837 abort ();
838
81d79e2c
RS
839 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
840 1, from, from_mode);
841 emit_move_insn (to, value);
bbf6f052
RK
842 return;
843 }
844
845 /* Now both modes are integers. */
846
847 /* Handle expanding beyond a word. */
848 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
849 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
850 {
851 rtx insns;
852 rtx lowpart;
853 rtx fill_value;
854 rtx lowfrom;
855 int i;
856 enum machine_mode lowpart_mode;
857 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
858
859 /* Try converting directly if the insn is supported. */
860 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
861 != CODE_FOR_nothing)
862 {
cd1b4b44
RK
863 /* If FROM is a SUBREG, put it into a register. Do this
864 so that we always generate the same set of insns for
865 better cse'ing; if an intermediate assignment occurred,
866 we won't be doing the operation directly on the SUBREG. */
867 if (optimize > 0 && GET_CODE (from) == SUBREG)
868 from = force_reg (from_mode, from);
bbf6f052
RK
869 emit_unop_insn (code, to, from, equiv_code);
870 return;
871 }
872 /* Next, try converting via full word. */
873 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
874 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
875 != CODE_FOR_nothing))
876 {
a81fee56
RS
877 if (GET_CODE (to) == REG)
878 emit_insn (gen_rtx (CLOBBER, VOIDmode, to));
bbf6f052
RK
879 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
880 emit_unop_insn (code, to,
881 gen_lowpart (word_mode, to), equiv_code);
882 return;
883 }
884
885 /* No special multiword conversion insn; do it by hand. */
886 start_sequence ();
887
888 /* Get a copy of FROM widened to a word, if necessary. */
889 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
890 lowpart_mode = word_mode;
891 else
892 lowpart_mode = from_mode;
893
894 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
895
896 lowpart = gen_lowpart (lowpart_mode, to);
897 emit_move_insn (lowpart, lowfrom);
898
899 /* Compute the value to put in each remaining word. */
900 if (unsignedp)
901 fill_value = const0_rtx;
902 else
903 {
904#ifdef HAVE_slt
905 if (HAVE_slt
906 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
907 && STORE_FLAG_VALUE == -1)
908 {
906c4e36
RK
909 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
910 lowpart_mode, 0, 0);
bbf6f052
RK
911 fill_value = gen_reg_rtx (word_mode);
912 emit_insn (gen_slt (fill_value));
913 }
914 else
915#endif
916 {
917 fill_value
918 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
919 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
906c4e36 920 NULL_RTX, 0);
bbf6f052
RK
921 fill_value = convert_to_mode (word_mode, fill_value, 1);
922 }
923 }
924
925 /* Fill the remaining words. */
926 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
927 {
928 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
929 rtx subword = operand_subword (to, index, 1, to_mode);
930
931 if (subword == 0)
932 abort ();
933
934 if (fill_value != subword)
935 emit_move_insn (subword, fill_value);
936 }
937
938 insns = get_insns ();
939 end_sequence ();
940
906c4e36 941 emit_no_conflict_block (insns, to, from, NULL_RTX,
2abec1b7 942 gen_rtx (equiv_code, to_mode, copy_rtx (from)));
bbf6f052
RK
943 return;
944 }
945
d3c64ee3
RS
946 /* Truncating multi-word to a word or less. */
947 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
948 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
bbf6f052 949 {
431a6eca
JW
950 if (!((GET_CODE (from) == MEM
951 && ! MEM_VOLATILE_P (from)
952 && direct_load[(int) to_mode]
953 && ! mode_dependent_address_p (XEXP (from, 0)))
954 || GET_CODE (from) == REG
955 || GET_CODE (from) == SUBREG))
956 from = force_reg (from_mode, from);
bbf6f052
RK
957 convert_move (to, gen_lowpart (word_mode, from), 0);
958 return;
959 }
960
961 /* Handle pointer conversion */ /* SPEE 900220 */
962 if (to_mode == PSImode)
963 {
964 if (from_mode != SImode)
965 from = convert_to_mode (SImode, from, unsignedp);
966
967#ifdef HAVE_truncsipsi
968 if (HAVE_truncsipsi)
969 {
970 emit_unop_insn (CODE_FOR_truncsipsi, to, from, UNKNOWN);
971 return;
972 }
973#endif /* HAVE_truncsipsi */
974 abort ();
975 }
976
977 if (from_mode == PSImode)
978 {
979 if (to_mode != SImode)
980 {
981 from = convert_to_mode (SImode, from, unsignedp);
982 from_mode = SImode;
983 }
984 else
985 {
986#ifdef HAVE_extendpsisi
987 if (HAVE_extendpsisi)
988 {
989 emit_unop_insn (CODE_FOR_extendpsisi, to, from, UNKNOWN);
990 return;
991 }
992#endif /* HAVE_extendpsisi */
993 abort ();
994 }
995 }
996
997 /* Now follow all the conversions between integers
998 no more than a word long. */
999
1000 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1001 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1002 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
d3c64ee3 1003 GET_MODE_BITSIZE (from_mode)))
bbf6f052 1004 {
d3c64ee3
RS
1005 if (!((GET_CODE (from) == MEM
1006 && ! MEM_VOLATILE_P (from)
1007 && direct_load[(int) to_mode]
1008 && ! mode_dependent_address_p (XEXP (from, 0)))
1009 || GET_CODE (from) == REG
1010 || GET_CODE (from) == SUBREG))
1011 from = force_reg (from_mode, from);
bbf6f052
RK
1012 emit_move_insn (to, gen_lowpart (to_mode, from));
1013 return;
1014 }
1015
d3c64ee3 1016 /* Handle extension. */
bbf6f052
RK
1017 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1018 {
1019 /* Convert directly if that works. */
1020 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1021 != CODE_FOR_nothing)
1022 {
3dc4195c
RK
1023 /* If FROM is a SUBREG, put it into a register. Do this
1024 so that we always generate the same set of insns for
1025 better cse'ing; if an intermediate assignment occurred,
1026 we won't be doing the operation directly on the SUBREG. */
1027 if (optimize > 0 && GET_CODE (from) == SUBREG)
1028 from = force_reg (from_mode, from);
bbf6f052
RK
1029 emit_unop_insn (code, to, from, equiv_code);
1030 return;
1031 }
1032 else
1033 {
1034 enum machine_mode intermediate;
1035
1036 /* Search for a mode to convert via. */
1037 for (intermediate = from_mode; intermediate != VOIDmode;
1038 intermediate = GET_MODE_WIDER_MODE (intermediate))
1039 if ((can_extend_p (to_mode, intermediate, unsignedp)
1040 != CODE_FOR_nothing)
1041 && (can_extend_p (intermediate, from_mode, unsignedp)
1042 != CODE_FOR_nothing))
1043 {
1044 convert_move (to, convert_to_mode (intermediate, from,
1045 unsignedp), unsignedp);
1046 return;
1047 }
1048
1049 /* No suitable intermediate mode. */
1050 abort ();
1051 }
1052 }
1053
1054 /* Support special truncate insns for certain modes. */
1055
1056 if (from_mode == DImode && to_mode == SImode)
1057 {
1058#ifdef HAVE_truncdisi2
1059 if (HAVE_truncdisi2)
1060 {
1061 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1062 return;
1063 }
1064#endif
1065 convert_move (to, force_reg (from_mode, from), unsignedp);
1066 return;
1067 }
1068
1069 if (from_mode == DImode && to_mode == HImode)
1070 {
1071#ifdef HAVE_truncdihi2
1072 if (HAVE_truncdihi2)
1073 {
1074 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1075 return;
1076 }
1077#endif
1078 convert_move (to, force_reg (from_mode, from), unsignedp);
1079 return;
1080 }
1081
1082 if (from_mode == DImode && to_mode == QImode)
1083 {
1084#ifdef HAVE_truncdiqi2
1085 if (HAVE_truncdiqi2)
1086 {
1087 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1088 return;
1089 }
1090#endif
1091 convert_move (to, force_reg (from_mode, from), unsignedp);
1092 return;
1093 }
1094
1095 if (from_mode == SImode && to_mode == HImode)
1096 {
1097#ifdef HAVE_truncsihi2
1098 if (HAVE_truncsihi2)
1099 {
1100 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1101 return;
1102 }
1103#endif
1104 convert_move (to, force_reg (from_mode, from), unsignedp);
1105 return;
1106 }
1107
1108 if (from_mode == SImode && to_mode == QImode)
1109 {
1110#ifdef HAVE_truncsiqi2
1111 if (HAVE_truncsiqi2)
1112 {
1113 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1114 return;
1115 }
1116#endif
1117 convert_move (to, force_reg (from_mode, from), unsignedp);
1118 return;
1119 }
1120
1121 if (from_mode == HImode && to_mode == QImode)
1122 {
1123#ifdef HAVE_trunchiqi2
1124 if (HAVE_trunchiqi2)
1125 {
1126 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1127 return;
1128 }
1129#endif
1130 convert_move (to, force_reg (from_mode, from), unsignedp);
1131 return;
1132 }
1133
1134 /* Handle truncation of volatile memrefs, and so on;
1135 the things that couldn't be truncated directly,
1136 and for which there was no special instruction. */
1137 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1138 {
1139 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1140 emit_move_insn (to, temp);
1141 return;
1142 }
1143
1144 /* Mode combination is not recognized. */
1145 abort ();
1146}
1147
1148/* Return an rtx for a value that would result
1149 from converting X to mode MODE.
1150 Both X and MODE may be floating, or both integer.
1151 UNSIGNEDP is nonzero if X is an unsigned value.
1152 This can be done by referring to a part of X in place
5d901c31
RS
1153 or by copying to a new temporary with conversion.
1154
1155 This function *must not* call protect_from_queue
1156 except when putting X into an insn (in which case convert_move does it). */
bbf6f052
RK
1157
1158rtx
1159convert_to_mode (mode, x, unsignedp)
1160 enum machine_mode mode;
1161 rtx x;
1162 int unsignedp;
5ffe63ed
RS
1163{
1164 return convert_modes (mode, VOIDmode, x, unsignedp);
1165}
1166
1167/* Return an rtx for a value that would result
1168 from converting X from mode OLDMODE to mode MODE.
1169 Both modes may be floating, or both integer.
1170 UNSIGNEDP is nonzero if X is an unsigned value.
1171
1172 This can be done by referring to a part of X in place
1173 or by copying to a new temporary with conversion.
1174
1175 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1176
1177 This function *must not* call protect_from_queue
1178 except when putting X into an insn (in which case convert_move does it). */
1179
1180rtx
1181convert_modes (mode, oldmode, x, unsignedp)
1182 enum machine_mode mode, oldmode;
1183 rtx x;
1184 int unsignedp;
bbf6f052
RK
1185{
1186 register rtx temp;
5ffe63ed 1187
1499e0a8
RK
1188 /* If FROM is a SUBREG that indicates that we have already done at least
1189 the required extension, strip it. */
1190
1191 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1192 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1193 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1194 x = gen_lowpart (mode, x);
bbf6f052 1195
64791b18
RK
1196 if (GET_MODE (x) != VOIDmode)
1197 oldmode = GET_MODE (x);
1198
5ffe63ed 1199 if (mode == oldmode)
bbf6f052
RK
1200 return x;
1201
1202 /* There is one case that we must handle specially: If we are converting
906c4e36 1203 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
bbf6f052
RK
1204 we are to interpret the constant as unsigned, gen_lowpart will do
1205 the wrong if the constant appears negative. What we want to do is
1206 make the high-order word of the constant zero, not all ones. */
1207
1208 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
906c4e36 1209 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
bbf6f052 1210 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
906c4e36 1211 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
bbf6f052
RK
1212
1213 /* We can do this with a gen_lowpart if both desired and current modes
1214 are integer, and this is either a constant integer, a register, or a
ba2e110c
RK
1215 non-volatile MEM. Except for the constant case where MODE is no
1216 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
bbf6f052 1217
ba2e110c
RK
1218 if ((GET_CODE (x) == CONST_INT
1219 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 1220 || (GET_MODE_CLASS (mode) == MODE_INT
5ffe63ed 1221 && GET_MODE_CLASS (oldmode) == MODE_INT
bbf6f052 1222 && (GET_CODE (x) == CONST_DOUBLE
5ffe63ed 1223 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
d57c66da
JW
1224 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1225 && direct_load[(int) mode])
bbf6f052 1226 || GET_CODE (x) == REG)))))
ba2e110c
RK
1227 {
1228 /* ?? If we don't know OLDMODE, we have to assume here that
1229 X does not need sign- or zero-extension. This may not be
1230 the case, but it's the best we can do. */
1231 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1232 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1233 {
1234 HOST_WIDE_INT val = INTVAL (x);
1235 int width = GET_MODE_BITSIZE (oldmode);
1236
1237 /* We must sign or zero-extend in this case. Start by
1238 zero-extending, then sign extend if we need to. */
1239 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1240 if (! unsignedp
1241 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1242 val |= (HOST_WIDE_INT) (-1) << width;
1243
1244 return GEN_INT (val);
1245 }
1246
1247 return gen_lowpart (mode, x);
1248 }
bbf6f052
RK
1249
1250 temp = gen_reg_rtx (mode);
1251 convert_move (temp, x, unsignedp);
1252 return temp;
1253}
1254\f
1255/* Generate several move instructions to copy LEN bytes
1256 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1257 The caller must pass FROM and TO
1258 through protect_from_queue before calling.
1259 ALIGN (in bytes) is maximum alignment we can assume. */
1260
bbf6f052
RK
1261static void
1262move_by_pieces (to, from, len, align)
1263 rtx to, from;
1264 int len, align;
1265{
1266 struct move_by_pieces data;
1267 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
e87b4f3f 1268 int max_size = MOVE_MAX + 1;
bbf6f052
RK
1269
1270 data.offset = 0;
1271 data.to_addr = to_addr;
1272 data.from_addr = from_addr;
1273 data.to = to;
1274 data.from = from;
1275 data.autinc_to
1276 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1277 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1278 data.autinc_from
1279 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1280 || GET_CODE (from_addr) == POST_INC
1281 || GET_CODE (from_addr) == POST_DEC);
1282
1283 data.explicit_inc_from = 0;
1284 data.explicit_inc_to = 0;
1285 data.reverse
1286 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1287 if (data.reverse) data.offset = len;
1288 data.len = len;
1289
1290 /* If copying requires more than two move insns,
1291 copy addresses to registers (to make displacements shorter)
1292 and use post-increment if available. */
1293 if (!(data.autinc_from && data.autinc_to)
1294 && move_by_pieces_ninsns (len, align) > 2)
1295 {
1296#ifdef HAVE_PRE_DECREMENT
1297 if (data.reverse && ! data.autinc_from)
1298 {
1299 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1300 data.autinc_from = 1;
1301 data.explicit_inc_from = -1;
1302 }
1303#endif
1304#ifdef HAVE_POST_INCREMENT
1305 if (! data.autinc_from)
1306 {
1307 data.from_addr = copy_addr_to_reg (from_addr);
1308 data.autinc_from = 1;
1309 data.explicit_inc_from = 1;
1310 }
1311#endif
1312 if (!data.autinc_from && CONSTANT_P (from_addr))
1313 data.from_addr = copy_addr_to_reg (from_addr);
1314#ifdef HAVE_PRE_DECREMENT
1315 if (data.reverse && ! data.autinc_to)
1316 {
1317 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1318 data.autinc_to = 1;
1319 data.explicit_inc_to = -1;
1320 }
1321#endif
1322#ifdef HAVE_POST_INCREMENT
1323 if (! data.reverse && ! data.autinc_to)
1324 {
1325 data.to_addr = copy_addr_to_reg (to_addr);
1326 data.autinc_to = 1;
1327 data.explicit_inc_to = 1;
1328 }
1329#endif
1330 if (!data.autinc_to && CONSTANT_P (to_addr))
1331 data.to_addr = copy_addr_to_reg (to_addr);
1332 }
1333
e87b4f3f
RS
1334 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1335 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1336 align = MOVE_MAX;
bbf6f052
RK
1337
1338 /* First move what we can in the largest integer mode, then go to
1339 successively smaller modes. */
1340
1341 while (max_size > 1)
1342 {
1343 enum machine_mode mode = VOIDmode, tmode;
1344 enum insn_code icode;
1345
e7c33f54
RK
1346 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1347 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1348 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1349 mode = tmode;
1350
1351 if (mode == VOIDmode)
1352 break;
1353
1354 icode = mov_optab->handlers[(int) mode].insn_code;
1355 if (icode != CODE_FOR_nothing
1356 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1357 GET_MODE_SIZE (mode)))
1358 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1359
1360 max_size = GET_MODE_SIZE (mode);
1361 }
1362
1363 /* The code above should have handled everything. */
1364 if (data.len != 0)
1365 abort ();
1366}
1367
1368/* Return number of insns required to move L bytes by pieces.
1369 ALIGN (in bytes) is maximum alignment we can assume. */
1370
1371static int
1372move_by_pieces_ninsns (l, align)
1373 unsigned int l;
1374 int align;
1375{
1376 register int n_insns = 0;
e87b4f3f 1377 int max_size = MOVE_MAX + 1;
bbf6f052 1378
e87b4f3f
RS
1379 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1380 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1381 align = MOVE_MAX;
bbf6f052
RK
1382
1383 while (max_size > 1)
1384 {
1385 enum machine_mode mode = VOIDmode, tmode;
1386 enum insn_code icode;
1387
e7c33f54
RK
1388 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1389 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1390 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1391 mode = tmode;
1392
1393 if (mode == VOIDmode)
1394 break;
1395
1396 icode = mov_optab->handlers[(int) mode].insn_code;
1397 if (icode != CODE_FOR_nothing
1398 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1399 GET_MODE_SIZE (mode)))
1400 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1401
1402 max_size = GET_MODE_SIZE (mode);
1403 }
1404
1405 return n_insns;
1406}
1407
1408/* Subroutine of move_by_pieces. Move as many bytes as appropriate
1409 with move instructions for mode MODE. GENFUN is the gen_... function
1410 to make a move insn for that mode. DATA has all the other info. */
1411
1412static void
1413move_by_pieces_1 (genfun, mode, data)
1414 rtx (*genfun) ();
1415 enum machine_mode mode;
1416 struct move_by_pieces *data;
1417{
1418 register int size = GET_MODE_SIZE (mode);
1419 register rtx to1, from1;
1420
1421 while (data->len >= size)
1422 {
1423 if (data->reverse) data->offset -= size;
1424
1425 to1 = (data->autinc_to
1426 ? gen_rtx (MEM, mode, data->to_addr)
1427 : change_address (data->to, mode,
1428 plus_constant (data->to_addr, data->offset)));
1429 from1 =
1430 (data->autinc_from
1431 ? gen_rtx (MEM, mode, data->from_addr)
1432 : change_address (data->from, mode,
1433 plus_constant (data->from_addr, data->offset)));
1434
1435#ifdef HAVE_PRE_DECREMENT
1436 if (data->explicit_inc_to < 0)
906c4e36 1437 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
bbf6f052 1438 if (data->explicit_inc_from < 0)
906c4e36 1439 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
bbf6f052
RK
1440#endif
1441
1442 emit_insn ((*genfun) (to1, from1));
1443#ifdef HAVE_POST_INCREMENT
1444 if (data->explicit_inc_to > 0)
906c4e36 1445 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
bbf6f052 1446 if (data->explicit_inc_from > 0)
906c4e36 1447 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
bbf6f052
RK
1448#endif
1449
1450 if (! data->reverse) data->offset += size;
1451
1452 data->len -= size;
1453 }
1454}
1455\f
1456/* Emit code to move a block Y to a block X.
1457 This may be done with string-move instructions,
1458 with multiple scalar move instructions, or with a library call.
1459
1460 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1461 with mode BLKmode.
1462 SIZE is an rtx that says how long they are.
1463 ALIGN is the maximum alignment we can assume they have,
1464 measured in bytes. */
1465
1466void
1467emit_block_move (x, y, size, align)
1468 rtx x, y;
1469 rtx size;
1470 int align;
1471{
1472 if (GET_MODE (x) != BLKmode)
1473 abort ();
1474
1475 if (GET_MODE (y) != BLKmode)
1476 abort ();
1477
1478 x = protect_from_queue (x, 1);
1479 y = protect_from_queue (y, 0);
5d901c31 1480 size = protect_from_queue (size, 0);
bbf6f052
RK
1481
1482 if (GET_CODE (x) != MEM)
1483 abort ();
1484 if (GET_CODE (y) != MEM)
1485 abort ();
1486 if (size == 0)
1487 abort ();
1488
1489 if (GET_CODE (size) == CONST_INT
906c4e36 1490 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
bbf6f052
RK
1491 move_by_pieces (x, y, INTVAL (size), align);
1492 else
1493 {
1494 /* Try the most limited insn first, because there's no point
1495 including more than one in the machine description unless
1496 the more limited one has some advantage. */
266007a7 1497
0bba3f6f 1498 rtx opalign = GEN_INT (align);
266007a7
RK
1499 enum machine_mode mode;
1500
1501 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1502 mode = GET_MODE_WIDER_MODE (mode))
bbf6f052 1503 {
266007a7 1504 enum insn_code code = movstr_optab[(int) mode];
266007a7
RK
1505
1506 if (code != CODE_FOR_nothing
803090c4
RK
1507 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1508 here because if SIZE is less than the mode mask, as it is
8008b228 1509 returned by the macro, it will definitely be less than the
803090c4 1510 actual mode mask. */
f85b95d1 1511 && (unsigned HOST_WIDE_INT) INTVAL (size) <= GET_MODE_MASK (mode)
0bba3f6f
RK
1512 && (insn_operand_predicate[(int) code][0] == 0
1513 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1514 && (insn_operand_predicate[(int) code][1] == 0
1515 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1516 && (insn_operand_predicate[(int) code][3] == 0
1517 || (*insn_operand_predicate[(int) code][3]) (opalign,
1518 VOIDmode)))
bbf6f052 1519 {
1ba1e2a8 1520 rtx op2;
266007a7
RK
1521 rtx last = get_last_insn ();
1522 rtx pat;
1523
1ba1e2a8 1524 op2 = convert_to_mode (mode, size, 1);
0bba3f6f
RK
1525 if (insn_operand_predicate[(int) code][2] != 0
1526 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
266007a7
RK
1527 op2 = copy_to_mode_reg (mode, op2);
1528
1529 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1530 if (pat)
1531 {
1532 emit_insn (pat);
1533 return;
1534 }
1535 else
1536 delete_insns_since (last);
bbf6f052
RK
1537 }
1538 }
bbf6f052
RK
1539
1540#ifdef TARGET_MEM_FUNCTIONS
d562e42e 1541 emit_library_call (memcpy_libfunc, 0,
bbf6f052
RK
1542 VOIDmode, 3, XEXP (x, 0), Pmode,
1543 XEXP (y, 0), Pmode,
0fa83258
RK
1544 convert_to_mode (TYPE_MODE (sizetype), size,
1545 TREE_UNSIGNED (sizetype)),
1546 TYPE_MODE (sizetype));
bbf6f052 1547#else
d562e42e 1548 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
1549 VOIDmode, 3, XEXP (y, 0), Pmode,
1550 XEXP (x, 0), Pmode,
0fa83258
RK
1551 convert_to_mode (TYPE_MODE (sizetype), size,
1552 TREE_UNSIGNED (sizetype)),
1553 TYPE_MODE (sizetype));
bbf6f052
RK
1554#endif
1555 }
1556}
1557\f
1558/* Copy all or part of a value X into registers starting at REGNO.
1559 The number of registers to be filled is NREGS. */
1560
1561void
1562move_block_to_reg (regno, x, nregs, mode)
1563 int regno;
1564 rtx x;
1565 int nregs;
1566 enum machine_mode mode;
1567{
1568 int i;
1569 rtx pat, last;
1570
1571 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1572 x = validize_mem (force_const_mem (mode, x));
1573
1574 /* See if the machine can do this with a load multiple insn. */
1575#ifdef HAVE_load_multiple
1576 last = get_last_insn ();
1577 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
906c4e36 1578 GEN_INT (nregs));
bbf6f052
RK
1579 if (pat)
1580 {
1581 emit_insn (pat);
1582 return;
1583 }
1584 else
1585 delete_insns_since (last);
1586#endif
1587
1588 for (i = 0; i < nregs; i++)
1589 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1590 operand_subword_force (x, i, mode));
1591}
1592
1593/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
0040593d
JW
1594 The number of registers to be filled is NREGS. SIZE indicates the number
1595 of bytes in the object X. */
1596
bbf6f052
RK
1597
1598void
0040593d 1599move_block_from_reg (regno, x, nregs, size)
bbf6f052
RK
1600 int regno;
1601 rtx x;
1602 int nregs;
0040593d 1603 int size;
bbf6f052
RK
1604{
1605 int i;
1606 rtx pat, last;
1607
0040593d
JW
1608 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1609 to the left before storing to memory. */
1610 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1611 {
1612 rtx tem = operand_subword (x, 0, 1, BLKmode);
1613 rtx shift;
1614
1615 if (tem == 0)
1616 abort ();
1617
1618 shift = expand_shift (LSHIFT_EXPR, word_mode,
1619 gen_rtx (REG, word_mode, regno),
1620 build_int_2 ((UNITS_PER_WORD - size)
1621 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1622 emit_move_insn (tem, shift);
1623 return;
1624 }
1625
bbf6f052
RK
1626 /* See if the machine can do this with a store multiple insn. */
1627#ifdef HAVE_store_multiple
1628 last = get_last_insn ();
1629 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
906c4e36 1630 GEN_INT (nregs));
bbf6f052
RK
1631 if (pat)
1632 {
1633 emit_insn (pat);
1634 return;
1635 }
1636 else
1637 delete_insns_since (last);
1638#endif
1639
1640 for (i = 0; i < nregs; i++)
1641 {
1642 rtx tem = operand_subword (x, i, 1, BLKmode);
1643
1644 if (tem == 0)
1645 abort ();
1646
1647 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1648 }
1649}
1650
1651/* Mark NREGS consecutive regs, starting at REGNO, as being live now. */
1652
1653void
1654use_regs (regno, nregs)
1655 int regno;
1656 int nregs;
1657{
1658 int i;
1659
1660 for (i = 0; i < nregs; i++)
1661 emit_insn (gen_rtx (USE, VOIDmode, gen_rtx (REG, word_mode, regno + i)));
1662}
7308a047
RS
1663
1664/* Mark the instructions since PREV as a libcall block.
1665 Add REG_LIBCALL to PREV and add a REG_RETVAL to the most recent insn. */
1666
f76a70d5 1667static void
7308a047
RS
1668group_insns (prev)
1669 rtx prev;
1670{
1671 rtx insn_first;
1672 rtx insn_last;
1673
1674 /* Find the instructions to mark */
1675 if (prev)
1676 insn_first = NEXT_INSN (prev);
1677 else
1678 insn_first = get_insns ();
1679
1680 insn_last = get_last_insn ();
1681
1682 REG_NOTES (insn_last) = gen_rtx (INSN_LIST, REG_RETVAL, insn_first,
1683 REG_NOTES (insn_last));
1684
1685 REG_NOTES (insn_first) = gen_rtx (INSN_LIST, REG_LIBCALL, insn_last,
1686 REG_NOTES (insn_first));
1687}
bbf6f052
RK
1688\f
1689/* Write zeros through the storage of OBJECT.
1690 If OBJECT has BLKmode, SIZE is its length in bytes. */
1691
1692void
1693clear_storage (object, size)
1694 rtx object;
1695 int size;
1696{
1697 if (GET_MODE (object) == BLKmode)
1698 {
1699#ifdef TARGET_MEM_FUNCTIONS
d562e42e 1700 emit_library_call (memset_libfunc, 0,
bbf6f052
RK
1701 VOIDmode, 3,
1702 XEXP (object, 0), Pmode, const0_rtx, Pmode,
906c4e36 1703 GEN_INT (size), Pmode);
bbf6f052 1704#else
d562e42e 1705 emit_library_call (bzero_libfunc, 0,
bbf6f052
RK
1706 VOIDmode, 2,
1707 XEXP (object, 0), Pmode,
906c4e36 1708 GEN_INT (size), Pmode);
bbf6f052
RK
1709#endif
1710 }
1711 else
1712 emit_move_insn (object, const0_rtx);
1713}
1714
1715/* Generate code to copy Y into X.
1716 Both Y and X must have the same mode, except that
1717 Y can be a constant with VOIDmode.
1718 This mode cannot be BLKmode; use emit_block_move for that.
1719
1720 Return the last instruction emitted. */
1721
1722rtx
1723emit_move_insn (x, y)
1724 rtx x, y;
1725{
1726 enum machine_mode mode = GET_MODE (x);
7308a047
RS
1727 enum machine_mode submode;
1728 enum mode_class class = GET_MODE_CLASS (mode);
bbf6f052
RK
1729 int i;
1730
1731 x = protect_from_queue (x, 1);
1732 y = protect_from_queue (y, 0);
1733
1734 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1735 abort ();
1736
1737 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1738 y = force_const_mem (mode, y);
1739
1740 /* If X or Y are memory references, verify that their addresses are valid
1741 for the machine. */
1742 if (GET_CODE (x) == MEM
1743 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1744 && ! push_operand (x, GET_MODE (x)))
1745 || (flag_force_addr
1746 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1747 x = change_address (x, VOIDmode, XEXP (x, 0));
1748
1749 if (GET_CODE (y) == MEM
1750 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1751 || (flag_force_addr
1752 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1753 y = change_address (y, VOIDmode, XEXP (y, 0));
1754
1755 if (mode == BLKmode)
1756 abort ();
1757
261c4230
RS
1758 return emit_move_insn_1 (x, y);
1759}
1760
1761/* Low level part of emit_move_insn.
1762 Called just like emit_move_insn, but assumes X and Y
1763 are basically valid. */
1764
1765rtx
1766emit_move_insn_1 (x, y)
1767 rtx x, y;
1768{
1769 enum machine_mode mode = GET_MODE (x);
1770 enum machine_mode submode;
1771 enum mode_class class = GET_MODE_CLASS (mode);
1772 int i;
1773
7308a047
RS
1774 if (class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1775 submode = mode_for_size (GET_MODE_UNIT_SIZE (mode) * BITS_PER_UNIT,
1776 (class == MODE_COMPLEX_INT
1777 ? MODE_INT : MODE_FLOAT),
1778 0);
1779
bbf6f052
RK
1780 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1781 return
1782 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1783
89742723 1784 /* Expand complex moves by moving real part and imag part, if possible. */
7308a047
RS
1785 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1786 && submode != BLKmode
1787 && (mov_optab->handlers[(int) submode].insn_code
1788 != CODE_FOR_nothing))
1789 {
1790 /* Don't split destination if it is a stack push. */
1791 int stack = push_operand (x, GET_MODE (x));
1792 rtx prev = get_last_insn ();
1793
1794 /* Tell flow that the whole of the destination is being set. */
1795 if (GET_CODE (x) == REG)
1796 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
1797
1798 /* If this is a stack, push the highpart first, so it
1799 will be in the argument order.
1800
1801 In that case, change_address is used only to convert
1802 the mode, not to change the address. */
c937357e
RS
1803 if (stack)
1804 {
e33c0d66
RS
1805 /* Note that the real part always precedes the imag part in memory
1806 regardless of machine's endianness. */
c937357e
RS
1807#ifdef STACK_GROWS_DOWNWARD
1808 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1809 (gen_rtx (MEM, submode, (XEXP (x, 0))),
e33c0d66 1810 gen_imagpart (submode, y)));
c937357e
RS
1811 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1812 (gen_rtx (MEM, submode, (XEXP (x, 0))),
e33c0d66 1813 gen_realpart (submode, y)));
c937357e
RS
1814#else
1815 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1816 (gen_rtx (MEM, submode, (XEXP (x, 0))),
e33c0d66 1817 gen_realpart (submode, y)));
c937357e
RS
1818 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1819 (gen_rtx (MEM, submode, (XEXP (x, 0))),
e33c0d66 1820 gen_imagpart (submode, y)));
c937357e
RS
1821#endif
1822 }
1823 else
1824 {
1825 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1826 (gen_highpart (submode, x), gen_highpart (submode, y)));
1827 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1828 (gen_lowpart (submode, x), gen_lowpart (submode, y)));
1829 }
7308a047 1830
6d6e61ce
RS
1831 if (GET_CODE (x) != CONCAT)
1832 /* If X is a CONCAT, we got insns like RD = RS, ID = IS,
1833 each with a separate pseudo as destination.
1834 It's not correct for flow to treat them as a unit. */
1835 group_insns (prev);
7a1ab50a
RS
1836
1837 return get_last_insn ();
7308a047
RS
1838 }
1839
bbf6f052
RK
1840 /* This will handle any multi-word mode that lacks a move_insn pattern.
1841 However, you will get better code if you define such patterns,
1842 even if they must turn into multiple assembler instructions. */
a4320483 1843 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
bbf6f052
RK
1844 {
1845 rtx last_insn = 0;
7308a047 1846 rtx prev_insn = get_last_insn ();
bbf6f052
RK
1847
1848 for (i = 0;
1849 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1850 i++)
1851 {
1852 rtx xpart = operand_subword (x, i, 1, mode);
1853 rtx ypart = operand_subword (y, i, 1, mode);
1854
1855 /* If we can't get a part of Y, put Y into memory if it is a
1856 constant. Otherwise, force it into a register. If we still
1857 can't get a part of Y, abort. */
1858 if (ypart == 0 && CONSTANT_P (y))
1859 {
1860 y = force_const_mem (mode, y);
1861 ypart = operand_subword (y, i, 1, mode);
1862 }
1863 else if (ypart == 0)
1864 ypart = operand_subword_force (y, i, mode);
1865
1866 if (xpart == 0 || ypart == 0)
1867 abort ();
1868
1869 last_insn = emit_move_insn (xpart, ypart);
1870 }
7308a047
RS
1871 /* Mark these insns as a libcall block. */
1872 group_insns (prev_insn);
1873
bbf6f052
RK
1874 return last_insn;
1875 }
1876 else
1877 abort ();
1878}
1879\f
1880/* Pushing data onto the stack. */
1881
1882/* Push a block of length SIZE (perhaps variable)
1883 and return an rtx to address the beginning of the block.
1884 Note that it is not possible for the value returned to be a QUEUED.
1885 The value may be virtual_outgoing_args_rtx.
1886
1887 EXTRA is the number of bytes of padding to push in addition to SIZE.
1888 BELOW nonzero means this padding comes at low addresses;
1889 otherwise, the padding comes at high addresses. */
1890
1891rtx
1892push_block (size, extra, below)
1893 rtx size;
1894 int extra, below;
1895{
1896 register rtx temp;
1897 if (CONSTANT_P (size))
1898 anti_adjust_stack (plus_constant (size, extra));
1899 else if (GET_CODE (size) == REG && extra == 0)
1900 anti_adjust_stack (size);
1901 else
1902 {
1903 rtx temp = copy_to_mode_reg (Pmode, size);
1904 if (extra != 0)
906c4e36 1905 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
bbf6f052
RK
1906 temp, 0, OPTAB_LIB_WIDEN);
1907 anti_adjust_stack (temp);
1908 }
1909
1910#ifdef STACK_GROWS_DOWNWARD
1911 temp = virtual_outgoing_args_rtx;
1912 if (extra != 0 && below)
1913 temp = plus_constant (temp, extra);
1914#else
1915 if (GET_CODE (size) == CONST_INT)
1916 temp = plus_constant (virtual_outgoing_args_rtx,
1917 - INTVAL (size) - (below ? 0 : extra));
1918 else if (extra != 0 && !below)
1919 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1920 negate_rtx (Pmode, plus_constant (size, extra)));
1921 else
1922 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1923 negate_rtx (Pmode, size));
1924#endif
1925
1926 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
1927}
1928
87e38d84 1929rtx
bbf6f052
RK
1930gen_push_operand ()
1931{
1932 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
1933}
1934
1935/* Generate code to push X onto the stack, assuming it has mode MODE and
1936 type TYPE.
1937 MODE is redundant except when X is a CONST_INT (since they don't
1938 carry mode info).
1939 SIZE is an rtx for the size of data to be copied (in bytes),
1940 needed only if X is BLKmode.
1941
1942 ALIGN (in bytes) is maximum alignment we can assume.
1943
cd048831
RK
1944 If PARTIAL and REG are both nonzero, then copy that many of the first
1945 words of X into registers starting with REG, and push the rest of X.
bbf6f052
RK
1946 The amount of space pushed is decreased by PARTIAL words,
1947 rounded *down* to a multiple of PARM_BOUNDARY.
1948 REG must be a hard register in this case.
cd048831
RK
1949 If REG is zero but PARTIAL is not, take any all others actions for an
1950 argument partially in registers, but do not actually load any
1951 registers.
bbf6f052
RK
1952
1953 EXTRA is the amount in bytes of extra space to leave next to this arg.
6dc42e49 1954 This is ignored if an argument block has already been allocated.
bbf6f052
RK
1955
1956 On a machine that lacks real push insns, ARGS_ADDR is the address of
1957 the bottom of the argument block for this call. We use indexing off there
1958 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
1959 argument block has not been preallocated.
1960
1961 ARGS_SO_FAR is the size of args previously pushed for this call. */
1962
1963void
1964emit_push_insn (x, mode, type, size, align, partial, reg, extra,
1965 args_addr, args_so_far)
1966 register rtx x;
1967 enum machine_mode mode;
1968 tree type;
1969 rtx size;
1970 int align;
1971 int partial;
1972 rtx reg;
1973 int extra;
1974 rtx args_addr;
1975 rtx args_so_far;
1976{
1977 rtx xinner;
1978 enum direction stack_direction
1979#ifdef STACK_GROWS_DOWNWARD
1980 = downward;
1981#else
1982 = upward;
1983#endif
1984
1985 /* Decide where to pad the argument: `downward' for below,
1986 `upward' for above, or `none' for don't pad it.
1987 Default is below for small data on big-endian machines; else above. */
1988 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
1989
1990 /* Invert direction if stack is post-update. */
1991 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
1992 if (where_pad != none)
1993 where_pad = (where_pad == downward ? upward : downward);
1994
1995 xinner = x = protect_from_queue (x, 0);
1996
1997 if (mode == BLKmode)
1998 {
1999 /* Copy a block into the stack, entirely or partially. */
2000
2001 register rtx temp;
2002 int used = partial * UNITS_PER_WORD;
2003 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2004 int skip;
2005
2006 if (size == 0)
2007 abort ();
2008
2009 used -= offset;
2010
2011 /* USED is now the # of bytes we need not copy to the stack
2012 because registers will take care of them. */
2013
2014 if (partial != 0)
2015 xinner = change_address (xinner, BLKmode,
2016 plus_constant (XEXP (xinner, 0), used));
2017
2018 /* If the partial register-part of the arg counts in its stack size,
2019 skip the part of stack space corresponding to the registers.
2020 Otherwise, start copying to the beginning of the stack space,
2021 by setting SKIP to 0. */
2022#ifndef REG_PARM_STACK_SPACE
2023 skip = 0;
2024#else
2025 skip = used;
2026#endif
2027
2028#ifdef PUSH_ROUNDING
2029 /* Do it with several push insns if that doesn't take lots of insns
2030 and if there is no difficulty with push insns that skip bytes
2031 on the stack for alignment purposes. */
2032 if (args_addr == 0
2033 && GET_CODE (size) == CONST_INT
2034 && skip == 0
2035 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2036 < MOVE_RATIO)
bbf6f052
RK
2037 /* Here we avoid the case of a structure whose weak alignment
2038 forces many pushes of a small amount of data,
2039 and such small pushes do rounding that causes trouble. */
e87b4f3f
RS
2040 && ((! STRICT_ALIGNMENT && ! SLOW_UNALIGNED_ACCESS)
2041 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
bbf6f052 2042 || PUSH_ROUNDING (align) == align)
bbf6f052
RK
2043 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2044 {
2045 /* Push padding now if padding above and stack grows down,
2046 or if padding below and stack grows up.
2047 But if space already allocated, this has already been done. */
2048 if (extra && args_addr == 0
2049 && where_pad != none && where_pad != stack_direction)
906c4e36 2050 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2051
2052 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
2053 INTVAL (size) - used, align);
2054 }
2055 else
2056#endif /* PUSH_ROUNDING */
2057 {
2058 /* Otherwise make space on the stack and copy the data
2059 to the address of that space. */
2060
2061 /* Deduct words put into registers from the size we must copy. */
2062 if (partial != 0)
2063 {
2064 if (GET_CODE (size) == CONST_INT)
906c4e36 2065 size = GEN_INT (INTVAL (size) - used);
bbf6f052
RK
2066 else
2067 size = expand_binop (GET_MODE (size), sub_optab, size,
906c4e36
RK
2068 GEN_INT (used), NULL_RTX, 0,
2069 OPTAB_LIB_WIDEN);
bbf6f052
RK
2070 }
2071
2072 /* Get the address of the stack space.
2073 In this case, we do not deal with EXTRA separately.
2074 A single stack adjust will do. */
2075 if (! args_addr)
2076 {
2077 temp = push_block (size, extra, where_pad == downward);
2078 extra = 0;
2079 }
2080 else if (GET_CODE (args_so_far) == CONST_INT)
2081 temp = memory_address (BLKmode,
2082 plus_constant (args_addr,
2083 skip + INTVAL (args_so_far)));
2084 else
2085 temp = memory_address (BLKmode,
2086 plus_constant (gen_rtx (PLUS, Pmode,
2087 args_addr, args_so_far),
2088 skip));
2089
2090 /* TEMP is the address of the block. Copy the data there. */
2091 if (GET_CODE (size) == CONST_INT
2092 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2093 < MOVE_RATIO))
2094 {
2095 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
2096 INTVAL (size), align);
2097 goto ret;
2098 }
2099 /* Try the most limited insn first, because there's no point
2100 including more than one in the machine description unless
2101 the more limited one has some advantage. */
2102#ifdef HAVE_movstrqi
2103 if (HAVE_movstrqi
2104 && GET_CODE (size) == CONST_INT
2105 && ((unsigned) INTVAL (size)
2106 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
2107 {
c841050e
RS
2108 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
2109 xinner, size, GEN_INT (align));
2110 if (pat != 0)
2111 {
2112 emit_insn (pat);
2113 goto ret;
2114 }
bbf6f052
RK
2115 }
2116#endif
2117#ifdef HAVE_movstrhi
2118 if (HAVE_movstrhi
2119 && GET_CODE (size) == CONST_INT
2120 && ((unsigned) INTVAL (size)
2121 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
2122 {
c841050e
RS
2123 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
2124 xinner, size, GEN_INT (align));
2125 if (pat != 0)
2126 {
2127 emit_insn (pat);
2128 goto ret;
2129 }
bbf6f052
RK
2130 }
2131#endif
2132#ifdef HAVE_movstrsi
2133 if (HAVE_movstrsi)
2134 {
c841050e
RS
2135 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
2136 xinner, size, GEN_INT (align));
2137 if (pat != 0)
2138 {
2139 emit_insn (pat);
2140 goto ret;
2141 }
bbf6f052
RK
2142 }
2143#endif
2144#ifdef HAVE_movstrdi
2145 if (HAVE_movstrdi)
2146 {
c841050e
RS
2147 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
2148 xinner, size, GEN_INT (align));
2149 if (pat != 0)
2150 {
2151 emit_insn (pat);
2152 goto ret;
2153 }
bbf6f052
RK
2154 }
2155#endif
2156
2157#ifndef ACCUMULATE_OUTGOING_ARGS
2158 /* If the source is referenced relative to the stack pointer,
2159 copy it to another register to stabilize it. We do not need
2160 to do this if we know that we won't be changing sp. */
2161
2162 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2163 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2164 temp = copy_to_reg (temp);
2165#endif
2166
2167 /* Make inhibit_defer_pop nonzero around the library call
2168 to force it to pop the bcopy-arguments right away. */
2169 NO_DEFER_POP;
2170#ifdef TARGET_MEM_FUNCTIONS
d562e42e 2171 emit_library_call (memcpy_libfunc, 0,
bbf6f052 2172 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
0fa83258
RK
2173 convert_to_mode (TYPE_MODE (sizetype),
2174 size, TREE_UNSIGNED (sizetype)),
26ba80fc 2175 TYPE_MODE (sizetype));
bbf6f052 2176#else
d562e42e 2177 emit_library_call (bcopy_libfunc, 0,
bbf6f052 2178 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
0fa83258
RK
2179 convert_to_mode (TYPE_MODE (sizetype),
2180 size, TREE_UNSIGNED (sizetype)),
26ba80fc 2181 TYPE_MODE (sizetype));
bbf6f052
RK
2182#endif
2183 OK_DEFER_POP;
2184 }
2185 }
2186 else if (partial > 0)
2187 {
2188 /* Scalar partly in registers. */
2189
2190 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2191 int i;
2192 int not_stack;
2193 /* # words of start of argument
2194 that we must make space for but need not store. */
2195 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2196 int args_offset = INTVAL (args_so_far);
2197 int skip;
2198
2199 /* Push padding now if padding above and stack grows down,
2200 or if padding below and stack grows up.
2201 But if space already allocated, this has already been done. */
2202 if (extra && args_addr == 0
2203 && where_pad != none && where_pad != stack_direction)
906c4e36 2204 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2205
2206 /* If we make space by pushing it, we might as well push
2207 the real data. Otherwise, we can leave OFFSET nonzero
2208 and leave the space uninitialized. */
2209 if (args_addr == 0)
2210 offset = 0;
2211
2212 /* Now NOT_STACK gets the number of words that we don't need to
2213 allocate on the stack. */
2214 not_stack = partial - offset;
2215
2216 /* If the partial register-part of the arg counts in its stack size,
2217 skip the part of stack space corresponding to the registers.
2218 Otherwise, start copying to the beginning of the stack space,
2219 by setting SKIP to 0. */
2220#ifndef REG_PARM_STACK_SPACE
2221 skip = 0;
2222#else
2223 skip = not_stack;
2224#endif
2225
2226 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2227 x = validize_mem (force_const_mem (mode, x));
2228
2229 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2230 SUBREGs of such registers are not allowed. */
2231 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2232 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2233 x = copy_to_reg (x);
2234
2235 /* Loop over all the words allocated on the stack for this arg. */
2236 /* We can do it by words, because any scalar bigger than a word
2237 has a size a multiple of a word. */
2238#ifndef PUSH_ARGS_REVERSED
2239 for (i = not_stack; i < size; i++)
2240#else
2241 for (i = size - 1; i >= not_stack; i--)
2242#endif
2243 if (i >= not_stack + offset)
2244 emit_push_insn (operand_subword_force (x, i, mode),
906c4e36
RK
2245 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2246 0, args_addr,
2247 GEN_INT (args_offset + ((i - not_stack + skip)
bbf6f052
RK
2248 * UNITS_PER_WORD)));
2249 }
2250 else
2251 {
2252 rtx addr;
2253
2254 /* Push padding now if padding above and stack grows down,
2255 or if padding below and stack grows up.
2256 But if space already allocated, this has already been done. */
2257 if (extra && args_addr == 0
2258 && where_pad != none && where_pad != stack_direction)
906c4e36 2259 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2260
2261#ifdef PUSH_ROUNDING
2262 if (args_addr == 0)
2263 addr = gen_push_operand ();
2264 else
2265#endif
2266 if (GET_CODE (args_so_far) == CONST_INT)
2267 addr
2268 = memory_address (mode,
2269 plus_constant (args_addr, INTVAL (args_so_far)));
2270 else
2271 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2272 args_so_far));
2273
2274 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2275 }
2276
2277 ret:
2278 /* If part should go in registers, copy that part
2279 into the appropriate registers. Do this now, at the end,
2280 since mem-to-mem copies above may do function calls. */
cd048831 2281 if (partial > 0 && reg != 0)
bbf6f052
RK
2282 move_block_to_reg (REGNO (reg), x, partial, mode);
2283
2284 if (extra && args_addr == 0 && where_pad == stack_direction)
906c4e36 2285 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2286}
2287\f
bbf6f052
RK
2288/* Expand an assignment that stores the value of FROM into TO.
2289 If WANT_VALUE is nonzero, return an rtx for the value of TO.
709f5be1
RS
2290 (This may contain a QUEUED rtx;
2291 if the value is constant, this rtx is a constant.)
2292 Otherwise, the returned value is NULL_RTX.
bbf6f052
RK
2293
2294 SUGGEST_REG is no longer actually used.
2295 It used to mean, copy the value through a register
2296 and return that register, if that is possible.
709f5be1 2297 We now use WANT_VALUE to decide whether to do this. */
bbf6f052
RK
2298
2299rtx
2300expand_assignment (to, from, want_value, suggest_reg)
2301 tree to, from;
2302 int want_value;
2303 int suggest_reg;
2304{
2305 register rtx to_rtx = 0;
2306 rtx result;
2307
2308 /* Don't crash if the lhs of the assignment was erroneous. */
2309
2310 if (TREE_CODE (to) == ERROR_MARK)
709f5be1
RS
2311 {
2312 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
2313 return want_value ? result : NULL_RTX;
2314 }
bbf6f052 2315
ca695ac9
JB
2316 if (output_bytecode)
2317 {
2318 tree dest_innermost;
2319
2320 bc_expand_expr (from);
6d6e61ce 2321 bc_emit_instruction (duplicate);
ca695ac9
JB
2322
2323 dest_innermost = bc_expand_address (to);
2324
2325 /* Can't deduce from TYPE that we're dealing with a bitfield, so
2326 take care of it here. */
2327
2328 bc_store_memory (TREE_TYPE (to), dest_innermost);
2329 return NULL;
2330 }
2331
bbf6f052
RK
2332 /* Assignment of a structure component needs special treatment
2333 if the structure component's rtx is not simply a MEM.
2334 Assignment of an array element at a constant index
2335 has the same problem. */
2336
2337 if (TREE_CODE (to) == COMPONENT_REF
2338 || TREE_CODE (to) == BIT_FIELD_REF
2339 || (TREE_CODE (to) == ARRAY_REF
2340 && TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2341 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST))
2342 {
2343 enum machine_mode mode1;
2344 int bitsize;
2345 int bitpos;
7bb0943f 2346 tree offset;
bbf6f052
RK
2347 int unsignedp;
2348 int volatilep = 0;
0088fcb1 2349 tree tem;
d78d243c 2350 int alignment;
0088fcb1
RK
2351
2352 push_temp_slots ();
2353 tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
bbf6f052
RK
2354 &mode1, &unsignedp, &volatilep);
2355
2356 /* If we are going to use store_bit_field and extract_bit_field,
2357 make sure to_rtx will be safe for multiple use. */
2358
2359 if (mode1 == VOIDmode && want_value)
2360 tem = stabilize_reference (tem);
2361
d78d243c 2362 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
906c4e36 2363 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
2364 if (offset != 0)
2365 {
906c4e36 2366 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
2367
2368 if (GET_CODE (to_rtx) != MEM)
2369 abort ();
2370 to_rtx = change_address (to_rtx, VOIDmode,
2371 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2372 force_reg (Pmode, offset_rtx)));
d78d243c
RS
2373 /* If we have a variable offset, the known alignment
2374 is only that of the innermost structure containing the field.
2375 (Actually, we could sometimes do better by using the
2376 align of an element of the innermost array, but no need.) */
2377 if (TREE_CODE (to) == COMPONENT_REF
2378 || TREE_CODE (to) == BIT_FIELD_REF)
2379 alignment
2380 = TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (to, 0))) / BITS_PER_UNIT;
7bb0943f 2381 }
bbf6f052
RK
2382 if (volatilep)
2383 {
2384 if (GET_CODE (to_rtx) == MEM)
2385 MEM_VOLATILE_P (to_rtx) = 1;
2386#if 0 /* This was turned off because, when a field is volatile
2387 in an object which is not volatile, the object may be in a register,
2388 and then we would abort over here. */
2389 else
2390 abort ();
2391#endif
2392 }
2393
2394 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2395 (want_value
2396 /* Spurious cast makes HPUX compiler happy. */
2397 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2398 : VOIDmode),
2399 unsignedp,
2400 /* Required alignment of containing datum. */
d78d243c 2401 alignment,
bbf6f052
RK
2402 int_size_in_bytes (TREE_TYPE (tem)));
2403 preserve_temp_slots (result);
2404 free_temp_slots ();
0088fcb1 2405 pop_temp_slots ();
bbf6f052 2406
709f5be1
RS
2407 /* If the value is meaningful, convert RESULT to the proper mode.
2408 Otherwise, return nothing. */
5ffe63ed
RS
2409 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
2410 TYPE_MODE (TREE_TYPE (from)),
2411 result,
2412 TREE_UNSIGNED (TREE_TYPE (to)))
709f5be1 2413 : NULL_RTX);
bbf6f052
RK
2414 }
2415
cd1db108
RS
2416 /* If the rhs is a function call and its value is not an aggregate,
2417 call the function before we start to compute the lhs.
2418 This is needed for correct code for cases such as
2419 val = setjmp (buf) on machines where reference to val
1ad87b63
RK
2420 requires loading up part of an address in a separate insn.
2421
2422 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
2423 a promoted variable where the zero- or sign- extension needs to be done.
2424 Handling this in the normal way is safe because no computation is done
2425 before the call. */
2426 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
2427 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
cd1db108 2428 {
0088fcb1
RK
2429 rtx value;
2430
2431 push_temp_slots ();
2432 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
cd1db108
RS
2433 if (to_rtx == 0)
2434 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2435 emit_move_insn (to_rtx, value);
2436 preserve_temp_slots (to_rtx);
2437 free_temp_slots ();
0088fcb1 2438 pop_temp_slots ();
709f5be1 2439 return want_value ? to_rtx : NULL_RTX;
cd1db108
RS
2440 }
2441
bbf6f052
RK
2442 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2443 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2444
2445 if (to_rtx == 0)
906c4e36 2446 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
bbf6f052 2447
86d38d25
RS
2448 /* Don't move directly into a return register. */
2449 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
2450 {
0088fcb1
RK
2451 rtx temp;
2452
2453 push_temp_slots ();
2454 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
86d38d25
RS
2455 emit_move_insn (to_rtx, temp);
2456 preserve_temp_slots (to_rtx);
2457 free_temp_slots ();
0088fcb1 2458 pop_temp_slots ();
709f5be1 2459 return want_value ? to_rtx : NULL_RTX;
86d38d25
RS
2460 }
2461
bbf6f052
RK
2462 /* In case we are returning the contents of an object which overlaps
2463 the place the value is being stored, use a safe function when copying
2464 a value through a pointer into a structure value return block. */
2465 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2466 && current_function_returns_struct
2467 && !current_function_returns_pcc_struct)
2468 {
0088fcb1
RK
2469 rtx from_rtx, size;
2470
2471 push_temp_slots ();
33a20d10
RK
2472 size = expr_size (from);
2473 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
2474
2475#ifdef TARGET_MEM_FUNCTIONS
d562e42e 2476 emit_library_call (memcpy_libfunc, 0,
bbf6f052
RK
2477 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2478 XEXP (from_rtx, 0), Pmode,
0fa83258
RK
2479 convert_to_mode (TYPE_MODE (sizetype),
2480 size, TREE_UNSIGNED (sizetype)),
26ba80fc 2481 TYPE_MODE (sizetype));
bbf6f052 2482#else
d562e42e 2483 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
2484 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2485 XEXP (to_rtx, 0), Pmode,
0fa83258
RK
2486 convert_to_mode (TYPE_MODE (sizetype),
2487 size, TREE_UNSIGNED (sizetype)),
26ba80fc 2488 TYPE_MODE (sizetype));
bbf6f052
RK
2489#endif
2490
2491 preserve_temp_slots (to_rtx);
2492 free_temp_slots ();
0088fcb1 2493 pop_temp_slots ();
709f5be1 2494 return want_value ? to_rtx : NULL_RTX;
bbf6f052
RK
2495 }
2496
2497 /* Compute FROM and store the value in the rtx we got. */
2498
0088fcb1 2499 push_temp_slots ();
bbf6f052
RK
2500 result = store_expr (from, to_rtx, want_value);
2501 preserve_temp_slots (result);
2502 free_temp_slots ();
0088fcb1 2503 pop_temp_slots ();
709f5be1 2504 return want_value ? result : NULL_RTX;
bbf6f052
RK
2505}
2506
2507/* Generate code for computing expression EXP,
2508 and storing the value into TARGET.
bbf6f052
RK
2509 TARGET may contain a QUEUED rtx.
2510
709f5be1
RS
2511 If WANT_VALUE is nonzero, return a copy of the value
2512 not in TARGET, so that we can be sure to use the proper
2513 value in a containing expression even if TARGET has something
2514 else stored in it. If possible, we copy the value through a pseudo
2515 and return that pseudo. Or, if the value is constant, we try to
2516 return the constant. In some cases, we return a pseudo
2517 copied *from* TARGET.
2518
2519 If the mode is BLKmode then we may return TARGET itself.
2520 It turns out that in BLKmode it doesn't cause a problem.
2521 because C has no operators that could combine two different
2522 assignments into the same BLKmode object with different values
2523 with no sequence point. Will other languages need this to
2524 be more thorough?
2525
2526 If WANT_VALUE is 0, we return NULL, to make sure
2527 to catch quickly any cases where the caller uses the value
2528 and fails to set WANT_VALUE. */
bbf6f052
RK
2529
2530rtx
709f5be1 2531store_expr (exp, target, want_value)
bbf6f052
RK
2532 register tree exp;
2533 register rtx target;
709f5be1 2534 int want_value;
bbf6f052
RK
2535{
2536 register rtx temp;
2537 int dont_return_target = 0;
2538
2539 if (TREE_CODE (exp) == COMPOUND_EXPR)
2540 {
2541 /* Perform first part of compound expression, then assign from second
2542 part. */
2543 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2544 emit_queue ();
709f5be1 2545 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
bbf6f052
RK
2546 }
2547 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2548 {
2549 /* For conditional expression, get safe form of the target. Then
2550 test the condition, doing the appropriate assignment on either
2551 side. This avoids the creation of unnecessary temporaries.
2552 For non-BLKmode, it is more efficient not to do this. */
2553
2554 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2555
2556 emit_queue ();
2557 target = protect_from_queue (target, 1);
2558
2559 NO_DEFER_POP;
2560 jumpifnot (TREE_OPERAND (exp, 0), lab1);
709f5be1 2561 store_expr (TREE_OPERAND (exp, 1), target, 0);
bbf6f052
RK
2562 emit_queue ();
2563 emit_jump_insn (gen_jump (lab2));
2564 emit_barrier ();
2565 emit_label (lab1);
709f5be1 2566 store_expr (TREE_OPERAND (exp, 2), target, 0);
bbf6f052
RK
2567 emit_queue ();
2568 emit_label (lab2);
2569 OK_DEFER_POP;
709f5be1 2570 return want_value ? target : NULL_RTX;
bbf6f052 2571 }
709f5be1 2572 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
bbf6f052
RK
2573 && GET_MODE (target) != BLKmode)
2574 /* If target is in memory and caller wants value in a register instead,
2575 arrange that. Pass TARGET as target for expand_expr so that,
709f5be1 2576 if EXP is another assignment, WANT_VALUE will be nonzero for it.
c2e6aff6
RS
2577 We know expand_expr will not use the target in that case.
2578 Don't do this if TARGET is volatile because we are supposed
2579 to write it and then read it. */
bbf6f052 2580 {
906c4e36 2581 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
bbf6f052
RK
2582 GET_MODE (target), 0);
2583 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2584 temp = copy_to_reg (temp);
2585 dont_return_target = 1;
2586 }
2587 else if (queued_subexp_p (target))
709f5be1
RS
2588 /* If target contains a postincrement, let's not risk
2589 using it as the place to generate the rhs. */
bbf6f052
RK
2590 {
2591 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2592 {
2593 /* Expand EXP into a new pseudo. */
2594 temp = gen_reg_rtx (GET_MODE (target));
2595 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2596 }
2597 else
906c4e36 2598 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
709f5be1
RS
2599
2600 /* If target is volatile, ANSI requires accessing the value
2601 *from* the target, if it is accessed. So make that happen.
2602 In no case return the target itself. */
2603 if (! MEM_VOLATILE_P (target) && want_value)
2604 dont_return_target = 1;
bbf6f052 2605 }
1499e0a8
RK
2606 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2607 /* If this is an scalar in a register that is stored in a wider mode
2608 than the declared mode, compute the result into its declared mode
2609 and then convert to the wider mode. Our value is the computed
2610 expression. */
2611 {
2612 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
b258707c
RS
2613
2614 /* If TEMP is a VOIDmode constant, use convert_modes to make
2615 sure that we properly convert it. */
2616 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
2617 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
2618 TYPE_MODE (TREE_TYPE (exp)), temp,
2619 SUBREG_PROMOTED_UNSIGNED_P (target));
2620
1499e0a8
RK
2621 convert_move (SUBREG_REG (target), temp,
2622 SUBREG_PROMOTED_UNSIGNED_P (target));
709f5be1 2623 return want_value ? temp : NULL_RTX;
1499e0a8 2624 }
bbf6f052
RK
2625 else
2626 {
2627 temp = expand_expr (exp, target, GET_MODE (target), 0);
2628 /* DO return TARGET if it's a specified hardware register.
c2e6aff6 2629 expand_return relies on this.
709f5be1
RS
2630 If TARGET is a volatile mem ref, either return TARGET
2631 or return a reg copied *from* TARGET; ANSI requires this.
2632
2633 Otherwise, if TEMP is not TARGET, return TEMP
2634 if it is constant (for efficiency),
2635 or if we really want the correct value. */
bbf6f052
RK
2636 if (!(target && GET_CODE (target) == REG
2637 && REGNO (target) < FIRST_PSEUDO_REGISTER)
709f5be1
RS
2638 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
2639 && temp != target
2640 && (CONSTANT_P (temp) || want_value))
bbf6f052
RK
2641 dont_return_target = 1;
2642 }
2643
b258707c
RS
2644 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
2645 the same as that of TARGET, adjust the constant. This is needed, for
2646 example, in case it is a CONST_DOUBLE and we want only a word-sized
2647 value. */
2648 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
2649 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2650 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
2651 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
2652
bbf6f052
RK
2653 /* If value was not generated in the target, store it there.
2654 Convert the value to TARGET's type first if nec. */
2655
2656 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2657 {
2658 target = protect_from_queue (target, 1);
2659 if (GET_MODE (temp) != GET_MODE (target)
2660 && GET_MODE (temp) != VOIDmode)
2661 {
2662 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2663 if (dont_return_target)
2664 {
2665 /* In this case, we will return TEMP,
2666 so make sure it has the proper mode.
2667 But don't forget to store the value into TARGET. */
2668 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2669 emit_move_insn (target, temp);
2670 }
2671 else
2672 convert_move (target, temp, unsignedp);
2673 }
2674
2675 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2676 {
2677 /* Handle copying a string constant into an array.
2678 The string constant may be shorter than the array.
2679 So copy just the string's actual length, and clear the rest. */
2680 rtx size;
2681
e87b4f3f
RS
2682 /* Get the size of the data type of the string,
2683 which is actually the size of the target. */
2684 size = expr_size (exp);
2685 if (GET_CODE (size) == CONST_INT
2686 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2687 emit_block_move (target, temp, size,
2688 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2689 else
bbf6f052 2690 {
e87b4f3f
RS
2691 /* Compute the size of the data to copy from the string. */
2692 tree copy_size
c03b7665 2693 = size_binop (MIN_EXPR,
b50d17a1 2694 make_tree (sizetype, size),
c03b7665
RK
2695 convert (sizetype,
2696 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
906c4e36
RK
2697 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
2698 VOIDmode, 0);
e87b4f3f
RS
2699 rtx label = 0;
2700
2701 /* Copy that much. */
2702 emit_block_move (target, temp, copy_size_rtx,
2703 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2704
2705 /* Figure out how much is left in TARGET
2706 that we have to clear. */
2707 if (GET_CODE (copy_size_rtx) == CONST_INT)
2708 {
2709 temp = plus_constant (XEXP (target, 0),
2710 TREE_STRING_LENGTH (exp));
2711 size = plus_constant (size,
2712 - TREE_STRING_LENGTH (exp));
2713 }
2714 else
2715 {
2716 enum machine_mode size_mode = Pmode;
2717
2718 temp = force_reg (Pmode, XEXP (target, 0));
2719 temp = expand_binop (size_mode, add_optab, temp,
906c4e36
RK
2720 copy_size_rtx, NULL_RTX, 0,
2721 OPTAB_LIB_WIDEN);
e87b4f3f
RS
2722
2723 size = expand_binop (size_mode, sub_optab, size,
906c4e36
RK
2724 copy_size_rtx, NULL_RTX, 0,
2725 OPTAB_LIB_WIDEN);
e87b4f3f 2726
906c4e36 2727 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
e87b4f3f
RS
2728 GET_MODE (size), 0, 0);
2729 label = gen_label_rtx ();
2730 emit_jump_insn (gen_blt (label));
2731 }
2732
2733 if (size != const0_rtx)
2734 {
bbf6f052 2735#ifdef TARGET_MEM_FUNCTIONS
d562e42e 2736 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
e87b4f3f 2737 temp, Pmode, const0_rtx, Pmode, size, Pmode);
bbf6f052 2738#else
d562e42e 2739 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
e87b4f3f 2740 temp, Pmode, size, Pmode);
bbf6f052 2741#endif
e87b4f3f
RS
2742 }
2743 if (label)
2744 emit_label (label);
bbf6f052
RK
2745 }
2746 }
2747 else if (GET_MODE (temp) == BLKmode)
2748 emit_block_move (target, temp, expr_size (exp),
2749 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2750 else
2751 emit_move_insn (target, temp);
2752 }
709f5be1 2753
7d26fec6 2754 if (dont_return_target && GET_CODE (temp) != MEM)
bbf6f052 2755 return temp;
709f5be1
RS
2756 if (want_value && GET_MODE (target) != BLKmode)
2757 return copy_to_reg (target);
2758 if (want_value)
2759 return target;
2760 return NULL_RTX;
bbf6f052
RK
2761}
2762\f
2763/* Store the value of constructor EXP into the rtx TARGET.
2764 TARGET is either a REG or a MEM. */
2765
2766static void
2767store_constructor (exp, target)
2768 tree exp;
2769 rtx target;
2770{
4af3895e
JVA
2771 tree type = TREE_TYPE (exp);
2772
bbf6f052
RK
2773 /* We know our target cannot conflict, since safe_from_p has been called. */
2774#if 0
2775 /* Don't try copying piece by piece into a hard register
2776 since that is vulnerable to being clobbered by EXP.
2777 Instead, construct in a pseudo register and then copy it all. */
2778 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
2779 {
2780 rtx temp = gen_reg_rtx (GET_MODE (target));
2781 store_constructor (exp, temp);
2782 emit_move_insn (target, temp);
2783 return;
2784 }
2785#endif
2786
e44842fe
RK
2787 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
2788 || TREE_CODE (type) == QUAL_UNION_TYPE)
bbf6f052
RK
2789 {
2790 register tree elt;
2791
4af3895e 2792 /* Inform later passes that the whole union value is dead. */
e44842fe
RK
2793 if (TREE_CODE (type) == UNION_TYPE
2794 || TREE_CODE (type) == QUAL_UNION_TYPE)
bbf6f052 2795 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
4af3895e
JVA
2796
2797 /* If we are building a static constructor into a register,
2798 set the initial value as zero so we can fold the value into
2799 a constant. */
2800 else if (GET_CODE (target) == REG && TREE_STATIC (exp))
2801 emit_move_insn (target, const0_rtx);
2802
bbf6f052
RK
2803 /* If the constructor has fewer fields than the structure,
2804 clear the whole structure first. */
2805 else if (list_length (CONSTRUCTOR_ELTS (exp))
4af3895e
JVA
2806 != list_length (TYPE_FIELDS (type)))
2807 clear_storage (target, int_size_in_bytes (type));
bbf6f052
RK
2808 else
2809 /* Inform later passes that the old value is dead. */
2810 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2811
2812 /* Store each element of the constructor into
2813 the corresponding field of TARGET. */
2814
2815 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
2816 {
2817 register tree field = TREE_PURPOSE (elt);
2818 register enum machine_mode mode;
2819 int bitsize;
b50d17a1 2820 int bitpos = 0;
bbf6f052 2821 int unsignedp;
b50d17a1
RK
2822 tree pos, constant = 0, offset = 0;
2823 rtx to_rtx = target;
bbf6f052 2824
f32fd778
RS
2825 /* Just ignore missing fields.
2826 We cleared the whole structure, above,
2827 if any fields are missing. */
2828 if (field == 0)
2829 continue;
2830
bbf6f052
RK
2831 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
2832 unsignedp = TREE_UNSIGNED (field);
2833 mode = DECL_MODE (field);
2834 if (DECL_BIT_FIELD (field))
2835 mode = VOIDmode;
2836
b50d17a1
RK
2837 pos = DECL_FIELD_BITPOS (field);
2838 if (TREE_CODE (pos) == INTEGER_CST)
2839 constant = pos;
2840 else if (TREE_CODE (pos) == PLUS_EXPR
2841 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
2842 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
2843 else
2844 offset = pos;
2845
2846 if (constant)
2847 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
2848
2849 if (offset)
2850 {
2851 rtx offset_rtx;
2852
2853 if (contains_placeholder_p (offset))
2854 offset = build (WITH_RECORD_EXPR, sizetype,
2855 offset, exp);
bbf6f052 2856
b50d17a1
RK
2857 offset = size_binop (FLOOR_DIV_EXPR, offset,
2858 size_int (BITS_PER_UNIT));
bbf6f052 2859
b50d17a1
RK
2860 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2861 if (GET_CODE (to_rtx) != MEM)
2862 abort ();
2863
2864 to_rtx
2865 = change_address (to_rtx, VOIDmode,
2866 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2867 force_reg (Pmode, offset_rtx)));
2868 }
2869
2870 store_field (to_rtx, bitsize, bitpos, mode, TREE_VALUE (elt),
bbf6f052
RK
2871 /* The alignment of TARGET is
2872 at least what its type requires. */
2873 VOIDmode, 0,
4af3895e
JVA
2874 TYPE_ALIGN (type) / BITS_PER_UNIT,
2875 int_size_in_bytes (type));
bbf6f052
RK
2876 }
2877 }
4af3895e 2878 else if (TREE_CODE (type) == ARRAY_TYPE)
bbf6f052
RK
2879 {
2880 register tree elt;
2881 register int i;
4af3895e 2882 tree domain = TYPE_DOMAIN (type);
906c4e36
RK
2883 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
2884 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4af3895e 2885 tree elttype = TREE_TYPE (type);
bbf6f052
RK
2886
2887 /* If the constructor has fewer fields than the structure,
4af3895e
JVA
2888 clear the whole structure first. Similarly if this this is
2889 static constructor of a non-BLKmode object. */
bbf6f052 2890
4af3895e
JVA
2891 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
2892 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
597bb7f1 2893 clear_storage (target, int_size_in_bytes (type));
bbf6f052
RK
2894 else
2895 /* Inform later passes that the old value is dead. */
2896 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2897
2898 /* Store each element of the constructor into
2899 the corresponding element of TARGET, determined
2900 by counting the elements. */
2901 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
2902 elt;
2903 elt = TREE_CHAIN (elt), i++)
2904 {
2905 register enum machine_mode mode;
2906 int bitsize;
2907 int bitpos;
2908 int unsignedp;
03dc44a6
RS
2909 tree index = TREE_PURPOSE (elt);
2910 rtx xtarget = target;
bbf6f052
RK
2911
2912 mode = TYPE_MODE (elttype);
2913 bitsize = GET_MODE_BITSIZE (mode);
2914 unsignedp = TREE_UNSIGNED (elttype);
2915
03dc44a6
RS
2916 if (index != 0 && TREE_CODE (index) != INTEGER_CST)
2917 {
2918 /* We don't currently allow variable indices in a
2919 C initializer, but let's try here to support them. */
2920 rtx pos_rtx, addr, xtarget;
2921 tree position;
2922
2923 position = size_binop (MULT_EXPR, index, TYPE_SIZE (elttype));
2924 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
2925 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
2926 xtarget = change_address (target, mode, addr);
2927 store_expr (TREE_VALUE (elt), xtarget, 0);
2928 }
2929 else
2930 {
2931 if (index != 0)
7c314719 2932 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
03dc44a6
RS
2933 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2934 else
2935 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2936
2937 store_field (xtarget, bitsize, bitpos, mode, TREE_VALUE (elt),
2938 /* The alignment of TARGET is
2939 at least what its type requires. */
2940 VOIDmode, 0,
2941 TYPE_ALIGN (type) / BITS_PER_UNIT,
2942 int_size_in_bytes (type));
2943 }
bbf6f052
RK
2944 }
2945 }
2946
2947 else
2948 abort ();
2949}
2950
2951/* Store the value of EXP (an expression tree)
2952 into a subfield of TARGET which has mode MODE and occupies
2953 BITSIZE bits, starting BITPOS bits from the start of TARGET.
2954 If MODE is VOIDmode, it means that we are storing into a bit-field.
2955
2956 If VALUE_MODE is VOIDmode, return nothing in particular.
2957 UNSIGNEDP is not used in this case.
2958
2959 Otherwise, return an rtx for the value stored. This rtx
2960 has mode VALUE_MODE if that is convenient to do.
2961 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
2962
2963 ALIGN is the alignment that TARGET is known to have, measured in bytes.
2964 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
2965
2966static rtx
2967store_field (target, bitsize, bitpos, mode, exp, value_mode,
2968 unsignedp, align, total_size)
2969 rtx target;
2970 int bitsize, bitpos;
2971 enum machine_mode mode;
2972 tree exp;
2973 enum machine_mode value_mode;
2974 int unsignedp;
2975 int align;
2976 int total_size;
2977{
906c4e36 2978 HOST_WIDE_INT width_mask = 0;
bbf6f052 2979
906c4e36
RK
2980 if (bitsize < HOST_BITS_PER_WIDE_INT)
2981 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
bbf6f052
RK
2982
2983 /* If we are storing into an unaligned field of an aligned union that is
2984 in a register, we may have the mode of TARGET being an integer mode but
2985 MODE == BLKmode. In that case, get an aligned object whose size and
2986 alignment are the same as TARGET and store TARGET into it (we can avoid
2987 the store if the field being stored is the entire width of TARGET). Then
2988 call ourselves recursively to store the field into a BLKmode version of
2989 that object. Finally, load from the object into TARGET. This is not
2990 very efficient in general, but should only be slightly more expensive
2991 than the otherwise-required unaligned accesses. Perhaps this can be
2992 cleaned up later. */
2993
2994 if (mode == BLKmode
2995 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
2996 {
2997 rtx object = assign_stack_temp (GET_MODE (target),
2998 GET_MODE_SIZE (GET_MODE (target)), 0);
2999 rtx blk_object = copy_rtx (object);
3000
3001 PUT_MODE (blk_object, BLKmode);
3002
3003 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
3004 emit_move_insn (object, target);
3005
3006 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
3007 align, total_size);
3008
46093b97
RS
3009 /* Even though we aren't returning target, we need to
3010 give it the updated value. */
bbf6f052
RK
3011 emit_move_insn (target, object);
3012
46093b97 3013 return blk_object;
bbf6f052
RK
3014 }
3015
3016 /* If the structure is in a register or if the component
3017 is a bit field, we cannot use addressing to access it.
3018 Use bit-field techniques or SUBREG to store in it. */
3019
4fa52007
RK
3020 if (mode == VOIDmode
3021 || (mode != BLKmode && ! direct_store[(int) mode])
3022 || GET_CODE (target) == REG
c980ac49 3023 || GET_CODE (target) == SUBREG
ccc98036
RS
3024 /* If the field isn't aligned enough to store as an ordinary memref,
3025 store it as a bit field. */
3026 || (STRICT_ALIGNMENT
3027 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
3028 || (STRICT_ALIGNMENT && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
bbf6f052 3029 {
906c4e36 3030 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
3031 /* Store the value in the bitfield. */
3032 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
3033 if (value_mode != VOIDmode)
3034 {
3035 /* The caller wants an rtx for the value. */
3036 /* If possible, avoid refetching from the bitfield itself. */
3037 if (width_mask != 0
3038 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5c4d7cfb 3039 {
9074de27 3040 tree count;
5c4d7cfb 3041 enum machine_mode tmode;
86a2c12a 3042
5c4d7cfb
RS
3043 if (unsignedp)
3044 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
3045 tmode = GET_MODE (temp);
86a2c12a
RS
3046 if (tmode == VOIDmode)
3047 tmode = value_mode;
5c4d7cfb
RS
3048 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
3049 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
3050 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
3051 }
bbf6f052 3052 return extract_bit_field (target, bitsize, bitpos, unsignedp,
906c4e36
RK
3053 NULL_RTX, value_mode, 0, align,
3054 total_size);
bbf6f052
RK
3055 }
3056 return const0_rtx;
3057 }
3058 else
3059 {
3060 rtx addr = XEXP (target, 0);
3061 rtx to_rtx;
3062
3063 /* If a value is wanted, it must be the lhs;
3064 so make the address stable for multiple use. */
3065
3066 if (value_mode != VOIDmode && GET_CODE (addr) != REG
3067 && ! CONSTANT_ADDRESS_P (addr)
3068 /* A frame-pointer reference is already stable. */
3069 && ! (GET_CODE (addr) == PLUS
3070 && GET_CODE (XEXP (addr, 1)) == CONST_INT
3071 && (XEXP (addr, 0) == virtual_incoming_args_rtx
3072 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
3073 addr = copy_to_reg (addr);
3074
3075 /* Now build a reference to just the desired component. */
3076
3077 to_rtx = change_address (target, mode,
3078 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
3079 MEM_IN_STRUCT_P (to_rtx) = 1;
3080
3081 return store_expr (exp, to_rtx, value_mode != VOIDmode);
3082 }
3083}
3084\f
3085/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
3086 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
742920c7 3087 ARRAY_REFs and find the ultimate containing object, which we return.
bbf6f052
RK
3088
3089 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
3090 bit position, and *PUNSIGNEDP to the signedness of the field.
7bb0943f
RS
3091 If the position of the field is variable, we store a tree
3092 giving the variable offset (in units) in *POFFSET.
3093 This offset is in addition to the bit position.
3094 If the position is not variable, we store 0 in *POFFSET.
bbf6f052
RK
3095
3096 If any of the extraction expressions is volatile,
3097 we store 1 in *PVOLATILEP. Otherwise we don't change that.
3098
3099 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
3100 is a mode that can be used to access the field. In that case, *PBITSIZE
e7c33f54
RK
3101 is redundant.
3102
3103 If the field describes a variable-sized object, *PMODE is set to
3104 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
3105 this case, but the address of the object can be found. */
bbf6f052
RK
3106
3107tree
4969d05d
RK
3108get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
3109 punsignedp, pvolatilep)
bbf6f052
RK
3110 tree exp;
3111 int *pbitsize;
3112 int *pbitpos;
7bb0943f 3113 tree *poffset;
bbf6f052
RK
3114 enum machine_mode *pmode;
3115 int *punsignedp;
3116 int *pvolatilep;
3117{
b50d17a1 3118 tree orig_exp = exp;
bbf6f052
RK
3119 tree size_tree = 0;
3120 enum machine_mode mode = VOIDmode;
742920c7 3121 tree offset = integer_zero_node;
bbf6f052
RK
3122
3123 if (TREE_CODE (exp) == COMPONENT_REF)
3124 {
3125 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
3126 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
3127 mode = DECL_MODE (TREE_OPERAND (exp, 1));
3128 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
3129 }
3130 else if (TREE_CODE (exp) == BIT_FIELD_REF)
3131 {
3132 size_tree = TREE_OPERAND (exp, 1);
3133 *punsignedp = TREE_UNSIGNED (exp);
3134 }
3135 else
3136 {
3137 mode = TYPE_MODE (TREE_TYPE (exp));
3138 *pbitsize = GET_MODE_BITSIZE (mode);
3139 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3140 }
3141
3142 if (size_tree)
3143 {
3144 if (TREE_CODE (size_tree) != INTEGER_CST)
e7c33f54
RK
3145 mode = BLKmode, *pbitsize = -1;
3146 else
3147 *pbitsize = TREE_INT_CST_LOW (size_tree);
bbf6f052
RK
3148 }
3149
3150 /* Compute cumulative bit-offset for nested component-refs and array-refs,
3151 and find the ultimate containing object. */
3152
3153 *pbitpos = 0;
3154
3155 while (1)
3156 {
7bb0943f 3157 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
bbf6f052 3158 {
7bb0943f
RS
3159 tree pos = (TREE_CODE (exp) == COMPONENT_REF
3160 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
3161 : TREE_OPERAND (exp, 2));
bbf6f052 3162
e7f3c83f
RK
3163 /* If this field hasn't been filled in yet, don't go
3164 past it. This should only happen when folding expressions
3165 made during type construction. */
3166 if (pos == 0)
3167 break;
3168
7bb0943f
RS
3169 if (TREE_CODE (pos) == PLUS_EXPR)
3170 {
3171 tree constant, var;
3172 if (TREE_CODE (TREE_OPERAND (pos, 0)) == INTEGER_CST)
3173 {
3174 constant = TREE_OPERAND (pos, 0);
3175 var = TREE_OPERAND (pos, 1);
3176 }
3177 else if (TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3178 {
3179 constant = TREE_OPERAND (pos, 1);
3180 var = TREE_OPERAND (pos, 0);
3181 }
3182 else
3183 abort ();
742920c7 3184
7bb0943f 3185 *pbitpos += TREE_INT_CST_LOW (constant);
742920c7
RK
3186 offset = size_binop (PLUS_EXPR, offset,
3187 size_binop (FLOOR_DIV_EXPR, var,
3188 size_int (BITS_PER_UNIT)));
7bb0943f
RS
3189 }
3190 else if (TREE_CODE (pos) == INTEGER_CST)
3191 *pbitpos += TREE_INT_CST_LOW (pos);
3192 else
3193 {
3194 /* Assume here that the offset is a multiple of a unit.
3195 If not, there should be an explicitly added constant. */
742920c7
RK
3196 offset = size_binop (PLUS_EXPR, offset,
3197 size_binop (FLOOR_DIV_EXPR, pos,
3198 size_int (BITS_PER_UNIT)));
7bb0943f 3199 }
bbf6f052 3200 }
bbf6f052 3201
742920c7 3202 else if (TREE_CODE (exp) == ARRAY_REF)
bbf6f052 3203 {
742920c7
RK
3204 /* This code is based on the code in case ARRAY_REF in expand_expr
3205 below. We assume here that the size of an array element is
3206 always an integral multiple of BITS_PER_UNIT. */
3207
3208 tree index = TREE_OPERAND (exp, 1);
3209 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
3210 tree low_bound
3211 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
3212 tree index_type = TREE_TYPE (index);
3213
3214 if (! integer_zerop (low_bound))
3215 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
3216
3217 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
3218 {
3219 index = convert (type_for_size (POINTER_SIZE, 0), index);
3220 index_type = TREE_TYPE (index);
3221 }
3222
3223 index = fold (build (MULT_EXPR, index_type, index,
3224 TYPE_SIZE (TREE_TYPE (exp))));
3225
3226 if (TREE_CODE (index) == INTEGER_CST
3227 && TREE_INT_CST_HIGH (index) == 0)
3228 *pbitpos += TREE_INT_CST_LOW (index);
3229 else
3230 offset = size_binop (PLUS_EXPR, offset,
3231 size_binop (FLOOR_DIV_EXPR, index,
3232 size_int (BITS_PER_UNIT)));
bbf6f052
RK
3233 }
3234 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
3235 && ! ((TREE_CODE (exp) == NOP_EXPR
3236 || TREE_CODE (exp) == CONVERT_EXPR)
3237 && (TYPE_MODE (TREE_TYPE (exp))
3238 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
3239 break;
7bb0943f
RS
3240
3241 /* If any reference in the chain is volatile, the effect is volatile. */
3242 if (TREE_THIS_VOLATILE (exp))
3243 *pvolatilep = 1;
bbf6f052
RK
3244 exp = TREE_OPERAND (exp, 0);
3245 }
3246
3247 /* If this was a bit-field, see if there is a mode that allows direct
3248 access in case EXP is in memory. */
e7f3c83f 3249 if (mode == VOIDmode && *pbitsize != 0 && *pbitpos % *pbitsize == 0)
bbf6f052
RK
3250 {
3251 mode = mode_for_size (*pbitsize, MODE_INT, 0);
3252 if (mode == BLKmode)
3253 mode = VOIDmode;
3254 }
3255
742920c7
RK
3256 if (integer_zerop (offset))
3257 offset = 0;
3258
b50d17a1
RK
3259 if (offset != 0 && contains_placeholder_p (offset))
3260 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
3261
bbf6f052 3262 *pmode = mode;
7bb0943f 3263 *poffset = offset;
bbf6f052
RK
3264 return exp;
3265}
3266\f
3267/* Given an rtx VALUE that may contain additions and multiplications,
3268 return an equivalent value that just refers to a register or memory.
3269 This is done by generating instructions to perform the arithmetic
c45a13a6
RK
3270 and returning a pseudo-register containing the value.
3271
3272 The returned value may be a REG, SUBREG, MEM or constant. */
bbf6f052
RK
3273
3274rtx
3275force_operand (value, target)
3276 rtx value, target;
3277{
3278 register optab binoptab = 0;
3279 /* Use a temporary to force order of execution of calls to
3280 `force_operand'. */
3281 rtx tmp;
3282 register rtx op2;
3283 /* Use subtarget as the target for operand 0 of a binary operation. */
3284 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3285
3286 if (GET_CODE (value) == PLUS)
3287 binoptab = add_optab;
3288 else if (GET_CODE (value) == MINUS)
3289 binoptab = sub_optab;
3290 else if (GET_CODE (value) == MULT)
3291 {
3292 op2 = XEXP (value, 1);
3293 if (!CONSTANT_P (op2)
3294 && !(GET_CODE (op2) == REG && op2 != subtarget))
3295 subtarget = 0;
3296 tmp = force_operand (XEXP (value, 0), subtarget);
3297 return expand_mult (GET_MODE (value), tmp,
906c4e36 3298 force_operand (op2, NULL_RTX),
bbf6f052
RK
3299 target, 0);
3300 }
3301
3302 if (binoptab)
3303 {
3304 op2 = XEXP (value, 1);
3305 if (!CONSTANT_P (op2)
3306 && !(GET_CODE (op2) == REG && op2 != subtarget))
3307 subtarget = 0;
3308 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
3309 {
3310 binoptab = add_optab;
3311 op2 = negate_rtx (GET_MODE (value), op2);
3312 }
3313
3314 /* Check for an addition with OP2 a constant integer and our first
3315 operand a PLUS of a virtual register and something else. In that
3316 case, we want to emit the sum of the virtual register and the
3317 constant first and then add the other value. This allows virtual
3318 register instantiation to simply modify the constant rather than
3319 creating another one around this addition. */
3320 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
3321 && GET_CODE (XEXP (value, 0)) == PLUS
3322 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
3323 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
3324 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
3325 {
3326 rtx temp = expand_binop (GET_MODE (value), binoptab,
3327 XEXP (XEXP (value, 0), 0), op2,
3328 subtarget, 0, OPTAB_LIB_WIDEN);
3329 return expand_binop (GET_MODE (value), binoptab, temp,
3330 force_operand (XEXP (XEXP (value, 0), 1), 0),
3331 target, 0, OPTAB_LIB_WIDEN);
3332 }
3333
3334 tmp = force_operand (XEXP (value, 0), subtarget);
3335 return expand_binop (GET_MODE (value), binoptab, tmp,
906c4e36 3336 force_operand (op2, NULL_RTX),
bbf6f052 3337 target, 0, OPTAB_LIB_WIDEN);
8008b228 3338 /* We give UNSIGNEDP = 0 to expand_binop
bbf6f052
RK
3339 because the only operations we are expanding here are signed ones. */
3340 }
3341 return value;
3342}
3343\f
3344/* Subroutine of expand_expr:
3345 save the non-copied parts (LIST) of an expr (LHS), and return a list
3346 which can restore these values to their previous values,
3347 should something modify their storage. */
3348
3349static tree
3350save_noncopied_parts (lhs, list)
3351 tree lhs;
3352 tree list;
3353{
3354 tree tail;
3355 tree parts = 0;
3356
3357 for (tail = list; tail; tail = TREE_CHAIN (tail))
3358 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3359 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
3360 else
3361 {
3362 tree part = TREE_VALUE (tail);
3363 tree part_type = TREE_TYPE (part);
906c4e36 3364 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
bbf6f052
RK
3365 rtx target = assign_stack_temp (TYPE_MODE (part_type),
3366 int_size_in_bytes (part_type), 0);
3367 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
906c4e36 3368 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
bbf6f052 3369 parts = tree_cons (to_be_saved,
906c4e36
RK
3370 build (RTL_EXPR, part_type, NULL_TREE,
3371 (tree) target),
bbf6f052
RK
3372 parts);
3373 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
3374 }
3375 return parts;
3376}
3377
3378/* Subroutine of expand_expr:
3379 record the non-copied parts (LIST) of an expr (LHS), and return a list
3380 which specifies the initial values of these parts. */
3381
3382static tree
3383init_noncopied_parts (lhs, list)
3384 tree lhs;
3385 tree list;
3386{
3387 tree tail;
3388 tree parts = 0;
3389
3390 for (tail = list; tail; tail = TREE_CHAIN (tail))
3391 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3392 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
3393 else
3394 {
3395 tree part = TREE_VALUE (tail);
3396 tree part_type = TREE_TYPE (part);
906c4e36 3397 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
bbf6f052
RK
3398 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
3399 }
3400 return parts;
3401}
3402
3403/* Subroutine of expand_expr: return nonzero iff there is no way that
3404 EXP can reference X, which is being modified. */
3405
3406static int
3407safe_from_p (x, exp)
3408 rtx x;
3409 tree exp;
3410{
3411 rtx exp_rtl = 0;
3412 int i, nops;
3413
3414 if (x == 0)
3415 return 1;
3416
3417 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
3418 find the underlying pseudo. */
3419 if (GET_CODE (x) == SUBREG)
3420 {
3421 x = SUBREG_REG (x);
3422 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3423 return 0;
3424 }
3425
3426 /* If X is a location in the outgoing argument area, it is always safe. */
3427 if (GET_CODE (x) == MEM
3428 && (XEXP (x, 0) == virtual_outgoing_args_rtx
3429 || (GET_CODE (XEXP (x, 0)) == PLUS
3430 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
3431 return 1;
3432
3433 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3434 {
3435 case 'd':
3436 exp_rtl = DECL_RTL (exp);
3437 break;
3438
3439 case 'c':
3440 return 1;
3441
3442 case 'x':
3443 if (TREE_CODE (exp) == TREE_LIST)
f32fd778
RS
3444 return ((TREE_VALUE (exp) == 0
3445 || safe_from_p (x, TREE_VALUE (exp)))
bbf6f052
RK
3446 && (TREE_CHAIN (exp) == 0
3447 || safe_from_p (x, TREE_CHAIN (exp))));
3448 else
3449 return 0;
3450
3451 case '1':
3452 return safe_from_p (x, TREE_OPERAND (exp, 0));
3453
3454 case '2':
3455 case '<':
3456 return (safe_from_p (x, TREE_OPERAND (exp, 0))
3457 && safe_from_p (x, TREE_OPERAND (exp, 1)));
3458
3459 case 'e':
3460 case 'r':
3461 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
3462 the expression. If it is set, we conflict iff we are that rtx or
3463 both are in memory. Otherwise, we check all operands of the
3464 expression recursively. */
3465
3466 switch (TREE_CODE (exp))
3467 {
3468 case ADDR_EXPR:
e44842fe
RK
3469 return (staticp (TREE_OPERAND (exp, 0))
3470 || safe_from_p (x, TREE_OPERAND (exp, 0)));
bbf6f052
RK
3471
3472 case INDIRECT_REF:
3473 if (GET_CODE (x) == MEM)
3474 return 0;
3475 break;
3476
3477 case CALL_EXPR:
3478 exp_rtl = CALL_EXPR_RTL (exp);
3479 if (exp_rtl == 0)
3480 {
3481 /* Assume that the call will clobber all hard registers and
3482 all of memory. */
3483 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3484 || GET_CODE (x) == MEM)
3485 return 0;
3486 }
3487
3488 break;
3489
3490 case RTL_EXPR:
3491 exp_rtl = RTL_EXPR_RTL (exp);
3492 if (exp_rtl == 0)
3493 /* We don't know what this can modify. */
3494 return 0;
3495
3496 break;
3497
3498 case WITH_CLEANUP_EXPR:
3499 exp_rtl = RTL_EXPR_RTL (exp);
3500 break;
3501
3502 case SAVE_EXPR:
3503 exp_rtl = SAVE_EXPR_RTL (exp);
3504 break;
3505
8129842c
RS
3506 case BIND_EXPR:
3507 /* The only operand we look at is operand 1. The rest aren't
3508 part of the expression. */
3509 return safe_from_p (x, TREE_OPERAND (exp, 1));
3510
bbf6f052
RK
3511 case METHOD_CALL_EXPR:
3512 /* This takes a rtx argument, but shouldn't appear here. */
3513 abort ();
3514 }
3515
3516 /* If we have an rtx, we do not need to scan our operands. */
3517 if (exp_rtl)
3518 break;
3519
3520 nops = tree_code_length[(int) TREE_CODE (exp)];
3521 for (i = 0; i < nops; i++)
3522 if (TREE_OPERAND (exp, i) != 0
3523 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3524 return 0;
3525 }
3526
3527 /* If we have an rtl, find any enclosed object. Then see if we conflict
3528 with it. */
3529 if (exp_rtl)
3530 {
3531 if (GET_CODE (exp_rtl) == SUBREG)
3532 {
3533 exp_rtl = SUBREG_REG (exp_rtl);
3534 if (GET_CODE (exp_rtl) == REG
3535 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3536 return 0;
3537 }
3538
3539 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3540 are memory and EXP is not readonly. */
3541 return ! (rtx_equal_p (x, exp_rtl)
3542 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3543 && ! TREE_READONLY (exp)));
3544 }
3545
3546 /* If we reach here, it is safe. */
3547 return 1;
3548}
3549
3550/* Subroutine of expand_expr: return nonzero iff EXP is an
3551 expression whose type is statically determinable. */
3552
3553static int
3554fixed_type_p (exp)
3555 tree exp;
3556{
3557 if (TREE_CODE (exp) == PARM_DECL
3558 || TREE_CODE (exp) == VAR_DECL
3559 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3560 || TREE_CODE (exp) == COMPONENT_REF
3561 || TREE_CODE (exp) == ARRAY_REF)
3562 return 1;
3563 return 0;
3564}
3565\f
3566/* expand_expr: generate code for computing expression EXP.
3567 An rtx for the computed value is returned. The value is never null.
3568 In the case of a void EXP, const0_rtx is returned.
3569
3570 The value may be stored in TARGET if TARGET is nonzero.
3571 TARGET is just a suggestion; callers must assume that
3572 the rtx returned may not be the same as TARGET.
3573
3574 If TARGET is CONST0_RTX, it means that the value will be ignored.
3575
3576 If TMODE is not VOIDmode, it suggests generating the
3577 result in mode TMODE. But this is done only when convenient.
3578 Otherwise, TMODE is ignored and the value generated in its natural mode.
3579 TMODE is just a suggestion; callers must assume that
3580 the rtx returned may not have mode TMODE.
3581
3582 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
3583 with a constant address even if that address is not normally legitimate.
3584 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
3585
3586 If MODIFIER is EXPAND_SUM then when EXP is an addition
3587 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3588 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3589 products as above, or REG or MEM, or constant.
3590 Ordinarily in such cases we would output mul or add instructions
3591 and then return a pseudo reg containing the sum.
3592
3593 EXPAND_INITIALIZER is much like EXPAND_SUM except that
3594 it also marks a label as absolutely required (it can't be dead).
26fcb35a 3595 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6dc42e49 3596 This is used for outputting expressions used in initializers. */
bbf6f052
RK
3597
3598rtx
3599expand_expr (exp, target, tmode, modifier)
3600 register tree exp;
3601 rtx target;
3602 enum machine_mode tmode;
3603 enum expand_modifier modifier;
3604{
b50d17a1
RK
3605 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
3606 This is static so it will be accessible to our recursive callees. */
3607 static tree placeholder_list = 0;
bbf6f052
RK
3608 register rtx op0, op1, temp;
3609 tree type = TREE_TYPE (exp);
3610 int unsignedp = TREE_UNSIGNED (type);
3611 register enum machine_mode mode = TYPE_MODE (type);
3612 register enum tree_code code = TREE_CODE (exp);
3613 optab this_optab;
3614 /* Use subtarget as the target for operand 0 of a binary operation. */
3615 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3616 rtx original_target = target;
ca695ac9 3617 /* Maybe defer this until sure not doing bytecode? */
dd27116b
RK
3618 int ignore = (target == const0_rtx
3619 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
4d87de75
RS
3620 || code == CONVERT_EXPR || code == REFERENCE_EXPR
3621 || code == COND_EXPR)
dd27116b 3622 && TREE_CODE (type) == VOID_TYPE));
bbf6f052
RK
3623 tree context;
3624
ca695ac9
JB
3625
3626 if (output_bytecode)
3627 {
3628 bc_expand_expr (exp);
3629 return NULL;
3630 }
3631
bbf6f052
RK
3632 /* Don't use hard regs as subtargets, because the combiner
3633 can only handle pseudo regs. */
3634 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
3635 subtarget = 0;
3636 /* Avoid subtargets inside loops,
3637 since they hide some invariant expressions. */
3638 if (preserve_subexpressions_p ())
3639 subtarget = 0;
3640
dd27116b
RK
3641 /* If we are going to ignore this result, we need only do something
3642 if there is a side-effect somewhere in the expression. If there
b50d17a1
RK
3643 is, short-circuit the most common cases here. Note that we must
3644 not call expand_expr with anything but const0_rtx in case this
3645 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
bbf6f052 3646
dd27116b
RK
3647 if (ignore)
3648 {
3649 if (! TREE_SIDE_EFFECTS (exp))
3650 return const0_rtx;
3651
3652 /* Ensure we reference a volatile object even if value is ignored. */
3653 if (TREE_THIS_VOLATILE (exp)
3654 && TREE_CODE (exp) != FUNCTION_DECL
3655 && mode != VOIDmode && mode != BLKmode)
3656 {
3657 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
3658 if (GET_CODE (temp) == MEM)
3659 temp = copy_to_reg (temp);
3660 return const0_rtx;
3661 }
3662
3663 if (TREE_CODE_CLASS (code) == '1')
3664 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
3665 VOIDmode, modifier);
3666 else if (TREE_CODE_CLASS (code) == '2'
3667 || TREE_CODE_CLASS (code) == '<')
3668 {
3669 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
3670 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
3671 return const0_rtx;
3672 }
3673 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
3674 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
3675 /* If the second operand has no side effects, just evaluate
3676 the first. */
3677 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
3678 VOIDmode, modifier);
dd27116b 3679
90764a87 3680 target = 0;
dd27116b 3681 }
bbf6f052 3682
e44842fe
RK
3683 /* If will do cse, generate all results into pseudo registers
3684 since 1) that allows cse to find more things
3685 and 2) otherwise cse could produce an insn the machine
3686 cannot support. */
3687
bbf6f052
RK
3688 if (! cse_not_expected && mode != BLKmode && target
3689 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
3690 target = subtarget;
3691
bbf6f052
RK
3692 switch (code)
3693 {
3694 case LABEL_DECL:
b552441b
RS
3695 {
3696 tree function = decl_function_context (exp);
3697 /* Handle using a label in a containing function. */
3698 if (function != current_function_decl && function != 0)
3699 {
3700 struct function *p = find_function_data (function);
3701 /* Allocate in the memory associated with the function
3702 that the label is in. */
3703 push_obstacks (p->function_obstack,
3704 p->function_maybepermanent_obstack);
3705
3706 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3707 label_rtx (exp), p->forced_labels);
3708 pop_obstacks ();
3709 }
3710 else if (modifier == EXPAND_INITIALIZER)
3711 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3712 label_rtx (exp), forced_labels);
26fcb35a 3713 temp = gen_rtx (MEM, FUNCTION_MODE,
b552441b 3714 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
26fcb35a
RS
3715 if (function != current_function_decl && function != 0)
3716 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
3717 return temp;
b552441b 3718 }
bbf6f052
RK
3719
3720 case PARM_DECL:
3721 if (DECL_RTL (exp) == 0)
3722 {
3723 error_with_decl (exp, "prior parameter's size depends on `%s'");
4af3895e 3724 return CONST0_RTX (mode);
bbf6f052
RK
3725 }
3726
bbf6f052 3727 case VAR_DECL:
2dca20cd
RS
3728 /* If a static var's type was incomplete when the decl was written,
3729 but the type is complete now, lay out the decl now. */
3730 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
3731 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
3732 {
3733 push_obstacks_nochange ();
3734 end_temporary_allocation ();
3735 layout_decl (exp, 0);
3736 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
3737 pop_obstacks ();
3738 }
3739 case FUNCTION_DECL:
bbf6f052
RK
3740 case RESULT_DECL:
3741 if (DECL_RTL (exp) == 0)
3742 abort ();
e44842fe
RK
3743 /* Ensure variable marked as used even if it doesn't go through
3744 a parser. If it hasn't be used yet, write out an external
3745 definition. */
3746 if (! TREE_USED (exp))
3747 {
3748 assemble_external (exp);
3749 TREE_USED (exp) = 1;
3750 }
3751
bbf6f052
RK
3752 /* Handle variables inherited from containing functions. */
3753 context = decl_function_context (exp);
3754
3755 /* We treat inline_function_decl as an alias for the current function
3756 because that is the inline function whose vars, types, etc.
3757 are being merged into the current function.
3758 See expand_inline_function. */
3759 if (context != 0 && context != current_function_decl
3760 && context != inline_function_decl
3761 /* If var is static, we don't need a static chain to access it. */
3762 && ! (GET_CODE (DECL_RTL (exp)) == MEM
3763 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
3764 {
3765 rtx addr;
3766
3767 /* Mark as non-local and addressable. */
81feeecb 3768 DECL_NONLOCAL (exp) = 1;
bbf6f052
RK
3769 mark_addressable (exp);
3770 if (GET_CODE (DECL_RTL (exp)) != MEM)
3771 abort ();
3772 addr = XEXP (DECL_RTL (exp), 0);
3773 if (GET_CODE (addr) == MEM)
3774 addr = gen_rtx (MEM, Pmode, fix_lexical_addr (XEXP (addr, 0), exp));
3775 else
3776 addr = fix_lexical_addr (addr, exp);
3777 return change_address (DECL_RTL (exp), mode, addr);
3778 }
4af3895e 3779
bbf6f052
RK
3780 /* This is the case of an array whose size is to be determined
3781 from its initializer, while the initializer is still being parsed.
3782 See expand_decl. */
3783 if (GET_CODE (DECL_RTL (exp)) == MEM
3784 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
3785 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
3786 XEXP (DECL_RTL (exp), 0));
3787 if (GET_CODE (DECL_RTL (exp)) == MEM
3788 && modifier != EXPAND_CONST_ADDRESS
3789 && modifier != EXPAND_SUM
3790 && modifier != EXPAND_INITIALIZER)
3791 {
3792 /* DECL_RTL probably contains a constant address.
3793 On RISC machines where a constant address isn't valid,
3794 make some insns to get that address into a register. */
3795 if (!memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
3796 || (flag_force_addr
3797 && CONSTANT_ADDRESS_P (XEXP (DECL_RTL (exp), 0))))
3798 return change_address (DECL_RTL (exp), VOIDmode,
3799 copy_rtx (XEXP (DECL_RTL (exp), 0)));
3800 }
1499e0a8
RK
3801
3802 /* If the mode of DECL_RTL does not match that of the decl, it
3803 must be a promoted value. We return a SUBREG of the wanted mode,
3804 but mark it so that we know that it was already extended. */
3805
3806 if (GET_CODE (DECL_RTL (exp)) == REG
3807 && GET_MODE (DECL_RTL (exp)) != mode)
3808 {
3809 enum machine_mode decl_mode = DECL_MODE (exp);
3810
3811 /* Get the signedness used for this variable. Ensure we get the
3812 same mode we got when the variable was declared. */
3813
3814 PROMOTE_MODE (decl_mode, unsignedp, type);
3815
3816 if (decl_mode != GET_MODE (DECL_RTL (exp)))
3817 abort ();
3818
3819 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
3820 SUBREG_PROMOTED_VAR_P (temp) = 1;
3821 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3822 return temp;
3823 }
3824
bbf6f052
RK
3825 return DECL_RTL (exp);
3826
3827 case INTEGER_CST:
3828 return immed_double_const (TREE_INT_CST_LOW (exp),
3829 TREE_INT_CST_HIGH (exp),
3830 mode);
3831
3832 case CONST_DECL:
3833 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
3834
3835 case REAL_CST:
3836 /* If optimized, generate immediate CONST_DOUBLE
3837 which will be turned into memory by reload if necessary.
3838
3839 We used to force a register so that loop.c could see it. But
3840 this does not allow gen_* patterns to perform optimizations with
3841 the constants. It also produces two insns in cases like "x = 1.0;".
3842 On most machines, floating-point constants are not permitted in
3843 many insns, so we'd end up copying it to a register in any case.
3844
3845 Now, we do the copying in expand_binop, if appropriate. */
3846 return immed_real_const (exp);
3847
3848 case COMPLEX_CST:
3849 case STRING_CST:
3850 if (! TREE_CST_RTL (exp))
3851 output_constant_def (exp);
3852
3853 /* TREE_CST_RTL probably contains a constant address.
3854 On RISC machines where a constant address isn't valid,
3855 make some insns to get that address into a register. */
3856 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
3857 && modifier != EXPAND_CONST_ADDRESS
3858 && modifier != EXPAND_INITIALIZER
3859 && modifier != EXPAND_SUM
3860 && !memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0)))
3861 return change_address (TREE_CST_RTL (exp), VOIDmode,
3862 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
3863 return TREE_CST_RTL (exp);
3864
3865 case SAVE_EXPR:
3866 context = decl_function_context (exp);
3867 /* We treat inline_function_decl as an alias for the current function
3868 because that is the inline function whose vars, types, etc.
3869 are being merged into the current function.
3870 See expand_inline_function. */
3871 if (context == current_function_decl || context == inline_function_decl)
3872 context = 0;
3873
3874 /* If this is non-local, handle it. */
3875 if (context)
3876 {
3877 temp = SAVE_EXPR_RTL (exp);
3878 if (temp && GET_CODE (temp) == REG)
3879 {
3880 put_var_into_stack (exp);
3881 temp = SAVE_EXPR_RTL (exp);
3882 }
3883 if (temp == 0 || GET_CODE (temp) != MEM)
3884 abort ();
3885 return change_address (temp, mode,
3886 fix_lexical_addr (XEXP (temp, 0), exp));
3887 }
3888 if (SAVE_EXPR_RTL (exp) == 0)
3889 {
3890 if (mode == BLKmode)
34a25822
RK
3891 {
3892 temp
3893 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
3894 MEM_IN_STRUCT_P (temp)
3895 = (TREE_CODE (type) == RECORD_TYPE
3896 || TREE_CODE (type) == UNION_TYPE
3897 || TREE_CODE (type) == QUAL_UNION_TYPE
3898 || TREE_CODE (type) == ARRAY_TYPE);
3899 }
bbf6f052 3900 else
1499e0a8
RK
3901 {
3902 enum machine_mode var_mode = mode;
3903
3904 if (TREE_CODE (type) == INTEGER_TYPE
3905 || TREE_CODE (type) == ENUMERAL_TYPE
3906 || TREE_CODE (type) == BOOLEAN_TYPE
3907 || TREE_CODE (type) == CHAR_TYPE
3908 || TREE_CODE (type) == REAL_TYPE
3909 || TREE_CODE (type) == POINTER_TYPE
3910 || TREE_CODE (type) == OFFSET_TYPE)
3911 {
3912 PROMOTE_MODE (var_mode, unsignedp, type);
3913 }
3914
3915 temp = gen_reg_rtx (var_mode);
3916 }
3917
bbf6f052 3918 SAVE_EXPR_RTL (exp) = temp;
bbf6f052
RK
3919 if (!optimize && GET_CODE (temp) == REG)
3920 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
3921 save_expr_regs);
ff78f773
RK
3922
3923 /* If the mode of TEMP does not match that of the expression, it
3924 must be a promoted value. We pass store_expr a SUBREG of the
3925 wanted mode but mark it so that we know that it was already
3926 extended. Note that `unsignedp' was modified above in
3927 this case. */
3928
3929 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
3930 {
3931 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
3932 SUBREG_PROMOTED_VAR_P (temp) = 1;
3933 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3934 }
3935
3936 store_expr (TREE_OPERAND (exp, 0), temp, 0);
bbf6f052 3937 }
1499e0a8
RK
3938
3939 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
3940 must be a promoted value. We return a SUBREG of the wanted mode,
3941 but mark it so that we know that it was already extended. Note
3942 that `unsignedp' was modified above in this case. */
3943
3944 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
3945 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
3946 {
3947 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
3948 SUBREG_PROMOTED_VAR_P (temp) = 1;
3949 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3950 return temp;
3951 }
3952
bbf6f052
RK
3953 return SAVE_EXPR_RTL (exp);
3954
b50d17a1
RK
3955 case PLACEHOLDER_EXPR:
3956 /* If there is an object on the head of the placeholder list,
3957 see if some object in it's references is of type TYPE. For
3958 further information, see tree.def. */
3959 if (placeholder_list)
3960 {
3961 tree object;
3962
3963 for (object = TREE_PURPOSE (placeholder_list);
3964 TREE_TYPE (object) != type
3965 && (TREE_CODE_CLASS (TREE_CODE (object)) == 'r'
4805bfa0
RK
3966 || TREE_CODE_CLASS (TREE_CODE (object)) == '1'
3967 || TREE_CODE_CLASS (TREE_CODE (object)) == '2'
3968 || TREE_CODE_CLASS (TREE_CODE (object)) == 'e');
b50d17a1
RK
3969 object = TREE_OPERAND (object, 0))
3970 ;
3971
4805bfa0 3972 if (object && TREE_TYPE (object) == type)
b50d17a1
RK
3973 return expand_expr (object, original_target, tmode, modifier);
3974 }
3975
3976 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
3977 abort ();
3978
3979 case WITH_RECORD_EXPR:
3980 /* Put the object on the placeholder list, expand our first operand,
3981 and pop the list. */
3982 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
3983 placeholder_list);
3984 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
3985 tmode, modifier);
3986 placeholder_list = TREE_CHAIN (placeholder_list);
3987 return target;
3988
bbf6f052 3989 case EXIT_EXPR:
e44842fe
RK
3990 expand_exit_loop_if_false (NULL_PTR,
3991 invert_truthvalue (TREE_OPERAND (exp, 0)));
bbf6f052
RK
3992 return const0_rtx;
3993
3994 case LOOP_EXPR:
0088fcb1 3995 push_temp_slots ();
bbf6f052
RK
3996 expand_start_loop (1);
3997 expand_expr_stmt (TREE_OPERAND (exp, 0));
3998 expand_end_loop ();
0088fcb1 3999 pop_temp_slots ();
bbf6f052
RK
4000
4001 return const0_rtx;
4002
4003 case BIND_EXPR:
4004 {
4005 tree vars = TREE_OPERAND (exp, 0);
4006 int vars_need_expansion = 0;
4007
4008 /* Need to open a binding contour here because
4009 if there are any cleanups they most be contained here. */
4010 expand_start_bindings (0);
4011
2df53c0b
RS
4012 /* Mark the corresponding BLOCK for output in its proper place. */
4013 if (TREE_OPERAND (exp, 2) != 0
4014 && ! TREE_USED (TREE_OPERAND (exp, 2)))
4015 insert_block (TREE_OPERAND (exp, 2));
bbf6f052
RK
4016
4017 /* If VARS have not yet been expanded, expand them now. */
4018 while (vars)
4019 {
4020 if (DECL_RTL (vars) == 0)
4021 {
4022 vars_need_expansion = 1;
4023 expand_decl (vars);
4024 }
4025 expand_decl_init (vars);
4026 vars = TREE_CHAIN (vars);
4027 }
4028
4029 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
4030
4031 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
4032
4033 return temp;
4034 }
4035
4036 case RTL_EXPR:
4037 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
4038 abort ();
4039 emit_insns (RTL_EXPR_SEQUENCE (exp));
4040 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
4041 return RTL_EXPR_RTL (exp);
4042
4043 case CONSTRUCTOR:
dd27116b
RK
4044 /* If we don't need the result, just ensure we evaluate any
4045 subexpressions. */
4046 if (ignore)
4047 {
4048 tree elt;
4049 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4050 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
4051 return const0_rtx;
4052 }
4af3895e
JVA
4053 /* All elts simple constants => refer to a constant in memory. But
4054 if this is a non-BLKmode mode, let it store a field at a time
4055 since that should make a CONST_INT or CONST_DOUBLE when we
dd27116b
RK
4056 fold. If we are making an initializer and all operands are
4057 constant, put it in memory as well. */
4058 else if ((TREE_STATIC (exp)
4059 && (mode == BLKmode || TREE_ADDRESSABLE (exp)))
4060 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
bbf6f052
RK
4061 {
4062 rtx constructor = output_constant_def (exp);
b552441b
RS
4063 if (modifier != EXPAND_CONST_ADDRESS
4064 && modifier != EXPAND_INITIALIZER
4065 && modifier != EXPAND_SUM
4066 && !memory_address_p (GET_MODE (constructor),
4067 XEXP (constructor, 0)))
bbf6f052
RK
4068 constructor = change_address (constructor, VOIDmode,
4069 XEXP (constructor, 0));
4070 return constructor;
4071 }
4072
bbf6f052
RK
4073 else
4074 {
4075 if (target == 0 || ! safe_from_p (target, exp))
4076 {
4077 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
4078 target = gen_reg_rtx (mode);
4079 else
4080 {
3b94d087
RS
4081 enum tree_code c = TREE_CODE (type);
4082 target
4083 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
e7f3c83f
RK
4084 if (c == RECORD_TYPE || c == UNION_TYPE
4085 || c == QUAL_UNION_TYPE || c == ARRAY_TYPE)
3b94d087 4086 MEM_IN_STRUCT_P (target) = 1;
bbf6f052
RK
4087 }
4088 }
4089 store_constructor (exp, target);
4090 return target;
4091 }
4092
4093 case INDIRECT_REF:
4094 {
4095 tree exp1 = TREE_OPERAND (exp, 0);
4096 tree exp2;
4097
4098 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
4099 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
4100 This code has the same general effect as simply doing
4101 expand_expr on the save expr, except that the expression PTR
4102 is computed for use as a memory address. This means different
4103 code, suitable for indexing, may be generated. */
4104 if (TREE_CODE (exp1) == SAVE_EXPR
4105 && SAVE_EXPR_RTL (exp1) == 0
4106 && TREE_CODE (exp2 = TREE_OPERAND (exp1, 0)) != ERROR_MARK
4107 && TYPE_MODE (TREE_TYPE (exp1)) == Pmode
4108 && TYPE_MODE (TREE_TYPE (exp2)) == Pmode)
4109 {
906c4e36
RK
4110 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
4111 VOIDmode, EXPAND_SUM);
bbf6f052
RK
4112 op0 = memory_address (mode, temp);
4113 op0 = copy_all_regs (op0);
4114 SAVE_EXPR_RTL (exp1) = op0;
4115 }
4116 else
4117 {
906c4e36 4118 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
bbf6f052
RK
4119 op0 = memory_address (mode, op0);
4120 }
8c8a8e34
JW
4121
4122 temp = gen_rtx (MEM, mode, op0);
4123 /* If address was computed by addition,
4124 mark this as an element of an aggregate. */
4125 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4126 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
4127 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
4128 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
4129 || TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4130 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
e7f3c83f 4131 || TREE_CODE (TREE_TYPE (exp)) == QUAL_UNION_TYPE
8c8a8e34
JW
4132 || (TREE_CODE (exp1) == ADDR_EXPR
4133 && (exp2 = TREE_OPERAND (exp1, 0))
4134 && (TREE_CODE (TREE_TYPE (exp2)) == ARRAY_TYPE
4135 || TREE_CODE (TREE_TYPE (exp2)) == RECORD_TYPE
e7f3c83f
RK
4136 || TREE_CODE (TREE_TYPE (exp2)) == UNION_TYPE
4137 || TREE_CODE (TREE_TYPE (exp2)) == QUAL_UNION_TYPE)))
8c8a8e34 4138 MEM_IN_STRUCT_P (temp) = 1;
2c4c436a 4139 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
89742723 4140#if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
bbf6f052
RK
4141 a location is accessed through a pointer to const does not mean
4142 that the value there can never change. */
8c8a8e34 4143 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
bbf6f052 4144#endif
8c8a8e34
JW
4145 return temp;
4146 }
bbf6f052
RK
4147
4148 case ARRAY_REF:
742920c7
RK
4149 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
4150 abort ();
bbf6f052 4151
bbf6f052 4152 {
742920c7
RK
4153 tree array = TREE_OPERAND (exp, 0);
4154 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
4155 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4156 tree index = TREE_OPERAND (exp, 1);
4157 tree index_type = TREE_TYPE (index);
bbf6f052 4158 int i;
bbf6f052 4159
b50d17a1
RK
4160 if (TREE_CODE (low_bound) != INTEGER_CST
4161 && contains_placeholder_p (low_bound))
4162 low_bound = build (WITH_RECORD_EXPR, sizetype, low_bound, exp);
4163
d4c89139
PB
4164 /* Optimize the special-case of a zero lower bound.
4165
4166 We convert the low_bound to sizetype to avoid some problems
4167 with constant folding. (E.g. suppose the lower bound is 1,
4168 and its mode is QI. Without the conversion, (ARRAY
4169 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
4170 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
4171
4172 But sizetype isn't quite right either (especially if
4173 the lowbound is negative). FIXME */
4174
742920c7 4175 if (! integer_zerop (low_bound))
d4c89139
PB
4176 index = fold (build (MINUS_EXPR, index_type, index,
4177 convert (sizetype, low_bound)));
742920c7
RK
4178
4179 if (TREE_CODE (index) != INTEGER_CST
4180 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4181 {
4182 /* Nonconstant array index or nonconstant element size.
4183 Generate the tree for *(&array+index) and expand that,
4184 except do it in a language-independent way
4185 and don't complain about non-lvalue arrays.
4186 `mark_addressable' should already have been called
4187 for any array for which this case will be reached. */
4188
4189 /* Don't forget the const or volatile flag from the array
4190 element. */
4191 tree variant_type = build_type_variant (type,
4192 TREE_READONLY (exp),
4193 TREE_THIS_VOLATILE (exp));
4194 tree array_adr = build1 (ADDR_EXPR,
4195 build_pointer_type (variant_type), array);
4196 tree elt;
b50d17a1 4197 tree size = size_in_bytes (type);
742920c7
RK
4198
4199 /* Convert the integer argument to a type the same size as a
4200 pointer so the multiply won't overflow spuriously. */
4201 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
4202 index = convert (type_for_size (POINTER_SIZE, 0), index);
4203
b50d17a1
RK
4204 if (TREE_CODE (size) != INTEGER_CST
4205 && contains_placeholder_p (size))
4206 size = build (WITH_RECORD_EXPR, sizetype, size, exp);
4207
742920c7
RK
4208 /* Don't think the address has side effects
4209 just because the array does.
4210 (In some cases the address might have side effects,
4211 and we fail to record that fact here. However, it should not
4212 matter, since expand_expr should not care.) */
4213 TREE_SIDE_EFFECTS (array_adr) = 0;
4214
4215 elt = build1 (INDIRECT_REF, type,
4216 fold (build (PLUS_EXPR,
4217 TYPE_POINTER_TO (variant_type),
4218 array_adr,
4219 fold (build (MULT_EXPR,
4220 TYPE_POINTER_TO (variant_type),
b50d17a1 4221 index, size)))));
742920c7
RK
4222
4223 /* Volatility, etc., of new expression is same as old
4224 expression. */
4225 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
4226 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
4227 TREE_READONLY (elt) = TREE_READONLY (exp);
4228
4229 return expand_expr (elt, target, tmode, modifier);
4230 }
4231
4232 /* Fold an expression like: "foo"[2].
4233 This is not done in fold so it won't happen inside &. */
4234
4235 if (TREE_CODE (array) == STRING_CST
4236 && TREE_CODE (index) == INTEGER_CST
4237 && !TREE_INT_CST_HIGH (index)
4238 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array))
bbf6f052 4239 {
742920c7 4240 if (TREE_TYPE (TREE_TYPE (array)) == integer_type_node)
bbf6f052 4241 {
742920c7 4242 exp = build_int_2 (((int *)TREE_STRING_POINTER (array))[i], 0);
bbf6f052
RK
4243 TREE_TYPE (exp) = integer_type_node;
4244 return expand_expr (exp, target, tmode, modifier);
4245 }
742920c7 4246 if (TREE_TYPE (TREE_TYPE (array)) == char_type_node)
bbf6f052 4247 {
742920c7 4248 exp = build_int_2 (TREE_STRING_POINTER (array)[i], 0);
bbf6f052 4249 TREE_TYPE (exp) = integer_type_node;
742920c7
RK
4250 return expand_expr (convert (TREE_TYPE (TREE_TYPE (array)),
4251 exp),
4252 target, tmode, modifier);
bbf6f052
RK
4253 }
4254 }
bbf6f052 4255
742920c7
RK
4256 /* If this is a constant index into a constant array,
4257 just get the value from the array. Handle both the cases when
4258 we have an explicit constructor and when our operand is a variable
4259 that was declared const. */
4af3895e 4260
742920c7
RK
4261 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
4262 {
4263 if (TREE_CODE (index) == INTEGER_CST
4264 && TREE_INT_CST_HIGH (index) == 0)
4265 {
4266 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
4267
4268 i = TREE_INT_CST_LOW (index);
4269 while (elem && i--)
4270 elem = TREE_CHAIN (elem);
4271 if (elem)
4272 return expand_expr (fold (TREE_VALUE (elem)), target,
4273 tmode, modifier);
4274 }
4275 }
4af3895e 4276
742920c7
RK
4277 else if (optimize >= 1
4278 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
4279 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
4280 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
4281 {
4282 if (TREE_CODE (index) == INTEGER_CST
4283 && TREE_INT_CST_HIGH (index) == 0)
4284 {
4285 tree init = DECL_INITIAL (array);
4286
4287 i = TREE_INT_CST_LOW (index);
4288 if (TREE_CODE (init) == CONSTRUCTOR)
4289 {
4290 tree elem = CONSTRUCTOR_ELTS (init);
4291
03dc44a6
RS
4292 while (elem
4293 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
742920c7
RK
4294 elem = TREE_CHAIN (elem);
4295 if (elem)
4296 return expand_expr (fold (TREE_VALUE (elem)), target,
4297 tmode, modifier);
4298 }
4299 else if (TREE_CODE (init) == STRING_CST
4300 && i < TREE_STRING_LENGTH (init))
4301 {
4302 temp = GEN_INT (TREE_STRING_POINTER (init)[i]);
4303 return convert_to_mode (mode, temp, 0);
4304 }
4305 }
4306 }
4307 }
8c8a8e34 4308
bbf6f052
RK
4309 /* Treat array-ref with constant index as a component-ref. */
4310
4311 case COMPONENT_REF:
4312 case BIT_FIELD_REF:
4af3895e
JVA
4313 /* If the operand is a CONSTRUCTOR, we can just extract the
4314 appropriate field if it is present. */
4315 if (code != ARRAY_REF
4316 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
4317 {
4318 tree elt;
4319
4320 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
4321 elt = TREE_CHAIN (elt))
4322 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
4323 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
4324 }
4325
bbf6f052
RK
4326 {
4327 enum machine_mode mode1;
4328 int bitsize;
4329 int bitpos;
7bb0943f 4330 tree offset;
bbf6f052 4331 int volatilep = 0;
7bb0943f 4332 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
bbf6f052 4333 &mode1, &unsignedp, &volatilep);
034f9101 4334 int alignment;
bbf6f052 4335
e7f3c83f
RK
4336 /* If we got back the original object, something is wrong. Perhaps
4337 we are evaluating an expression too early. In any event, don't
4338 infinitely recurse. */
4339 if (tem == exp)
4340 abort ();
4341
bbf6f052
RK
4342 /* In some cases, we will be offsetting OP0's address by a constant.
4343 So get it as a sum, if possible. If we will be using it
4344 directly in an insn, we validate it. */
906c4e36 4345 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_SUM);
bbf6f052 4346
8c8a8e34 4347 /* If this is a constant, put it into a register if it is a
8008b228 4348 legitimate constant and memory if it isn't. */
8c8a8e34
JW
4349 if (CONSTANT_P (op0))
4350 {
4351 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
f2878c6b 4352 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
8c8a8e34
JW
4353 op0 = force_reg (mode, op0);
4354 else
4355 op0 = validize_mem (force_const_mem (mode, op0));
4356 }
4357
034f9101 4358 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
7bb0943f
RS
4359 if (offset != 0)
4360 {
906c4e36 4361 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
4362
4363 if (GET_CODE (op0) != MEM)
4364 abort ();
4365 op0 = change_address (op0, VOIDmode,
4366 gen_rtx (PLUS, Pmode, XEXP (op0, 0),
4367 force_reg (Pmode, offset_rtx)));
034f9101
RS
4368 /* If we have a variable offset, the known alignment
4369 is only that of the innermost structure containing the field.
4370 (Actually, we could sometimes do better by using the
4371 size of an element of the innermost array, but no need.) */
4372 if (TREE_CODE (exp) == COMPONENT_REF
4373 || TREE_CODE (exp) == BIT_FIELD_REF)
4374 alignment = (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
4375 / BITS_PER_UNIT);
7bb0943f
RS
4376 }
4377
bbf6f052
RK
4378 /* Don't forget about volatility even if this is a bitfield. */
4379 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
4380 {
4381 op0 = copy_rtx (op0);
4382 MEM_VOLATILE_P (op0) = 1;
4383 }
4384
ccc98036
RS
4385 /* In cases where an aligned union has an unaligned object
4386 as a field, we might be extracting a BLKmode value from
4387 an integer-mode (e.g., SImode) object. Handle this case
4388 by doing the extract into an object as wide as the field
4389 (which we know to be the width of a basic mode), then
4390 storing into memory, and changing the mode to BLKmode. */
bbf6f052 4391 if (mode1 == VOIDmode
0bba3f6f
RK
4392 || (mode1 != BLKmode && ! direct_load[(int) mode1]
4393 && modifier != EXPAND_CONST_ADDRESS
4394 && modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
ccc98036
RS
4395 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
4396 /* If the field isn't aligned enough to fetch as a memref,
4397 fetch it as a bit field. */
4398 || (STRICT_ALIGNMENT
4399 && TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
4400 || (STRICT_ALIGNMENT && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
bbf6f052 4401 {
bbf6f052
RK
4402 enum machine_mode ext_mode = mode;
4403
4404 if (ext_mode == BLKmode)
4405 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
4406
4407 if (ext_mode == BLKmode)
4408 abort ();
4409
4410 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
4411 unsignedp, target, ext_mode, ext_mode,
034f9101 4412 alignment,
bbf6f052
RK
4413 int_size_in_bytes (TREE_TYPE (tem)));
4414 if (mode == BLKmode)
4415 {
4416 rtx new = assign_stack_temp (ext_mode,
4417 bitsize / BITS_PER_UNIT, 0);
4418
4419 emit_move_insn (new, op0);
4420 op0 = copy_rtx (new);
4421 PUT_MODE (op0, BLKmode);
092dded9 4422 MEM_IN_STRUCT_P (op0) = 1;
bbf6f052
RK
4423 }
4424
4425 return op0;
4426 }
4427
4428 /* Get a reference to just this component. */
4429 if (modifier == EXPAND_CONST_ADDRESS
4430 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4431 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
4432 (bitpos / BITS_PER_UNIT)));
4433 else
4434 op0 = change_address (op0, mode1,
4435 plus_constant (XEXP (op0, 0),
4436 (bitpos / BITS_PER_UNIT)));
4437 MEM_IN_STRUCT_P (op0) = 1;
4438 MEM_VOLATILE_P (op0) |= volatilep;
4439 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
4440 return op0;
4441 if (target == 0)
4442 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4443 convert_move (target, op0, unsignedp);
4444 return target;
4445 }
4446
4447 case OFFSET_REF:
4448 {
da120c2f 4449 tree base = build1 (ADDR_EXPR, type, TREE_OPERAND (exp, 0));
bbf6f052 4450 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
906c4e36 4451 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
bbf6f052
RK
4452 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
4453 MEM_IN_STRUCT_P (temp) = 1;
f94cc92f 4454 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
89742723 4455#if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
bbf6f052
RK
4456 a location is accessed through a pointer to const does not mean
4457 that the value there can never change. */
4458 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
4459#endif
4460 return temp;
4461 }
4462
4463 /* Intended for a reference to a buffer of a file-object in Pascal.
4464 But it's not certain that a special tree code will really be
4465 necessary for these. INDIRECT_REF might work for them. */
4466 case BUFFER_REF:
4467 abort ();
4468
7308a047
RS
4469 /* IN_EXPR: Inlined pascal set IN expression.
4470
4471 Algorithm:
4472 rlo = set_low - (set_low%bits_per_word);
4473 the_word = set [ (index - rlo)/bits_per_word ];
4474 bit_index = index % bits_per_word;
4475 bitmask = 1 << bit_index;
4476 return !!(the_word & bitmask); */
4477 case IN_EXPR:
4478 preexpand_calls (exp);
4479 {
4480 tree set = TREE_OPERAND (exp, 0);
4481 tree index = TREE_OPERAND (exp, 1);
4482 tree set_type = TREE_TYPE (set);
4483
4484 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
4485 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
4486
4487 rtx index_val;
4488 rtx lo_r;
4489 rtx hi_r;
4490 rtx rlow;
4491 rtx diff, quo, rem, addr, bit, result;
4492 rtx setval, setaddr;
4493 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
4494
4495 if (target == 0)
17938e57 4496 target = gen_reg_rtx (mode);
7308a047
RS
4497
4498 /* If domain is empty, answer is no. */
4499 if (tree_int_cst_lt (set_high_bound, set_low_bound))
4500 return const0_rtx;
4501
4502 index_val = expand_expr (index, 0, VOIDmode, 0);
4503 lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
4504 hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
4505 setval = expand_expr (set, 0, VOIDmode, 0);
4506 setaddr = XEXP (setval, 0);
4507
4508 /* Compare index against bounds, if they are constant. */
4509 if (GET_CODE (index_val) == CONST_INT
17938e57
RK
4510 && GET_CODE (lo_r) == CONST_INT
4511 && INTVAL (index_val) < INTVAL (lo_r))
4512 return const0_rtx;
7308a047
RS
4513
4514 if (GET_CODE (index_val) == CONST_INT
17938e57
RK
4515 && GET_CODE (hi_r) == CONST_INT
4516 && INTVAL (hi_r) < INTVAL (index_val))
4517 return const0_rtx;
7308a047
RS
4518
4519 /* If we get here, we have to generate the code for both cases
4520 (in range and out of range). */
4521
4522 op0 = gen_label_rtx ();
4523 op1 = gen_label_rtx ();
4524
4525 if (! (GET_CODE (index_val) == CONST_INT
4526 && GET_CODE (lo_r) == CONST_INT))
4527 {
17938e57
RK
4528 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
4529 GET_MODE (index_val), 0, 0);
7308a047
RS
4530 emit_jump_insn (gen_blt (op1));
4531 }
4532
4533 if (! (GET_CODE (index_val) == CONST_INT
4534 && GET_CODE (hi_r) == CONST_INT))
4535 {
17938e57
RK
4536 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
4537 GET_MODE (index_val), 0, 0);
7308a047
RS
4538 emit_jump_insn (gen_bgt (op1));
4539 }
4540
4541 /* Calculate the element number of bit zero in the first word
4542 of the set. */
4543 if (GET_CODE (lo_r) == CONST_INT)
17938e57
RK
4544 rlow = GEN_INT (INTVAL (lo_r)
4545 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7308a047 4546 else
17938e57
RK
4547 rlow = expand_binop (index_mode, and_optab, lo_r,
4548 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
4549 NULL_RTX, 0, OPTAB_LIB_WIDEN);
7308a047
RS
4550
4551 diff = expand_binop (index_mode, sub_optab,
17938e57 4552 index_val, rlow, NULL_RTX, 0, OPTAB_LIB_WIDEN);
7308a047
RS
4553
4554 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
17938e57 4555 GEN_INT (BITS_PER_UNIT), NULL_RTX, 0);
7308a047 4556 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
17938e57 4557 GEN_INT (BITS_PER_UNIT), NULL_RTX, 0);
7308a047
RS
4558 addr = memory_address (byte_mode,
4559 expand_binop (index_mode, add_optab,
17938e57
RK
4560 diff, setaddr, NULL_RTX, 0,
4561 OPTAB_LIB_WIDEN));
7308a047
RS
4562 /* Extract the bit we want to examine */
4563 bit = expand_shift (RSHIFT_EXPR, byte_mode,
17938e57
RK
4564 gen_rtx (MEM, byte_mode, addr),
4565 make_tree (TREE_TYPE (index), rem),
4566 NULL_RTX, 1);
4567 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
4568 GET_MODE (target) == byte_mode ? target : 0,
7308a047 4569 1, OPTAB_LIB_WIDEN);
17938e57
RK
4570
4571 if (result != target)
4572 convert_move (target, result, 1);
7308a047
RS
4573
4574 /* Output the code to handle the out-of-range case. */
4575 emit_jump (op0);
4576 emit_label (op1);
4577 emit_move_insn (target, const0_rtx);
4578 emit_label (op0);
4579 return target;
4580 }
4581
bbf6f052
RK
4582 case WITH_CLEANUP_EXPR:
4583 if (RTL_EXPR_RTL (exp) == 0)
4584 {
4585 RTL_EXPR_RTL (exp)
4586 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
906c4e36
RK
4587 cleanups_this_call
4588 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
bbf6f052
RK
4589 /* That's it for this cleanup. */
4590 TREE_OPERAND (exp, 2) = 0;
4591 }
4592 return RTL_EXPR_RTL (exp);
4593
4594 case CALL_EXPR:
4595 /* Check for a built-in function. */
4596 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
4597 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == FUNCTION_DECL
4598 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4599 return expand_builtin (exp, target, subtarget, tmode, ignore);
4600 /* If this call was expanded already by preexpand_calls,
4601 just return the result we got. */
4602 if (CALL_EXPR_RTL (exp) != 0)
4603 return CALL_EXPR_RTL (exp);
8129842c 4604 return expand_call (exp, target, ignore);
bbf6f052
RK
4605
4606 case NON_LVALUE_EXPR:
4607 case NOP_EXPR:
4608 case CONVERT_EXPR:
4609 case REFERENCE_EXPR:
bbf6f052
RK
4610 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4611 return expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
4612 if (TREE_CODE (type) == UNION_TYPE)
4613 {
4614 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
4615 if (target == 0)
4616 {
4617 if (mode == BLKmode)
4618 {
4619 if (TYPE_SIZE (type) == 0
4620 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4621 abort ();
4622 target = assign_stack_temp (BLKmode,
4623 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4624 + BITS_PER_UNIT - 1)
4625 / BITS_PER_UNIT, 0);
4626 }
4627 else
4628 target = gen_reg_rtx (mode);
4629 }
4630 if (GET_CODE (target) == MEM)
4631 /* Store data into beginning of memory target. */
4632 store_expr (TREE_OPERAND (exp, 0),
1499e0a8
RK
4633 change_address (target, TYPE_MODE (valtype), 0), 0);
4634
bbf6f052
RK
4635 else if (GET_CODE (target) == REG)
4636 /* Store this field into a union of the proper type. */
4637 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
4638 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
4639 VOIDmode, 0, 1,
4640 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
4641 else
4642 abort ();
4643
4644 /* Return the entire union. */
4645 return target;
4646 }
1499e0a8 4647 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
12342f90
RS
4648 if (GET_MODE (op0) == mode)
4649 return op0;
4650 /* If arg is a constant integer being extended from a narrower mode,
4651 we must really truncate to get the extended bits right. Otherwise
4652 (unsigned long) (unsigned char) ("\377"[0])
4653 would come out as ffffffff. */
4654 if (GET_MODE (op0) == VOIDmode
4655 && (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4656 < GET_MODE_BITSIZE (mode)))
4657 {
4658 /* MODE must be narrower than HOST_BITS_PER_INT. */
4659 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4660
4661 if (width < HOST_BITS_PER_WIDE_INT)
4662 {
4663 HOST_WIDE_INT val = (GET_CODE (op0) == CONST_INT ? INTVAL (op0)
4664 : CONST_DOUBLE_LOW (op0));
4665 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4666 || !(val & ((HOST_WIDE_INT) 1 << (width - 1))))
4667 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4668 else
4669 val |= ~(((HOST_WIDE_INT) 1 << width) - 1);
4670
4671 op0 = GEN_INT (val);
4672 }
4673 else
4674 {
4675 op0 = (simplify_unary_operation
4676 ((TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4677 ? ZERO_EXTEND : SIGN_EXTEND),
4678 mode, op0,
4679 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))));
4680 if (op0 == 0)
4681 abort ();
4682 }
4683 }
4684 if (GET_MODE (op0) == VOIDmode)
bbf6f052 4685 return op0;
26fcb35a
RS
4686 if (modifier == EXPAND_INITIALIZER)
4687 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
bbf6f052
RK
4688 if (flag_force_mem && GET_CODE (op0) == MEM)
4689 op0 = copy_to_reg (op0);
4690
4691 if (target == 0)
4692 return convert_to_mode (mode, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4693 else
4694 convert_move (target, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4695 return target;
4696
4697 case PLUS_EXPR:
4698 /* We come here from MINUS_EXPR when the second operand is a constant. */
4699 plus_expr:
4700 this_optab = add_optab;
4701
4702 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
4703 something else, make sure we add the register to the constant and
4704 then to the other thing. This case can occur during strength
4705 reduction and doing it this way will produce better code if the
4706 frame pointer or argument pointer is eliminated.
4707
4708 fold-const.c will ensure that the constant is always in the inner
4709 PLUS_EXPR, so the only case we need to do anything about is if
4710 sp, ap, or fp is our second argument, in which case we must swap
4711 the innermost first argument and our second argument. */
4712
4713 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4714 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
4715 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
4716 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
4717 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
4718 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
4719 {
4720 tree t = TREE_OPERAND (exp, 1);
4721
4722 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4723 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
4724 }
4725
4726 /* If the result is to be Pmode and we are adding an integer to
4727 something, we might be forming a constant. So try to use
4728 plus_constant. If it produces a sum and we can't accept it,
4729 use force_operand. This allows P = &ARR[const] to generate
4730 efficient code on machines where a SYMBOL_REF is not a valid
4731 address.
4732
4733 If this is an EXPAND_SUM call, always return the sum. */
c980ac49
RS
4734 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4735 || mode == Pmode)
bbf6f052 4736 {
c980ac49
RS
4737 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
4738 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4739 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
4740 {
4741 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
4742 EXPAND_SUM);
4743 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
4744 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4745 op1 = force_operand (op1, target);
4746 return op1;
4747 }
bbf6f052 4748
c980ac49
RS
4749 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4750 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
4751 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
4752 {
4753 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
4754 EXPAND_SUM);
4755 if (! CONSTANT_P (op0))
4756 {
4757 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4758 VOIDmode, modifier);
709f5be1
RS
4759 /* Don't go to both_summands if modifier
4760 says it's not right to return a PLUS. */
4761 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4762 goto binop2;
c980ac49
RS
4763 goto both_summands;
4764 }
4765 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
4766 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4767 op0 = force_operand (op0, target);
4768 return op0;
4769 }
bbf6f052
RK
4770 }
4771
4772 /* No sense saving up arithmetic to be done
4773 if it's all in the wrong mode to form part of an address.
4774 And force_operand won't know whether to sign-extend or
4775 zero-extend. */
4776 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
c980ac49
RS
4777 || mode != Pmode)
4778 goto binop;
bbf6f052
RK
4779
4780 preexpand_calls (exp);
4781 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4782 subtarget = 0;
4783
4784 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
906c4e36 4785 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
bbf6f052 4786
c980ac49 4787 both_summands:
bbf6f052
RK
4788 /* Make sure any term that's a sum with a constant comes last. */
4789 if (GET_CODE (op0) == PLUS
4790 && CONSTANT_P (XEXP (op0, 1)))
4791 {
4792 temp = op0;
4793 op0 = op1;
4794 op1 = temp;
4795 }
4796 /* If adding to a sum including a constant,
4797 associate it to put the constant outside. */
4798 if (GET_CODE (op1) == PLUS
4799 && CONSTANT_P (XEXP (op1, 1)))
4800 {
4801 rtx constant_term = const0_rtx;
4802
4803 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
4804 if (temp != 0)
4805 op0 = temp;
6f90e075
JW
4806 /* Ensure that MULT comes first if there is one. */
4807 else if (GET_CODE (op0) == MULT)
4808 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
bbf6f052
RK
4809 else
4810 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
4811
4812 /* Let's also eliminate constants from op0 if possible. */
4813 op0 = eliminate_constant_term (op0, &constant_term);
4814
4815 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
4816 their sum should be a constant. Form it into OP1, since the
4817 result we want will then be OP0 + OP1. */
4818
4819 temp = simplify_binary_operation (PLUS, mode, constant_term,
4820 XEXP (op1, 1));
4821 if (temp != 0)
4822 op1 = temp;
4823 else
4824 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
4825 }
4826
4827 /* Put a constant term last and put a multiplication first. */
4828 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
4829 temp = op1, op1 = op0, op0 = temp;
4830
4831 temp = simplify_binary_operation (PLUS, mode, op0, op1);
4832 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
4833
4834 case MINUS_EXPR:
4835 /* Handle difference of two symbolic constants,
4836 for the sake of an initializer. */
4837 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4838 && really_constant_p (TREE_OPERAND (exp, 0))
4839 && really_constant_p (TREE_OPERAND (exp, 1)))
4840 {
906c4e36
RK
4841 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
4842 VOIDmode, modifier);
4843 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4844 VOIDmode, modifier);
bbf6f052
RK
4845 return gen_rtx (MINUS, mode, op0, op1);
4846 }
4847 /* Convert A - const to A + (-const). */
4848 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4849 {
4850 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0),
4851 fold (build1 (NEGATE_EXPR, type,
4852 TREE_OPERAND (exp, 1))));
4853 goto plus_expr;
4854 }
4855 this_optab = sub_optab;
4856 goto binop;
4857
4858 case MULT_EXPR:
4859 preexpand_calls (exp);
4860 /* If first operand is constant, swap them.
4861 Thus the following special case checks need only
4862 check the second operand. */
4863 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
4864 {
4865 register tree t1 = TREE_OPERAND (exp, 0);
4866 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
4867 TREE_OPERAND (exp, 1) = t1;
4868 }
4869
4870 /* Attempt to return something suitable for generating an
4871 indexed address, for machines that support that. */
4872
4873 if (modifier == EXPAND_SUM && mode == Pmode
4874 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
906c4e36 4875 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
4876 {
4877 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
4878
4879 /* Apply distributive law if OP0 is x+c. */
4880 if (GET_CODE (op0) == PLUS
4881 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
4882 return gen_rtx (PLUS, mode,
4883 gen_rtx (MULT, mode, XEXP (op0, 0),
906c4e36
RK
4884 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
4885 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
4886 * INTVAL (XEXP (op0, 1))));
bbf6f052
RK
4887
4888 if (GET_CODE (op0) != REG)
906c4e36 4889 op0 = force_operand (op0, NULL_RTX);
bbf6f052
RK
4890 if (GET_CODE (op0) != REG)
4891 op0 = copy_to_mode_reg (mode, op0);
4892
4893 return gen_rtx (MULT, mode, op0,
906c4e36 4894 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
bbf6f052
RK
4895 }
4896
4897 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4898 subtarget = 0;
4899
4900 /* Check for multiplying things that have been extended
4901 from a narrower type. If this machine supports multiplying
4902 in that narrower type with a result in the desired type,
4903 do it that way, and avoid the explicit type-conversion. */
4904 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
4905 && TREE_CODE (type) == INTEGER_TYPE
4906 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4907 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
4908 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4909 && int_fits_type_p (TREE_OPERAND (exp, 1),
4910 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4911 /* Don't use a widening multiply if a shift will do. */
4912 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
906c4e36 4913 > HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
4914 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
4915 ||
4916 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
4917 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4918 ==
4919 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
4920 /* If both operands are extended, they must either both
4921 be zero-extended or both be sign-extended. */
4922 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4923 ==
4924 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
4925 {
4926 enum machine_mode innermode
4927 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
4928 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4929 ? umul_widen_optab : smul_widen_optab);
4930 if (mode == GET_MODE_WIDER_MODE (innermode)
4931 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
4932 {
4933 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
906c4e36 4934 NULL_RTX, VOIDmode, 0);
bbf6f052 4935 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
906c4e36
RK
4936 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4937 VOIDmode, 0);
bbf6f052
RK
4938 else
4939 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
906c4e36 4940 NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4941 goto binop2;
4942 }
4943 }
4944 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 4945 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4946 return expand_mult (mode, op0, op1, target, unsignedp);
4947
4948 case TRUNC_DIV_EXPR:
4949 case FLOOR_DIV_EXPR:
4950 case CEIL_DIV_EXPR:
4951 case ROUND_DIV_EXPR:
4952 case EXACT_DIV_EXPR:
4953 preexpand_calls (exp);
4954 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4955 subtarget = 0;
4956 /* Possible optimization: compute the dividend with EXPAND_SUM
4957 then if the divisor is constant can optimize the case
4958 where some terms of the dividend have coeffs divisible by it. */
4959 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 4960 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4961 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
4962
4963 case RDIV_EXPR:
4964 this_optab = flodiv_optab;
4965 goto binop;
4966
4967 case TRUNC_MOD_EXPR:
4968 case FLOOR_MOD_EXPR:
4969 case CEIL_MOD_EXPR:
4970 case ROUND_MOD_EXPR:
4971 preexpand_calls (exp);
4972 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4973 subtarget = 0;
4974 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 4975 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4976 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
4977
4978 case FIX_ROUND_EXPR:
4979 case FIX_FLOOR_EXPR:
4980 case FIX_CEIL_EXPR:
4981 abort (); /* Not used for C. */
4982
4983 case FIX_TRUNC_EXPR:
906c4e36 4984 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4985 if (target == 0)
4986 target = gen_reg_rtx (mode);
4987 expand_fix (target, op0, unsignedp);
4988 return target;
4989
4990 case FLOAT_EXPR:
906c4e36 4991 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4992 if (target == 0)
4993 target = gen_reg_rtx (mode);
4994 /* expand_float can't figure out what to do if FROM has VOIDmode.
4995 So give it the correct mode. With -O, cse will optimize this. */
4996 if (GET_MODE (op0) == VOIDmode)
4997 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
4998 op0);
4999 expand_float (target, op0,
5000 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5001 return target;
5002
5003 case NEGATE_EXPR:
5004 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5005 temp = expand_unop (mode, neg_optab, op0, target, 0);
5006 if (temp == 0)
5007 abort ();
5008 return temp;
5009
5010 case ABS_EXPR:
5011 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5012
2d7050fd
RS
5013 /* Handle complex values specially. */
5014 {
5015 enum machine_mode opmode
5016 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5017
5018 if (GET_MODE_CLASS (opmode) == MODE_COMPLEX_INT
5019 || GET_MODE_CLASS (opmode) == MODE_COMPLEX_FLOAT)
5020 return expand_complex_abs (opmode, op0, target, unsignedp);
5021 }
5022
bbf6f052
RK
5023 /* Unsigned abs is simply the operand. Testing here means we don't
5024 risk generating incorrect code below. */
5025 if (TREE_UNSIGNED (type))
5026 return op0;
5027
5028 /* First try to do it with a special abs instruction. */
5029 temp = expand_unop (mode, abs_optab, op0, target, 0);
5030 if (temp != 0)
5031 return temp;
5032
5033 /* If this machine has expensive jumps, we can do integer absolute
5034 value of X as (((signed) x >> (W-1)) ^ x) - ((signed) x >> (W-1)),
5035 where W is the width of MODE. */
5036
5037 if (GET_MODE_CLASS (mode) == MODE_INT && BRANCH_COST >= 2)
5038 {
5039 rtx extended = expand_shift (RSHIFT_EXPR, mode, op0,
5040 size_int (GET_MODE_BITSIZE (mode) - 1),
906c4e36 5041 NULL_RTX, 0);
bbf6f052
RK
5042
5043 temp = expand_binop (mode, xor_optab, extended, op0, target, 0,
5044 OPTAB_LIB_WIDEN);
5045 if (temp != 0)
5046 temp = expand_binop (mode, sub_optab, temp, extended, target, 0,
5047 OPTAB_LIB_WIDEN);
5048
5049 if (temp != 0)
5050 return temp;
5051 }
5052
5053 /* If that does not win, use conditional jump and negate. */
5054 target = original_target;
5055 temp = gen_label_rtx ();
5056 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 0))
37568125 5057 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
bbf6f052
RK
5058 || (GET_CODE (target) == REG
5059 && REGNO (target) < FIRST_PSEUDO_REGISTER))
5060 target = gen_reg_rtx (mode);
5061 emit_move_insn (target, op0);
5062 emit_cmp_insn (target,
5063 expand_expr (convert (type, integer_zero_node),
906c4e36
RK
5064 NULL_RTX, VOIDmode, 0),
5065 GE, NULL_RTX, mode, 0, 0);
bbf6f052
RK
5066 NO_DEFER_POP;
5067 emit_jump_insn (gen_bge (temp));
5068 op0 = expand_unop (mode, neg_optab, target, target, 0);
5069 if (op0 != target)
5070 emit_move_insn (target, op0);
5071 emit_label (temp);
5072 OK_DEFER_POP;
5073 return target;
5074
5075 case MAX_EXPR:
5076 case MIN_EXPR:
5077 target = original_target;
5078 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
fc155707 5079 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
bbf6f052
RK
5080 || (GET_CODE (target) == REG
5081 && REGNO (target) < FIRST_PSEUDO_REGISTER))
5082 target = gen_reg_rtx (mode);
906c4e36 5083 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5084 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5085
5086 /* First try to do it with a special MIN or MAX instruction.
5087 If that does not win, use a conditional jump to select the proper
5088 value. */
5089 this_optab = (TREE_UNSIGNED (type)
5090 ? (code == MIN_EXPR ? umin_optab : umax_optab)
5091 : (code == MIN_EXPR ? smin_optab : smax_optab));
5092
5093 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
5094 OPTAB_WIDEN);
5095 if (temp != 0)
5096 return temp;
5097
ee456b1c
RK
5098 if (target != op0)
5099 emit_move_insn (target, op0);
bbf6f052 5100 op0 = gen_label_rtx ();
f81497d9
RS
5101 /* If this mode is an integer too wide to compare properly,
5102 compare word by word. Rely on cse to optimize constant cases. */
5103 if (GET_MODE_CLASS (mode) == MODE_INT
5104 && !can_compare_p (mode))
bbf6f052 5105 {
f81497d9 5106 if (code == MAX_EXPR)
ee456b1c 5107 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type), target, op1, NULL, op0);
bbf6f052 5108 else
ee456b1c
RK
5109 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type), op1, target, NULL, op0);
5110 emit_move_insn (target, op1);
bbf6f052 5111 }
f81497d9
RS
5112 else
5113 {
5114 if (code == MAX_EXPR)
5115 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
ee456b1c
RK
5116 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
5117 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
f81497d9
RS
5118 else
5119 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
ee456b1c
RK
5120 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
5121 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
f81497d9 5122 if (temp == const0_rtx)
ee456b1c 5123 emit_move_insn (target, op1);
f81497d9
RS
5124 else if (temp != const_true_rtx)
5125 {
5126 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
5127 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
5128 else
5129 abort ();
ee456b1c 5130 emit_move_insn (target, op1);
f81497d9
RS
5131 }
5132 }
bbf6f052
RK
5133 emit_label (op0);
5134 return target;
5135
5136/* ??? Can optimize when the operand of this is a bitwise operation,
5137 by using a different bitwise operation. */
5138 case BIT_NOT_EXPR:
5139 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5140 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
5141 if (temp == 0)
5142 abort ();
5143 return temp;
5144
5145 case FFS_EXPR:
5146 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5147 temp = expand_unop (mode, ffs_optab, op0, target, 1);
5148 if (temp == 0)
5149 abort ();
5150 return temp;
5151
5152/* ??? Can optimize bitwise operations with one arg constant.
5153 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
5154 and (a bitwise1 b) bitwise2 b (etc)
5155 but that is probably not worth while. */
5156
5157/* BIT_AND_EXPR is for bitwise anding.
5158 TRUTH_AND_EXPR is for anding two boolean values
5159 when we want in all cases to compute both of them.
5160 In general it is fastest to do TRUTH_AND_EXPR by
5161 computing both operands as actual zero-or-1 values
5162 and then bitwise anding. In cases where there cannot
5163 be any side effects, better code would be made by
5164 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR;
5165 but the question is how to recognize those cases. */
5166
b258707c
RS
5167 /* TRUTH_AND_EXPR can have a result whose mode doesn't match
5168 th operands. If so, don't use our target. */
bbf6f052 5169 case TRUTH_AND_EXPR:
b258707c
RS
5170 if (mode != TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5171 subtarget = 0;
bbf6f052
RK
5172 case BIT_AND_EXPR:
5173 this_optab = and_optab;
5174 goto binop;
5175
5176/* See comment above about TRUTH_AND_EXPR; it applies here too. */
5177 case TRUTH_OR_EXPR:
b258707c
RS
5178 if (mode != TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5179 subtarget = 0;
bbf6f052
RK
5180 case BIT_IOR_EXPR:
5181 this_optab = ior_optab;
5182 goto binop;
5183
874726a8 5184 case TRUTH_XOR_EXPR:
b258707c
RS
5185 if (mode != TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5186 subtarget = 0;
bbf6f052
RK
5187 case BIT_XOR_EXPR:
5188 this_optab = xor_optab;
5189 goto binop;
5190
5191 case LSHIFT_EXPR:
5192 case RSHIFT_EXPR:
5193 case LROTATE_EXPR:
5194 case RROTATE_EXPR:
5195 preexpand_calls (exp);
5196 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5197 subtarget = 0;
5198 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5199 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
5200 unsignedp);
5201
5202/* Could determine the answer when only additive constants differ.
5203 Also, the addition of one can be handled by changing the condition. */
5204 case LT_EXPR:
5205 case LE_EXPR:
5206 case GT_EXPR:
5207 case GE_EXPR:
5208 case EQ_EXPR:
5209 case NE_EXPR:
5210 preexpand_calls (exp);
5211 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
5212 if (temp != 0)
5213 return temp;
5214 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
5215 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
5216 && original_target
5217 && GET_CODE (original_target) == REG
5218 && (GET_MODE (original_target)
5219 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5220 {
5221 temp = expand_expr (TREE_OPERAND (exp, 0), original_target, VOIDmode, 0);
5222 if (temp != original_target)
5223 temp = copy_to_reg (temp);
5224 op1 = gen_label_rtx ();
906c4e36 5225 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
bbf6f052
RK
5226 GET_MODE (temp), unsignedp, 0);
5227 emit_jump_insn (gen_beq (op1));
5228 emit_move_insn (temp, const1_rtx);
5229 emit_label (op1);
5230 return temp;
5231 }
5232 /* If no set-flag instruction, must generate a conditional
5233 store into a temporary variable. Drop through
5234 and handle this like && and ||. */
5235
5236 case TRUTH_ANDIF_EXPR:
5237 case TRUTH_ORIF_EXPR:
e44842fe
RK
5238 if (! ignore
5239 && (target == 0 || ! safe_from_p (target, exp)
5240 /* Make sure we don't have a hard reg (such as function's return
5241 value) live across basic blocks, if not optimizing. */
5242 || (!optimize && GET_CODE (target) == REG
5243 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
bbf6f052 5244 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
e44842fe
RK
5245
5246 if (target)
5247 emit_clr_insn (target);
5248
bbf6f052
RK
5249 op1 = gen_label_rtx ();
5250 jumpifnot (exp, op1);
e44842fe
RK
5251
5252 if (target)
5253 emit_0_to_1_insn (target);
5254
bbf6f052 5255 emit_label (op1);
e44842fe 5256 return ignore ? const0_rtx : target;
bbf6f052
RK
5257
5258 case TRUTH_NOT_EXPR:
5259 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5260 /* The parser is careful to generate TRUTH_NOT_EXPR
5261 only with operands that are always zero or one. */
906c4e36 5262 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
bbf6f052
RK
5263 target, 1, OPTAB_LIB_WIDEN);
5264 if (temp == 0)
5265 abort ();
5266 return temp;
5267
5268 case COMPOUND_EXPR:
5269 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
5270 emit_queue ();
5271 return expand_expr (TREE_OPERAND (exp, 1),
5272 (ignore ? const0_rtx : target),
5273 VOIDmode, 0);
5274
5275 case COND_EXPR:
5276 {
5277 /* Note that COND_EXPRs whose type is a structure or union
5278 are required to be constructed to contain assignments of
5279 a temporary variable, so that we can evaluate them here
5280 for side effect only. If type is void, we must do likewise. */
5281
5282 /* If an arm of the branch requires a cleanup,
5283 only that cleanup is performed. */
5284
5285 tree singleton = 0;
5286 tree binary_op = 0, unary_op = 0;
5287 tree old_cleanups = cleanups_this_call;
5288 cleanups_this_call = 0;
5289
5290 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
5291 convert it to our mode, if necessary. */
5292 if (integer_onep (TREE_OPERAND (exp, 1))
5293 && integer_zerop (TREE_OPERAND (exp, 2))
5294 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
5295 {
dd27116b
RK
5296 if (ignore)
5297 {
5298 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5299 modifier);
5300 return const0_rtx;
5301 }
5302
bbf6f052
RK
5303 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
5304 if (GET_MODE (op0) == mode)
5305 return op0;
5306 if (target == 0)
5307 target = gen_reg_rtx (mode);
5308 convert_move (target, op0, unsignedp);
5309 return target;
5310 }
5311
5312 /* If we are not to produce a result, we have no target. Otherwise,
5313 if a target was specified use it; it will not be used as an
5314 intermediate target unless it is safe. If no target, use a
5315 temporary. */
5316
dd27116b 5317 if (ignore)
bbf6f052
RK
5318 temp = 0;
5319 else if (original_target
5320 && safe_from_p (original_target, TREE_OPERAND (exp, 0)))
5321 temp = original_target;
5322 else if (mode == BLKmode)
5323 {
5324 if (TYPE_SIZE (type) == 0
5325 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5326 abort ();
673bc773 5327
bbf6f052
RK
5328 temp = assign_stack_temp (BLKmode,
5329 (TREE_INT_CST_LOW (TYPE_SIZE (type))
5330 + BITS_PER_UNIT - 1)
5331 / BITS_PER_UNIT, 0);
673bc773
RS
5332 MEM_IN_STRUCT_P (temp)
5333 = (TREE_CODE (type) == RECORD_TYPE
5334 || TREE_CODE (type) == UNION_TYPE
5335 || TREE_CODE (type) == QUAL_UNION_TYPE
5336 || TREE_CODE (type) == ARRAY_TYPE);
bbf6f052
RK
5337 }
5338 else
5339 temp = gen_reg_rtx (mode);
5340
5341 /* Check for X ? A + B : A. If we have this, we can copy
5342 A to the output and conditionally add B. Similarly for unary
5343 operations. Don't do this if X has side-effects because
5344 those side effects might affect A or B and the "?" operation is
5345 a sequence point in ANSI. (We test for side effects later.) */
5346
5347 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
5348 && operand_equal_p (TREE_OPERAND (exp, 2),
5349 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
5350 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
5351 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
5352 && operand_equal_p (TREE_OPERAND (exp, 1),
5353 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
5354 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
5355 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
5356 && operand_equal_p (TREE_OPERAND (exp, 2),
5357 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
5358 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
5359 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
5360 && operand_equal_p (TREE_OPERAND (exp, 1),
5361 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
5362 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
5363
5364 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
5365 operation, do this as A + (X != 0). Similarly for other simple
5366 binary operators. */
dd27116b 5367 if (temp && singleton && binary_op
bbf6f052
RK
5368 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5369 && (TREE_CODE (binary_op) == PLUS_EXPR
5370 || TREE_CODE (binary_op) == MINUS_EXPR
5371 || TREE_CODE (binary_op) == BIT_IOR_EXPR
5372 || TREE_CODE (binary_op) == BIT_XOR_EXPR
5373 || TREE_CODE (binary_op) == BIT_AND_EXPR)
5374 && integer_onep (TREE_OPERAND (binary_op, 1))
5375 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
5376 {
5377 rtx result;
5378 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
5379 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
5380 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
5381 : TREE_CODE (binary_op) == BIT_XOR_EXPR ? xor_optab
5382 : and_optab);
5383
5384 /* If we had X ? A : A + 1, do this as A + (X == 0).
5385
5386 We have to invert the truth value here and then put it
5387 back later if do_store_flag fails. We cannot simply copy
5388 TREE_OPERAND (exp, 0) to another variable and modify that
5389 because invert_truthvalue can modify the tree pointed to
5390 by its argument. */
5391 if (singleton == TREE_OPERAND (exp, 1))
5392 TREE_OPERAND (exp, 0)
5393 = invert_truthvalue (TREE_OPERAND (exp, 0));
5394
5395 result = do_store_flag (TREE_OPERAND (exp, 0),
906c4e36
RK
5396 (safe_from_p (temp, singleton)
5397 ? temp : NULL_RTX),
bbf6f052
RK
5398 mode, BRANCH_COST <= 1);
5399
5400 if (result)
5401 {
906c4e36 5402 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5403 return expand_binop (mode, boptab, op1, result, temp,
5404 unsignedp, OPTAB_LIB_WIDEN);
5405 }
5406 else if (singleton == TREE_OPERAND (exp, 1))
5407 TREE_OPERAND (exp, 0)
5408 = invert_truthvalue (TREE_OPERAND (exp, 0));
5409 }
5410
5411 NO_DEFER_POP;
5412 op0 = gen_label_rtx ();
5413
5414 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
5415 {
5416 if (temp != 0)
5417 {
5418 /* If the target conflicts with the other operand of the
5419 binary op, we can't use it. Also, we can't use the target
5420 if it is a hard register, because evaluating the condition
5421 might clobber it. */
5422 if ((binary_op
5423 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
5424 || (GET_CODE (temp) == REG
5425 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
5426 temp = gen_reg_rtx (mode);
5427 store_expr (singleton, temp, 0);
5428 }
5429 else
906c4e36 5430 expand_expr (singleton,
2937cf87 5431 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5432 if (cleanups_this_call)
5433 {
5434 sorry ("aggregate value in COND_EXPR");
5435 cleanups_this_call = 0;
5436 }
5437 if (singleton == TREE_OPERAND (exp, 1))
5438 jumpif (TREE_OPERAND (exp, 0), op0);
5439 else
5440 jumpifnot (TREE_OPERAND (exp, 0), op0);
5441
5442 if (binary_op && temp == 0)
5443 /* Just touch the other operand. */
5444 expand_expr (TREE_OPERAND (binary_op, 1),
906c4e36 5445 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5446 else if (binary_op)
5447 store_expr (build (TREE_CODE (binary_op), type,
5448 make_tree (type, temp),
5449 TREE_OPERAND (binary_op, 1)),
5450 temp, 0);
5451 else
5452 store_expr (build1 (TREE_CODE (unary_op), type,
5453 make_tree (type, temp)),
5454 temp, 0);
5455 op1 = op0;
5456 }
5457#if 0
5458 /* This is now done in jump.c and is better done there because it
5459 produces shorter register lifetimes. */
5460
5461 /* Check for both possibilities either constants or variables
5462 in registers (but not the same as the target!). If so, can
5463 save branches by assigning one, branching, and assigning the
5464 other. */
5465 else if (temp && GET_MODE (temp) != BLKmode
5466 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
5467 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
5468 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
5469 && DECL_RTL (TREE_OPERAND (exp, 1))
5470 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
5471 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
5472 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
5473 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
5474 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
5475 && DECL_RTL (TREE_OPERAND (exp, 2))
5476 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
5477 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
5478 {
5479 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5480 temp = gen_reg_rtx (mode);
5481 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5482 jumpifnot (TREE_OPERAND (exp, 0), op0);
5483 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5484 op1 = op0;
5485 }
5486#endif
5487 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
5488 comparison operator. If we have one of these cases, set the
5489 output to A, branch on A (cse will merge these two references),
5490 then set the output to FOO. */
5491 else if (temp
5492 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5493 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5494 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5495 TREE_OPERAND (exp, 1), 0)
5496 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5497 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
5498 {
5499 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5500 temp = gen_reg_rtx (mode);
5501 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5502 jumpif (TREE_OPERAND (exp, 0), op0);
5503 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5504 op1 = op0;
5505 }
5506 else if (temp
5507 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5508 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5509 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5510 TREE_OPERAND (exp, 2), 0)
5511 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5512 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
5513 {
5514 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5515 temp = gen_reg_rtx (mode);
5516 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5517 jumpifnot (TREE_OPERAND (exp, 0), op0);
5518 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5519 op1 = op0;
5520 }
5521 else
5522 {
5523 op1 = gen_label_rtx ();
5524 jumpifnot (TREE_OPERAND (exp, 0), op0);
5525 if (temp != 0)
5526 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5527 else
906c4e36
RK
5528 expand_expr (TREE_OPERAND (exp, 1),
5529 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5530 if (cleanups_this_call)
5531 {
5532 sorry ("aggregate value in COND_EXPR");
5533 cleanups_this_call = 0;
5534 }
5535
5536 emit_queue ();
5537 emit_jump_insn (gen_jump (op1));
5538 emit_barrier ();
5539 emit_label (op0);
5540 if (temp != 0)
5541 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5542 else
906c4e36
RK
5543 expand_expr (TREE_OPERAND (exp, 2),
5544 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5545 }
5546
5547 if (cleanups_this_call)
5548 {
5549 sorry ("aggregate value in COND_EXPR");
5550 cleanups_this_call = 0;
5551 }
5552
5553 emit_queue ();
5554 emit_label (op1);
5555 OK_DEFER_POP;
5556 cleanups_this_call = old_cleanups;
5557 return temp;
5558 }
5559
5560 case TARGET_EXPR:
5561 {
5562 /* Something needs to be initialized, but we didn't know
5563 where that thing was when building the tree. For example,
5564 it could be the return value of a function, or a parameter
5565 to a function which lays down in the stack, or a temporary
5566 variable which must be passed by reference.
5567
5568 We guarantee that the expression will either be constructed
5569 or copied into our original target. */
5570
5571 tree slot = TREE_OPERAND (exp, 0);
5c062816 5572 tree exp1;
bbf6f052
RK
5573
5574 if (TREE_CODE (slot) != VAR_DECL)
5575 abort ();
5576
5577 if (target == 0)
5578 {
5579 if (DECL_RTL (slot) != 0)
ac993f4f
MS
5580 {
5581 target = DECL_RTL (slot);
5c062816 5582 /* If we have already expanded the slot, so don't do
ac993f4f 5583 it again. (mrs) */
5c062816
MS
5584 if (TREE_OPERAND (exp, 1) == NULL_TREE)
5585 return target;
ac993f4f 5586 }
bbf6f052
RK
5587 else
5588 {
5589 target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
5590 /* All temp slots at this level must not conflict. */
5591 preserve_temp_slots (target);
5592 DECL_RTL (slot) = target;
5593 }
5594
5595#if 0
ac993f4f
MS
5596 /* I bet this needs to be done, and I bet that it needs to
5597 be above, inside the else clause. The reason is
5598 simple, how else is it going to get cleaned up? (mrs)
5599
5600 The reason is probably did not work before, and was
5601 commented out is because this was re-expanding already
5602 expanded target_exprs (target == 0 and DECL_RTL (slot)
5603 != 0) also cleaning them up many times as well. :-( */
5604
bbf6f052
RK
5605 /* Since SLOT is not known to the called function
5606 to belong to its stack frame, we must build an explicit
5607 cleanup. This case occurs when we must build up a reference
5608 to pass the reference as an argument. In this case,
5609 it is very likely that such a reference need not be
5610 built here. */
5611
5612 if (TREE_OPERAND (exp, 2) == 0)
5613 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
5614 if (TREE_OPERAND (exp, 2))
906c4e36
RK
5615 cleanups_this_call = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2),
5616 cleanups_this_call);
bbf6f052
RK
5617#endif
5618 }
5619 else
5620 {
5621 /* This case does occur, when expanding a parameter which
5622 needs to be constructed on the stack. The target
5623 is the actual stack address that we want to initialize.
5624 The function we call will perform the cleanup in this case. */
5625
8c042b47
RS
5626 /* If we have already assigned it space, use that space,
5627 not target that we were passed in, as our target
5628 parameter is only a hint. */
5629 if (DECL_RTL (slot) != 0)
5630 {
5631 target = DECL_RTL (slot);
5632 /* If we have already expanded the slot, so don't do
5633 it again. (mrs) */
5634 if (TREE_OPERAND (exp, 1) == NULL_TREE)
5635 return target;
5636 }
5637
bbf6f052
RK
5638 DECL_RTL (slot) = target;
5639 }
5640
5c062816
MS
5641 exp1 = TREE_OPERAND (exp, 1);
5642 /* Mark it as expanded. */
5643 TREE_OPERAND (exp, 1) = NULL_TREE;
5644
5645 return expand_expr (exp1, target, tmode, modifier);
bbf6f052
RK
5646 }
5647
5648 case INIT_EXPR:
5649 {
5650 tree lhs = TREE_OPERAND (exp, 0);
5651 tree rhs = TREE_OPERAND (exp, 1);
5652 tree noncopied_parts = 0;
5653 tree lhs_type = TREE_TYPE (lhs);
5654
5655 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5656 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
5657 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
5658 TYPE_NONCOPIED_PARTS (lhs_type));
5659 while (noncopied_parts != 0)
5660 {
5661 expand_assignment (TREE_VALUE (noncopied_parts),
5662 TREE_PURPOSE (noncopied_parts), 0, 0);
5663 noncopied_parts = TREE_CHAIN (noncopied_parts);
5664 }
5665 return temp;
5666 }
5667
5668 case MODIFY_EXPR:
5669 {
5670 /* If lhs is complex, expand calls in rhs before computing it.
5671 That's so we don't compute a pointer and save it over a call.
5672 If lhs is simple, compute it first so we can give it as a
5673 target if the rhs is just a call. This avoids an extra temp and copy
5674 and that prevents a partial-subsumption which makes bad code.
5675 Actually we could treat component_ref's of vars like vars. */
5676
5677 tree lhs = TREE_OPERAND (exp, 0);
5678 tree rhs = TREE_OPERAND (exp, 1);
5679 tree noncopied_parts = 0;
5680 tree lhs_type = TREE_TYPE (lhs);
5681
5682 temp = 0;
5683
5684 if (TREE_CODE (lhs) != VAR_DECL
5685 && TREE_CODE (lhs) != RESULT_DECL
5686 && TREE_CODE (lhs) != PARM_DECL)
5687 preexpand_calls (exp);
5688
5689 /* Check for |= or &= of a bitfield of size one into another bitfield
5690 of size 1. In this case, (unless we need the result of the
5691 assignment) we can do this more efficiently with a
5692 test followed by an assignment, if necessary.
5693
5694 ??? At this point, we can't get a BIT_FIELD_REF here. But if
5695 things change so we do, this code should be enhanced to
5696 support it. */
5697 if (ignore
5698 && TREE_CODE (lhs) == COMPONENT_REF
5699 && (TREE_CODE (rhs) == BIT_IOR_EXPR
5700 || TREE_CODE (rhs) == BIT_AND_EXPR)
5701 && TREE_OPERAND (rhs, 0) == lhs
5702 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
5703 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
5704 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
5705 {
5706 rtx label = gen_label_rtx ();
5707
5708 do_jump (TREE_OPERAND (rhs, 1),
5709 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
5710 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
5711 expand_assignment (lhs, convert (TREE_TYPE (rhs),
5712 (TREE_CODE (rhs) == BIT_IOR_EXPR
5713 ? integer_one_node
5714 : integer_zero_node)),
5715 0, 0);
e7c33f54 5716 do_pending_stack_adjust ();
bbf6f052
RK
5717 emit_label (label);
5718 return const0_rtx;
5719 }
5720
5721 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
5722 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
5723 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
5724 TYPE_NONCOPIED_PARTS (lhs_type));
5725
5726 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5727 while (noncopied_parts != 0)
5728 {
5729 expand_assignment (TREE_PURPOSE (noncopied_parts),
5730 TREE_VALUE (noncopied_parts), 0, 0);
5731 noncopied_parts = TREE_CHAIN (noncopied_parts);
5732 }
5733 return temp;
5734 }
5735
5736 case PREINCREMENT_EXPR:
5737 case PREDECREMENT_EXPR:
5738 return expand_increment (exp, 0);
5739
5740 case POSTINCREMENT_EXPR:
5741 case POSTDECREMENT_EXPR:
5742 /* Faster to treat as pre-increment if result is not used. */
5743 return expand_increment (exp, ! ignore);
5744
5745 case ADDR_EXPR:
5746 /* Are we taking the address of a nested function? */
5747 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
5748 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
5749 {
5750 op0 = trampoline_address (TREE_OPERAND (exp, 0));
5751 op0 = force_operand (op0, target);
5752 }
5753 else
5754 {
906c4e36 5755 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
bbf6f052
RK
5756 (modifier == EXPAND_INITIALIZER
5757 ? modifier : EXPAND_CONST_ADDRESS));
896102d0
RK
5758
5759 /* We would like the object in memory. If it is a constant,
5760 we can have it be statically allocated into memory. For
5761 a non-constant (REG or SUBREG), we need to allocate some
5762 memory and store the value into it. */
5763
5764 if (CONSTANT_P (op0))
5765 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5766 op0);
5767
b6f01001
RS
5768 /* These cases happen in Fortran. Is that legitimate?
5769 Should Fortran work in another way?
5770 Do they happen in C? */
5771 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
5772 || GET_CODE (op0) == CONCAT)
896102d0
RK
5773 {
5774 /* If this object is in a register, it must be not
5775 be BLKmode. */
5776 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
5777 enum machine_mode inner_mode = TYPE_MODE (inner_type);
5778 rtx memloc
5779 = assign_stack_temp (inner_mode,
5780 int_size_in_bytes (inner_type), 1);
5781
5782 emit_move_insn (memloc, op0);
5783 op0 = memloc;
5784 }
5785
bbf6f052
RK
5786 if (GET_CODE (op0) != MEM)
5787 abort ();
5788
5789 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5790 return XEXP (op0, 0);
5791 op0 = force_operand (XEXP (op0, 0), target);
5792 }
5793 if (flag_force_addr && GET_CODE (op0) != REG)
5794 return force_reg (Pmode, op0);
5795 return op0;
5796
5797 case ENTRY_VALUE_EXPR:
5798 abort ();
5799
7308a047
RS
5800 /* COMPLEX type for Extended Pascal & Fortran */
5801 case COMPLEX_EXPR:
5802 {
5803 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5804
5805 rtx prev;
5806
5807 /* Get the rtx code of the operands. */
5808 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5809 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
5810
5811 if (! target)
5812 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5813
5814 prev = get_last_insn ();
5815
5816 /* Tell flow that the whole of the destination is being set. */
5817 if (GET_CODE (target) == REG)
5818 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5819
5820 /* Move the real (op0) and imaginary (op1) parts to their location. */
2d7050fd
RS
5821 emit_move_insn (gen_realpart (mode, target), op0);
5822 emit_move_insn (gen_imagpart (mode, target), op1);
7308a047
RS
5823
5824 /* Complex construction should appear as a single unit. */
6d6e61ce
RS
5825 if (GET_CODE (target) != CONCAT)
5826 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
5827 each with a separate pseudo as destination.
5828 It's not correct for flow to treat them as a unit. */
5829 group_insns (prev);
7308a047
RS
5830
5831 return target;
5832 }
5833
5834 case REALPART_EXPR:
2d7050fd
RS
5835 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5836 return gen_realpart (mode, op0);
7308a047
RS
5837
5838 case IMAGPART_EXPR:
2d7050fd
RS
5839 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5840 return gen_imagpart (mode, op0);
7308a047
RS
5841
5842 case CONJ_EXPR:
5843 {
5844 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5845 rtx imag_t;
5846 rtx prev;
5847
5848 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5849
5850 if (! target)
5851 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5852
5853 prev = get_last_insn ();
5854
5855 /* Tell flow that the whole of the destination is being set. */
5856 if (GET_CODE (target) == REG)
5857 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5858
5859 /* Store the realpart and the negated imagpart to target. */
2d7050fd 5860 emit_move_insn (gen_realpart (mode, target), gen_realpart (mode, op0));
7308a047 5861
2d7050fd 5862 imag_t = gen_imagpart (mode, target);
7308a047 5863 temp = expand_unop (mode, neg_optab,
2d7050fd 5864 gen_imagpart (mode, op0), imag_t, 0);
7308a047
RS
5865 if (temp != imag_t)
5866 emit_move_insn (imag_t, temp);
5867
5868 /* Conjugate should appear as a single unit */
6d6e61ce
RS
5869 if (GET_CODE (target) != CONCAT)
5870 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
5871 each with a separate pseudo as destination.
5872 It's not correct for flow to treat them as a unit. */
5873 group_insns (prev);
7308a047
RS
5874
5875 return target;
5876 }
5877
bbf6f052 5878 case ERROR_MARK:
66538193
RS
5879 op0 = CONST0_RTX (tmode);
5880 if (op0 != 0)
5881 return op0;
bbf6f052
RK
5882 return const0_rtx;
5883
5884 default:
90764a87 5885 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
bbf6f052
RK
5886 }
5887
5888 /* Here to do an ordinary binary operator, generating an instruction
5889 from the optab already placed in `this_optab'. */
5890 binop:
5891 preexpand_calls (exp);
5892 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5893 subtarget = 0;
5894 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 5895 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5896 binop2:
5897 temp = expand_binop (mode, this_optab, op0, op1, target,
5898 unsignedp, OPTAB_LIB_WIDEN);
5899 if (temp == 0)
5900 abort ();
5901 return temp;
5902}
bbf6f052 5903
bbf6f052 5904
ca695ac9
JB
5905/* Emit bytecode to evaluate the given expression EXP to the stack. */
5906void
5907bc_expand_expr (exp)
5908 tree exp;
bbf6f052 5909{
ca695ac9
JB
5910 enum tree_code code;
5911 tree type, arg0;
5912 rtx r;
5913 struct binary_operator *binoptab;
5914 struct unary_operator *unoptab;
5915 struct increment_operator *incroptab;
5916 struct bc_label *lab, *lab1;
5917 enum bytecode_opcode opcode;
5918
5919
5920 code = TREE_CODE (exp);
5921
5922 switch (code)
bbf6f052 5923 {
ca695ac9
JB
5924 case PARM_DECL:
5925
5926 if (DECL_RTL (exp) == 0)
bbf6f052 5927 {
ca695ac9
JB
5928 error_with_decl (exp, "prior parameter's size depends on `%s'");
5929 return;
bbf6f052 5930 }
ca695ac9
JB
5931
5932 bc_load_parmaddr (DECL_RTL (exp));
5933 bc_load_memory (TREE_TYPE (exp), exp);
5934
5935 return;
5936
5937 case VAR_DECL:
5938
5939 if (DECL_RTL (exp) == 0)
5940 abort ();
5941
5942#if 0
e7a42772 5943 if (BYTECODE_LABEL (DECL_RTL (exp)))
ca695ac9
JB
5944 bc_load_externaddr (DECL_RTL (exp));
5945 else
5946 bc_load_localaddr (DECL_RTL (exp));
5947#endif
5948 if (TREE_PUBLIC (exp))
e7a42772
JB
5949 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
5950 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
ca695ac9
JB
5951 else
5952 bc_load_localaddr (DECL_RTL (exp));
5953
5954 bc_load_memory (TREE_TYPE (exp), exp);
5955 return;
5956
5957 case INTEGER_CST:
5958
5959#ifdef DEBUG_PRINT_CODE
5960 fprintf (stderr, " [%x]\n", TREE_INT_CST_LOW (exp));
5961#endif
6bd6178d 5962 bc_emit_instruction (mode_to_const_map[(int) (DECL_BIT_FIELD (exp)
ca695ac9 5963 ? SImode
6bd6178d 5964 : TYPE_MODE (TREE_TYPE (exp)))],
ca695ac9
JB
5965 (HOST_WIDE_INT) TREE_INT_CST_LOW (exp));
5966 return;
5967
5968 case REAL_CST:
5969
c02bd5d9 5970#if 0
ca695ac9
JB
5971#ifdef DEBUG_PRINT_CODE
5972 fprintf (stderr, " [%g]\n", (double) TREE_INT_CST_LOW (exp));
5973#endif
c02bd5d9 5974 /* FIX THIS: find a better way to pass real_cst's. -bson */
ca695ac9
JB
5975 bc_emit_instruction (mode_to_const_map[TYPE_MODE (TREE_TYPE (exp))],
5976 (double) TREE_REAL_CST (exp));
c02bd5d9
JB
5977#else
5978 abort ();
5979#endif
5980
ca695ac9
JB
5981 return;
5982
5983 case CALL_EXPR:
5984
5985 /* We build a call description vector describing the type of
5986 the return value and of the arguments; this call vector,
5987 together with a pointer to a location for the return value
5988 and the base of the argument list, is passed to the low
5989 level machine dependent call subroutine, which is responsible
5990 for putting the arguments wherever real functions expect
5991 them, as well as getting the return value back. */
5992 {
5993 tree calldesc = 0, arg;
5994 int nargs = 0, i;
5995 rtx retval;
5996
5997 /* Push the evaluated args on the evaluation stack in reverse
5998 order. Also make an entry for each arg in the calldesc
5999 vector while we're at it. */
6000
6001 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
6002
6003 for (arg = TREE_OPERAND (exp, 1); arg; arg = TREE_CHAIN (arg))
6004 {
6005 ++nargs;
6006 bc_expand_expr (TREE_VALUE (arg));
6007
6008 calldesc = tree_cons ((tree) 0,
6009 size_in_bytes (TREE_TYPE (TREE_VALUE (arg))),
6010 calldesc);
6011 calldesc = tree_cons ((tree) 0,
6012 bc_runtime_type_code (TREE_TYPE (TREE_VALUE (arg))),
6013 calldesc);
6014 }
6015
6016 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
6017
6018 /* Allocate a location for the return value and push its
6019 address on the evaluation stack. Also make an entry
6020 at the front of the calldesc for the return value type. */
6021
6022 type = TREE_TYPE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
6023 retval = bc_allocate_local (int_size_in_bytes (type), TYPE_ALIGN (type));
6024 bc_load_localaddr (retval);
6025
6026 calldesc = tree_cons ((tree) 0, size_in_bytes (type), calldesc);
6027 calldesc = tree_cons ((tree) 0, bc_runtime_type_code (type), calldesc);
6028
6029 /* Prepend the argument count. */
6030 calldesc = tree_cons ((tree) 0,
6031 build_int_2 (nargs, 0),
6032 calldesc);
6033
6034 /* Push the address of the call description vector on the stack. */
6035 calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
6036 TREE_TYPE (calldesc) = build_array_type (integer_type_node,
6037 build_index_type (build_int_2 (nargs * 2, 0)));
6038 r = output_constant_def (calldesc);
6039 bc_load_externaddr (r);
6040
6041 /* Push the address of the function to be called. */
6042 bc_expand_expr (TREE_OPERAND (exp, 0));
6043
6044 /* Call the function, popping its address and the calldesc vector
6045 address off the evaluation stack in the process. */
6046 bc_emit_instruction (call);
6047
6048 /* Pop the arguments off the stack. */
6049 bc_adjust_stack (nargs);
6050
6051 /* Load the return value onto the stack. */
6052 bc_load_localaddr (retval);
6053 bc_load_memory (type, TREE_OPERAND (exp, 0));
6054 }
6055 return;
6056
6057 case SAVE_EXPR:
6058
6059 if (!SAVE_EXPR_RTL (exp))
bbf6f052 6060 {
ca695ac9
JB
6061 /* First time around: copy to local variable */
6062 SAVE_EXPR_RTL (exp) = bc_allocate_local (int_size_in_bytes (TREE_TYPE (exp)),
6063 TYPE_ALIGN (TREE_TYPE(exp)));
6064 bc_expand_expr (TREE_OPERAND (exp, 0));
6d6e61ce 6065 bc_emit_instruction (duplicate);
ca695ac9
JB
6066
6067 bc_load_localaddr (SAVE_EXPR_RTL (exp));
6068 bc_store_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
bbf6f052 6069 }
ca695ac9 6070 else
bbf6f052 6071 {
ca695ac9
JB
6072 /* Consecutive reference: use saved copy */
6073 bc_load_localaddr (SAVE_EXPR_RTL (exp));
6074 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
bbf6f052 6075 }
ca695ac9
JB
6076 return;
6077
6078#if 0
6079 /* FIXME: the XXXX_STMT codes have been removed in GCC2, but
6080 how are they handled instead? */
6081 case LET_STMT:
6082
6083 TREE_USED (exp) = 1;
6084 bc_expand_expr (STMT_BODY (exp));
6085 return;
6086#endif
6087
6088 case NOP_EXPR:
6089 case CONVERT_EXPR:
6090
6091 bc_expand_expr (TREE_OPERAND (exp, 0));
6092 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)), TREE_TYPE (exp));
6093 return;
6094
6095 case MODIFY_EXPR:
6096
c02bd5d9 6097 expand_assignment (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 0, 0);
ca695ac9
JB
6098 return;
6099
6100 case ADDR_EXPR:
6101
6102 bc_expand_address (TREE_OPERAND (exp, 0));
6103 return;
6104
6105 case INDIRECT_REF:
6106
6107 bc_expand_expr (TREE_OPERAND (exp, 0));
6108 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6109 return;
6110
6111 case ARRAY_REF:
6112
6113 bc_expand_expr (bc_canonicalize_array_ref (exp));
6114 return;
6115
6116 case COMPONENT_REF:
6117
6118 bc_expand_component_address (exp);
6119
6120 /* If we have a bitfield, generate a proper load */
6121 bc_load_memory (TREE_TYPE (TREE_OPERAND (exp, 1)), TREE_OPERAND (exp, 1));
6122 return;
6123
6124 case COMPOUND_EXPR:
6125
6126 bc_expand_expr (TREE_OPERAND (exp, 0));
6127 bc_emit_instruction (drop);
6128 bc_expand_expr (TREE_OPERAND (exp, 1));
6129 return;
6130
6131 case COND_EXPR:
6132
6133 bc_expand_expr (TREE_OPERAND (exp, 0));
6134 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
6135 lab = bc_get_bytecode_label ();
c02bd5d9 6136 bc_emit_bytecode (xjumpifnot);
ca695ac9
JB
6137 bc_emit_bytecode_labelref (lab);
6138
6139#ifdef DEBUG_PRINT_CODE
6140 fputc ('\n', stderr);
6141#endif
6142 bc_expand_expr (TREE_OPERAND (exp, 1));
6143 lab1 = bc_get_bytecode_label ();
6144 bc_emit_bytecode (jump);
6145 bc_emit_bytecode_labelref (lab1);
6146
6147#ifdef DEBUG_PRINT_CODE
6148 fputc ('\n', stderr);
6149#endif
6150
6151 bc_emit_bytecode_labeldef (lab);
6152 bc_expand_expr (TREE_OPERAND (exp, 2));
6153 bc_emit_bytecode_labeldef (lab1);
6154 return;
6155
6156 case TRUTH_ANDIF_EXPR:
6157
c02bd5d9 6158 opcode = xjumpifnot;
ca695ac9
JB
6159 goto andorif;
6160
6161 case TRUTH_ORIF_EXPR:
6162
c02bd5d9 6163 opcode = xjumpif;
ca695ac9
JB
6164 goto andorif;
6165
6166 case PLUS_EXPR:
6167
6168 binoptab = optab_plus_expr;
6169 goto binop;
6170
6171 case MINUS_EXPR:
6172
6173 binoptab = optab_minus_expr;
6174 goto binop;
6175
6176 case MULT_EXPR:
6177
6178 binoptab = optab_mult_expr;
6179 goto binop;
6180
6181 case TRUNC_DIV_EXPR:
6182 case FLOOR_DIV_EXPR:
6183 case CEIL_DIV_EXPR:
6184 case ROUND_DIV_EXPR:
6185 case EXACT_DIV_EXPR:
6186
6187 binoptab = optab_trunc_div_expr;
6188 goto binop;
6189
6190 case TRUNC_MOD_EXPR:
6191 case FLOOR_MOD_EXPR:
6192 case CEIL_MOD_EXPR:
6193 case ROUND_MOD_EXPR:
6194
6195 binoptab = optab_trunc_mod_expr;
6196 goto binop;
6197
6198 case FIX_ROUND_EXPR:
6199 case FIX_FLOOR_EXPR:
6200 case FIX_CEIL_EXPR:
6201 abort (); /* Not used for C. */
6202
6203 case FIX_TRUNC_EXPR:
6204 case FLOAT_EXPR:
6205 case MAX_EXPR:
6206 case MIN_EXPR:
6207 case FFS_EXPR:
6208 case LROTATE_EXPR:
6209 case RROTATE_EXPR:
6210 abort (); /* FIXME */
6211
6212 case RDIV_EXPR:
6213
6214 binoptab = optab_rdiv_expr;
6215 goto binop;
6216
6217 case BIT_AND_EXPR:
6218
6219 binoptab = optab_bit_and_expr;
6220 goto binop;
6221
6222 case BIT_IOR_EXPR:
6223
6224 binoptab = optab_bit_ior_expr;
6225 goto binop;
6226
6227 case BIT_XOR_EXPR:
6228
6229 binoptab = optab_bit_xor_expr;
6230 goto binop;
6231
6232 case LSHIFT_EXPR:
6233
6234 binoptab = optab_lshift_expr;
6235 goto binop;
6236
6237 case RSHIFT_EXPR:
6238
6239 binoptab = optab_rshift_expr;
6240 goto binop;
6241
6242 case TRUTH_AND_EXPR:
6243
6244 binoptab = optab_truth_and_expr;
6245 goto binop;
6246
6247 case TRUTH_OR_EXPR:
6248
6249 binoptab = optab_truth_or_expr;
6250 goto binop;
6251
6252 case LT_EXPR:
6253
6254 binoptab = optab_lt_expr;
6255 goto binop;
6256
6257 case LE_EXPR:
6258
6259 binoptab = optab_le_expr;
6260 goto binop;
6261
6262 case GE_EXPR:
6263
6264 binoptab = optab_ge_expr;
6265 goto binop;
6266
6267 case GT_EXPR:
6268
6269 binoptab = optab_gt_expr;
6270 goto binop;
6271
6272 case EQ_EXPR:
6273
6274 binoptab = optab_eq_expr;
6275 goto binop;
6276
6277 case NE_EXPR:
6278
6279 binoptab = optab_ne_expr;
6280 goto binop;
6281
6282 case NEGATE_EXPR:
6283
6284 unoptab = optab_negate_expr;
6285 goto unop;
6286
6287 case BIT_NOT_EXPR:
6288
6289 unoptab = optab_bit_not_expr;
6290 goto unop;
6291
6292 case TRUTH_NOT_EXPR:
6293
6294 unoptab = optab_truth_not_expr;
6295 goto unop;
6296
6297 case PREDECREMENT_EXPR:
6298
6299 incroptab = optab_predecrement_expr;
6300 goto increment;
6301
6302 case PREINCREMENT_EXPR:
6303
6304 incroptab = optab_preincrement_expr;
6305 goto increment;
6306
6307 case POSTDECREMENT_EXPR:
6308
6309 incroptab = optab_postdecrement_expr;
6310 goto increment;
6311
6312 case POSTINCREMENT_EXPR:
6313
6314 incroptab = optab_postincrement_expr;
6315 goto increment;
6316
6317 case CONSTRUCTOR:
6318
6319 bc_expand_constructor (exp);
6320 return;
6321
6322 case ERROR_MARK:
6323 case RTL_EXPR:
6324
6325 return;
6326
6327 case BIND_EXPR:
6328 {
6329 tree vars = TREE_OPERAND (exp, 0);
6330 int vars_need_expansion = 0;
6331
6332 /* Need to open a binding contour here because
6333 if there are any cleanups they most be contained here. */
6334 expand_start_bindings (0);
6335
6336 /* Mark the corresponding BLOCK for output. */
6337 if (TREE_OPERAND (exp, 2) != 0)
6338 TREE_USED (TREE_OPERAND (exp, 2)) = 1;
6339
6340 /* If VARS have not yet been expanded, expand them now. */
6341 while (vars)
6342 {
6343 if (DECL_RTL (vars) == 0)
6344 {
6345 vars_need_expansion = 1;
6346 bc_expand_decl (vars, 0);
6347 }
6348 bc_expand_decl_init (vars);
6349 vars = TREE_CHAIN (vars);
6350 }
6351
6352 bc_expand_expr (TREE_OPERAND (exp, 1));
6353
6354 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6355
6356 return;
6357 }
6358 }
6359
6360 abort ();
6361
6362 binop:
6363
6364 bc_expand_binary_operation (binoptab, TREE_TYPE (exp),
6365 TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1));
6366 return;
6367
6368
6369 unop:
6370
6371 bc_expand_unary_operation (unoptab, TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6372 return;
6373
6374
6375 andorif:
6376
6377 bc_expand_expr (TREE_OPERAND (exp, 0));
6378 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
6379 lab = bc_get_bytecode_label ();
6380
6d6e61ce 6381 bc_emit_instruction (duplicate);
ca695ac9
JB
6382 bc_emit_bytecode (opcode);
6383 bc_emit_bytecode_labelref (lab);
6384
6385#ifdef DEBUG_PRINT_CODE
6386 fputc ('\n', stderr);
6387#endif
6388
6389 bc_emit_instruction (drop);
6390
6391 bc_expand_expr (TREE_OPERAND (exp, 1));
6392 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)));
6393 bc_emit_bytecode_labeldef (lab);
6394 return;
6395
6396
6397 increment:
6398
6399 type = TREE_TYPE (TREE_OPERAND (exp, 0));
6400
6401 /* Push the quantum. */
6402 bc_expand_expr (TREE_OPERAND (exp, 1));
6403
6404 /* Convert it to the lvalue's type. */
6405 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)), type);
6406
6407 /* Push the address of the lvalue */
c02bd5d9 6408 bc_expand_expr (build1 (ADDR_EXPR, TYPE_POINTER_TO (type), TREE_OPERAND (exp, 0)));
ca695ac9
JB
6409
6410 /* Perform actual increment */
c02bd5d9 6411 bc_expand_increment (incroptab, type);
ca695ac9
JB
6412 return;
6413}
6414\f
6415/* Return the alignment in bits of EXP, a pointer valued expression.
6416 But don't return more than MAX_ALIGN no matter what.
6417 The alignment returned is, by default, the alignment of the thing that
6418 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
6419
6420 Otherwise, look at the expression to see if we can do better, i.e., if the
6421 expression is actually pointing at an object whose alignment is tighter. */
6422
6423static int
6424get_pointer_alignment (exp, max_align)
6425 tree exp;
6426 unsigned max_align;
6427{
6428 unsigned align, inner;
6429
6430 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
6431 return 0;
6432
6433 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
6434 align = MIN (align, max_align);
6435
6436 while (1)
6437 {
6438 switch (TREE_CODE (exp))
6439 {
6440 case NOP_EXPR:
6441 case CONVERT_EXPR:
6442 case NON_LVALUE_EXPR:
6443 exp = TREE_OPERAND (exp, 0);
6444 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
6445 return align;
6446 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
6447 inner = MIN (inner, max_align);
6448 align = MAX (align, inner);
6449 break;
6450
6451 case PLUS_EXPR:
6452 /* If sum of pointer + int, restrict our maximum alignment to that
6453 imposed by the integer. If not, we can't do any better than
6454 ALIGN. */
6455 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
6456 return align;
6457
6458 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
6459 & (max_align - 1))
6460 != 0)
6461 max_align >>= 1;
6462
6463 exp = TREE_OPERAND (exp, 0);
6464 break;
6465
6466 case ADDR_EXPR:
6467 /* See what we are pointing at and look at its alignment. */
6468 exp = TREE_OPERAND (exp, 0);
6469 if (TREE_CODE (exp) == FUNCTION_DECL)
6470 align = MAX (align, FUNCTION_BOUNDARY);
6471 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
6472 align = MAX (align, DECL_ALIGN (exp));
6473#ifdef CONSTANT_ALIGNMENT
6474 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
6475 align = CONSTANT_ALIGNMENT (exp, align);
6476#endif
6477 return MIN (align, max_align);
6478
6479 default:
6480 return align;
6481 }
6482 }
6483}
6484\f
6485/* Return the tree node and offset if a given argument corresponds to
6486 a string constant. */
6487
6488static tree
6489string_constant (arg, ptr_offset)
6490 tree arg;
6491 tree *ptr_offset;
6492{
6493 STRIP_NOPS (arg);
6494
6495 if (TREE_CODE (arg) == ADDR_EXPR
6496 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
6497 {
6498 *ptr_offset = integer_zero_node;
6499 return TREE_OPERAND (arg, 0);
6500 }
6501 else if (TREE_CODE (arg) == PLUS_EXPR)
6502 {
6503 tree arg0 = TREE_OPERAND (arg, 0);
6504 tree arg1 = TREE_OPERAND (arg, 1);
6505
6506 STRIP_NOPS (arg0);
6507 STRIP_NOPS (arg1);
6508
6509 if (TREE_CODE (arg0) == ADDR_EXPR
6510 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
6511 {
6512 *ptr_offset = arg1;
6513 return TREE_OPERAND (arg0, 0);
6514 }
6515 else if (TREE_CODE (arg1) == ADDR_EXPR
6516 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
6517 {
6518 *ptr_offset = arg0;
6519 return TREE_OPERAND (arg1, 0);
6520 }
6521 }
6522
6523 return 0;
6524}
6525
6526/* Compute the length of a C string. TREE_STRING_LENGTH is not the right
6527 way, because it could contain a zero byte in the middle.
6528 TREE_STRING_LENGTH is the size of the character array, not the string.
6529
6530 Unfortunately, string_constant can't access the values of const char
6531 arrays with initializers, so neither can we do so here. */
6532
6533static tree
6534c_strlen (src)
6535 tree src;
6536{
6537 tree offset_node;
6538 int offset, max;
6539 char *ptr;
6540
6541 src = string_constant (src, &offset_node);
6542 if (src == 0)
6543 return 0;
6544 max = TREE_STRING_LENGTH (src);
6545 ptr = TREE_STRING_POINTER (src);
6546 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
6547 {
6548 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
6549 compute the offset to the following null if we don't know where to
6550 start searching for it. */
6551 int i;
6552 for (i = 0; i < max; i++)
6553 if (ptr[i] == 0)
6554 return 0;
6555 /* We don't know the starting offset, but we do know that the string
6556 has no internal zero bytes. We can assume that the offset falls
6557 within the bounds of the string; otherwise, the programmer deserves
6558 what he gets. Subtract the offset from the length of the string,
6559 and return that. */
6560 /* This would perhaps not be valid if we were dealing with named
6561 arrays in addition to literal string constants. */
6562 return size_binop (MINUS_EXPR, size_int (max), offset_node);
6563 }
6564
6565 /* We have a known offset into the string. Start searching there for
6566 a null character. */
6567 if (offset_node == 0)
6568 offset = 0;
6569 else
6570 {
6571 /* Did we get a long long offset? If so, punt. */
6572 if (TREE_INT_CST_HIGH (offset_node) != 0)
6573 return 0;
6574 offset = TREE_INT_CST_LOW (offset_node);
6575 }
6576 /* If the offset is known to be out of bounds, warn, and call strlen at
6577 runtime. */
6578 if (offset < 0 || offset > max)
6579 {
6580 warning ("offset outside bounds of constant string");
6581 return 0;
6582 }
6583 /* Use strlen to search for the first zero byte. Since any strings
6584 constructed with build_string will have nulls appended, we win even
6585 if we get handed something like (char[4])"abcd".
6586
6587 Since OFFSET is our starting index into the string, no further
6588 calculation is needed. */
6589 return size_int (strlen (ptr + offset));
6590}
6591\f
6592/* Expand an expression EXP that calls a built-in function,
6593 with result going to TARGET if that's convenient
6594 (and in mode MODE if that's convenient).
6595 SUBTARGET may be used as the target for computing one of EXP's operands.
6596 IGNORE is nonzero if the value is to be ignored. */
6597
6598static rtx
6599expand_builtin (exp, target, subtarget, mode, ignore)
6600 tree exp;
6601 rtx target;
6602 rtx subtarget;
6603 enum machine_mode mode;
6604 int ignore;
6605{
6606 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6607 tree arglist = TREE_OPERAND (exp, 1);
6608 rtx op0;
6609 rtx lab1, insns;
6610 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
6611 optab builtin_optab;
6612
6613 switch (DECL_FUNCTION_CODE (fndecl))
6614 {
6615 case BUILT_IN_ABS:
6616 case BUILT_IN_LABS:
6617 case BUILT_IN_FABS:
6618 /* build_function_call changes these into ABS_EXPR. */
6619 abort ();
6620
6621 case BUILT_IN_SIN:
6622 case BUILT_IN_COS:
6623 case BUILT_IN_FSQRT:
6624 /* If not optimizing, call the library function. */
6625 if (! optimize)
6626 break;
6627
6628 if (arglist == 0
6629 /* Arg could be wrong type if user redeclared this fcn wrong. */
6630 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
6631 return CONST0_RTX (TYPE_MODE (TREE_TYPE (exp)));
6632
6633 /* Stabilize and compute the argument. */
6634 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
6635 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
6636 {
6637 exp = copy_node (exp);
6638 arglist = copy_node (arglist);
6639 TREE_OPERAND (exp, 1) = arglist;
6640 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
6641 }
6642 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
6643
6644 /* Make a suitable register to place result in. */
6645 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6646
6647 emit_queue ();
6648 start_sequence ();
6649
6650 switch (DECL_FUNCTION_CODE (fndecl))
6651 {
6652 case BUILT_IN_SIN:
6653 builtin_optab = sin_optab; break;
6654 case BUILT_IN_COS:
6655 builtin_optab = cos_optab; break;
6656 case BUILT_IN_FSQRT:
6657 builtin_optab = sqrt_optab; break;
6658 default:
6659 abort ();
6660 }
6661
6662 /* Compute into TARGET.
6663 Set TARGET to wherever the result comes back. */
6664 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
6665 builtin_optab, op0, target, 0);
6666
6667 /* If we were unable to expand via the builtin, stop the
6668 sequence (without outputting the insns) and break, causing
6669 a call the the library function. */
6670 if (target == 0)
6671 {
6672 end_sequence ();
6673 break;
6674 }
6675
6676 /* Check the results by default. But if flag_fast_math is turned on,
6677 then assume sqrt will always be called with valid arguments. */
6678
6679 if (! flag_fast_math)
6680 {
6681 /* Don't define the builtin FP instructions
6682 if your machine is not IEEE. */
6683 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
6684 abort ();
6685
6686 lab1 = gen_label_rtx ();
6687
6688 /* Test the result; if it is NaN, set errno=EDOM because
6689 the argument was not in the domain. */
6690 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
6691 emit_jump_insn (gen_beq (lab1));
6692
6693#if TARGET_EDOM
6694 {
6695#ifdef GEN_ERRNO_RTX
6696 rtx errno_rtx = GEN_ERRNO_RTX;
6697#else
6698 rtx errno_rtx
6699 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "*errno"));
6700#endif
6701
6702 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
6703 }
6704#else
6705 /* We can't set errno=EDOM directly; let the library call do it.
6706 Pop the arguments right away in case the call gets deleted. */
6707 NO_DEFER_POP;
6708 expand_call (exp, target, 0);
6709 OK_DEFER_POP;
6710#endif
6711
6712 emit_label (lab1);
6713 }
6714
6715 /* Output the entire sequence. */
6716 insns = get_insns ();
6717 end_sequence ();
6718 emit_insns (insns);
6719
6720 return target;
6721
6722 /* __builtin_apply_args returns block of memory allocated on
6723 the stack into which is stored the arg pointer, structure
6724 value address, static chain, and all the registers that might
6725 possibly be used in performing a function call. The code is
6726 moved to the start of the function so the incoming values are
6727 saved. */
6728 case BUILT_IN_APPLY_ARGS:
6729 /* Don't do __builtin_apply_args more than once in a function.
6730 Save the result of the first call and reuse it. */
6731 if (apply_args_value != 0)
6732 return apply_args_value;
6733 {
6734 /* When this function is called, it means that registers must be
6735 saved on entry to this function. So we migrate the
6736 call to the first insn of this function. */
6737 rtx temp;
6738 rtx seq;
6739
6740 start_sequence ();
6741 temp = expand_builtin_apply_args ();
6742 seq = get_insns ();
6743 end_sequence ();
6744
6745 apply_args_value = temp;
6746
6747 /* Put the sequence after the NOTE that starts the function.
6748 If this is inside a SEQUENCE, make the outer-level insn
6749 chain current, so the code is placed at the start of the
6750 function. */
6751 push_topmost_sequence ();
6752 emit_insns_before (seq, NEXT_INSN (get_insns ()));
6753 pop_topmost_sequence ();
6754 return temp;
6755 }
6756
6757 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6758 FUNCTION with a copy of the parameters described by
6759 ARGUMENTS, and ARGSIZE. It returns a block of memory
6760 allocated on the stack into which is stored all the registers
6761 that might possibly be used for returning the result of a
6762 function. ARGUMENTS is the value returned by
6763 __builtin_apply_args. ARGSIZE is the number of bytes of
6764 arguments that must be copied. ??? How should this value be
6765 computed? We'll also need a safe worst case value for varargs
6766 functions. */
6767 case BUILT_IN_APPLY:
6768 if (arglist == 0
6769 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6770 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6771 || TREE_CHAIN (arglist) == 0
6772 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6773 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6774 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6775 return const0_rtx;
6776 else
6777 {
6778 int i;
6779 tree t;
6780 rtx ops[3];
6781
6782 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
6783 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
6784
6785 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6786 }
6787
6788 /* __builtin_return (RESULT) causes the function to return the
6789 value described by RESULT. RESULT is address of the block of
6790 memory returned by __builtin_apply. */
6791 case BUILT_IN_RETURN:
6792 if (arglist
6793 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6794 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
6795 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
6796 NULL_RTX, VOIDmode, 0));
6797 return const0_rtx;
6798
6799 case BUILT_IN_SAVEREGS:
6800 /* Don't do __builtin_saveregs more than once in a function.
6801 Save the result of the first call and reuse it. */
6802 if (saveregs_value != 0)
6803 return saveregs_value;
6804 {
6805 /* When this function is called, it means that registers must be
6806 saved on entry to this function. So we migrate the
6807 call to the first insn of this function. */
6808 rtx temp;
6809 rtx seq;
6810 rtx valreg, saved_valreg;
6811
6812 /* Now really call the function. `expand_call' does not call
6813 expand_builtin, so there is no danger of infinite recursion here. */
6814 start_sequence ();
6815
6816#ifdef EXPAND_BUILTIN_SAVEREGS
6817 /* Do whatever the machine needs done in this case. */
6818 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
6819#else
6820 /* The register where the function returns its value
6821 is likely to have something else in it, such as an argument.
6822 So preserve that register around the call. */
6823 if (value_mode != VOIDmode)
6824 {
6825 valreg = hard_libcall_value (value_mode);
6826 saved_valreg = gen_reg_rtx (value_mode);
6827 emit_move_insn (saved_valreg, valreg);
6828 }
6829
6830 /* Generate the call, putting the value in a pseudo. */
6831 temp = expand_call (exp, target, ignore);
6832
6833 if (value_mode != VOIDmode)
6834 emit_move_insn (valreg, saved_valreg);
6835#endif
6836
6837 seq = get_insns ();
6838 end_sequence ();
6839
6840 saveregs_value = temp;
6841
6842 /* Put the sequence after the NOTE that starts the function.
6843 If this is inside a SEQUENCE, make the outer-level insn
6844 chain current, so the code is placed at the start of the
6845 function. */
6846 push_topmost_sequence ();
6847 emit_insns_before (seq, NEXT_INSN (get_insns ()));
6848 pop_topmost_sequence ();
6849 return temp;
6850 }
6851
6852 /* __builtin_args_info (N) returns word N of the arg space info
6853 for the current function. The number and meanings of words
6854 is controlled by the definition of CUMULATIVE_ARGS. */
6855 case BUILT_IN_ARGS_INFO:
6856 {
6857 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
6858 int i;
6859 int *word_ptr = (int *) &current_function_args_info;
6860 tree type, elts, result;
6861
6862 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
6863 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
6864 __FILE__, __LINE__);
6865
6866 if (arglist != 0)
6867 {
6868 tree arg = TREE_VALUE (arglist);
6869 if (TREE_CODE (arg) != INTEGER_CST)
6870 error ("argument of `__builtin_args_info' must be constant");
6871 else
6872 {
6873 int wordnum = TREE_INT_CST_LOW (arg);
6874
6875 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
6876 error ("argument of `__builtin_args_info' out of range");
6877 else
6878 return GEN_INT (word_ptr[wordnum]);
6879 }
6880 }
6881 else
6882 error ("missing argument in `__builtin_args_info'");
6883
6884 return const0_rtx;
6885
6886#if 0
6887 for (i = 0; i < nwords; i++)
6888 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
6889
6890 type = build_array_type (integer_type_node,
6891 build_index_type (build_int_2 (nwords, 0)));
6892 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
6893 TREE_CONSTANT (result) = 1;
6894 TREE_STATIC (result) = 1;
6895 result = build (INDIRECT_REF, build_pointer_type (type), result);
6896 TREE_CONSTANT (result) = 1;
6897 return expand_expr (result, NULL_RTX, VOIDmode, 0);
6898#endif
6899 }
6900
6901 /* Return the address of the first anonymous stack arg. */
6902 case BUILT_IN_NEXT_ARG:
6903 {
6904 tree fntype = TREE_TYPE (current_function_decl);
6905 if (!(TYPE_ARG_TYPES (fntype) != 0
6906 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
6907 != void_type_node)))
6908 {
6909 error ("`va_start' used in function with fixed args");
6910 return const0_rtx;
6911 }
6912 }
6913
6914 return expand_binop (Pmode, add_optab,
6915 current_function_internal_arg_pointer,
6916 current_function_arg_offset_rtx,
6917 NULL_RTX, 0, OPTAB_LIB_WIDEN);
6918
6919 case BUILT_IN_CLASSIFY_TYPE:
6920 if (arglist != 0)
6921 {
6922 tree type = TREE_TYPE (TREE_VALUE (arglist));
6923 enum tree_code code = TREE_CODE (type);
6924 if (code == VOID_TYPE)
6925 return GEN_INT (void_type_class);
6926 if (code == INTEGER_TYPE)
6927 return GEN_INT (integer_type_class);
6928 if (code == CHAR_TYPE)
6929 return GEN_INT (char_type_class);
6930 if (code == ENUMERAL_TYPE)
6931 return GEN_INT (enumeral_type_class);
6932 if (code == BOOLEAN_TYPE)
6933 return GEN_INT (boolean_type_class);
6934 if (code == POINTER_TYPE)
6935 return GEN_INT (pointer_type_class);
6936 if (code == REFERENCE_TYPE)
6937 return GEN_INT (reference_type_class);
6938 if (code == OFFSET_TYPE)
6939 return GEN_INT (offset_type_class);
6940 if (code == REAL_TYPE)
6941 return GEN_INT (real_type_class);
6942 if (code == COMPLEX_TYPE)
6943 return GEN_INT (complex_type_class);
6944 if (code == FUNCTION_TYPE)
6945 return GEN_INT (function_type_class);
6946 if (code == METHOD_TYPE)
6947 return GEN_INT (method_type_class);
6948 if (code == RECORD_TYPE)
6949 return GEN_INT (record_type_class);
6950 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
6951 return GEN_INT (union_type_class);
6952 if (code == ARRAY_TYPE)
6953 return GEN_INT (array_type_class);
6954 if (code == STRING_TYPE)
6955 return GEN_INT (string_type_class);
6956 if (code == SET_TYPE)
6957 return GEN_INT (set_type_class);
6958 if (code == FILE_TYPE)
6959 return GEN_INT (file_type_class);
6960 if (code == LANG_TYPE)
6961 return GEN_INT (lang_type_class);
6962 }
6963 return GEN_INT (no_type_class);
6964
6965 case BUILT_IN_CONSTANT_P:
6966 if (arglist == 0)
6967 return const0_rtx;
6968 else
6969 return (TREE_CODE_CLASS (TREE_CODE (TREE_VALUE (arglist))) == 'c'
6970 ? const1_rtx : const0_rtx);
6971
6972 case BUILT_IN_FRAME_ADDRESS:
6973 /* The argument must be a nonnegative integer constant.
6974 It counts the number of frames to scan up the stack.
6975 The value is the address of that frame. */
6976 case BUILT_IN_RETURN_ADDRESS:
6977 /* The argument must be a nonnegative integer constant.
6978 It counts the number of frames to scan up the stack.
6979 The value is the return address saved in that frame. */
6980 if (arglist == 0)
6981 /* Warning about missing arg was already issued. */
6982 return const0_rtx;
6983 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
6984 {
6985 error ("invalid arg to `__builtin_return_address'");
6986 return const0_rtx;
6987 }
6988 else if (tree_int_cst_lt (TREE_VALUE (arglist), integer_zero_node))
6989 {
6990 error ("invalid arg to `__builtin_return_address'");
6991 return const0_rtx;
6992 }
6993 else
6994 {
6995 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
6996 rtx tem = frame_pointer_rtx;
6997 int i;
6998
6999 /* Some machines need special handling before we can access arbitrary
7000 frames. For example, on the sparc, we must first flush all
7001 register windows to the stack. */
7002#ifdef SETUP_FRAME_ADDRESSES
7003 SETUP_FRAME_ADDRESSES ();
7004#endif
7005
7006 /* On the sparc, the return address is not in the frame, it is
7007 in a register. There is no way to access it off of the current
7008 frame pointer, but it can be accessed off the previous frame
7009 pointer by reading the value from the register window save
7010 area. */
7011#ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
7012 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_RETURN_ADDRESS)
7013 count--;
7014#endif
7015
7016 /* Scan back COUNT frames to the specified frame. */
7017 for (i = 0; i < count; i++)
7018 {
7019 /* Assume the dynamic chain pointer is in the word that
7020 the frame address points to, unless otherwise specified. */
7021#ifdef DYNAMIC_CHAIN_ADDRESS
7022 tem = DYNAMIC_CHAIN_ADDRESS (tem);
7023#endif
7024 tem = memory_address (Pmode, tem);
7025 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
7026 }
7027
7028 /* For __builtin_frame_address, return what we've got. */
7029 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
7030 return tem;
7031
7032 /* For __builtin_return_address,
7033 Get the return address from that frame. */
7034#ifdef RETURN_ADDR_RTX
7035 return RETURN_ADDR_RTX (count, tem);
7036#else
7037 tem = memory_address (Pmode,
7038 plus_constant (tem, GET_MODE_SIZE (Pmode)));
7039 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
7040#endif
7041 }
7042
7043 case BUILT_IN_ALLOCA:
7044 if (arglist == 0
7045 /* Arg could be non-integer if user redeclared this fcn wrong. */
7046 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
7047 return const0_rtx;
7048 current_function_calls_alloca = 1;
7049 /* Compute the argument. */
7050 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
7051
7052 /* Allocate the desired space. */
7053 target = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
7054
7055 /* Record the new stack level for nonlocal gotos. */
7056 if (nonlocal_goto_handler_slot != 0)
7057 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
7058 return target;
7059
7060 case BUILT_IN_FFS:
7061 /* If not optimizing, call the library function. */
7062 if (!optimize)
7063 break;
7064
7065 if (arglist == 0
7066 /* Arg could be non-integer if user redeclared this fcn wrong. */
7067 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
7068 return const0_rtx;
7069
7070 /* Compute the argument. */
7071 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
7072 /* Compute ffs, into TARGET if possible.
7073 Set TARGET to wherever the result comes back. */
7074 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
7075 ffs_optab, op0, target, 1);
7076 if (target == 0)
7077 abort ();
7078 return target;
7079
7080 case BUILT_IN_STRLEN:
7081 /* If not optimizing, call the library function. */
7082 if (!optimize)
7083 break;
7084
7085 if (arglist == 0
7086 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7087 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
7088 return const0_rtx;
7089 else
7090 {
7091 tree src = TREE_VALUE (arglist);
7092 tree len = c_strlen (src);
7093
7094 int align
7095 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7096
7097 rtx result, src_rtx, char_rtx;
7098 enum machine_mode insn_mode = value_mode, char_mode;
7099 enum insn_code icode;
7100
7101 /* If the length is known, just return it. */
7102 if (len != 0)
7103 return expand_expr (len, target, mode, 0);
7104
7105 /* If SRC is not a pointer type, don't do this operation inline. */
7106 if (align == 0)
7107 break;
7108
7109 /* Call a function if we can't compute strlen in the right mode. */
7110
7111 while (insn_mode != VOIDmode)
7112 {
7113 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
7114 if (icode != CODE_FOR_nothing)
7115 break;
bbf6f052 7116
ca695ac9
JB
7117 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
7118 }
7119 if (insn_mode == VOIDmode)
7120 break;
bbf6f052 7121
ca695ac9
JB
7122 /* Make a place to write the result of the instruction. */
7123 result = target;
7124 if (! (result != 0
7125 && GET_CODE (result) == REG
7126 && GET_MODE (result) == insn_mode
7127 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
7128 result = gen_reg_rtx (insn_mode);
bbf6f052 7129
ca695ac9
JB
7130 /* Make sure the operands are acceptable to the predicates. */
7131
7132 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
7133 result = gen_reg_rtx (insn_mode);
7134
7135 src_rtx = memory_address (BLKmode,
7136 expand_expr (src, NULL_RTX, Pmode,
7137 EXPAND_NORMAL));
7138 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
7139 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
7140
7141 char_rtx = const0_rtx;
7142 char_mode = insn_operand_mode[(int)icode][2];
7143 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
7144 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
7145
7146 emit_insn (GEN_FCN (icode) (result,
7147 gen_rtx (MEM, BLKmode, src_rtx),
7148 char_rtx, GEN_INT (align)));
7149
7150 /* Return the value in the proper mode for this function. */
7151 if (GET_MODE (result) == value_mode)
7152 return result;
7153 else if (target != 0)
7154 {
7155 convert_move (target, result, 0);
7156 return target;
7157 }
7158 else
7159 return convert_to_mode (value_mode, result, 0);
7160 }
7161
7162 case BUILT_IN_STRCPY:
e87b4f3f 7163 /* If not optimizing, call the library function. */
ca695ac9 7164 if (!optimize)
e87b4f3f
RS
7165 break;
7166
7167 if (arglist == 0
ca695ac9
JB
7168 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7169 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7170 || TREE_CHAIN (arglist) == 0
7171 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
7172 return const0_rtx;
7173 else
db0e6d01 7174 {
ca695ac9 7175 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
e7c33f54 7176
ca695ac9
JB
7177 if (len == 0)
7178 break;
e7c33f54 7179
ca695ac9 7180 len = size_binop (PLUS_EXPR, len, integer_one_node);
e7c33f54 7181
ca695ac9 7182 chainon (arglist, build_tree_list (NULL_TREE, len));
1bbddf11
JVA
7183 }
7184
ca695ac9
JB
7185 /* Drops in. */
7186 case BUILT_IN_MEMCPY:
7187 /* If not optimizing, call the library function. */
7188 if (!optimize)
7189 break;
e7c33f54 7190
ca695ac9
JB
7191 if (arglist == 0
7192 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7193 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7194 || TREE_CHAIN (arglist) == 0
7195 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
7196 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
7197 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7198 return const0_rtx;
7199 else
e7c33f54 7200 {
ca695ac9
JB
7201 tree dest = TREE_VALUE (arglist);
7202 tree src = TREE_VALUE (TREE_CHAIN (arglist));
7203 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
e87b4f3f 7204
ca695ac9
JB
7205 int src_align
7206 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7207 int dest_align
7208 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7209 rtx dest_rtx, dest_mem, src_mem;
60bac6ea 7210
ca695ac9
JB
7211 /* If either SRC or DEST is not a pointer type, don't do
7212 this operation in-line. */
7213 if (src_align == 0 || dest_align == 0)
7214 {
7215 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
7216 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
7217 break;
7218 }
7219
7220 dest_rtx = expand_expr (dest, NULL_RTX, Pmode, EXPAND_NORMAL);
7221 dest_mem = gen_rtx (MEM, BLKmode,
7222 memory_address (BLKmode, dest_rtx));
7223 src_mem = gen_rtx (MEM, BLKmode,
7224 memory_address (BLKmode,
7225 expand_expr (src, NULL_RTX,
7226 Pmode,
7227 EXPAND_NORMAL)));
7228
7229 /* Copy word part most expediently. */
7230 emit_block_move (dest_mem, src_mem,
7231 expand_expr (len, NULL_RTX, VOIDmode, 0),
7232 MIN (src_align, dest_align));
7233 return dest_rtx;
7234 }
7235
7236/* These comparison functions need an instruction that returns an actual
7237 index. An ordinary compare that just sets the condition codes
7238 is not enough. */
7239#ifdef HAVE_cmpstrsi
7240 case BUILT_IN_STRCMP:
7241 /* If not optimizing, call the library function. */
7242 if (!optimize)
7243 break;
7244
7245 if (arglist == 0
7246 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7247 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7248 || TREE_CHAIN (arglist) == 0
7249 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
7250 return const0_rtx;
7251 else if (!HAVE_cmpstrsi)
7252 break;
7253 {
7254 tree arg1 = TREE_VALUE (arglist);
7255 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
7256 tree offset;
7257 tree len, len2;
7258
7259 len = c_strlen (arg1);
7260 if (len)
7261 len = size_binop (PLUS_EXPR, integer_one_node, len);
7262 len2 = c_strlen (arg2);
7263 if (len2)
7264 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
7265
7266 /* If we don't have a constant length for the first, use the length
7267 of the second, if we know it. We don't require a constant for
7268 this case; some cost analysis could be done if both are available
7269 but neither is constant. For now, assume they're equally cheap.
7270
7271 If both strings have constant lengths, use the smaller. This
7272 could arise if optimization results in strcpy being called with
7273 two fixed strings, or if the code was machine-generated. We should
7274 add some code to the `memcmp' handler below to deal with such
7275 situations, someday. */
7276 if (!len || TREE_CODE (len) != INTEGER_CST)
7277 {
7278 if (len2)
7279 len = len2;
7280 else if (len == 0)
7281 break;
7282 }
7283 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
7284 {
7285 if (tree_int_cst_lt (len2, len))
7286 len = len2;
7287 }
7288
7289 chainon (arglist, build_tree_list (NULL_TREE, len));
7290 }
7291
7292 /* Drops in. */
7293 case BUILT_IN_MEMCMP:
7294 /* If not optimizing, call the library function. */
7295 if (!optimize)
7296 break;
7297
7298 if (arglist == 0
7299 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7300 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7301 || TREE_CHAIN (arglist) == 0
7302 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
7303 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
7304 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7305 return const0_rtx;
7306 else if (!HAVE_cmpstrsi)
7307 break;
7308 {
7309 tree arg1 = TREE_VALUE (arglist);
7310 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
7311 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7312 rtx result;
7313
7314 int arg1_align
7315 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7316 int arg2_align
7317 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7318 enum machine_mode insn_mode
7319 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
60bac6ea 7320
ca695ac9
JB
7321 /* If we don't have POINTER_TYPE, call the function. */
7322 if (arg1_align == 0 || arg2_align == 0)
7323 {
7324 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
7325 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
7326 break;
7327 }
60bac6ea 7328
ca695ac9
JB
7329 /* Make a place to write the result of the instruction. */
7330 result = target;
7331 if (! (result != 0
7332 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
7333 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
7334 result = gen_reg_rtx (insn_mode);
60bac6ea 7335
ca695ac9
JB
7336 emit_insn (gen_cmpstrsi (result,
7337 gen_rtx (MEM, BLKmode,
7338 expand_expr (arg1, NULL_RTX, Pmode,
7339 EXPAND_NORMAL)),
7340 gen_rtx (MEM, BLKmode,
7341 expand_expr (arg2, NULL_RTX, Pmode,
7342 EXPAND_NORMAL)),
7343 expand_expr (len, NULL_RTX, VOIDmode, 0),
7344 GEN_INT (MIN (arg1_align, arg2_align))));
60bac6ea 7345
ca695ac9
JB
7346 /* Return the value in the proper mode for this function. */
7347 mode = TYPE_MODE (TREE_TYPE (exp));
7348 if (GET_MODE (result) == mode)
7349 return result;
7350 else if (target != 0)
7351 {
7352 convert_move (target, result, 0);
7353 return target;
60bac6ea 7354 }
ca695ac9
JB
7355 else
7356 return convert_to_mode (mode, result, 0);
7357 }
60bac6ea 7358#else
ca695ac9
JB
7359 case BUILT_IN_STRCMP:
7360 case BUILT_IN_MEMCMP:
7361 break;
60bac6ea
RS
7362#endif
7363
ca695ac9
JB
7364 default: /* just do library call, if unknown builtin */
7365 error ("built-in function `%s' not currently supported",
7366 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
7367 }
e87b4f3f 7368
ca695ac9
JB
7369 /* The switch statement above can drop through to cause the function
7370 to be called normally. */
e7c33f54 7371
ca695ac9
JB
7372 return expand_call (exp, target, ignore);
7373}
7374\f
7375/* Built-in functions to perform an untyped call and return. */
0006469d 7376
ca695ac9
JB
7377/* For each register that may be used for calling a function, this
7378 gives a mode used to copy the register's value. VOIDmode indicates
7379 the register is not used for calling a function. If the machine
7380 has register windows, this gives only the outbound registers.
7381 INCOMING_REGNO gives the corresponding inbound register. */
7382static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
0006469d 7383
ca695ac9
JB
7384/* For each register that may be used for returning values, this gives
7385 a mode used to copy the register's value. VOIDmode indicates the
7386 register is not used for returning values. If the machine has
7387 register windows, this gives only the outbound registers.
7388 INCOMING_REGNO gives the corresponding inbound register. */
7389static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
0006469d 7390
ca695ac9
JB
7391/* For each register that may be used for calling a function, this
7392 gives the offset of that register into the block returned by
7393 __bultin_apply_args. 0 indicates that the register is not
7394 used for calling a function. */
7395static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
0006469d 7396
ca695ac9
JB
7397/* Return the offset of register REGNO into the block returned by
7398 __builtin_apply_args. This is not declared static, since it is
7399 needed in objc-act.c. */
0006469d 7400
ca695ac9
JB
7401int
7402apply_args_register_offset (regno)
7403 int regno;
7404{
7405 apply_args_size ();
0006469d 7406
ca695ac9
JB
7407 /* Arguments are always put in outgoing registers (in the argument
7408 block) if such make sense. */
7409#ifdef OUTGOING_REGNO
7410 regno = OUTGOING_REGNO(regno);
7411#endif
7412 return apply_args_reg_offset[regno];
7413}
0006469d 7414
ca695ac9
JB
7415/* Return the size required for the block returned by __builtin_apply_args,
7416 and initialize apply_args_mode. */
0006469d 7417
ca695ac9
JB
7418static int
7419apply_args_size ()
7420{
7421 static int size = -1;
7422 int align, regno;
7423 enum machine_mode mode;
bbf6f052 7424
ca695ac9
JB
7425 /* The values computed by this function never change. */
7426 if (size < 0)
7427 {
7428 /* The first value is the incoming arg-pointer. */
7429 size = GET_MODE_SIZE (Pmode);
bbf6f052 7430
ca695ac9
JB
7431 /* The second value is the structure value address unless this is
7432 passed as an "invisible" first argument. */
7433 if (struct_value_rtx)
7434 size += GET_MODE_SIZE (Pmode);
7435
7436 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7437 if (FUNCTION_ARG_REGNO_P (regno))
bbf6f052 7438 {
ca695ac9
JB
7439 /* Search for the proper mode for copying this register's
7440 value. I'm not sure this is right, but it works so far. */
7441 enum machine_mode best_mode = VOIDmode;
7442
7443 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
7444 mode != VOIDmode;
7445 mode = GET_MODE_WIDER_MODE (mode))
7446 if (HARD_REGNO_MODE_OK (regno, mode)
7447 && HARD_REGNO_NREGS (regno, mode) == 1)
7448 best_mode = mode;
7449
7450 if (best_mode == VOIDmode)
7451 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
7452 mode != VOIDmode;
7453 mode = GET_MODE_WIDER_MODE (mode))
7454 if (HARD_REGNO_MODE_OK (regno, mode)
7455 && (mov_optab->handlers[(int) mode].insn_code
7456 != CODE_FOR_nothing))
7457 best_mode = mode;
7458
7459 mode = best_mode;
7460 if (mode == VOIDmode)
7461 abort ();
7462
7463 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7464 if (size % align != 0)
7465 size = CEIL (size, align) * align;
7466 apply_args_reg_offset[regno] = size;
7467 size += GET_MODE_SIZE (mode);
7468 apply_args_mode[regno] = mode;
7469 }
7470 else
7471 {
7472 apply_args_mode[regno] = VOIDmode;
7473 apply_args_reg_offset[regno] = 0;
bbf6f052 7474 }
ca695ac9
JB
7475 }
7476 return size;
7477}
bbf6f052 7478
ca695ac9
JB
7479/* Return the size required for the block returned by __builtin_apply,
7480 and initialize apply_result_mode. */
bbf6f052 7481
ca695ac9
JB
7482static int
7483apply_result_size ()
7484{
7485 static int size = -1;
7486 int align, regno;
7487 enum machine_mode mode;
bbf6f052 7488
ca695ac9
JB
7489 /* The values computed by this function never change. */
7490 if (size < 0)
7491 {
7492 size = 0;
bbf6f052 7493
ca695ac9
JB
7494 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7495 if (FUNCTION_VALUE_REGNO_P (regno))
7496 {
7497 /* Search for the proper mode for copying this register's
7498 value. I'm not sure this is right, but it works so far. */
7499 enum machine_mode best_mode = VOIDmode;
bbf6f052 7500
ca695ac9
JB
7501 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
7502 mode != TImode;
7503 mode = GET_MODE_WIDER_MODE (mode))
7504 if (HARD_REGNO_MODE_OK (regno, mode))
7505 best_mode = mode;
bbf6f052 7506
ca695ac9
JB
7507 if (best_mode == VOIDmode)
7508 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
7509 mode != VOIDmode;
7510 mode = GET_MODE_WIDER_MODE (mode))
7511 if (HARD_REGNO_MODE_OK (regno, mode)
7512 && (mov_optab->handlers[(int) mode].insn_code
7513 != CODE_FOR_nothing))
7514 best_mode = mode;
bbf6f052 7515
ca695ac9
JB
7516 mode = best_mode;
7517 if (mode == VOIDmode)
7518 abort ();
bbf6f052 7519
ca695ac9
JB
7520 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7521 if (size % align != 0)
7522 size = CEIL (size, align) * align;
7523 size += GET_MODE_SIZE (mode);
7524 apply_result_mode[regno] = mode;
bbf6f052
RK
7525 }
7526 else
ca695ac9 7527 apply_result_mode[regno] = VOIDmode;
bbf6f052 7528
ca695ac9
JB
7529 /* Allow targets that use untyped_call and untyped_return to override
7530 the size so that machine-specific information can be stored here. */
7531#ifdef APPLY_RESULT_SIZE
7532 size = APPLY_RESULT_SIZE;
7533#endif
7534 }
7535 return size;
7536}
bbf6f052 7537
ca695ac9
JB
7538#if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
7539/* Create a vector describing the result block RESULT. If SAVEP is true,
7540 the result block is used to save the values; otherwise it is used to
7541 restore the values. */
bbf6f052 7542
ca695ac9
JB
7543static rtx
7544result_vector (savep, result)
7545 int savep;
7546 rtx result;
7547{
7548 int regno, size, align, nelts;
7549 enum machine_mode mode;
7550 rtx reg, mem;
7551 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
7552
7553 size = nelts = 0;
7554 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7555 if ((mode = apply_result_mode[regno]) != VOIDmode)
7556 {
7557 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7558 if (size % align != 0)
7559 size = CEIL (size, align) * align;
7560 reg = gen_rtx (REG, mode, savep ? INCOMING_REGNO (regno) : regno);
7561 mem = change_address (result, mode,
7562 plus_constant (XEXP (result, 0), size));
7563 savevec[nelts++] = (savep
7564 ? gen_rtx (SET, VOIDmode, mem, reg)
7565 : gen_rtx (SET, VOIDmode, reg, mem));
7566 size += GET_MODE_SIZE (mode);
bbf6f052 7567 }
ca695ac9
JB
7568 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
7569}
7570#endif /* HAVE_untyped_call or HAVE_untyped_return */
bbf6f052 7571
ca695ac9
JB
7572/* Save the state required to perform an untyped call with the same
7573 arguments as were passed to the current function. */
7574
7575static rtx
7576expand_builtin_apply_args ()
7577{
7578 rtx registers;
7579 int size, align, regno;
7580 enum machine_mode mode;
7581
7582 /* Create a block where the arg-pointer, structure value address,
7583 and argument registers can be saved. */
7584 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
7585
7586 /* Walk past the arg-pointer and structure value address. */
7587 size = GET_MODE_SIZE (Pmode);
7588 if (struct_value_rtx)
7589 size += GET_MODE_SIZE (Pmode);
7590
7591 /* Save each register used in calling a function to the block. */
7592 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7593 if ((mode = apply_args_mode[regno]) != VOIDmode)
bbf6f052 7594 {
ca695ac9
JB
7595 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7596 if (size % align != 0)
7597 size = CEIL (size, align) * align;
7598 emit_move_insn (change_address (registers, mode,
7599 plus_constant (XEXP (registers, 0),
7600 size)),
7601 gen_rtx (REG, mode, INCOMING_REGNO (regno)));
7602 size += GET_MODE_SIZE (mode);
bbf6f052
RK
7603 }
7604
ca695ac9
JB
7605 /* Save the arg pointer to the block. */
7606 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
7607 copy_to_reg (virtual_incoming_args_rtx));
7608 size = GET_MODE_SIZE (Pmode);
bbf6f052 7609
ca695ac9
JB
7610 /* Save the structure value address unless this is passed as an
7611 "invisible" first argument. */
7612 if (struct_value_incoming_rtx)
7613 {
7614 emit_move_insn (change_address (registers, Pmode,
7615 plus_constant (XEXP (registers, 0),
7616 size)),
7617 copy_to_reg (struct_value_incoming_rtx));
7618 size += GET_MODE_SIZE (Pmode);
7619 }
7620
7621 /* Return the address of the block. */
7622 return copy_addr_to_reg (XEXP (registers, 0));
7623}
7624
7625/* Perform an untyped call and save the state required to perform an
7626 untyped return of whatever value was returned by the given function. */
7627
7628static rtx
7629expand_builtin_apply (function, arguments, argsize)
7630 rtx function, arguments, argsize;
7631{
7632 int size, align, regno;
7633 enum machine_mode mode;
7634 rtx incoming_args, result, reg, dest, call_insn;
7635 rtx old_stack_level = 0;
7636 rtx use_insns = 0;
bbf6f052 7637
ca695ac9
JB
7638 /* Create a block where the return registers can be saved. */
7639 result = assign_stack_local (BLKmode, apply_result_size (), -1);
bbf6f052 7640
ca695ac9 7641 /* ??? The argsize value should be adjusted here. */
bbf6f052 7642
ca695ac9
JB
7643 /* Fetch the arg pointer from the ARGUMENTS block. */
7644 incoming_args = gen_reg_rtx (Pmode);
7645 emit_move_insn (incoming_args,
7646 gen_rtx (MEM, Pmode, arguments));
7647#ifndef STACK_GROWS_DOWNWARD
7648 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
7649 incoming_args, 0, OPTAB_LIB_WIDEN);
46b68a37
JW
7650#endif
7651
ca695ac9
JB
7652 /* Perform postincrements before actually calling the function. */
7653 emit_queue ();
46b68a37 7654
ca695ac9
JB
7655 /* Push a new argument block and copy the arguments. */
7656 do_pending_stack_adjust ();
7657 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
bbf6f052 7658
ca695ac9
JB
7659 /* Push a block of memory onto the stack to store the memory arguments.
7660 Save the address in a register, and copy the memory arguments. ??? I
7661 haven't figured out how the calling convention macros effect this,
7662 but it's likely that the source and/or destination addresses in
7663 the block copy will need updating in machine specific ways. */
7664 dest = copy_addr_to_reg (push_block (argsize, 0, 0));
7665 emit_block_move (gen_rtx (MEM, BLKmode, dest),
7666 gen_rtx (MEM, BLKmode, incoming_args),
7667 argsize,
7668 PARM_BOUNDARY / BITS_PER_UNIT);
bbf6f052 7669
ca695ac9
JB
7670 /* Refer to the argument block. */
7671 apply_args_size ();
7672 arguments = gen_rtx (MEM, BLKmode, arguments);
7673
7674 /* Walk past the arg-pointer and structure value address. */
7675 size = GET_MODE_SIZE (Pmode);
7676 if (struct_value_rtx)
7677 size += GET_MODE_SIZE (Pmode);
7678
7679 /* Restore each of the registers previously saved. Make USE insns
7680 for each of these registers for use in making the call. */
7681 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7682 if ((mode = apply_args_mode[regno]) != VOIDmode)
7683 {
7684 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7685 if (size % align != 0)
7686 size = CEIL (size, align) * align;
7687 reg = gen_rtx (REG, mode, regno);
7688 emit_move_insn (reg,
7689 change_address (arguments, mode,
7690 plus_constant (XEXP (arguments, 0),
7691 size)));
7692
7693 push_to_sequence (use_insns);
7694 emit_insn (gen_rtx (USE, VOIDmode, reg));
7695 use_insns = get_insns ();
7696 end_sequence ();
7697 size += GET_MODE_SIZE (mode);
7698 }
7699
7700 /* Restore the structure value address unless this is passed as an
7701 "invisible" first argument. */
7702 size = GET_MODE_SIZE (Pmode);
7703 if (struct_value_rtx)
7704 {
7705 rtx value = gen_reg_rtx (Pmode);
7706 emit_move_insn (value,
7707 change_address (arguments, Pmode,
7708 plus_constant (XEXP (arguments, 0),
7709 size)));
7710 emit_move_insn (struct_value_rtx, value);
7711 if (GET_CODE (struct_value_rtx) == REG)
7712 {
7713 push_to_sequence (use_insns);
7714 emit_insn (gen_rtx (USE, VOIDmode, struct_value_rtx));
7715 use_insns = get_insns ();
7716 end_sequence ();
bbf6f052 7717 }
ca695ac9
JB
7718 size += GET_MODE_SIZE (Pmode);
7719 }
bbf6f052 7720
ca695ac9
JB
7721 /* All arguments and registers used for the call are set up by now! */
7722 function = prepare_call_address (function, NULL_TREE, &use_insns);
bbf6f052 7723
ca695ac9
JB
7724 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
7725 and we don't want to load it into a register as an optimization,
7726 because prepare_call_address already did it if it should be done. */
7727 if (GET_CODE (function) != SYMBOL_REF)
7728 function = memory_address (FUNCTION_MODE, function);
bbf6f052 7729
ca695ac9
JB
7730 /* Generate the actual call instruction and save the return value. */
7731#ifdef HAVE_untyped_call
7732 if (HAVE_untyped_call)
7733 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
7734 result, result_vector (1, result)));
7735 else
7736#endif
7737#ifdef HAVE_call_value
7738 if (HAVE_call_value)
7739 {
7740 rtx valreg = 0;
bbf6f052 7741
ca695ac9
JB
7742 /* Locate the unique return register. It is not possible to
7743 express a call that sets more than one return register using
7744 call_value; use untyped_call for that. In fact, untyped_call
7745 only needs to save the return registers in the given block. */
7746 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7747 if ((mode = apply_result_mode[regno]) != VOIDmode)
7748 {
7749 if (valreg)
7750 abort (); /* HAVE_untyped_call required. */
7751 valreg = gen_rtx (REG, mode, regno);
7752 }
bbf6f052 7753
ca695ac9
JB
7754 emit_call_insn (gen_call_value (valreg,
7755 gen_rtx (MEM, FUNCTION_MODE, function),
7756 const0_rtx, NULL_RTX, const0_rtx));
bbf6f052 7757
ca695ac9
JB
7758 emit_move_insn (change_address (result, GET_MODE (valreg),
7759 XEXP (result, 0)),
7760 valreg);
7761 }
7762 else
7763#endif
7764 abort ();
bbf6f052 7765
ca695ac9
JB
7766 /* Find the CALL insn we just emitted and write the USE insns before it. */
7767 for (call_insn = get_last_insn ();
7768 call_insn && GET_CODE (call_insn) != CALL_INSN;
7769 call_insn = PREV_INSN (call_insn))
7770 ;
bbf6f052 7771
ca695ac9
JB
7772 if (! call_insn)
7773 abort ();
bbf6f052 7774
ca695ac9
JB
7775 /* Put the USE insns before the CALL. */
7776 emit_insns_before (use_insns, call_insn);
e7c33f54 7777
ca695ac9
JB
7778 /* Restore the stack. */
7779 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
e7c33f54 7780
ca695ac9
JB
7781 /* Return the address of the result block. */
7782 return copy_addr_to_reg (XEXP (result, 0));
7783}
e7c33f54 7784
ca695ac9 7785/* Perform an untyped return. */
e7c33f54 7786
ca695ac9
JB
7787static void
7788expand_builtin_return (result)
7789 rtx result;
7790{
7791 int size, align, regno;
7792 enum machine_mode mode;
7793 rtx reg;
7794 rtx use_insns = 0;
e7c33f54 7795
ca695ac9
JB
7796 apply_result_size ();
7797 result = gen_rtx (MEM, BLKmode, result);
e7c33f54 7798
ca695ac9
JB
7799#ifdef HAVE_untyped_return
7800 if (HAVE_untyped_return)
7801 {
7802 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
7803 emit_barrier ();
7804 return;
7805 }
7806#endif
e7c33f54 7807
ca695ac9
JB
7808 /* Restore the return value and note that each value is used. */
7809 size = 0;
7810 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7811 if ((mode = apply_result_mode[regno]) != VOIDmode)
7812 {
7813 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7814 if (size % align != 0)
7815 size = CEIL (size, align) * align;
7816 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
7817 emit_move_insn (reg,
7818 change_address (result, mode,
7819 plus_constant (XEXP (result, 0),
7820 size)));
e7c33f54 7821
ca695ac9
JB
7822 push_to_sequence (use_insns);
7823 emit_insn (gen_rtx (USE, VOIDmode, reg));
7824 use_insns = get_insns ();
7825 end_sequence ();
7826 size += GET_MODE_SIZE (mode);
7827 }
e7c33f54 7828
ca695ac9
JB
7829 /* Put the USE insns before the return. */
7830 emit_insns (use_insns);
e7c33f54 7831
ca695ac9
JB
7832 /* Return whatever values was restored by jumping directly to the end
7833 of the function. */
7834 expand_null_return ();
7835}
7836\f
7837/* Expand code for a post- or pre- increment or decrement
7838 and return the RTX for the result.
7839 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
e7c33f54 7840
ca695ac9
JB
7841static rtx
7842expand_increment (exp, post)
7843 register tree exp;
7844 int post;
7845{
7846 register rtx op0, op1;
7847 register rtx temp, value;
7848 register tree incremented = TREE_OPERAND (exp, 0);
7849 optab this_optab = add_optab;
7850 int icode;
7851 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
7852 int op0_is_copy = 0;
7853 int single_insn = 0;
a97f5a86
RS
7854 /* 1 means we can't store into OP0 directly,
7855 because it is a subreg narrower than a word,
7856 and we don't dare clobber the rest of the word. */
7857 int bad_subreg = 0;
e7c33f54 7858
ca695ac9 7859 if (output_bytecode)
c02bd5d9
JB
7860 {
7861 bc_expand_expr (exp);
7862 return NULL_RTX;
7863 }
e7c33f54 7864
ca695ac9
JB
7865 /* Stabilize any component ref that might need to be
7866 evaluated more than once below. */
7867 if (!post
7868 || TREE_CODE (incremented) == BIT_FIELD_REF
7869 || (TREE_CODE (incremented) == COMPONENT_REF
7870 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
7871 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
7872 incremented = stabilize_reference (incremented);
7873 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
7874 ones into save exprs so that they don't accidentally get evaluated
7875 more than once by the code below. */
7876 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
7877 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
7878 incremented = save_expr (incremented);
bbf6f052 7879
ca695ac9
JB
7880 /* Compute the operands as RTX.
7881 Note whether OP0 is the actual lvalue or a copy of it:
7882 I believe it is a copy iff it is a register or subreg
7883 and insns were generated in computing it. */
bbf6f052 7884
ca695ac9
JB
7885 temp = get_last_insn ();
7886 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
bbf6f052 7887
ca695ac9
JB
7888 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
7889 in place but intead must do sign- or zero-extension during assignment,
7890 so we copy it into a new register and let the code below use it as
7891 a copy.
bbf6f052 7892
ca695ac9
JB
7893 Note that we can safely modify this SUBREG since it is know not to be
7894 shared (it was made by the expand_expr call above). */
bbf6f052 7895
ca695ac9
JB
7896 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
7897 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
a97f5a86
RS
7898 else if (GET_CODE (op0) == SUBREG
7899 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
7900 bad_subreg = 1;
bbf6f052 7901
ca695ac9
JB
7902 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
7903 && temp != get_last_insn ());
7904 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052 7905
ca695ac9
JB
7906 /* Decide whether incrementing or decrementing. */
7907 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
7908 || TREE_CODE (exp) == PREDECREMENT_EXPR)
7909 this_optab = sub_optab;
bbf6f052 7910
ca695ac9
JB
7911 /* Convert decrement by a constant into a negative increment. */
7912 if (this_optab == sub_optab
7913 && GET_CODE (op1) == CONST_INT)
7914 {
7915 op1 = GEN_INT (- INTVAL (op1));
7916 this_optab = add_optab;
7917 }
bbf6f052 7918
ca695ac9
JB
7919 /* For a preincrement, see if we can do this with a single instruction. */
7920 if (!post)
7921 {
7922 icode = (int) this_optab->handlers[(int) mode].insn_code;
7923 if (icode != (int) CODE_FOR_nothing
7924 /* Make sure that OP0 is valid for operands 0 and 1
7925 of the insn we want to queue. */
7926 && (*insn_operand_predicate[icode][0]) (op0, mode)
7927 && (*insn_operand_predicate[icode][1]) (op0, mode)
7928 && (*insn_operand_predicate[icode][2]) (op1, mode))
7929 single_insn = 1;
7930 }
bbf6f052 7931
ca695ac9
JB
7932 /* If OP0 is not the actual lvalue, but rather a copy in a register,
7933 then we cannot just increment OP0. We must therefore contrive to
7934 increment the original value. Then, for postincrement, we can return
7935 OP0 since it is a copy of the old value. For preincrement, expand here
a97f5a86
RS
7936 unless we can do it with a single insn.
7937
7938 Likewise if storing directly into OP0 would clobber high bits
7939 we need to preserve (bad_subreg). */
7940 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
ca695ac9
JB
7941 {
7942 /* This is the easiest way to increment the value wherever it is.
7943 Problems with multiple evaluation of INCREMENTED are prevented
7944 because either (1) it is a component_ref or preincrement,
7945 in which case it was stabilized above, or (2) it is an array_ref
7946 with constant index in an array in a register, which is
7947 safe to reevaluate. */
7948 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
7949 || TREE_CODE (exp) == PREDECREMENT_EXPR)
7950 ? MINUS_EXPR : PLUS_EXPR),
7951 TREE_TYPE (exp),
7952 incremented,
7953 TREE_OPERAND (exp, 1));
7954 temp = expand_assignment (incremented, newexp, ! post, 0);
7955 return post ? op0 : temp;
7956 }
bbf6f052 7957
ca695ac9
JB
7958 if (post)
7959 {
7960 /* We have a true reference to the value in OP0.
7961 If there is an insn to add or subtract in this mode, queue it.
7962 Queueing the increment insn avoids the register shuffling
7963 that often results if we must increment now and first save
7964 the old value for subsequent use. */
bbf6f052 7965
ca695ac9
JB
7966#if 0 /* Turned off to avoid making extra insn for indexed memref. */
7967 op0 = stabilize (op0);
7968#endif
bbf6f052 7969
ca695ac9
JB
7970 icode = (int) this_optab->handlers[(int) mode].insn_code;
7971 if (icode != (int) CODE_FOR_nothing
7972 /* Make sure that OP0 is valid for operands 0 and 1
7973 of the insn we want to queue. */
7974 && (*insn_operand_predicate[icode][0]) (op0, mode)
7975 && (*insn_operand_predicate[icode][1]) (op0, mode))
7976 {
7977 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
7978 op1 = force_reg (mode, op1);
bbf6f052 7979
ca695ac9
JB
7980 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
7981 }
7982 }
bbf6f052 7983
ca695ac9
JB
7984 /* Preincrement, or we can't increment with one simple insn. */
7985 if (post)
7986 /* Save a copy of the value before inc or dec, to return it later. */
7987 temp = value = copy_to_reg (op0);
7988 else
7989 /* Arrange to return the incremented value. */
7990 /* Copy the rtx because expand_binop will protect from the queue,
7991 and the results of that would be invalid for us to return
7992 if our caller does emit_queue before using our result. */
7993 temp = copy_rtx (value = op0);
bbf6f052 7994
ca695ac9
JB
7995 /* Increment however we can. */
7996 op1 = expand_binop (mode, this_optab, value, op1, op0,
7997 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
7998 /* Make sure the value is stored into OP0. */
7999 if (op1 != op0)
8000 emit_move_insn (op0, op1);
bbf6f052 8001
ca695ac9
JB
8002 return temp;
8003}
8004\f
8005/* Expand all function calls contained within EXP, innermost ones first.
8006 But don't look within expressions that have sequence points.
8007 For each CALL_EXPR, record the rtx for its value
8008 in the CALL_EXPR_RTL field. */
bbf6f052 8009
ca695ac9
JB
8010static void
8011preexpand_calls (exp)
8012 tree exp;
8013{
8014 register int nops, i;
8015 int type = TREE_CODE_CLASS (TREE_CODE (exp));
bbf6f052 8016
ca695ac9
JB
8017 if (! do_preexpand_calls)
8018 return;
bbf6f052 8019
ca695ac9 8020 /* Only expressions and references can contain calls. */
bbf6f052 8021
ca695ac9
JB
8022 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
8023 return;
bbf6f052 8024
ca695ac9
JB
8025 switch (TREE_CODE (exp))
8026 {
8027 case CALL_EXPR:
8028 /* Do nothing if already expanded. */
8029 if (CALL_EXPR_RTL (exp) != 0)
8030 return;
bbf6f052 8031
ca695ac9
JB
8032 /* Do nothing to built-in functions. */
8033 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
8034 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
8035 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8036 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
8037 return;
bbf6f052 8038
ca695ac9
JB
8039 case COMPOUND_EXPR:
8040 case COND_EXPR:
8041 case TRUTH_ANDIF_EXPR:
8042 case TRUTH_ORIF_EXPR:
8043 /* If we find one of these, then we can be sure
8044 the adjust will be done for it (since it makes jumps).
8045 Do it now, so that if this is inside an argument
8046 of a function, we don't get the stack adjustment
8047 after some other args have already been pushed. */
8048 do_pending_stack_adjust ();
8049 return;
bbf6f052 8050
ca695ac9
JB
8051 case BLOCK:
8052 case RTL_EXPR:
8053 case WITH_CLEANUP_EXPR:
8054 return;
bbf6f052 8055
ca695ac9
JB
8056 case SAVE_EXPR:
8057 if (SAVE_EXPR_RTL (exp) != 0)
8058 return;
8059 }
bbf6f052 8060
ca695ac9
JB
8061 nops = tree_code_length[(int) TREE_CODE (exp)];
8062 for (i = 0; i < nops; i++)
8063 if (TREE_OPERAND (exp, i) != 0)
8064 {
8065 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
8066 if (type == 'e' || type == '<' || type == '1' || type == '2'
8067 || type == 'r')
8068 preexpand_calls (TREE_OPERAND (exp, i));
8069 }
bbf6f052
RK
8070}
8071\f
ca695ac9
JB
8072/* At the start of a function, record that we have no previously-pushed
8073 arguments waiting to be popped. */
0006469d 8074
ca695ac9
JB
8075void
8076init_pending_stack_adjust ()
8077{
8078 pending_stack_adjust = 0;
8079}
fb2ca25a 8080
ca695ac9
JB
8081/* When exiting from function, if safe, clear out any pending stack adjust
8082 so the adjustment won't get done. */
904762c8 8083
ca695ac9
JB
8084void
8085clear_pending_stack_adjust ()
fb2ca25a 8086{
ca695ac9
JB
8087#ifdef EXIT_IGNORE_STACK
8088 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
8089 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
8090 && ! flag_inline_functions)
8091 pending_stack_adjust = 0;
fb2ca25a 8092#endif
fb2ca25a
KKT
8093}
8094
ca695ac9
JB
8095/* Pop any previously-pushed arguments that have not been popped yet. */
8096
8097void
8098do_pending_stack_adjust ()
8099{
8100 if (inhibit_defer_pop == 0)
8101 {
8102 if (pending_stack_adjust != 0)
8103 adjust_stack (GEN_INT (pending_stack_adjust));
8104 pending_stack_adjust = 0;
8105 }
8106}
8107
8108/* Expand all cleanups up to OLD_CLEANUPS.
8109 Needed here, and also for language-dependent calls. */
904762c8 8110
ca695ac9
JB
8111void
8112expand_cleanups_to (old_cleanups)
8113 tree old_cleanups;
0006469d 8114{
ca695ac9 8115 while (cleanups_this_call != old_cleanups)
0006469d 8116 {
ca695ac9
JB
8117 expand_expr (TREE_VALUE (cleanups_this_call), NULL_RTX, VOIDmode, 0);
8118 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
8119 }
8120}
8121\f
8122/* Expand conditional expressions. */
0006469d 8123
ca695ac9
JB
8124/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
8125 LABEL is an rtx of code CODE_LABEL, in this function and all the
8126 functions here. */
0006469d 8127
ca695ac9
JB
8128void
8129jumpifnot (exp, label)
8130 tree exp;
8131 rtx label;
8132{
8133 do_jump (exp, label, NULL_RTX);
8134}
0006469d 8135
ca695ac9 8136/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
0006469d 8137
ca695ac9
JB
8138void
8139jumpif (exp, label)
8140 tree exp;
8141 rtx label;
8142{
8143 do_jump (exp, NULL_RTX, label);
8144}
0006469d 8145
ca695ac9
JB
8146/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
8147 the result is zero, or IF_TRUE_LABEL if the result is one.
8148 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
8149 meaning fall through in that case.
0006469d 8150
ca695ac9
JB
8151 do_jump always does any pending stack adjust except when it does not
8152 actually perform a jump. An example where there is no jump
8153 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
0006469d 8154
ca695ac9
JB
8155 This function is responsible for optimizing cases such as
8156 &&, || and comparison operators in EXP. */
904762c8 8157
ca695ac9
JB
8158void
8159do_jump (exp, if_false_label, if_true_label)
8160 tree exp;
8161 rtx if_false_label, if_true_label;
0006469d 8162{
ca695ac9
JB
8163 register enum tree_code code = TREE_CODE (exp);
8164 /* Some cases need to create a label to jump to
8165 in order to properly fall through.
8166 These cases set DROP_THROUGH_LABEL nonzero. */
8167 rtx drop_through_label = 0;
8168 rtx temp;
8169 rtx comparison = 0;
8170 int i;
8171 tree type;
0006469d 8172
ca695ac9 8173 emit_queue ();
0006469d 8174
ca695ac9
JB
8175 switch (code)
8176 {
8177 case ERROR_MARK:
8178 break;
0006469d 8179
ca695ac9
JB
8180 case INTEGER_CST:
8181 temp = integer_zerop (exp) ? if_false_label : if_true_label;
8182 if (temp)
8183 emit_jump (temp);
8184 break;
0006469d 8185
ca695ac9
JB
8186#if 0
8187 /* This is not true with #pragma weak */
8188 case ADDR_EXPR:
8189 /* The address of something can never be zero. */
8190 if (if_true_label)
8191 emit_jump (if_true_label);
8192 break;
8193#endif
0006469d 8194
ca695ac9
JB
8195 case NOP_EXPR:
8196 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
8197 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
8198 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
8199 goto normal;
8200 case CONVERT_EXPR:
8201 /* If we are narrowing the operand, we have to do the compare in the
8202 narrower mode. */
8203 if ((TYPE_PRECISION (TREE_TYPE (exp))
8204 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8205 goto normal;
8206 case NON_LVALUE_EXPR:
8207 case REFERENCE_EXPR:
8208 case ABS_EXPR:
8209 case NEGATE_EXPR:
8210 case LROTATE_EXPR:
8211 case RROTATE_EXPR:
8212 /* These cannot change zero->non-zero or vice versa. */
8213 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8214 break;
0006469d 8215
ca695ac9
JB
8216#if 0
8217 /* This is never less insns than evaluating the PLUS_EXPR followed by
8218 a test and can be longer if the test is eliminated. */
8219 case PLUS_EXPR:
8220 /* Reduce to minus. */
8221 exp = build (MINUS_EXPR, TREE_TYPE (exp),
8222 TREE_OPERAND (exp, 0),
8223 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
8224 TREE_OPERAND (exp, 1))));
8225 /* Process as MINUS. */
0006469d 8226#endif
0006469d 8227
ca695ac9
JB
8228 case MINUS_EXPR:
8229 /* Non-zero iff operands of minus differ. */
8230 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
8231 TREE_OPERAND (exp, 0),
8232 TREE_OPERAND (exp, 1)),
8233 NE, NE);
8234 break;
904762c8 8235
ca695ac9
JB
8236 case BIT_AND_EXPR:
8237 /* If we are AND'ing with a small constant, do this comparison in the
8238 smallest type that fits. If the machine doesn't have comparisons
8239 that small, it will be converted back to the wider comparison.
8240 This helps if we are testing the sign bit of a narrower object.
8241 combine can't do this for us because it can't know whether a
8242 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
0006469d 8243
ca695ac9
JB
8244 if (! SLOW_BYTE_ACCESS
8245 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8246 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
8247 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
8248 && (type = type_for_size (i + 1, 1)) != 0
8249 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
8250 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
8251 != CODE_FOR_nothing))
8252 {
8253 do_jump (convert (type, exp), if_false_label, if_true_label);
8254 break;
8255 }
8256 goto normal;
904762c8 8257
ca695ac9
JB
8258 case TRUTH_NOT_EXPR:
8259 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8260 break;
0006469d 8261
ca695ac9
JB
8262 case TRUTH_ANDIF_EXPR:
8263 if (if_false_label == 0)
8264 if_false_label = drop_through_label = gen_label_rtx ();
8265 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
8266 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8267 break;
0006469d 8268
ca695ac9
JB
8269 case TRUTH_ORIF_EXPR:
8270 if (if_true_label == 0)
8271 if_true_label = drop_through_label = gen_label_rtx ();
8272 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
8273 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8274 break;
0006469d 8275
ca695ac9 8276 case COMPOUND_EXPR:
0088fcb1 8277 push_temp_slots ();
ca695ac9
JB
8278 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8279 free_temp_slots ();
0088fcb1 8280 pop_temp_slots ();
ca695ac9
JB
8281 emit_queue ();
8282 do_pending_stack_adjust ();
8283 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8284 break;
0006469d 8285
ca695ac9
JB
8286 case COMPONENT_REF:
8287 case BIT_FIELD_REF:
8288 case ARRAY_REF:
8289 {
8290 int bitsize, bitpos, unsignedp;
8291 enum machine_mode mode;
8292 tree type;
8293 tree offset;
8294 int volatilep = 0;
0006469d 8295
ca695ac9
JB
8296 /* Get description of this reference. We don't actually care
8297 about the underlying object here. */
8298 get_inner_reference (exp, &bitsize, &bitpos, &offset,
8299 &mode, &unsignedp, &volatilep);
0006469d 8300
ca695ac9
JB
8301 type = type_for_size (bitsize, unsignedp);
8302 if (! SLOW_BYTE_ACCESS
8303 && type != 0 && bitsize >= 0
8304 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
8305 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
8306 != CODE_FOR_nothing))
8307 {
8308 do_jump (convert (type, exp), if_false_label, if_true_label);
8309 break;
8310 }
8311 goto normal;
8312 }
0006469d 8313
ca695ac9
JB
8314 case COND_EXPR:
8315 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
8316 if (integer_onep (TREE_OPERAND (exp, 1))
8317 && integer_zerop (TREE_OPERAND (exp, 2)))
8318 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
904762c8 8319
ca695ac9
JB
8320 else if (integer_zerop (TREE_OPERAND (exp, 1))
8321 && integer_onep (TREE_OPERAND (exp, 2)))
8322 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
0006469d 8323
ca695ac9
JB
8324 else
8325 {
8326 register rtx label1 = gen_label_rtx ();
8327 drop_through_label = gen_label_rtx ();
8328 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
8329 /* Now the THEN-expression. */
8330 do_jump (TREE_OPERAND (exp, 1),
8331 if_false_label ? if_false_label : drop_through_label,
8332 if_true_label ? if_true_label : drop_through_label);
8333 /* In case the do_jump just above never jumps. */
8334 do_pending_stack_adjust ();
8335 emit_label (label1);
8336 /* Now the ELSE-expression. */
8337 do_jump (TREE_OPERAND (exp, 2),
8338 if_false_label ? if_false_label : drop_through_label,
8339 if_true_label ? if_true_label : drop_through_label);
8340 }
8341 break;
0006469d 8342
ca695ac9
JB
8343 case EQ_EXPR:
8344 if (integer_zerop (TREE_OPERAND (exp, 1)))
8345 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
0766f239
RS
8346 else if (((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8347 == MODE_INT)
8348 &&
8349 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8350 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_FLOAT
8351 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_INT)
ca695ac9
JB
8352 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
8353 else
8354 comparison = compare (exp, EQ, EQ);
8355 break;
0006469d 8356
ca695ac9
JB
8357 case NE_EXPR:
8358 if (integer_zerop (TREE_OPERAND (exp, 1)))
8359 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
0766f239
RS
8360 else if (((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8361 == MODE_INT)
8362 &&
8363 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8364 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_FLOAT
8365 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_INT)
ca695ac9
JB
8366 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
8367 else
8368 comparison = compare (exp, NE, NE);
8369 break;
0006469d 8370
ca695ac9
JB
8371 case LT_EXPR:
8372 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8373 == MODE_INT)
8374 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8375 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
8376 else
8377 comparison = compare (exp, LT, LTU);
8378 break;
0006469d 8379
ca695ac9
JB
8380 case LE_EXPR:
8381 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8382 == MODE_INT)
8383 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8384 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
8385 else
8386 comparison = compare (exp, LE, LEU);
8387 break;
0006469d 8388
ca695ac9
JB
8389 case GT_EXPR:
8390 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8391 == MODE_INT)
8392 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8393 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
8394 else
8395 comparison = compare (exp, GT, GTU);
8396 break;
0006469d 8397
ca695ac9
JB
8398 case GE_EXPR:
8399 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8400 == MODE_INT)
8401 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8402 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
8403 else
8404 comparison = compare (exp, GE, GEU);
8405 break;
0006469d 8406
ca695ac9
JB
8407 default:
8408 normal:
8409 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
8410#if 0
8411 /* This is not needed any more and causes poor code since it causes
8412 comparisons and tests from non-SI objects to have different code
8413 sequences. */
8414 /* Copy to register to avoid generating bad insns by cse
8415 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
8416 if (!cse_not_expected && GET_CODE (temp) == MEM)
8417 temp = copy_to_reg (temp);
8418#endif
8419 do_pending_stack_adjust ();
8420 if (GET_CODE (temp) == CONST_INT)
8421 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
8422 else if (GET_CODE (temp) == LABEL_REF)
8423 comparison = const_true_rtx;
8424 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
8425 && !can_compare_p (GET_MODE (temp)))
8426 /* Note swapping the labels gives us not-equal. */
8427 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
8428 else if (GET_MODE (temp) != VOIDmode)
8429 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
8430 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
8431 GET_MODE (temp), NULL_RTX, 0);
8432 else
8433 abort ();
8434 }
0006469d 8435
ca695ac9
JB
8436 /* Do any postincrements in the expression that was tested. */
8437 emit_queue ();
0006469d 8438
ca695ac9
JB
8439 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
8440 straight into a conditional jump instruction as the jump condition.
8441 Otherwise, all the work has been done already. */
0006469d 8442
ca695ac9 8443 if (comparison == const_true_rtx)
0006469d 8444 {
ca695ac9
JB
8445 if (if_true_label)
8446 emit_jump (if_true_label);
0006469d 8447 }
ca695ac9
JB
8448 else if (comparison == const0_rtx)
8449 {
8450 if (if_false_label)
8451 emit_jump (if_false_label);
8452 }
8453 else if (comparison)
8454 do_jump_for_compare (comparison, if_false_label, if_true_label);
0006469d 8455
ca695ac9 8456 if (drop_through_label)
0006469d 8457 {
ca695ac9
JB
8458 /* If do_jump produces code that might be jumped around,
8459 do any stack adjusts from that code, before the place
8460 where control merges in. */
8461 do_pending_stack_adjust ();
8462 emit_label (drop_through_label);
8463 }
8464}
8465\f
8466/* Given a comparison expression EXP for values too wide to be compared
8467 with one insn, test the comparison and jump to the appropriate label.
8468 The code of EXP is ignored; we always test GT if SWAP is 0,
8469 and LT if SWAP is 1. */
0006469d 8470
ca695ac9
JB
8471static void
8472do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
8473 tree exp;
8474 int swap;
8475 rtx if_false_label, if_true_label;
8476{
8477 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
8478 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
8479 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
8480 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
8481 rtx drop_through_label = 0;
8482 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
8483 int i;
0006469d 8484
ca695ac9
JB
8485 if (! if_true_label || ! if_false_label)
8486 drop_through_label = gen_label_rtx ();
8487 if (! if_true_label)
8488 if_true_label = drop_through_label;
8489 if (! if_false_label)
8490 if_false_label = drop_through_label;
0006469d 8491
ca695ac9
JB
8492 /* Compare a word at a time, high order first. */
8493 for (i = 0; i < nwords; i++)
8494 {
8495 rtx comp;
8496 rtx op0_word, op1_word;
0006469d 8497
ca695ac9
JB
8498 if (WORDS_BIG_ENDIAN)
8499 {
8500 op0_word = operand_subword_force (op0, i, mode);
8501 op1_word = operand_subword_force (op1, i, mode);
8502 }
8503 else
8504 {
8505 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
8506 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
8507 }
0006469d 8508
ca695ac9
JB
8509 /* All but high-order word must be compared as unsigned. */
8510 comp = compare_from_rtx (op0_word, op1_word,
8511 (unsignedp || i > 0) ? GTU : GT,
8512 unsignedp, word_mode, NULL_RTX, 0);
8513 if (comp == const_true_rtx)
8514 emit_jump (if_true_label);
8515 else if (comp != const0_rtx)
8516 do_jump_for_compare (comp, NULL_RTX, if_true_label);
0006469d 8517
ca695ac9
JB
8518 /* Consider lower words only if these are equal. */
8519 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
8520 NULL_RTX, 0);
8521 if (comp == const_true_rtx)
8522 emit_jump (if_false_label);
8523 else if (comp != const0_rtx)
8524 do_jump_for_compare (comp, NULL_RTX, if_false_label);
8525 }
0006469d 8526
ca695ac9
JB
8527 if (if_false_label)
8528 emit_jump (if_false_label);
8529 if (drop_through_label)
8530 emit_label (drop_through_label);
0006469d
TW
8531}
8532
ca695ac9
JB
8533/* Compare OP0 with OP1, word at a time, in mode MODE.
8534 UNSIGNEDP says to do unsigned comparison.
8535 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
904762c8 8536
0006469d 8537static void
ca695ac9
JB
8538do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
8539 enum machine_mode mode;
8540 int unsignedp;
8541 rtx op0, op1;
8542 rtx if_false_label, if_true_label;
0006469d 8543{
ca695ac9
JB
8544 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
8545 rtx drop_through_label = 0;
8546 int i;
0006469d 8547
ca695ac9
JB
8548 if (! if_true_label || ! if_false_label)
8549 drop_through_label = gen_label_rtx ();
8550 if (! if_true_label)
8551 if_true_label = drop_through_label;
8552 if (! if_false_label)
8553 if_false_label = drop_through_label;
0006469d 8554
ca695ac9
JB
8555 /* Compare a word at a time, high order first. */
8556 for (i = 0; i < nwords; i++)
0006469d 8557 {
ca695ac9
JB
8558 rtx comp;
8559 rtx op0_word, op1_word;
0006469d 8560
ca695ac9
JB
8561 if (WORDS_BIG_ENDIAN)
8562 {
8563 op0_word = operand_subword_force (op0, i, mode);
8564 op1_word = operand_subword_force (op1, i, mode);
8565 }
8566 else
8567 {
8568 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
8569 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
8570 }
0006469d 8571
ca695ac9
JB
8572 /* All but high-order word must be compared as unsigned. */
8573 comp = compare_from_rtx (op0_word, op1_word,
8574 (unsignedp || i > 0) ? GTU : GT,
8575 unsignedp, word_mode, NULL_RTX, 0);
8576 if (comp == const_true_rtx)
8577 emit_jump (if_true_label);
8578 else if (comp != const0_rtx)
8579 do_jump_for_compare (comp, NULL_RTX, if_true_label);
0006469d 8580
ca695ac9
JB
8581 /* Consider lower words only if these are equal. */
8582 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
8583 NULL_RTX, 0);
8584 if (comp == const_true_rtx)
8585 emit_jump (if_false_label);
8586 else if (comp != const0_rtx)
8587 do_jump_for_compare (comp, NULL_RTX, if_false_label);
8588 }
0006469d 8589
ca695ac9
JB
8590 if (if_false_label)
8591 emit_jump (if_false_label);
8592 if (drop_through_label)
8593 emit_label (drop_through_label);
0006469d 8594}
bbf6f052 8595
ca695ac9
JB
8596/* Given an EQ_EXPR expression EXP for values too wide to be compared
8597 with one insn, test the comparison and jump to the appropriate label. */
8598
8599static void
8600do_jump_by_parts_equality (exp, if_false_label, if_true_label)
8601 tree exp;
8602 rtx if_false_label, if_true_label;
bbf6f052 8603{
ca695ac9
JB
8604 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8605 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8606 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
8607 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
8608 int i;
8609 rtx drop_through_label = 0;
bbf6f052 8610
ca695ac9
JB
8611 if (! if_false_label)
8612 drop_through_label = if_false_label = gen_label_rtx ();
bbf6f052 8613
ca695ac9
JB
8614 for (i = 0; i < nwords; i++)
8615 {
8616 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
8617 operand_subword_force (op1, i, mode),
8618 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
8619 word_mode, NULL_RTX, 0);
8620 if (comp == const_true_rtx)
8621 emit_jump (if_false_label);
8622 else if (comp != const0_rtx)
8623 do_jump_for_compare (comp, if_false_label, NULL_RTX);
8624 }
1499e0a8 8625
ca695ac9
JB
8626 if (if_true_label)
8627 emit_jump (if_true_label);
8628 if (drop_through_label)
8629 emit_label (drop_through_label);
8630}
8631\f
8632/* Jump according to whether OP0 is 0.
8633 We assume that OP0 has an integer mode that is too wide
8634 for the available compare insns. */
1499e0a8 8635
ca695ac9
JB
8636static void
8637do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
8638 rtx op0;
8639 rtx if_false_label, if_true_label;
8640{
8641 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
8642 int i;
8643 rtx drop_through_label = 0;
1499e0a8 8644
ca695ac9
JB
8645 if (! if_false_label)
8646 drop_through_label = if_false_label = gen_label_rtx ();
1499e0a8 8647
ca695ac9
JB
8648 for (i = 0; i < nwords; i++)
8649 {
8650 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
8651 GET_MODE (op0)),
8652 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
8653 if (comp == const_true_rtx)
8654 emit_jump (if_false_label);
8655 else if (comp != const0_rtx)
8656 do_jump_for_compare (comp, if_false_label, NULL_RTX);
8657 }
1499e0a8 8658
ca695ac9
JB
8659 if (if_true_label)
8660 emit_jump (if_true_label);
8661 if (drop_through_label)
8662 emit_label (drop_through_label);
8663}
bbf6f052 8664
ca695ac9
JB
8665/* Given a comparison expression in rtl form, output conditional branches to
8666 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
bbf6f052 8667
ca695ac9
JB
8668static void
8669do_jump_for_compare (comparison, if_false_label, if_true_label)
8670 rtx comparison, if_false_label, if_true_label;
8671{
8672 if (if_true_label)
a358cee0 8673 {
ca695ac9
JB
8674 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
8675 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
8676 else
8677 abort ();
a358cee0 8678
ca695ac9
JB
8679 if (if_false_label)
8680 emit_jump (if_false_label);
c980ac49 8681 }
ca695ac9 8682 else if (if_false_label)
bbf6f052 8683 {
ca695ac9 8684 rtx insn;
f12f485a 8685 rtx prev = get_last_insn ();
ca695ac9 8686 rtx branch = 0;
bbf6f052 8687
f12f485a
RK
8688 if (prev != 0)
8689 prev = PREV_INSN (prev);
8690
ca695ac9
JB
8691 /* Output the branch with the opposite condition. Then try to invert
8692 what is generated. If more than one insn is a branch, or if the
8693 branch is not the last insn written, abort. If we can't invert
8694 the branch, emit make a true label, redirect this jump to that,
8695 emit a jump to the false label and define the true label. */
bbf6f052 8696
ca695ac9
JB
8697 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
8698 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_false_label));
8699 else
8700 abort ();
bbf6f052 8701
ca695ac9
JB
8702 /* Here we get the insn before what was just emitted.
8703 On some machines, emitting the branch can discard
8704 the previous compare insn and emit a replacement. */
8705 if (prev == 0)
8706 /* If there's only one preceding insn... */
8707 insn = get_insns ();
8708 else
8709 insn = NEXT_INSN (prev);
bbf6f052 8710
ca695ac9
JB
8711 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
8712 if (GET_CODE (insn) == JUMP_INSN)
8713 {
8714 if (branch)
8715 abort ();
8716 branch = insn;
8717 }
8718
8719 if (branch != get_last_insn ())
8720 abort ();
8721
8722 if (! invert_jump (branch, if_false_label))
8723 {
8724 if_true_label = gen_label_rtx ();
8725 redirect_jump (branch, if_true_label);
8726 emit_jump (if_false_label);
8727 emit_label (if_true_label);
bbf6f052
RK
8728 }
8729 }
ca695ac9
JB
8730}
8731\f
8732/* Generate code for a comparison expression EXP
8733 (including code to compute the values to be compared)
8734 and set (CC0) according to the result.
8735 SIGNED_CODE should be the rtx operation for this comparison for
8736 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
8737
8738 We force a stack adjustment unless there are currently
8739 things pushed on the stack that aren't yet used. */
8740
8741static rtx
8742compare (exp, signed_code, unsigned_code)
8743 register tree exp;
8744 enum rtx_code signed_code, unsigned_code;
8745{
8746 register rtx op0
8747 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8748 register rtx op1
8749 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8750 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
8751 register enum machine_mode mode = TYPE_MODE (type);
8752 int unsignedp = TREE_UNSIGNED (type);
8753 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
bbf6f052 8754
ca695ac9
JB
8755 return compare_from_rtx (op0, op1, code, unsignedp, mode,
8756 ((mode == BLKmode)
8757 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
8758 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
8759}
bbf6f052 8760
ca695ac9
JB
8761/* Like compare but expects the values to compare as two rtx's.
8762 The decision as to signed or unsigned comparison must be made by the caller.
bbf6f052 8763
ca695ac9
JB
8764 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
8765 compared.
bbf6f052 8766
ca695ac9
JB
8767 If ALIGN is non-zero, it is the alignment of this type; if zero, the
8768 size of MODE should be used. */
bbf6f052 8769
ca695ac9
JB
8770rtx
8771compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
8772 register rtx op0, op1;
8773 enum rtx_code code;
8774 int unsignedp;
8775 enum machine_mode mode;
8776 rtx size;
8777 int align;
8778{
8779 rtx tem;
bbf6f052 8780
ca695ac9
JB
8781 /* If one operand is constant, make it the second one. Only do this
8782 if the other operand is not constant as well. */
bbf6f052 8783
ca695ac9
JB
8784 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
8785 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
8786 {
8787 tem = op0;
8788 op0 = op1;
8789 op1 = tem;
8790 code = swap_condition (code);
8791 }
bbf6f052 8792
ca695ac9 8793 if (flag_force_mem)
bbf6f052 8794 {
ca695ac9
JB
8795 op0 = force_not_mem (op0);
8796 op1 = force_not_mem (op1);
8797 }
bbf6f052 8798
ca695ac9 8799 do_pending_stack_adjust ();
bbf6f052 8800
ca695ac9
JB
8801 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
8802 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
8803 return tem;
bbf6f052 8804
ca695ac9
JB
8805#if 0
8806 /* There's no need to do this now that combine.c can eliminate lots of
8807 sign extensions. This can be less efficient in certain cases on other
8808 machines. */
bbf6f052 8809
ca695ac9
JB
8810 /* If this is a signed equality comparison, we can do it as an
8811 unsigned comparison since zero-extension is cheaper than sign
8812 extension and comparisons with zero are done as unsigned. This is
8813 the case even on machines that can do fast sign extension, since
8814 zero-extension is easier to combine with other operations than
8815 sign-extension is. If we are comparing against a constant, we must
8816 convert it to what it would look like unsigned. */
8817 if ((code == EQ || code == NE) && ! unsignedp
8818 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
8819 {
8820 if (GET_CODE (op1) == CONST_INT
8821 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
8822 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
8823 unsignedp = 1;
bbf6f052 8824 }
ca695ac9
JB
8825#endif
8826
8827 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
bbf6f052 8828
ca695ac9 8829 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
bbf6f052
RK
8830}
8831\f
ca695ac9
JB
8832/* Generate code to calculate EXP using a store-flag instruction
8833 and return an rtx for the result. EXP is either a comparison
8834 or a TRUTH_NOT_EXPR whose operand is a comparison.
bbf6f052 8835
ca695ac9 8836 If TARGET is nonzero, store the result there if convenient.
bbf6f052 8837
ca695ac9
JB
8838 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
8839 cheap.
bbf6f052 8840
ca695ac9
JB
8841 Return zero if there is no suitable set-flag instruction
8842 available on this machine.
bbf6f052 8843
ca695ac9
JB
8844 Once expand_expr has been called on the arguments of the comparison,
8845 we are committed to doing the store flag, since it is not safe to
8846 re-evaluate the expression. We emit the store-flag insn by calling
8847 emit_store_flag, but only expand the arguments if we have a reason
8848 to believe that emit_store_flag will be successful. If we think that
8849 it will, but it isn't, we have to simulate the store-flag with a
8850 set/jump/set sequence. */
bbf6f052 8851
ca695ac9
JB
8852static rtx
8853do_store_flag (exp, target, mode, only_cheap)
8854 tree exp;
8855 rtx target;
8856 enum machine_mode mode;
8857 int only_cheap;
bbf6f052 8858{
ca695ac9
JB
8859 enum rtx_code code;
8860 tree arg0, arg1, type;
8861 tree tem;
8862 enum machine_mode operand_mode;
8863 int invert = 0;
8864 int unsignedp;
8865 rtx op0, op1;
8866 enum insn_code icode;
8867 rtx subtarget = target;
8868 rtx result, label, pattern, jump_pat;
bbf6f052 8869
ca695ac9
JB
8870 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
8871 result at the end. We can't simply invert the test since it would
8872 have already been inverted if it were valid. This case occurs for
8873 some floating-point comparisons. */
8874
8875 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
8876 invert = 1, exp = TREE_OPERAND (exp, 0);
8877
8878 arg0 = TREE_OPERAND (exp, 0);
8879 arg1 = TREE_OPERAND (exp, 1);
8880 type = TREE_TYPE (arg0);
8881 operand_mode = TYPE_MODE (type);
8882 unsignedp = TREE_UNSIGNED (type);
8883
8884 /* We won't bother with BLKmode store-flag operations because it would mean
8885 passing a lot of information to emit_store_flag. */
8886 if (operand_mode == BLKmode)
8887 return 0;
8888
8889 STRIP_NOPS (arg0);
8890 STRIP_NOPS (arg1);
8891
8892 /* Get the rtx comparison code to use. We know that EXP is a comparison
8893 operation of some type. Some comparisons against 1 and -1 can be
8894 converted to comparisons with zero. Do so here so that the tests
8895 below will be aware that we have a comparison with zero. These
8896 tests will not catch constants in the first operand, but constants
8897 are rarely passed as the first operand. */
8898
8899 switch (TREE_CODE (exp))
8900 {
8901 case EQ_EXPR:
8902 code = EQ;
8903 break;
8904 case NE_EXPR:
8905 code = NE;
8906 break;
8907 case LT_EXPR:
8908 if (integer_onep (arg1))
8909 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
8910 else
8911 code = unsignedp ? LTU : LT;
8912 break;
8913 case LE_EXPR:
8914 if (! unsignedp && integer_all_onesp (arg1))
8915 arg1 = integer_zero_node, code = LT;
8916 else
8917 code = unsignedp ? LEU : LE;
8918 break;
8919 case GT_EXPR:
8920 if (! unsignedp && integer_all_onesp (arg1))
8921 arg1 = integer_zero_node, code = GE;
8922 else
8923 code = unsignedp ? GTU : GT;
8924 break;
8925 case GE_EXPR:
8926 if (integer_onep (arg1))
8927 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
8928 else
8929 code = unsignedp ? GEU : GE;
8930 break;
8931 default:
8932 abort ();
8933 }
bbf6f052 8934
ca695ac9
JB
8935 /* Put a constant second. */
8936 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
bbf6f052 8937 {
ca695ac9
JB
8938 tem = arg0; arg0 = arg1; arg1 = tem;
8939 code = swap_condition (code);
bbf6f052 8940 }
bbf6f052 8941
ca695ac9
JB
8942 /* If this is an equality or inequality test of a single bit, we can
8943 do this by shifting the bit being tested to the low-order bit and
8944 masking the result with the constant 1. If the condition was EQ,
8945 we xor it with 1. This does not require an scc insn and is faster
8946 than an scc insn even if we have it. */
bbf6f052 8947
ca695ac9
JB
8948 if ((code == NE || code == EQ)
8949 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
8950 && integer_pow2p (TREE_OPERAND (arg0, 1))
8951 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
8952 {
8953 tree inner = TREE_OPERAND (arg0, 0);
8954 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
8955 NULL_RTX, VOIDmode, 0)));
8956 int ops_unsignedp;
bbf6f052 8957
ca695ac9
JB
8958 /* If INNER is a right shift of a constant and it plus BITNUM does
8959 not overflow, adjust BITNUM and INNER. */
bbf6f052 8960
ca695ac9
JB
8961 if (TREE_CODE (inner) == RSHIFT_EXPR
8962 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
8963 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
8964 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
8965 < TYPE_PRECISION (type)))
8966 {
8967 bitnum +=TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
8968 inner = TREE_OPERAND (inner, 0);
8969 }
bbf6f052 8970
ca695ac9
JB
8971 /* If we are going to be able to omit the AND below, we must do our
8972 operations as unsigned. If we must use the AND, we have a choice.
8973 Normally unsigned is faster, but for some machines signed is. */
8974 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
ad92c826
RK
8975#ifdef LOAD_EXTEND_OP
8976 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
ca695ac9
JB
8977#else
8978 : 1
8979#endif
8980 );
bbf6f052 8981
ca695ac9
JB
8982 if (subtarget == 0 || GET_CODE (subtarget) != REG
8983 || GET_MODE (subtarget) != operand_mode
8984 || ! safe_from_p (subtarget, inner))
8985 subtarget = 0;
e7c33f54 8986
ca695ac9 8987 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
bbf6f052 8988
ca695ac9
JB
8989 if (bitnum != 0)
8990 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
0c316b20 8991 size_int (bitnum), subtarget, ops_unsignedp);
bbf6f052 8992
ca695ac9
JB
8993 if (GET_MODE (op0) != mode)
8994 op0 = convert_to_mode (mode, op0, ops_unsignedp);
bbf6f052 8995
ca695ac9 8996 if ((code == EQ && ! invert) || (code == NE && invert))
0c316b20 8997 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
ca695ac9 8998 ops_unsignedp, OPTAB_LIB_WIDEN);
bbf6f052 8999
ca695ac9
JB
9000 /* Put the AND last so it can combine with more things. */
9001 if (bitnum != TYPE_PRECISION (type) - 1)
0c316b20 9002 op0 = expand_and (op0, const1_rtx, subtarget);
bbf6f052 9003
ca695ac9
JB
9004 return op0;
9005 }
bbf6f052 9006
ca695ac9
JB
9007 /* Now see if we are likely to be able to do this. Return if not. */
9008 if (! can_compare_p (operand_mode))
9009 return 0;
9010 icode = setcc_gen_code[(int) code];
9011 if (icode == CODE_FOR_nothing
9012 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
9013 {
9014 /* We can only do this if it is one of the special cases that
9015 can be handled without an scc insn. */
9016 if ((code == LT && integer_zerop (arg1))
9017 || (! only_cheap && code == GE && integer_zerop (arg1)))
9018 ;
9019 else if (BRANCH_COST >= 0
9020 && ! only_cheap && (code == NE || code == EQ)
9021 && TREE_CODE (type) != REAL_TYPE
9022 && ((abs_optab->handlers[(int) operand_mode].insn_code
9023 != CODE_FOR_nothing)
9024 || (ffs_optab->handlers[(int) operand_mode].insn_code
9025 != CODE_FOR_nothing)))
9026 ;
9027 else
9028 return 0;
9029 }
9030
9031 preexpand_calls (exp);
9032 if (subtarget == 0 || GET_CODE (subtarget) != REG
9033 || GET_MODE (subtarget) != operand_mode
9034 || ! safe_from_p (subtarget, arg1))
9035 subtarget = 0;
bbf6f052 9036
ca695ac9
JB
9037 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
9038 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
bbf6f052 9039
ca695ac9
JB
9040 if (target == 0)
9041 target = gen_reg_rtx (mode);
bbf6f052 9042
ca695ac9
JB
9043 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9044 because, if the emit_store_flag does anything it will succeed and
9045 OP0 and OP1 will not be used subsequently. */
bbf6f052 9046
ca695ac9
JB
9047 result = emit_store_flag (target, code,
9048 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9049 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9050 operand_mode, unsignedp, 1);
bbf6f052 9051
ca695ac9
JB
9052 if (result)
9053 {
9054 if (invert)
9055 result = expand_binop (mode, xor_optab, result, const1_rtx,
9056 result, 0, OPTAB_LIB_WIDEN);
9057 return result;
9058 }
bbf6f052 9059
ca695ac9
JB
9060 /* If this failed, we have to do this with set/compare/jump/set code. */
9061 if (target == 0 || GET_CODE (target) != REG
9062 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9063 target = gen_reg_rtx (GET_MODE (target));
bbf6f052 9064
ca695ac9
JB
9065 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9066 result = compare_from_rtx (op0, op1, code, unsignedp,
9067 operand_mode, NULL_RTX, 0);
9068 if (GET_CODE (result) == CONST_INT)
9069 return (((result == const0_rtx && ! invert)
9070 || (result != const0_rtx && invert))
9071 ? const0_rtx : const1_rtx);
bbf6f052 9072
ca695ac9
JB
9073 label = gen_label_rtx ();
9074 if (bcc_gen_fctn[(int) code] == 0)
9075 abort ();
bbf6f052 9076
ca695ac9
JB
9077 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9078 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9079 emit_label (label);
bbf6f052 9080
ca695ac9
JB
9081 return target;
9082}
9083\f
9084/* Generate a tablejump instruction (used for switch statements). */
bbf6f052 9085
ca695ac9 9086#ifdef HAVE_tablejump
bbf6f052 9087
ca695ac9
JB
9088/* INDEX is the value being switched on, with the lowest value
9089 in the table already subtracted.
9090 MODE is its expected mode (needed if INDEX is constant).
9091 RANGE is the length of the jump table.
9092 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
bbf6f052 9093
ca695ac9
JB
9094 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9095 index value is out of range. */
bbf6f052 9096
ca695ac9
JB
9097void
9098do_tablejump (index, mode, range, table_label, default_label)
9099 rtx index, range, table_label, default_label;
9100 enum machine_mode mode;
9101{
9102 register rtx temp, vector;
bbf6f052 9103
ca695ac9
JB
9104 /* Do an unsigned comparison (in the proper mode) between the index
9105 expression and the value which represents the length of the range.
9106 Since we just finished subtracting the lower bound of the range
9107 from the index expression, this comparison allows us to simultaneously
9108 check that the original index expression value is both greater than
9109 or equal to the minimum value of the range and less than or equal to
9110 the maximum value of the range. */
bbf6f052 9111
ca695ac9
JB
9112 emit_cmp_insn (range, index, LTU, NULL_RTX, mode, 1, 0);
9113 emit_jump_insn (gen_bltu (default_label));
bbf6f052 9114
ca695ac9
JB
9115 /* If index is in range, it must fit in Pmode.
9116 Convert to Pmode so we can index with it. */
9117 if (mode != Pmode)
9118 index = convert_to_mode (Pmode, index, 1);
bbf6f052 9119
ca695ac9
JB
9120 /* Don't let a MEM slip thru, because then INDEX that comes
9121 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9122 and break_out_memory_refs will go to work on it and mess it up. */
9123#ifdef PIC_CASE_VECTOR_ADDRESS
9124 if (flag_pic && GET_CODE (index) != REG)
9125 index = copy_to_mode_reg (Pmode, index);
9126#endif
bbf6f052 9127
ca695ac9
JB
9128 /* If flag_force_addr were to affect this address
9129 it could interfere with the tricky assumptions made
9130 about addresses that contain label-refs,
9131 which may be valid only very near the tablejump itself. */
9132 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9133 GET_MODE_SIZE, because this indicates how large insns are. The other
9134 uses should all be Pmode, because they are addresses. This code
9135 could fail if addresses and insns are not the same size. */
9136 index = gen_rtx (PLUS, Pmode,
9137 gen_rtx (MULT, Pmode, index,
9138 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9139 gen_rtx (LABEL_REF, Pmode, table_label));
9140#ifdef PIC_CASE_VECTOR_ADDRESS
9141 if (flag_pic)
9142 index = PIC_CASE_VECTOR_ADDRESS (index);
9143 else
9144#endif
9145 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9146 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9147 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
9148 RTX_UNCHANGING_P (vector) = 1;
9149 convert_move (temp, vector, 0);
bbf6f052 9150
ca695ac9 9151 emit_jump_insn (gen_tablejump (temp, table_label));
bbf6f052 9152
ca695ac9
JB
9153#ifndef CASE_VECTOR_PC_RELATIVE
9154 /* If we are generating PIC code or if the table is PC-relative, the
9155 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9156 if (! flag_pic)
9157 emit_barrier ();
bbf6f052 9158#endif
ca695ac9 9159}
bbf6f052 9160
ca695ac9 9161#endif /* HAVE_tablejump */
bbf6f052 9162
bbf6f052 9163
ca695ac9
JB
9164/* Emit a suitable bytecode to load a value from memory, assuming a pointer
9165 to that value is on the top of the stack. The resulting type is TYPE, and
9166 the source declaration is DECL. */
bbf6f052 9167
ca695ac9
JB
9168void
9169bc_load_memory (type, decl)
9170 tree type, decl;
9171{
9172 enum bytecode_opcode opcode;
9173
9174
9175 /* Bit fields are special. We only know about signed and
9176 unsigned ints, and enums. The latter are treated as
9177 signed integers. */
9178
9179 if (DECL_BIT_FIELD (decl))
9180 if (TREE_CODE (type) == ENUMERAL_TYPE
9181 || TREE_CODE (type) == INTEGER_TYPE)
9182 opcode = TREE_UNSIGNED (type) ? zxloadBI : sxloadBI;
9183 else
9184 abort ();
9185 else
9186 /* See corresponding comment in bc_store_memory(). */
9187 if (TYPE_MODE (type) == BLKmode
9188 || TYPE_MODE (type) == VOIDmode)
9189 return;
9190 else
6bd6178d 9191 opcode = mode_to_load_map [(int) TYPE_MODE (type)];
bbf6f052 9192
ca695ac9
JB
9193 if (opcode == neverneverland)
9194 abort ();
9195
9196 bc_emit_bytecode (opcode);
9197
9198#ifdef DEBUG_PRINT_CODE
9199 fputc ('\n', stderr);
9200#endif
bbf6f052 9201}
bbf6f052 9202
bbf6f052 9203
ca695ac9
JB
9204/* Store the contents of the second stack slot to the address in the
9205 top stack slot. DECL is the declaration of the destination and is used
9206 to determine whether we're dealing with a bitfield. */
bbf6f052 9207
ca695ac9
JB
9208void
9209bc_store_memory (type, decl)
9210 tree type, decl;
9211{
9212 enum bytecode_opcode opcode;
9213
9214
9215 if (DECL_BIT_FIELD (decl))
f81497d9 9216 {
ca695ac9
JB
9217 if (TREE_CODE (type) == ENUMERAL_TYPE
9218 || TREE_CODE (type) == INTEGER_TYPE)
9219 opcode = sstoreBI;
f81497d9 9220 else
ca695ac9 9221 abort ();
f81497d9 9222 }
ca695ac9
JB
9223 else
9224 if (TYPE_MODE (type) == BLKmode)
9225 {
9226 /* Copy structure. This expands to a block copy instruction, storeBLK.
9227 In addition to the arguments expected by the other store instructions,
9228 it also expects a type size (SImode) on top of the stack, which is the
9229 structure size in size units (usually bytes). The two first arguments
9230 are already on the stack; so we just put the size on level 1. For some
9231 other languages, the size may be variable, this is why we don't encode
9232 it as a storeBLK literal, but rather treat it as a full-fledged expression. */
9233
9234 bc_expand_expr (TYPE_SIZE (type));
9235 opcode = storeBLK;
9236 }
9237 else
6bd6178d 9238 opcode = mode_to_store_map [(int) TYPE_MODE (type)];
f81497d9 9239
ca695ac9
JB
9240 if (opcode == neverneverland)
9241 abort ();
9242
9243 bc_emit_bytecode (opcode);
9244
9245#ifdef DEBUG_PRINT_CODE
9246 fputc ('\n', stderr);
9247#endif
f81497d9
RS
9248}
9249
f81497d9 9250
ca695ac9
JB
9251/* Allocate local stack space sufficient to hold a value of the given
9252 SIZE at alignment boundary ALIGNMENT bits. ALIGNMENT must be an
9253 integral power of 2. A special case is locals of type VOID, which
9254 have size 0 and alignment 1 - any "voidish" SIZE or ALIGNMENT is
9255 remapped into the corresponding attribute of SI. */
9256
9257rtx
9258bc_allocate_local (size, alignment)
9259 int size, alignment;
f81497d9 9260{
ca695ac9
JB
9261 rtx retval;
9262 int byte_alignment;
f81497d9 9263
ca695ac9
JB
9264 if (size < 0)
9265 abort ();
f81497d9 9266
ca695ac9
JB
9267 /* Normalize size and alignment */
9268 if (!size)
9269 size = UNITS_PER_WORD;
bbf6f052 9270
ca695ac9
JB
9271 if (alignment < BITS_PER_UNIT)
9272 byte_alignment = 1 << (INT_ALIGN - 1);
9273 else
9274 /* Align */
9275 byte_alignment = alignment / BITS_PER_UNIT;
bbf6f052 9276
ca695ac9
JB
9277 if (local_vars_size & (byte_alignment - 1))
9278 local_vars_size += byte_alignment - (local_vars_size & (byte_alignment - 1));
bbf6f052 9279
ca695ac9
JB
9280 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
9281 local_vars_size += size;
bbf6f052 9282
ca695ac9 9283 return retval;
bbf6f052
RK
9284}
9285
bbf6f052 9286
ca695ac9
JB
9287/* Allocate variable-sized local array. Variable-sized arrays are
9288 actually pointers to the address in memory where they are stored. */
9289
9290rtx
9291bc_allocate_variable_array (size)
9292 tree size;
bbf6f052 9293{
ca695ac9
JB
9294 rtx retval;
9295 const int ptralign = (1 << (PTR_ALIGN - 1));
bbf6f052 9296
ca695ac9
JB
9297 /* Align pointer */
9298 if (local_vars_size & ptralign)
9299 local_vars_size += ptralign - (local_vars_size & ptralign);
bbf6f052 9300
ca695ac9
JB
9301 /* Note down local space needed: pointer to block; also return
9302 dummy rtx */
bbf6f052 9303
ca695ac9
JB
9304 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
9305 local_vars_size += POINTER_SIZE / BITS_PER_UNIT;
9306 return retval;
bbf6f052 9307}
bbf6f052 9308
bbf6f052 9309
ca695ac9
JB
9310/* Push the machine address for the given external variable offset. */
9311void
9312bc_load_externaddr (externaddr)
9313 rtx externaddr;
9314{
9315 bc_emit_bytecode (constP);
e7a42772
JB
9316 bc_emit_code_labelref (BYTECODE_LABEL (externaddr),
9317 BYTECODE_BC_LABEL (externaddr)->offset);
bbf6f052 9318
ca695ac9
JB
9319#ifdef DEBUG_PRINT_CODE
9320 fputc ('\n', stderr);
9321#endif
bbf6f052
RK
9322}
9323
bbf6f052 9324
ca695ac9
JB
9325static char *
9326bc_strdup (s)
9327 char *s;
bbf6f052 9328{
5e70898c
RS
9329 char *new = (char *) xmalloc ((strlen (s) + 1) * sizeof *s);
9330 strcpy (new, s);
9331 return new;
ca695ac9 9332}
bbf6f052 9333
bbf6f052 9334
ca695ac9
JB
9335/* Like above, but expects an IDENTIFIER. */
9336void
9337bc_load_externaddr_id (id, offset)
9338 tree id;
9339 int offset;
9340{
9341 if (!IDENTIFIER_POINTER (id))
9342 abort ();
bbf6f052 9343
ca695ac9
JB
9344 bc_emit_bytecode (constP);
9345 bc_emit_code_labelref (bc_xstrdup (IDENTIFIER_POINTER (id)), offset);
bbf6f052 9346
ca695ac9
JB
9347#ifdef DEBUG_PRINT_CODE
9348 fputc ('\n', stderr);
9349#endif
9350}
bbf6f052 9351
bbf6f052 9352
ca695ac9
JB
9353/* Push the machine address for the given local variable offset. */
9354void
9355bc_load_localaddr (localaddr)
9356 rtx localaddr;
9357{
e7a42772 9358 bc_emit_instruction (localP, (HOST_WIDE_INT) BYTECODE_BC_LABEL (localaddr)->offset);
bbf6f052 9359}
bbf6f052 9360
bbf6f052 9361
ca695ac9
JB
9362/* Push the machine address for the given parameter offset.
9363 NOTE: offset is in bits. */
9364void
9365bc_load_parmaddr (parmaddr)
9366 rtx parmaddr;
bbf6f052 9367{
e7a42772
JB
9368 bc_emit_instruction (argP, ((HOST_WIDE_INT) BYTECODE_BC_LABEL (parmaddr)->offset
9369 / BITS_PER_UNIT));
ca695ac9 9370}
bbf6f052 9371
ca695ac9
JB
9372
9373/* Convert a[i] into *(a + i). */
9374tree
9375bc_canonicalize_array_ref (exp)
9376 tree exp;
9377{
9378 tree type = TREE_TYPE (exp);
9379 tree array_adr = build1 (ADDR_EXPR, TYPE_POINTER_TO (type),
9380 TREE_OPERAND (exp, 0));
9381 tree index = TREE_OPERAND (exp, 1);
9382
9383
9384 /* Convert the integer argument to a type the same size as a pointer
9385 so the multiply won't overflow spuriously. */
9386
9387 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
9388 index = convert (type_for_size (POINTER_SIZE, 0), index);
9389
9390 /* The array address isn't volatile even if the array is.
9391 (Of course this isn't terribly relevant since the bytecode
9392 translator treats nearly everything as volatile anyway.) */
9393 TREE_THIS_VOLATILE (array_adr) = 0;
9394
9395 return build1 (INDIRECT_REF, type,
9396 fold (build (PLUS_EXPR,
9397 TYPE_POINTER_TO (type),
9398 array_adr,
9399 fold (build (MULT_EXPR,
9400 TYPE_POINTER_TO (type),
9401 index,
9402 size_in_bytes (type))))));
bbf6f052
RK
9403}
9404
bbf6f052 9405
ca695ac9
JB
9406/* Load the address of the component referenced by the given
9407 COMPONENT_REF expression.
bbf6f052 9408
ca695ac9 9409 Returns innermost lvalue. */
bbf6f052 9410
ca695ac9
JB
9411tree
9412bc_expand_component_address (exp)
9413 tree exp;
bbf6f052 9414{
ca695ac9
JB
9415 tree tem, chain;
9416 enum machine_mode mode;
9417 int bitpos = 0;
9418 HOST_WIDE_INT SIval;
a7c5971a 9419
bbf6f052 9420
ca695ac9
JB
9421 tem = TREE_OPERAND (exp, 1);
9422 mode = DECL_MODE (tem);
bbf6f052 9423
ca695ac9
JB
9424
9425 /* Compute cumulative bit offset for nested component refs
9426 and array refs, and find the ultimate containing object. */
9427
9428 for (tem = exp;; tem = TREE_OPERAND (tem, 0))
bbf6f052 9429 {
ca695ac9
JB
9430 if (TREE_CODE (tem) == COMPONENT_REF)
9431 bitpos += TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (tem, 1)));
9432 else
9433 if (TREE_CODE (tem) == ARRAY_REF
9434 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
9435 && TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) == INTEGER_CST)
bbf6f052 9436
ca695ac9
JB
9437 bitpos += (TREE_INT_CST_LOW (TREE_OPERAND (tem, 1))
9438 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)))
9439 /* * TYPE_SIZE_UNIT (TREE_TYPE (tem)) */);
9440 else
9441 break;
9442 }
bbf6f052 9443
c02bd5d9 9444 bc_expand_expr (tem);
bbf6f052 9445
cd1b4b44 9446
ca695ac9
JB
9447 /* For bitfields also push their offset and size */
9448 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
9449 bc_push_offset_and_size (bitpos, /* DECL_SIZE_UNIT */ (TREE_OPERAND (exp, 1)));
9450 else
9451 if (SIval = bitpos / BITS_PER_UNIT)
9452 bc_emit_instruction (addconstPSI, SIval);
bbf6f052 9453
ca695ac9 9454 return (TREE_OPERAND (exp, 1));
bbf6f052 9455}
e7c33f54 9456
bbf6f052 9457
ca695ac9
JB
9458/* Emit code to push two SI constants */
9459void
9460bc_push_offset_and_size (offset, size)
9461 HOST_WIDE_INT offset, size;
9462{
9463 bc_emit_instruction (constSI, offset);
9464 bc_emit_instruction (constSI, size);
9465}
bbf6f052 9466
bbf6f052 9467
ca695ac9
JB
9468/* Emit byte code to push the address of the given lvalue expression to
9469 the stack. If it's a bit field, we also push offset and size info.
bbf6f052 9470
ca695ac9
JB
9471 Returns innermost component, which allows us to determine not only
9472 its type, but also whether it's a bitfield. */
9473
9474tree
9475bc_expand_address (exp)
bbf6f052 9476 tree exp;
bbf6f052 9477{
ca695ac9
JB
9478 /* Safeguard */
9479 if (!exp || TREE_CODE (exp) == ERROR_MARK)
9480 return (exp);
bbf6f052 9481
e7c33f54 9482
ca695ac9
JB
9483 switch (TREE_CODE (exp))
9484 {
9485 case ARRAY_REF:
e7c33f54 9486
ca695ac9 9487 return (bc_expand_address (bc_canonicalize_array_ref (exp)));
e7c33f54 9488
ca695ac9 9489 case COMPONENT_REF:
bbf6f052 9490
ca695ac9 9491 return (bc_expand_component_address (exp));
bbf6f052 9492
ca695ac9 9493 case INDIRECT_REF:
bbf6f052 9494
ca695ac9
JB
9495 bc_expand_expr (TREE_OPERAND (exp, 0));
9496
9497 /* For variable-sized types: retrieve pointer. Sometimes the
9498 TYPE_SIZE tree is NULL. Is this a bug or a feature? Let's
9499 also make sure we have an operand, just in case... */
9500
9501 if (TREE_OPERAND (exp, 0)
9502 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))
9503 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))) != INTEGER_CST)
9504 bc_emit_instruction (loadP);
9505
9506 /* If packed, also return offset and size */
9507 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 0)))
9508
9509 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (exp, 0))),
9510 TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (exp, 0))));
9511
9512 return (TREE_OPERAND (exp, 0));
9513
9514 case FUNCTION_DECL:
9515
e7a42772
JB
9516 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
9517 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
bbf6f052 9518 break;
ca695ac9
JB
9519
9520 case PARM_DECL:
9521
9522 bc_load_parmaddr (DECL_RTL (exp));
9523
9524 /* For variable-sized types: retrieve pointer */
9525 if (TYPE_SIZE (TREE_TYPE (exp))
9526 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
9527 bc_emit_instruction (loadP);
9528
9529 /* If packed, also return offset and size */
9530 if (DECL_BIT_FIELD (exp))
9531 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
9532 TREE_INT_CST_LOW (DECL_SIZE (exp)));
9533
bbf6f052 9534 break;
ca695ac9
JB
9535
9536 case RESULT_DECL:
9537
9538 bc_emit_instruction (returnP);
bbf6f052 9539 break;
ca695ac9
JB
9540
9541 case VAR_DECL:
9542
9543#if 0
e7a42772 9544 if (BYTECODE_LABEL (DECL_RTL (exp)))
ca695ac9
JB
9545 bc_load_externaddr (DECL_RTL (exp));
9546#endif
9547
9548 if (DECL_EXTERNAL (exp))
e7a42772 9549 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
eb862a37 9550 (BYTECODE_BC_LABEL (DECL_RTL (exp)))->offset);
bbf6f052 9551 else
ca695ac9
JB
9552 bc_load_localaddr (DECL_RTL (exp));
9553
9554 /* For variable-sized types: retrieve pointer */
9555 if (TYPE_SIZE (TREE_TYPE (exp))
9556 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
9557 bc_emit_instruction (loadP);
9558
9559 /* If packed, also return offset and size */
9560 if (DECL_BIT_FIELD (exp))
9561 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
9562 TREE_INT_CST_LOW (DECL_SIZE (exp)));
9563
bbf6f052 9564 break;
ca695ac9
JB
9565
9566 case STRING_CST:
9567 {
9568 rtx r;
9569
9570 bc_emit_bytecode (constP);
9571 r = output_constant_def (exp);
e7a42772 9572 bc_emit_code_labelref (BYTECODE_LABEL (r), BYTECODE_BC_LABEL (r)->offset);
ca695ac9
JB
9573
9574#ifdef DEBUG_PRINT_CODE
9575 fputc ('\n', stderr);
9576#endif
9577 }
bbf6f052 9578 break;
ca695ac9 9579
bbf6f052 9580 default:
bbf6f052 9581
ca695ac9
JB
9582 abort();
9583 break;
bbf6f052
RK
9584 }
9585
ca695ac9
JB
9586 /* Most lvalues don't have components. */
9587 return (exp);
9588}
bbf6f052 9589
ca695ac9
JB
9590
9591/* Emit a type code to be used by the runtime support in handling
9592 parameter passing. The type code consists of the machine mode
9593 plus the minimal alignment shifted left 8 bits. */
9594
9595tree
9596bc_runtime_type_code (type)
9597 tree type;
9598{
9599 int val;
9600
9601 switch (TREE_CODE (type))
bbf6f052 9602 {
ca695ac9
JB
9603 case VOID_TYPE:
9604 case INTEGER_TYPE:
9605 case REAL_TYPE:
9606 case COMPLEX_TYPE:
9607 case ENUMERAL_TYPE:
9608 case POINTER_TYPE:
9609 case RECORD_TYPE:
9610
6bd6178d 9611 val = (int) TYPE_MODE (type) | TYPE_ALIGN (type) << 8;
ca695ac9
JB
9612 break;
9613
9614 case ERROR_MARK:
9615
9616 val = 0;
9617 break;
9618
9619 default:
af508edd 9620
ca695ac9
JB
9621 abort ();
9622 }
9623 return build_int_2 (val, 0);
9624}
af508edd 9625
af508edd 9626
ca695ac9
JB
9627/* Generate constructor label */
9628char *
9629bc_gen_constr_label ()
9630{
9631 static int label_counter;
9632 static char label[20];
bbf6f052 9633
ca695ac9 9634 sprintf (label, "*LR%d", label_counter++);
bbf6f052 9635
ca695ac9
JB
9636 return (obstack_copy0 (&permanent_obstack, label, strlen (label)));
9637}
bbf6f052 9638
bbf6f052 9639
ca695ac9
JB
9640/* Evaluate constructor CONSTR and return pointer to it on level one. We
9641 expand the constructor data as static data, and push a pointer to it.
9642 The pointer is put in the pointer table and is retrieved by a constP
9643 bytecode instruction. We then loop and store each constructor member in
9644 the corresponding component. Finally, we return the original pointer on
9645 the stack. */
af508edd 9646
ca695ac9
JB
9647void
9648bc_expand_constructor (constr)
9649 tree constr;
9650{
9651 char *l;
9652 HOST_WIDE_INT ptroffs;
9653 rtx constr_rtx;
bbf6f052 9654
ca695ac9
JB
9655
9656 /* Literal constructors are handled as constants, whereas
9657 non-literals are evaluated and stored element by element
9658 into the data segment. */
9659
9660 /* Allocate space in proper segment and push pointer to space on stack.
9661 */
bbf6f052 9662
ca695ac9 9663 l = bc_gen_constr_label ();
bbf6f052 9664
ca695ac9 9665 if (TREE_CONSTANT (constr))
bbf6f052 9666 {
ca695ac9
JB
9667 text_section ();
9668
9669 bc_emit_const_labeldef (l);
9670 bc_output_constructor (constr, int_size_in_bytes (TREE_TYPE (constr)));
bbf6f052 9671 }
ca695ac9
JB
9672 else
9673 {
9674 data_section ();
bbf6f052 9675
ca695ac9
JB
9676 bc_emit_data_labeldef (l);
9677 bc_output_data_constructor (constr);
9678 }
bbf6f052 9679
ca695ac9
JB
9680
9681 /* Add reference to pointer table and recall pointer to stack;
9682 this code is common for both types of constructors: literals
9683 and non-literals. */
bbf6f052 9684
de7d9320
JB
9685 ptroffs = bc_define_pointer (l);
9686 bc_emit_instruction (constP, ptroffs);
d39985fa 9687
ca695ac9
JB
9688 /* This is all that has to be done if it's a literal. */
9689 if (TREE_CONSTANT (constr))
9690 return;
bbf6f052 9691
ca695ac9
JB
9692
9693 /* At this point, we have the pointer to the structure on top of the stack.
9694 Generate sequences of store_memory calls for the constructor. */
9695
9696 /* constructor type is structure */
9697 if (TREE_CODE (TREE_TYPE (constr)) == RECORD_TYPE)
e7c33f54 9698 {
ca695ac9
JB
9699 register tree elt;
9700
9701 /* If the constructor has fewer fields than the structure,
9702 clear the whole structure first. */
9703
9704 if (list_length (CONSTRUCTOR_ELTS (constr))
9705 != list_length (TYPE_FIELDS (TREE_TYPE (constr))))
9706 {
6d6e61ce 9707 bc_emit_instruction (duplicate);
ca695ac9
JB
9708 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
9709 bc_emit_instruction (clearBLK);
9710 }
9711
9712 /* Store each element of the constructor into the corresponding
9713 field of TARGET. */
9714
9715 for (elt = CONSTRUCTOR_ELTS (constr); elt; elt = TREE_CHAIN (elt))
9716 {
9717 register tree field = TREE_PURPOSE (elt);
9718 register enum machine_mode mode;
9719 int bitsize;
9720 int bitpos;
9721 int unsignedp;
9722
9723 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field)) /* * DECL_SIZE_UNIT (field) */;
9724 mode = DECL_MODE (field);
9725 unsignedp = TREE_UNSIGNED (field);
9726
9727 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
9728
9729 bc_store_field (elt, bitsize, bitpos, mode, TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
9730 /* The alignment of TARGET is
9731 at least what its type requires. */
9732 VOIDmode, 0,
9733 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
9734 int_size_in_bytes (TREE_TYPE (constr)));
9735 }
e7c33f54 9736 }
ca695ac9
JB
9737 else
9738
9739 /* Constructor type is array */
9740 if (TREE_CODE (TREE_TYPE (constr)) == ARRAY_TYPE)
9741 {
9742 register tree elt;
9743 register int i;
9744 tree domain = TYPE_DOMAIN (TREE_TYPE (constr));
9745 int minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
9746 int maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
9747 tree elttype = TREE_TYPE (TREE_TYPE (constr));
9748
9749 /* If the constructor has fewer fields than the structure,
9750 clear the whole structure first. */
9751
9752 if (list_length (CONSTRUCTOR_ELTS (constr)) < maxelt - minelt + 1)
9753 {
6d6e61ce 9754 bc_emit_instruction (duplicate);
ca695ac9
JB
9755 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
9756 bc_emit_instruction (clearBLK);
9757 }
9758
9759
9760 /* Store each element of the constructor into the corresponding
9761 element of TARGET, determined by counting the elements. */
9762
9763 for (elt = CONSTRUCTOR_ELTS (constr), i = 0;
9764 elt;
9765 elt = TREE_CHAIN (elt), i++)
9766 {
9767 register enum machine_mode mode;
9768 int bitsize;
9769 int bitpos;
9770 int unsignedp;
9771
9772 mode = TYPE_MODE (elttype);
9773 bitsize = GET_MODE_BITSIZE (mode);
9774 unsignedp = TREE_UNSIGNED (elttype);
9775
9776 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype))
9777 /* * TYPE_SIZE_UNIT (elttype) */ );
9778
9779 bc_store_field (elt, bitsize, bitpos, mode,
9780 TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
9781 /* The alignment of TARGET is
9782 at least what its type requires. */
9783 VOIDmode, 0,
9784 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
9785 int_size_in_bytes (TREE_TYPE (constr)));
9786 }
9787
9788 }
9789}
bbf6f052 9790
bbf6f052 9791
ca695ac9
JB
9792/* Store the value of EXP (an expression tree) into member FIELD of
9793 structure at address on stack, which has type TYPE, mode MODE and
9794 occupies BITSIZE bits, starting BITPOS bits from the beginning of the
9795 structure.
bbf6f052 9796
ca695ac9
JB
9797 ALIGN is the alignment that TARGET is known to have, measured in bytes.
9798 TOTAL_SIZE is its size in bytes, or -1 if variable. */
bbf6f052 9799
ca695ac9
JB
9800void
9801bc_store_field (field, bitsize, bitpos, mode, exp, type,
9802 value_mode, unsignedp, align, total_size)
9803 int bitsize, bitpos;
9804 enum machine_mode mode;
9805 tree field, exp, type;
9806 enum machine_mode value_mode;
9807 int unsignedp;
9808 int align;
9809 int total_size;
9810{
bbf6f052 9811
ca695ac9
JB
9812 /* Expand expression and copy pointer */
9813 bc_expand_expr (exp);
9814 bc_emit_instruction (over);
bbf6f052 9815
bbf6f052 9816
ca695ac9
JB
9817 /* If the component is a bit field, we cannot use addressing to access
9818 it. Use bit-field techniques to store in it. */
bbf6f052 9819
ca695ac9
JB
9820 if (DECL_BIT_FIELD (field))
9821 {
9822 bc_store_bit_field (bitpos, bitsize, unsignedp);
9823 return;
9824 }
9825 else
9826 /* Not bit field */
9827 {
9828 HOST_WIDE_INT offset = bitpos / BITS_PER_UNIT;
9829
9830 /* Advance pointer to the desired member */
9831 if (offset)
9832 bc_emit_instruction (addconstPSI, offset);
9833
9834 /* Store */
9835 bc_store_memory (type, field);
9836 }
9837}
bbf6f052 9838
ca695ac9
JB
9839
9840/* Store SI/SU in bitfield */
bbf6f052 9841void
ca695ac9
JB
9842bc_store_bit_field (offset, size, unsignedp)
9843 int offset, size, unsignedp;
bbf6f052 9844{
ca695ac9
JB
9845 /* Push bitfield offset and size */
9846 bc_push_offset_and_size (offset, size);
bbf6f052 9847
ca695ac9
JB
9848 /* Store */
9849 bc_emit_instruction (sstoreBI);
9850}
e87b4f3f 9851
88d3b7f0 9852
ca695ac9
JB
9853/* Load SI/SU from bitfield */
9854void
9855bc_load_bit_field (offset, size, unsignedp)
9856 int offset, size, unsignedp;
9857{
9858 /* Push bitfield offset and size */
9859 bc_push_offset_and_size (offset, size);
88d3b7f0 9860
ca695ac9
JB
9861 /* Load: sign-extend if signed, else zero-extend */
9862 bc_emit_instruction (unsignedp ? zxloadBI : sxloadBI);
9863}
709f5be1 9864
bbf6f052 9865
ca695ac9
JB
9866/* Adjust interpreter stack by NLEVELS. Positive means drop NLEVELS
9867 (adjust stack pointer upwards), negative means add that number of
9868 levels (adjust the stack pointer downwards). Only positive values
9869 normally make sense. */
bbf6f052 9870
ca695ac9
JB
9871void
9872bc_adjust_stack (nlevels)
9873 int nlevels;
9874{
9875 switch (nlevels)
9876 {
9877 case 0:
9878 break;
9879
9880 case 2:
9881 bc_emit_instruction (drop);
9882
9883 case 1:
9884 bc_emit_instruction (drop);
9885 break;
9886
9887 default:
9888
9889 bc_emit_instruction (adjstackSI, (HOST_WIDE_INT) nlevels);
9890 stack_depth -= nlevels;
9891 }
9892
a68c7608
RS
9893#if defined (VALIDATE_STACK_FOR_BC)
9894 VALIDATE_STACK_FOR_BC ();
bbf6f052
RK
9895#endif
9896}
This page took 1.348708 seconds and 5 git commands to generate.