]> gcc.gnu.org Git - gcc.git/blame - gcc/expr.c
(output_local_subroutine_die): Use the function name
[gcc.git] / gcc / expr.c
CommitLineData
bbf6f052 1/* Convert tree expression to rtl instructions, for GNU compiler.
4be204f0 2 Copyright (C) 1988, 1992, 1993 Free Software Foundation, Inc.
bbf6f052
RK
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21#include "config.h"
ca695ac9 22#include "machmode.h"
bbf6f052
RK
23#include "rtl.h"
24#include "tree.h"
ca695ac9 25#include "obstack.h"
bbf6f052
RK
26#include "flags.h"
27#include "function.h"
28#include "insn-flags.h"
29#include "insn-codes.h"
30#include "expr.h"
31#include "insn-config.h"
32#include "recog.h"
33#include "output.h"
bbf6f052
RK
34#include "typeclass.h"
35
ca695ac9
JB
36#include "bytecode.h"
37#include "bc-opcode.h"
38#include "bc-typecd.h"
39#include "bc-optab.h"
40#include "bc-emit.h"
41
42
bbf6f052
RK
43#define CEIL(x,y) (((x) + (y) - 1) / (y))
44
45/* Decide whether a function's arguments should be processed
bbc8a071
RK
46 from first to last or from last to first.
47
48 They should if the stack and args grow in opposite directions, but
49 only if we have push insns. */
bbf6f052 50
bbf6f052 51#ifdef PUSH_ROUNDING
bbc8a071 52
3319a347 53#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
bbf6f052
RK
54#define PUSH_ARGS_REVERSED /* If it's last to first */
55#endif
bbc8a071 56
bbf6f052
RK
57#endif
58
59#ifndef STACK_PUSH_CODE
60#ifdef STACK_GROWS_DOWNWARD
61#define STACK_PUSH_CODE PRE_DEC
62#else
63#define STACK_PUSH_CODE PRE_INC
64#endif
65#endif
66
67/* Like STACK_BOUNDARY but in units of bytes, not bits. */
68#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
69
70/* If this is nonzero, we do not bother generating VOLATILE
71 around volatile memory references, and we are willing to
72 output indirect addresses. If cse is to follow, we reject
73 indirect addresses so a useful potential cse is generated;
74 if it is used only once, instruction combination will produce
75 the same indirect address eventually. */
76int cse_not_expected;
77
78/* Nonzero to generate code for all the subroutines within an
79 expression before generating the upper levels of the expression.
80 Nowadays this is never zero. */
81int do_preexpand_calls = 1;
82
83/* Number of units that we should eventually pop off the stack.
84 These are the arguments to function calls that have already returned. */
85int pending_stack_adjust;
86
87/* Nonzero means stack pops must not be deferred, and deferred stack
88 pops must not be output. It is nonzero inside a function call,
89 inside a conditional expression, inside a statement expression,
90 and in other cases as well. */
91int inhibit_defer_pop;
92
93/* A list of all cleanups which belong to the arguments of
94 function calls being expanded by expand_call. */
95tree cleanups_this_call;
96
97/* Nonzero means __builtin_saveregs has already been done in this function.
98 The value is the pseudoreg containing the value __builtin_saveregs
99 returned. */
100static rtx saveregs_value;
101
dcf76fff
TW
102/* Similarly for __builtin_apply_args. */
103static rtx apply_args_value;
104
4969d05d
RK
105/* This structure is used by move_by_pieces to describe the move to
106 be performed. */
107
108struct move_by_pieces
109{
110 rtx to;
111 rtx to_addr;
112 int autinc_to;
113 int explicit_inc_to;
114 rtx from;
115 rtx from_addr;
116 int autinc_from;
117 int explicit_inc_from;
118 int len;
119 int offset;
120 int reverse;
121};
122
c02bd5d9
JB
123/* Used to generate bytecodes: keep track of size of local variables,
124 as well as depth of arithmetic stack. (Notice that variables are
125 stored on the machine's stack, not the arithmetic stack.) */
126
127int local_vars_size;
128extern int stack_depth;
129extern int max_stack_depth;
292b1216 130extern struct obstack permanent_obstack;
c02bd5d9
JB
131
132
4969d05d
RK
133static rtx enqueue_insn PROTO((rtx, rtx));
134static int queued_subexp_p PROTO((rtx));
135static void init_queue PROTO((void));
136static void move_by_pieces PROTO((rtx, rtx, int, int));
137static int move_by_pieces_ninsns PROTO((unsigned int, int));
138static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
139 struct move_by_pieces *));
140static void group_insns PROTO((rtx));
141static void store_constructor PROTO((tree, rtx));
142static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
143 enum machine_mode, int, int, int));
144static tree save_noncopied_parts PROTO((tree, tree));
145static tree init_noncopied_parts PROTO((tree, tree));
146static int safe_from_p PROTO((rtx, tree));
147static int fixed_type_p PROTO((tree));
148static int get_pointer_alignment PROTO((tree, unsigned));
149static tree string_constant PROTO((tree, tree *));
150static tree c_strlen PROTO((tree));
151static rtx expand_builtin PROTO((tree, rtx, rtx, enum machine_mode, int));
0006469d
TW
152static int apply_args_size PROTO((void));
153static int apply_result_size PROTO((void));
154static rtx result_vector PROTO((int, rtx));
155static rtx expand_builtin_apply_args PROTO((void));
156static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
157static void expand_builtin_return PROTO((rtx));
4969d05d 158static rtx expand_increment PROTO((tree, int));
ca695ac9
JB
159rtx bc_expand_increment PROTO((struct increment_operator *, tree));
160tree bc_runtime_type_code PROTO((tree));
161rtx bc_allocate_local PROTO((int, int));
162void bc_store_memory PROTO((tree, tree));
163tree bc_expand_component_address PROTO((tree));
164tree bc_expand_address PROTO((tree));
165void bc_expand_constructor PROTO((tree));
166void bc_adjust_stack PROTO((int));
167tree bc_canonicalize_array_ref PROTO((tree));
168void bc_load_memory PROTO((tree, tree));
169void bc_load_externaddr PROTO((rtx));
170void bc_load_externaddr_id PROTO((tree, int));
171void bc_load_localaddr PROTO((rtx));
172void bc_load_parmaddr PROTO((rtx));
4969d05d
RK
173static void preexpand_calls PROTO((tree));
174static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
f81497d9 175static void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
4969d05d
RK
176static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
177static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
178static void do_jump_for_compare PROTO((rtx, rtx, rtx));
179static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
180static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
bbf6f052 181
4fa52007
RK
182/* Record for each mode whether we can move a register directly to or
183 from an object of that mode in memory. If we can't, we won't try
184 to use that mode directly when accessing a field of that mode. */
185
186static char direct_load[NUM_MACHINE_MODES];
187static char direct_store[NUM_MACHINE_MODES];
188
bbf6f052
RK
189/* MOVE_RATIO is the number of move instructions that is better than
190 a block move. */
191
192#ifndef MOVE_RATIO
266007a7 193#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
bbf6f052
RK
194#define MOVE_RATIO 2
195#else
196/* A value of around 6 would minimize code size; infinity would minimize
197 execution time. */
198#define MOVE_RATIO 15
199#endif
200#endif
e87b4f3f 201
266007a7 202/* This array records the insn_code of insns to perform block moves. */
e6677db3 203enum insn_code movstr_optab[NUM_MACHINE_MODES];
266007a7 204
e87b4f3f
RS
205/* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
206
207#ifndef SLOW_UNALIGNED_ACCESS
208#define SLOW_UNALIGNED_ACCESS 0
209#endif
0006469d
TW
210
211/* Register mappings for target machines without register windows. */
212#ifndef INCOMING_REGNO
213#define INCOMING_REGNO(OUT) (OUT)
214#endif
215#ifndef OUTGOING_REGNO
216#define OUTGOING_REGNO(IN) (IN)
217#endif
bbf6f052 218\f
ca695ac9
JB
219/* Maps used to convert modes to const, load, and store bytecodes. */
220enum bytecode_opcode mode_to_const_map[MAX_MACHINE_MODE];
221enum bytecode_opcode mode_to_load_map[MAX_MACHINE_MODE];
222enum bytecode_opcode mode_to_store_map[MAX_MACHINE_MODE];
223
224/* Initialize maps used to convert modes to const, load, and store
225 bytecodes. */
226void
227bc_init_mode_to_opcode_maps ()
228{
229 int mode;
230
6bd6178d 231 for (mode = 0; mode < (int) MAX_MACHINE_MODE; mode++)
ca695ac9
JB
232 mode_to_const_map[mode] =
233 mode_to_load_map[mode] =
234 mode_to_store_map[mode] = neverneverland;
235
236#define DEF_MODEMAP(SYM, CODE, UCODE, CONST, LOAD, STORE) \
6bd6178d
RK
237 mode_to_const_map[(int) SYM] = CONST; \
238 mode_to_load_map[(int) SYM] = LOAD; \
239 mode_to_store_map[(int) SYM] = STORE;
ca695ac9
JB
240
241#include "modemap.def"
242#undef DEF_MODEMAP
243}
244\f
4fa52007 245/* This is run once per compilation to set up which modes can be used
266007a7 246 directly in memory and to initialize the block move optab. */
4fa52007
RK
247
248void
249init_expr_once ()
250{
251 rtx insn, pat;
252 enum machine_mode mode;
e2549997
RS
253 /* Try indexing by frame ptr and try by stack ptr.
254 It is known that on the Convex the stack ptr isn't a valid index.
255 With luck, one or the other is valid on any machine. */
4fa52007 256 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
e2549997 257 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
4fa52007
RK
258
259 start_sequence ();
260 insn = emit_insn (gen_rtx (SET, 0, 0));
261 pat = PATTERN (insn);
262
263 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
264 mode = (enum machine_mode) ((int) mode + 1))
265 {
266 int regno;
267 rtx reg;
268 int num_clobbers;
269
270 direct_load[(int) mode] = direct_store[(int) mode] = 0;
271 PUT_MODE (mem, mode);
e2549997 272 PUT_MODE (mem1, mode);
4fa52007 273
e6fe56a4
RK
274 /* See if there is some register that can be used in this mode and
275 directly loaded or stored from memory. */
276
7308a047
RS
277 if (mode != VOIDmode && mode != BLKmode)
278 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
279 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
280 regno++)
281 {
282 if (! HARD_REGNO_MODE_OK (regno, mode))
283 continue;
e6fe56a4 284
7308a047 285 reg = gen_rtx (REG, mode, regno);
e6fe56a4 286
7308a047
RS
287 SET_SRC (pat) = mem;
288 SET_DEST (pat) = reg;
289 if (recog (pat, insn, &num_clobbers) >= 0)
290 direct_load[(int) mode] = 1;
e6fe56a4 291
e2549997
RS
292 SET_SRC (pat) = mem1;
293 SET_DEST (pat) = reg;
294 if (recog (pat, insn, &num_clobbers) >= 0)
295 direct_load[(int) mode] = 1;
296
7308a047
RS
297 SET_SRC (pat) = reg;
298 SET_DEST (pat) = mem;
299 if (recog (pat, insn, &num_clobbers) >= 0)
300 direct_store[(int) mode] = 1;
e2549997
RS
301
302 SET_SRC (pat) = reg;
303 SET_DEST (pat) = mem1;
304 if (recog (pat, insn, &num_clobbers) >= 0)
305 direct_store[(int) mode] = 1;
7308a047 306 }
4fa52007
RK
307 }
308
309 end_sequence ();
310}
311
bbf6f052
RK
312/* This is run at the start of compiling a function. */
313
314void
315init_expr ()
316{
317 init_queue ();
318
319 pending_stack_adjust = 0;
320 inhibit_defer_pop = 0;
321 cleanups_this_call = 0;
322 saveregs_value = 0;
0006469d 323 apply_args_value = 0;
e87b4f3f 324 forced_labels = 0;
bbf6f052
RK
325}
326
327/* Save all variables describing the current status into the structure *P.
328 This is used before starting a nested function. */
329
330void
331save_expr_status (p)
332 struct function *p;
333{
334 /* Instead of saving the postincrement queue, empty it. */
335 emit_queue ();
336
337 p->pending_stack_adjust = pending_stack_adjust;
338 p->inhibit_defer_pop = inhibit_defer_pop;
339 p->cleanups_this_call = cleanups_this_call;
340 p->saveregs_value = saveregs_value;
0006469d 341 p->apply_args_value = apply_args_value;
e87b4f3f 342 p->forced_labels = forced_labels;
bbf6f052
RK
343
344 pending_stack_adjust = 0;
345 inhibit_defer_pop = 0;
346 cleanups_this_call = 0;
347 saveregs_value = 0;
0006469d 348 apply_args_value = 0;
e87b4f3f 349 forced_labels = 0;
bbf6f052
RK
350}
351
352/* Restore all variables describing the current status from the structure *P.
353 This is used after a nested function. */
354
355void
356restore_expr_status (p)
357 struct function *p;
358{
359 pending_stack_adjust = p->pending_stack_adjust;
360 inhibit_defer_pop = p->inhibit_defer_pop;
361 cleanups_this_call = p->cleanups_this_call;
362 saveregs_value = p->saveregs_value;
0006469d 363 apply_args_value = p->apply_args_value;
e87b4f3f 364 forced_labels = p->forced_labels;
bbf6f052
RK
365}
366\f
367/* Manage the queue of increment instructions to be output
368 for POSTINCREMENT_EXPR expressions, etc. */
369
370static rtx pending_chain;
371
372/* Queue up to increment (or change) VAR later. BODY says how:
373 BODY should be the same thing you would pass to emit_insn
374 to increment right away. It will go to emit_insn later on.
375
376 The value is a QUEUED expression to be used in place of VAR
377 where you want to guarantee the pre-incrementation value of VAR. */
378
379static rtx
380enqueue_insn (var, body)
381 rtx var, body;
382{
383 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
906c4e36 384 var, NULL_RTX, NULL_RTX, body, pending_chain);
bbf6f052
RK
385 return pending_chain;
386}
387
388/* Use protect_from_queue to convert a QUEUED expression
389 into something that you can put immediately into an instruction.
390 If the queued incrementation has not happened yet,
391 protect_from_queue returns the variable itself.
392 If the incrementation has happened, protect_from_queue returns a temp
393 that contains a copy of the old value of the variable.
394
395 Any time an rtx which might possibly be a QUEUED is to be put
396 into an instruction, it must be passed through protect_from_queue first.
397 QUEUED expressions are not meaningful in instructions.
398
399 Do not pass a value through protect_from_queue and then hold
400 on to it for a while before putting it in an instruction!
401 If the queue is flushed in between, incorrect code will result. */
402
403rtx
404protect_from_queue (x, modify)
405 register rtx x;
406 int modify;
407{
408 register RTX_CODE code = GET_CODE (x);
409
410#if 0 /* A QUEUED can hang around after the queue is forced out. */
411 /* Shortcut for most common case. */
412 if (pending_chain == 0)
413 return x;
414#endif
415
416 if (code != QUEUED)
417 {
418 /* A special hack for read access to (MEM (QUEUED ...))
419 to facilitate use of autoincrement.
420 Make a copy of the contents of the memory location
421 rather than a copy of the address, but not
422 if the value is of mode BLKmode. */
423 if (code == MEM && GET_MODE (x) != BLKmode
424 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
425 {
426 register rtx y = XEXP (x, 0);
427 XEXP (x, 0) = QUEUED_VAR (y);
428 if (QUEUED_INSN (y))
429 {
430 register rtx temp = gen_reg_rtx (GET_MODE (x));
431 emit_insn_before (gen_move_insn (temp, x),
432 QUEUED_INSN (y));
433 return temp;
434 }
435 return x;
436 }
437 /* Otherwise, recursively protect the subexpressions of all
438 the kinds of rtx's that can contain a QUEUED. */
439 if (code == MEM)
3f15938e
RS
440 {
441 rtx tem = protect_from_queue (XEXP (x, 0), 0);
442 if (tem != XEXP (x, 0))
443 {
444 x = copy_rtx (x);
445 XEXP (x, 0) = tem;
446 }
447 }
bbf6f052
RK
448 else if (code == PLUS || code == MULT)
449 {
3f15938e
RS
450 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
451 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
452 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
453 {
454 x = copy_rtx (x);
455 XEXP (x, 0) = new0;
456 XEXP (x, 1) = new1;
457 }
bbf6f052
RK
458 }
459 return x;
460 }
461 /* If the increment has not happened, use the variable itself. */
462 if (QUEUED_INSN (x) == 0)
463 return QUEUED_VAR (x);
464 /* If the increment has happened and a pre-increment copy exists,
465 use that copy. */
466 if (QUEUED_COPY (x) != 0)
467 return QUEUED_COPY (x);
468 /* The increment has happened but we haven't set up a pre-increment copy.
469 Set one up now, and use it. */
470 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
471 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
472 QUEUED_INSN (x));
473 return QUEUED_COPY (x);
474}
475
476/* Return nonzero if X contains a QUEUED expression:
477 if it contains anything that will be altered by a queued increment.
478 We handle only combinations of MEM, PLUS, MINUS and MULT operators
479 since memory addresses generally contain only those. */
480
481static int
482queued_subexp_p (x)
483 rtx x;
484{
485 register enum rtx_code code = GET_CODE (x);
486 switch (code)
487 {
488 case QUEUED:
489 return 1;
490 case MEM:
491 return queued_subexp_p (XEXP (x, 0));
492 case MULT:
493 case PLUS:
494 case MINUS:
495 return queued_subexp_p (XEXP (x, 0))
496 || queued_subexp_p (XEXP (x, 1));
497 }
498 return 0;
499}
500
501/* Perform all the pending incrementations. */
502
503void
504emit_queue ()
505{
506 register rtx p;
507 while (p = pending_chain)
508 {
509 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
510 pending_chain = QUEUED_NEXT (p);
511 }
512}
513
514static void
515init_queue ()
516{
517 if (pending_chain)
518 abort ();
519}
520\f
521/* Copy data from FROM to TO, where the machine modes are not the same.
522 Both modes may be integer, or both may be floating.
523 UNSIGNEDP should be nonzero if FROM is an unsigned type.
524 This causes zero-extension instead of sign-extension. */
525
526void
527convert_move (to, from, unsignedp)
528 register rtx to, from;
529 int unsignedp;
530{
531 enum machine_mode to_mode = GET_MODE (to);
532 enum machine_mode from_mode = GET_MODE (from);
533 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
534 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
535 enum insn_code code;
536 rtx libcall;
537
538 /* rtx code for making an equivalent value. */
539 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
540
541 to = protect_from_queue (to, 1);
542 from = protect_from_queue (from, 0);
543
544 if (to_real != from_real)
545 abort ();
546
1499e0a8
RK
547 /* If FROM is a SUBREG that indicates that we have already done at least
548 the required extension, strip it. We don't handle such SUBREGs as
549 TO here. */
550
551 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
552 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
553 >= GET_MODE_SIZE (to_mode))
554 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
555 from = gen_lowpart (to_mode, from), from_mode = to_mode;
556
557 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
558 abort ();
559
bbf6f052
RK
560 if (to_mode == from_mode
561 || (from_mode == VOIDmode && CONSTANT_P (from)))
562 {
563 emit_move_insn (to, from);
564 return;
565 }
566
567 if (to_real)
568 {
81d79e2c
RS
569 rtx value;
570
b424402e
RS
571#ifdef HAVE_extendqfhf2
572 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == HFmode)
573 {
574 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
575 return;
576 }
577#endif
578#ifdef HAVE_extendqfsf2
579 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == SFmode)
580 {
581 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
582 return;
583 }
584#endif
585#ifdef HAVE_extendqfdf2
586 if (HAVE_extendqfdf2 && from_mode == QFmode && to_mode == DFmode)
587 {
588 emit_unop_insn (CODE_FOR_extendqfdf2, to, from, UNKNOWN);
589 return;
590 }
591#endif
592#ifdef HAVE_extendqfxf2
593 if (HAVE_extendqfxf2 && from_mode == QFmode && to_mode == XFmode)
594 {
595 emit_unop_insn (CODE_FOR_extendqfxf2, to, from, UNKNOWN);
596 return;
597 }
598#endif
599#ifdef HAVE_extendqftf2
600 if (HAVE_extendqftf2 && from_mode == QFmode && to_mode == TFmode)
601 {
602 emit_unop_insn (CODE_FOR_extendqftf2, to, from, UNKNOWN);
603 return;
604 }
605#endif
606
607#ifdef HAVE_extendhfsf2
608 if (HAVE_extendhfsf2 && from_mode == HFmode && to_mode == SFmode)
609 {
610 emit_unop_insn (CODE_FOR_extendhfsf2, to, from, UNKNOWN);
611 return;
612 }
613#endif
614#ifdef HAVE_extendhfdf2
615 if (HAVE_extendhfdf2 && from_mode == HFmode && to_mode == DFmode)
616 {
617 emit_unop_insn (CODE_FOR_extendhfdf2, to, from, UNKNOWN);
618 return;
619 }
620#endif
621#ifdef HAVE_extendhfxf2
622 if (HAVE_extendhfxf2 && from_mode == HFmode && to_mode == XFmode)
623 {
624 emit_unop_insn (CODE_FOR_extendhfxf2, to, from, UNKNOWN);
625 return;
626 }
627#endif
628#ifdef HAVE_extendhftf2
629 if (HAVE_extendhftf2 && from_mode == HFmode && to_mode == TFmode)
630 {
631 emit_unop_insn (CODE_FOR_extendhftf2, to, from, UNKNOWN);
632 return;
633 }
634#endif
635
bbf6f052
RK
636#ifdef HAVE_extendsfdf2
637 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
638 {
639 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
640 return;
641 }
642#endif
b092b471
JW
643#ifdef HAVE_extendsfxf2
644 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
645 {
646 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
647 return;
648 }
649#endif
bbf6f052
RK
650#ifdef HAVE_extendsftf2
651 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
652 {
653 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
654 return;
655 }
656#endif
b092b471
JW
657#ifdef HAVE_extenddfxf2
658 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
659 {
660 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
661 return;
662 }
663#endif
bbf6f052
RK
664#ifdef HAVE_extenddftf2
665 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
666 {
667 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
668 return;
669 }
670#endif
b424402e
RS
671
672#ifdef HAVE_trunchfqf2
673 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
674 {
675 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
676 return;
677 }
678#endif
679#ifdef HAVE_truncsfqf2
680 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
681 {
682 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
683 return;
684 }
685#endif
686#ifdef HAVE_truncdfqf2
687 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
688 {
689 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
690 return;
691 }
692#endif
693#ifdef HAVE_truncxfqf2
694 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
695 {
696 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
697 return;
698 }
699#endif
700#ifdef HAVE_trunctfqf2
701 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
702 {
703 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
704 return;
705 }
706#endif
707#ifdef HAVE_truncsfhf2
708 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
709 {
710 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
711 return;
712 }
713#endif
714#ifdef HAVE_truncdfhf2
715 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
716 {
717 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
718 return;
719 }
720#endif
721#ifdef HAVE_truncxfhf2
722 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
723 {
724 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
725 return;
726 }
727#endif
728#ifdef HAVE_trunctfhf2
729 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
730 {
731 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
732 return;
733 }
734#endif
bbf6f052
RK
735#ifdef HAVE_truncdfsf2
736 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
737 {
738 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
739 return;
740 }
741#endif
b092b471
JW
742#ifdef HAVE_truncxfsf2
743 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
744 {
745 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
746 return;
747 }
748#endif
bbf6f052
RK
749#ifdef HAVE_trunctfsf2
750 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
751 {
752 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
753 return;
754 }
755#endif
b092b471
JW
756#ifdef HAVE_truncxfdf2
757 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
758 {
759 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
760 return;
761 }
762#endif
bbf6f052
RK
763#ifdef HAVE_trunctfdf2
764 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
765 {
766 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
767 return;
768 }
769#endif
770
b092b471
JW
771 libcall = (rtx) 0;
772 switch (from_mode)
773 {
774 case SFmode:
775 switch (to_mode)
776 {
777 case DFmode:
778 libcall = extendsfdf2_libfunc;
779 break;
780
781 case XFmode:
782 libcall = extendsfxf2_libfunc;
783 break;
784
785 case TFmode:
786 libcall = extendsftf2_libfunc;
787 break;
788 }
789 break;
790
791 case DFmode:
792 switch (to_mode)
793 {
794 case SFmode:
795 libcall = truncdfsf2_libfunc;
796 break;
797
798 case XFmode:
799 libcall = extenddfxf2_libfunc;
800 break;
801
802 case TFmode:
803 libcall = extenddftf2_libfunc;
804 break;
805 }
806 break;
807
808 case XFmode:
809 switch (to_mode)
810 {
811 case SFmode:
812 libcall = truncxfsf2_libfunc;
813 break;
814
815 case DFmode:
816 libcall = truncxfdf2_libfunc;
817 break;
818 }
819 break;
820
821 case TFmode:
822 switch (to_mode)
823 {
824 case SFmode:
825 libcall = trunctfsf2_libfunc;
826 break;
827
828 case DFmode:
829 libcall = trunctfdf2_libfunc;
830 break;
831 }
832 break;
833 }
834
835 if (libcall == (rtx) 0)
836 /* This conversion is not implemented yet. */
bbf6f052
RK
837 abort ();
838
81d79e2c
RS
839 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
840 1, from, from_mode);
841 emit_move_insn (to, value);
bbf6f052
RK
842 return;
843 }
844
845 /* Now both modes are integers. */
846
847 /* Handle expanding beyond a word. */
848 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
849 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
850 {
851 rtx insns;
852 rtx lowpart;
853 rtx fill_value;
854 rtx lowfrom;
855 int i;
856 enum machine_mode lowpart_mode;
857 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
858
859 /* Try converting directly if the insn is supported. */
860 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
861 != CODE_FOR_nothing)
862 {
cd1b4b44
RK
863 /* If FROM is a SUBREG, put it into a register. Do this
864 so that we always generate the same set of insns for
865 better cse'ing; if an intermediate assignment occurred,
866 we won't be doing the operation directly on the SUBREG. */
867 if (optimize > 0 && GET_CODE (from) == SUBREG)
868 from = force_reg (from_mode, from);
bbf6f052
RK
869 emit_unop_insn (code, to, from, equiv_code);
870 return;
871 }
872 /* Next, try converting via full word. */
873 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
874 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
875 != CODE_FOR_nothing))
876 {
a81fee56
RS
877 if (GET_CODE (to) == REG)
878 emit_insn (gen_rtx (CLOBBER, VOIDmode, to));
bbf6f052
RK
879 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
880 emit_unop_insn (code, to,
881 gen_lowpart (word_mode, to), equiv_code);
882 return;
883 }
884
885 /* No special multiword conversion insn; do it by hand. */
886 start_sequence ();
887
888 /* Get a copy of FROM widened to a word, if necessary. */
889 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
890 lowpart_mode = word_mode;
891 else
892 lowpart_mode = from_mode;
893
894 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
895
896 lowpart = gen_lowpart (lowpart_mode, to);
897 emit_move_insn (lowpart, lowfrom);
898
899 /* Compute the value to put in each remaining word. */
900 if (unsignedp)
901 fill_value = const0_rtx;
902 else
903 {
904#ifdef HAVE_slt
905 if (HAVE_slt
906 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
907 && STORE_FLAG_VALUE == -1)
908 {
906c4e36
RK
909 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
910 lowpart_mode, 0, 0);
bbf6f052
RK
911 fill_value = gen_reg_rtx (word_mode);
912 emit_insn (gen_slt (fill_value));
913 }
914 else
915#endif
916 {
917 fill_value
918 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
919 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
906c4e36 920 NULL_RTX, 0);
bbf6f052
RK
921 fill_value = convert_to_mode (word_mode, fill_value, 1);
922 }
923 }
924
925 /* Fill the remaining words. */
926 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
927 {
928 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
929 rtx subword = operand_subword (to, index, 1, to_mode);
930
931 if (subword == 0)
932 abort ();
933
934 if (fill_value != subword)
935 emit_move_insn (subword, fill_value);
936 }
937
938 insns = get_insns ();
939 end_sequence ();
940
906c4e36 941 emit_no_conflict_block (insns, to, from, NULL_RTX,
2abec1b7 942 gen_rtx (equiv_code, to_mode, copy_rtx (from)));
bbf6f052
RK
943 return;
944 }
945
d3c64ee3
RS
946 /* Truncating multi-word to a word or less. */
947 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
948 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
bbf6f052 949 {
431a6eca
JW
950 if (!((GET_CODE (from) == MEM
951 && ! MEM_VOLATILE_P (from)
952 && direct_load[(int) to_mode]
953 && ! mode_dependent_address_p (XEXP (from, 0)))
954 || GET_CODE (from) == REG
955 || GET_CODE (from) == SUBREG))
956 from = force_reg (from_mode, from);
bbf6f052
RK
957 convert_move (to, gen_lowpart (word_mode, from), 0);
958 return;
959 }
960
961 /* Handle pointer conversion */ /* SPEE 900220 */
962 if (to_mode == PSImode)
963 {
964 if (from_mode != SImode)
965 from = convert_to_mode (SImode, from, unsignedp);
966
967#ifdef HAVE_truncsipsi
968 if (HAVE_truncsipsi)
969 {
970 emit_unop_insn (CODE_FOR_truncsipsi, to, from, UNKNOWN);
971 return;
972 }
973#endif /* HAVE_truncsipsi */
974 abort ();
975 }
976
977 if (from_mode == PSImode)
978 {
979 if (to_mode != SImode)
980 {
981 from = convert_to_mode (SImode, from, unsignedp);
982 from_mode = SImode;
983 }
984 else
985 {
986#ifdef HAVE_extendpsisi
987 if (HAVE_extendpsisi)
988 {
989 emit_unop_insn (CODE_FOR_extendpsisi, to, from, UNKNOWN);
990 return;
991 }
992#endif /* HAVE_extendpsisi */
993 abort ();
994 }
995 }
996
997 /* Now follow all the conversions between integers
998 no more than a word long. */
999
1000 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1001 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1002 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
d3c64ee3 1003 GET_MODE_BITSIZE (from_mode)))
bbf6f052 1004 {
d3c64ee3
RS
1005 if (!((GET_CODE (from) == MEM
1006 && ! MEM_VOLATILE_P (from)
1007 && direct_load[(int) to_mode]
1008 && ! mode_dependent_address_p (XEXP (from, 0)))
1009 || GET_CODE (from) == REG
1010 || GET_CODE (from) == SUBREG))
1011 from = force_reg (from_mode, from);
bbf6f052
RK
1012 emit_move_insn (to, gen_lowpart (to_mode, from));
1013 return;
1014 }
1015
d3c64ee3 1016 /* Handle extension. */
bbf6f052
RK
1017 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1018 {
1019 /* Convert directly if that works. */
1020 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1021 != CODE_FOR_nothing)
1022 {
3dc4195c
RK
1023 /* If FROM is a SUBREG, put it into a register. Do this
1024 so that we always generate the same set of insns for
1025 better cse'ing; if an intermediate assignment occurred,
1026 we won't be doing the operation directly on the SUBREG. */
1027 if (optimize > 0 && GET_CODE (from) == SUBREG)
1028 from = force_reg (from_mode, from);
bbf6f052
RK
1029 emit_unop_insn (code, to, from, equiv_code);
1030 return;
1031 }
1032 else
1033 {
1034 enum machine_mode intermediate;
1035
1036 /* Search for a mode to convert via. */
1037 for (intermediate = from_mode; intermediate != VOIDmode;
1038 intermediate = GET_MODE_WIDER_MODE (intermediate))
1039 if ((can_extend_p (to_mode, intermediate, unsignedp)
1040 != CODE_FOR_nothing)
1041 && (can_extend_p (intermediate, from_mode, unsignedp)
1042 != CODE_FOR_nothing))
1043 {
1044 convert_move (to, convert_to_mode (intermediate, from,
1045 unsignedp), unsignedp);
1046 return;
1047 }
1048
1049 /* No suitable intermediate mode. */
1050 abort ();
1051 }
1052 }
1053
1054 /* Support special truncate insns for certain modes. */
1055
1056 if (from_mode == DImode && to_mode == SImode)
1057 {
1058#ifdef HAVE_truncdisi2
1059 if (HAVE_truncdisi2)
1060 {
1061 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1062 return;
1063 }
1064#endif
1065 convert_move (to, force_reg (from_mode, from), unsignedp);
1066 return;
1067 }
1068
1069 if (from_mode == DImode && to_mode == HImode)
1070 {
1071#ifdef HAVE_truncdihi2
1072 if (HAVE_truncdihi2)
1073 {
1074 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1075 return;
1076 }
1077#endif
1078 convert_move (to, force_reg (from_mode, from), unsignedp);
1079 return;
1080 }
1081
1082 if (from_mode == DImode && to_mode == QImode)
1083 {
1084#ifdef HAVE_truncdiqi2
1085 if (HAVE_truncdiqi2)
1086 {
1087 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1088 return;
1089 }
1090#endif
1091 convert_move (to, force_reg (from_mode, from), unsignedp);
1092 return;
1093 }
1094
1095 if (from_mode == SImode && to_mode == HImode)
1096 {
1097#ifdef HAVE_truncsihi2
1098 if (HAVE_truncsihi2)
1099 {
1100 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1101 return;
1102 }
1103#endif
1104 convert_move (to, force_reg (from_mode, from), unsignedp);
1105 return;
1106 }
1107
1108 if (from_mode == SImode && to_mode == QImode)
1109 {
1110#ifdef HAVE_truncsiqi2
1111 if (HAVE_truncsiqi2)
1112 {
1113 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1114 return;
1115 }
1116#endif
1117 convert_move (to, force_reg (from_mode, from), unsignedp);
1118 return;
1119 }
1120
1121 if (from_mode == HImode && to_mode == QImode)
1122 {
1123#ifdef HAVE_trunchiqi2
1124 if (HAVE_trunchiqi2)
1125 {
1126 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1127 return;
1128 }
1129#endif
1130 convert_move (to, force_reg (from_mode, from), unsignedp);
1131 return;
1132 }
1133
1134 /* Handle truncation of volatile memrefs, and so on;
1135 the things that couldn't be truncated directly,
1136 and for which there was no special instruction. */
1137 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1138 {
1139 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1140 emit_move_insn (to, temp);
1141 return;
1142 }
1143
1144 /* Mode combination is not recognized. */
1145 abort ();
1146}
1147
1148/* Return an rtx for a value that would result
1149 from converting X to mode MODE.
1150 Both X and MODE may be floating, or both integer.
1151 UNSIGNEDP is nonzero if X is an unsigned value.
1152 This can be done by referring to a part of X in place
5d901c31
RS
1153 or by copying to a new temporary with conversion.
1154
1155 This function *must not* call protect_from_queue
1156 except when putting X into an insn (in which case convert_move does it). */
bbf6f052
RK
1157
1158rtx
1159convert_to_mode (mode, x, unsignedp)
1160 enum machine_mode mode;
1161 rtx x;
1162 int unsignedp;
5ffe63ed
RS
1163{
1164 return convert_modes (mode, VOIDmode, x, unsignedp);
1165}
1166
1167/* Return an rtx for a value that would result
1168 from converting X from mode OLDMODE to mode MODE.
1169 Both modes may be floating, or both integer.
1170 UNSIGNEDP is nonzero if X is an unsigned value.
1171
1172 This can be done by referring to a part of X in place
1173 or by copying to a new temporary with conversion.
1174
1175 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1176
1177 This function *must not* call protect_from_queue
1178 except when putting X into an insn (in which case convert_move does it). */
1179
1180rtx
1181convert_modes (mode, oldmode, x, unsignedp)
1182 enum machine_mode mode, oldmode;
1183 rtx x;
1184 int unsignedp;
bbf6f052
RK
1185{
1186 register rtx temp;
5ffe63ed 1187
1499e0a8
RK
1188 /* If FROM is a SUBREG that indicates that we have already done at least
1189 the required extension, strip it. */
1190
1191 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1192 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1193 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1194 x = gen_lowpart (mode, x);
bbf6f052 1195
64791b18
RK
1196 if (GET_MODE (x) != VOIDmode)
1197 oldmode = GET_MODE (x);
1198
5ffe63ed 1199 if (mode == oldmode)
bbf6f052
RK
1200 return x;
1201
1202 /* There is one case that we must handle specially: If we are converting
906c4e36 1203 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
bbf6f052
RK
1204 we are to interpret the constant as unsigned, gen_lowpart will do
1205 the wrong if the constant appears negative. What we want to do is
1206 make the high-order word of the constant zero, not all ones. */
1207
1208 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
906c4e36 1209 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
bbf6f052 1210 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
906c4e36 1211 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
bbf6f052
RK
1212
1213 /* We can do this with a gen_lowpart if both desired and current modes
1214 are integer, and this is either a constant integer, a register, or a
ba2e110c
RK
1215 non-volatile MEM. Except for the constant case where MODE is no
1216 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
bbf6f052 1217
ba2e110c
RK
1218 if ((GET_CODE (x) == CONST_INT
1219 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 1220 || (GET_MODE_CLASS (mode) == MODE_INT
5ffe63ed 1221 && GET_MODE_CLASS (oldmode) == MODE_INT
bbf6f052 1222 && (GET_CODE (x) == CONST_DOUBLE
5ffe63ed 1223 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
d57c66da
JW
1224 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1225 && direct_load[(int) mode])
bbf6f052 1226 || GET_CODE (x) == REG)))))
ba2e110c
RK
1227 {
1228 /* ?? If we don't know OLDMODE, we have to assume here that
1229 X does not need sign- or zero-extension. This may not be
1230 the case, but it's the best we can do. */
1231 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1232 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1233 {
1234 HOST_WIDE_INT val = INTVAL (x);
1235 int width = GET_MODE_BITSIZE (oldmode);
1236
1237 /* We must sign or zero-extend in this case. Start by
1238 zero-extending, then sign extend if we need to. */
1239 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1240 if (! unsignedp
1241 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1242 val |= (HOST_WIDE_INT) (-1) << width;
1243
1244 return GEN_INT (val);
1245 }
1246
1247 return gen_lowpart (mode, x);
1248 }
bbf6f052
RK
1249
1250 temp = gen_reg_rtx (mode);
1251 convert_move (temp, x, unsignedp);
1252 return temp;
1253}
1254\f
1255/* Generate several move instructions to copy LEN bytes
1256 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1257 The caller must pass FROM and TO
1258 through protect_from_queue before calling.
1259 ALIGN (in bytes) is maximum alignment we can assume. */
1260
bbf6f052
RK
1261static void
1262move_by_pieces (to, from, len, align)
1263 rtx to, from;
1264 int len, align;
1265{
1266 struct move_by_pieces data;
1267 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
e87b4f3f 1268 int max_size = MOVE_MAX + 1;
bbf6f052
RK
1269
1270 data.offset = 0;
1271 data.to_addr = to_addr;
1272 data.from_addr = from_addr;
1273 data.to = to;
1274 data.from = from;
1275 data.autinc_to
1276 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1277 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1278 data.autinc_from
1279 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1280 || GET_CODE (from_addr) == POST_INC
1281 || GET_CODE (from_addr) == POST_DEC);
1282
1283 data.explicit_inc_from = 0;
1284 data.explicit_inc_to = 0;
1285 data.reverse
1286 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1287 if (data.reverse) data.offset = len;
1288 data.len = len;
1289
1290 /* If copying requires more than two move insns,
1291 copy addresses to registers (to make displacements shorter)
1292 and use post-increment if available. */
1293 if (!(data.autinc_from && data.autinc_to)
1294 && move_by_pieces_ninsns (len, align) > 2)
1295 {
1296#ifdef HAVE_PRE_DECREMENT
1297 if (data.reverse && ! data.autinc_from)
1298 {
1299 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1300 data.autinc_from = 1;
1301 data.explicit_inc_from = -1;
1302 }
1303#endif
1304#ifdef HAVE_POST_INCREMENT
1305 if (! data.autinc_from)
1306 {
1307 data.from_addr = copy_addr_to_reg (from_addr);
1308 data.autinc_from = 1;
1309 data.explicit_inc_from = 1;
1310 }
1311#endif
1312 if (!data.autinc_from && CONSTANT_P (from_addr))
1313 data.from_addr = copy_addr_to_reg (from_addr);
1314#ifdef HAVE_PRE_DECREMENT
1315 if (data.reverse && ! data.autinc_to)
1316 {
1317 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1318 data.autinc_to = 1;
1319 data.explicit_inc_to = -1;
1320 }
1321#endif
1322#ifdef HAVE_POST_INCREMENT
1323 if (! data.reverse && ! data.autinc_to)
1324 {
1325 data.to_addr = copy_addr_to_reg (to_addr);
1326 data.autinc_to = 1;
1327 data.explicit_inc_to = 1;
1328 }
1329#endif
1330 if (!data.autinc_to && CONSTANT_P (to_addr))
1331 data.to_addr = copy_addr_to_reg (to_addr);
1332 }
1333
e87b4f3f
RS
1334 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1335 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1336 align = MOVE_MAX;
bbf6f052
RK
1337
1338 /* First move what we can in the largest integer mode, then go to
1339 successively smaller modes. */
1340
1341 while (max_size > 1)
1342 {
1343 enum machine_mode mode = VOIDmode, tmode;
1344 enum insn_code icode;
1345
e7c33f54
RK
1346 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1347 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1348 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1349 mode = tmode;
1350
1351 if (mode == VOIDmode)
1352 break;
1353
1354 icode = mov_optab->handlers[(int) mode].insn_code;
1355 if (icode != CODE_FOR_nothing
1356 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1357 GET_MODE_SIZE (mode)))
1358 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1359
1360 max_size = GET_MODE_SIZE (mode);
1361 }
1362
1363 /* The code above should have handled everything. */
1364 if (data.len != 0)
1365 abort ();
1366}
1367
1368/* Return number of insns required to move L bytes by pieces.
1369 ALIGN (in bytes) is maximum alignment we can assume. */
1370
1371static int
1372move_by_pieces_ninsns (l, align)
1373 unsigned int l;
1374 int align;
1375{
1376 register int n_insns = 0;
e87b4f3f 1377 int max_size = MOVE_MAX + 1;
bbf6f052 1378
e87b4f3f
RS
1379 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1380 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1381 align = MOVE_MAX;
bbf6f052
RK
1382
1383 while (max_size > 1)
1384 {
1385 enum machine_mode mode = VOIDmode, tmode;
1386 enum insn_code icode;
1387
e7c33f54
RK
1388 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1389 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1390 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1391 mode = tmode;
1392
1393 if (mode == VOIDmode)
1394 break;
1395
1396 icode = mov_optab->handlers[(int) mode].insn_code;
1397 if (icode != CODE_FOR_nothing
1398 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1399 GET_MODE_SIZE (mode)))
1400 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1401
1402 max_size = GET_MODE_SIZE (mode);
1403 }
1404
1405 return n_insns;
1406}
1407
1408/* Subroutine of move_by_pieces. Move as many bytes as appropriate
1409 with move instructions for mode MODE. GENFUN is the gen_... function
1410 to make a move insn for that mode. DATA has all the other info. */
1411
1412static void
1413move_by_pieces_1 (genfun, mode, data)
1414 rtx (*genfun) ();
1415 enum machine_mode mode;
1416 struct move_by_pieces *data;
1417{
1418 register int size = GET_MODE_SIZE (mode);
1419 register rtx to1, from1;
1420
1421 while (data->len >= size)
1422 {
1423 if (data->reverse) data->offset -= size;
1424
1425 to1 = (data->autinc_to
1426 ? gen_rtx (MEM, mode, data->to_addr)
1427 : change_address (data->to, mode,
1428 plus_constant (data->to_addr, data->offset)));
1429 from1 =
1430 (data->autinc_from
1431 ? gen_rtx (MEM, mode, data->from_addr)
1432 : change_address (data->from, mode,
1433 plus_constant (data->from_addr, data->offset)));
1434
1435#ifdef HAVE_PRE_DECREMENT
1436 if (data->explicit_inc_to < 0)
906c4e36 1437 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
bbf6f052 1438 if (data->explicit_inc_from < 0)
906c4e36 1439 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
bbf6f052
RK
1440#endif
1441
1442 emit_insn ((*genfun) (to1, from1));
1443#ifdef HAVE_POST_INCREMENT
1444 if (data->explicit_inc_to > 0)
906c4e36 1445 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
bbf6f052 1446 if (data->explicit_inc_from > 0)
906c4e36 1447 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
bbf6f052
RK
1448#endif
1449
1450 if (! data->reverse) data->offset += size;
1451
1452 data->len -= size;
1453 }
1454}
1455\f
1456/* Emit code to move a block Y to a block X.
1457 This may be done with string-move instructions,
1458 with multiple scalar move instructions, or with a library call.
1459
1460 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1461 with mode BLKmode.
1462 SIZE is an rtx that says how long they are.
1463 ALIGN is the maximum alignment we can assume they have,
1464 measured in bytes. */
1465
1466void
1467emit_block_move (x, y, size, align)
1468 rtx x, y;
1469 rtx size;
1470 int align;
1471{
1472 if (GET_MODE (x) != BLKmode)
1473 abort ();
1474
1475 if (GET_MODE (y) != BLKmode)
1476 abort ();
1477
1478 x = protect_from_queue (x, 1);
1479 y = protect_from_queue (y, 0);
5d901c31 1480 size = protect_from_queue (size, 0);
bbf6f052
RK
1481
1482 if (GET_CODE (x) != MEM)
1483 abort ();
1484 if (GET_CODE (y) != MEM)
1485 abort ();
1486 if (size == 0)
1487 abort ();
1488
1489 if (GET_CODE (size) == CONST_INT
906c4e36 1490 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
bbf6f052
RK
1491 move_by_pieces (x, y, INTVAL (size), align);
1492 else
1493 {
1494 /* Try the most limited insn first, because there's no point
1495 including more than one in the machine description unless
1496 the more limited one has some advantage. */
266007a7 1497
0bba3f6f 1498 rtx opalign = GEN_INT (align);
266007a7
RK
1499 enum machine_mode mode;
1500
1501 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1502 mode = GET_MODE_WIDER_MODE (mode))
bbf6f052 1503 {
266007a7 1504 enum insn_code code = movstr_optab[(int) mode];
266007a7
RK
1505
1506 if (code != CODE_FOR_nothing
803090c4
RK
1507 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1508 here because if SIZE is less than the mode mask, as it is
8008b228 1509 returned by the macro, it will definitely be less than the
803090c4 1510 actual mode mask. */
f85b95d1 1511 && (unsigned HOST_WIDE_INT) INTVAL (size) <= GET_MODE_MASK (mode)
0bba3f6f
RK
1512 && (insn_operand_predicate[(int) code][0] == 0
1513 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1514 && (insn_operand_predicate[(int) code][1] == 0
1515 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1516 && (insn_operand_predicate[(int) code][3] == 0
1517 || (*insn_operand_predicate[(int) code][3]) (opalign,
1518 VOIDmode)))
bbf6f052 1519 {
1ba1e2a8 1520 rtx op2;
266007a7
RK
1521 rtx last = get_last_insn ();
1522 rtx pat;
1523
1ba1e2a8 1524 op2 = convert_to_mode (mode, size, 1);
0bba3f6f
RK
1525 if (insn_operand_predicate[(int) code][2] != 0
1526 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
266007a7
RK
1527 op2 = copy_to_mode_reg (mode, op2);
1528
1529 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1530 if (pat)
1531 {
1532 emit_insn (pat);
1533 return;
1534 }
1535 else
1536 delete_insns_since (last);
bbf6f052
RK
1537 }
1538 }
bbf6f052
RK
1539
1540#ifdef TARGET_MEM_FUNCTIONS
d562e42e 1541 emit_library_call (memcpy_libfunc, 0,
bbf6f052
RK
1542 VOIDmode, 3, XEXP (x, 0), Pmode,
1543 XEXP (y, 0), Pmode,
0fa83258
RK
1544 convert_to_mode (TYPE_MODE (sizetype), size,
1545 TREE_UNSIGNED (sizetype)),
1546 TYPE_MODE (sizetype));
bbf6f052 1547#else
d562e42e 1548 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
1549 VOIDmode, 3, XEXP (y, 0), Pmode,
1550 XEXP (x, 0), Pmode,
0fa83258
RK
1551 convert_to_mode (TYPE_MODE (sizetype), size,
1552 TREE_UNSIGNED (sizetype)),
1553 TYPE_MODE (sizetype));
bbf6f052
RK
1554#endif
1555 }
1556}
1557\f
1558/* Copy all or part of a value X into registers starting at REGNO.
1559 The number of registers to be filled is NREGS. */
1560
1561void
1562move_block_to_reg (regno, x, nregs, mode)
1563 int regno;
1564 rtx x;
1565 int nregs;
1566 enum machine_mode mode;
1567{
1568 int i;
1569 rtx pat, last;
1570
1571 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1572 x = validize_mem (force_const_mem (mode, x));
1573
1574 /* See if the machine can do this with a load multiple insn. */
1575#ifdef HAVE_load_multiple
1576 last = get_last_insn ();
1577 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
906c4e36 1578 GEN_INT (nregs));
bbf6f052
RK
1579 if (pat)
1580 {
1581 emit_insn (pat);
1582 return;
1583 }
1584 else
1585 delete_insns_since (last);
1586#endif
1587
1588 for (i = 0; i < nregs; i++)
1589 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1590 operand_subword_force (x, i, mode));
1591}
1592
1593/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
0040593d
JW
1594 The number of registers to be filled is NREGS. SIZE indicates the number
1595 of bytes in the object X. */
1596
bbf6f052
RK
1597
1598void
0040593d 1599move_block_from_reg (regno, x, nregs, size)
bbf6f052
RK
1600 int regno;
1601 rtx x;
1602 int nregs;
0040593d 1603 int size;
bbf6f052
RK
1604{
1605 int i;
1606 rtx pat, last;
1607
0040593d
JW
1608 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1609 to the left before storing to memory. */
1610 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1611 {
1612 rtx tem = operand_subword (x, 0, 1, BLKmode);
1613 rtx shift;
1614
1615 if (tem == 0)
1616 abort ();
1617
1618 shift = expand_shift (LSHIFT_EXPR, word_mode,
1619 gen_rtx (REG, word_mode, regno),
1620 build_int_2 ((UNITS_PER_WORD - size)
1621 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1622 emit_move_insn (tem, shift);
1623 return;
1624 }
1625
bbf6f052
RK
1626 /* See if the machine can do this with a store multiple insn. */
1627#ifdef HAVE_store_multiple
1628 last = get_last_insn ();
1629 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
906c4e36 1630 GEN_INT (nregs));
bbf6f052
RK
1631 if (pat)
1632 {
1633 emit_insn (pat);
1634 return;
1635 }
1636 else
1637 delete_insns_since (last);
1638#endif
1639
1640 for (i = 0; i < nregs; i++)
1641 {
1642 rtx tem = operand_subword (x, i, 1, BLKmode);
1643
1644 if (tem == 0)
1645 abort ();
1646
1647 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1648 }
1649}
1650
1651/* Mark NREGS consecutive regs, starting at REGNO, as being live now. */
1652
1653void
1654use_regs (regno, nregs)
1655 int regno;
1656 int nregs;
1657{
1658 int i;
1659
1660 for (i = 0; i < nregs; i++)
1661 emit_insn (gen_rtx (USE, VOIDmode, gen_rtx (REG, word_mode, regno + i)));
1662}
7308a047
RS
1663
1664/* Mark the instructions since PREV as a libcall block.
1665 Add REG_LIBCALL to PREV and add a REG_RETVAL to the most recent insn. */
1666
f76a70d5 1667static void
7308a047
RS
1668group_insns (prev)
1669 rtx prev;
1670{
1671 rtx insn_first;
1672 rtx insn_last;
1673
1674 /* Find the instructions to mark */
1675 if (prev)
1676 insn_first = NEXT_INSN (prev);
1677 else
1678 insn_first = get_insns ();
1679
1680 insn_last = get_last_insn ();
1681
1682 REG_NOTES (insn_last) = gen_rtx (INSN_LIST, REG_RETVAL, insn_first,
1683 REG_NOTES (insn_last));
1684
1685 REG_NOTES (insn_first) = gen_rtx (INSN_LIST, REG_LIBCALL, insn_last,
1686 REG_NOTES (insn_first));
1687}
bbf6f052
RK
1688\f
1689/* Write zeros through the storage of OBJECT.
1690 If OBJECT has BLKmode, SIZE is its length in bytes. */
1691
1692void
1693clear_storage (object, size)
1694 rtx object;
1695 int size;
1696{
1697 if (GET_MODE (object) == BLKmode)
1698 {
1699#ifdef TARGET_MEM_FUNCTIONS
d562e42e 1700 emit_library_call (memset_libfunc, 0,
bbf6f052
RK
1701 VOIDmode, 3,
1702 XEXP (object, 0), Pmode, const0_rtx, Pmode,
906c4e36 1703 GEN_INT (size), Pmode);
bbf6f052 1704#else
d562e42e 1705 emit_library_call (bzero_libfunc, 0,
bbf6f052
RK
1706 VOIDmode, 2,
1707 XEXP (object, 0), Pmode,
906c4e36 1708 GEN_INT (size), Pmode);
bbf6f052
RK
1709#endif
1710 }
1711 else
1712 emit_move_insn (object, const0_rtx);
1713}
1714
1715/* Generate code to copy Y into X.
1716 Both Y and X must have the same mode, except that
1717 Y can be a constant with VOIDmode.
1718 This mode cannot be BLKmode; use emit_block_move for that.
1719
1720 Return the last instruction emitted. */
1721
1722rtx
1723emit_move_insn (x, y)
1724 rtx x, y;
1725{
1726 enum machine_mode mode = GET_MODE (x);
7308a047
RS
1727 enum machine_mode submode;
1728 enum mode_class class = GET_MODE_CLASS (mode);
bbf6f052
RK
1729 int i;
1730
1731 x = protect_from_queue (x, 1);
1732 y = protect_from_queue (y, 0);
1733
1734 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1735 abort ();
1736
1737 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1738 y = force_const_mem (mode, y);
1739
1740 /* If X or Y are memory references, verify that their addresses are valid
1741 for the machine. */
1742 if (GET_CODE (x) == MEM
1743 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1744 && ! push_operand (x, GET_MODE (x)))
1745 || (flag_force_addr
1746 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1747 x = change_address (x, VOIDmode, XEXP (x, 0));
1748
1749 if (GET_CODE (y) == MEM
1750 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1751 || (flag_force_addr
1752 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1753 y = change_address (y, VOIDmode, XEXP (y, 0));
1754
1755 if (mode == BLKmode)
1756 abort ();
1757
261c4230
RS
1758 return emit_move_insn_1 (x, y);
1759}
1760
1761/* Low level part of emit_move_insn.
1762 Called just like emit_move_insn, but assumes X and Y
1763 are basically valid. */
1764
1765rtx
1766emit_move_insn_1 (x, y)
1767 rtx x, y;
1768{
1769 enum machine_mode mode = GET_MODE (x);
1770 enum machine_mode submode;
1771 enum mode_class class = GET_MODE_CLASS (mode);
1772 int i;
1773
7308a047
RS
1774 if (class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1775 submode = mode_for_size (GET_MODE_UNIT_SIZE (mode) * BITS_PER_UNIT,
1776 (class == MODE_COMPLEX_INT
1777 ? MODE_INT : MODE_FLOAT),
1778 0);
1779
bbf6f052
RK
1780 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1781 return
1782 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1783
89742723 1784 /* Expand complex moves by moving real part and imag part, if possible. */
7308a047
RS
1785 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1786 && submode != BLKmode
1787 && (mov_optab->handlers[(int) submode].insn_code
1788 != CODE_FOR_nothing))
1789 {
1790 /* Don't split destination if it is a stack push. */
1791 int stack = push_operand (x, GET_MODE (x));
1792 rtx prev = get_last_insn ();
1793
1794 /* Tell flow that the whole of the destination is being set. */
1795 if (GET_CODE (x) == REG)
1796 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
1797
1798 /* If this is a stack, push the highpart first, so it
1799 will be in the argument order.
1800
1801 In that case, change_address is used only to convert
1802 the mode, not to change the address. */
c937357e
RS
1803 if (stack)
1804 {
e33c0d66
RS
1805 /* Note that the real part always precedes the imag part in memory
1806 regardless of machine's endianness. */
c937357e
RS
1807#ifdef STACK_GROWS_DOWNWARD
1808 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1809 (gen_rtx (MEM, submode, (XEXP (x, 0))),
e33c0d66 1810 gen_imagpart (submode, y)));
c937357e
RS
1811 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1812 (gen_rtx (MEM, submode, (XEXP (x, 0))),
e33c0d66 1813 gen_realpart (submode, y)));
c937357e
RS
1814#else
1815 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1816 (gen_rtx (MEM, submode, (XEXP (x, 0))),
e33c0d66 1817 gen_realpart (submode, y)));
c937357e
RS
1818 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1819 (gen_rtx (MEM, submode, (XEXP (x, 0))),
e33c0d66 1820 gen_imagpart (submode, y)));
c937357e
RS
1821#endif
1822 }
1823 else
1824 {
1825 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1826 (gen_highpart (submode, x), gen_highpart (submode, y)));
1827 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1828 (gen_lowpart (submode, x), gen_lowpart (submode, y)));
1829 }
7308a047 1830
6d6e61ce
RS
1831 if (GET_CODE (x) != CONCAT)
1832 /* If X is a CONCAT, we got insns like RD = RS, ID = IS,
1833 each with a separate pseudo as destination.
1834 It's not correct for flow to treat them as a unit. */
1835 group_insns (prev);
7a1ab50a
RS
1836
1837 return get_last_insn ();
7308a047
RS
1838 }
1839
bbf6f052
RK
1840 /* This will handle any multi-word mode that lacks a move_insn pattern.
1841 However, you will get better code if you define such patterns,
1842 even if they must turn into multiple assembler instructions. */
a4320483 1843 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
bbf6f052
RK
1844 {
1845 rtx last_insn = 0;
7308a047 1846 rtx prev_insn = get_last_insn ();
bbf6f052
RK
1847
1848 for (i = 0;
1849 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1850 i++)
1851 {
1852 rtx xpart = operand_subword (x, i, 1, mode);
1853 rtx ypart = operand_subword (y, i, 1, mode);
1854
1855 /* If we can't get a part of Y, put Y into memory if it is a
1856 constant. Otherwise, force it into a register. If we still
1857 can't get a part of Y, abort. */
1858 if (ypart == 0 && CONSTANT_P (y))
1859 {
1860 y = force_const_mem (mode, y);
1861 ypart = operand_subword (y, i, 1, mode);
1862 }
1863 else if (ypart == 0)
1864 ypart = operand_subword_force (y, i, mode);
1865
1866 if (xpart == 0 || ypart == 0)
1867 abort ();
1868
1869 last_insn = emit_move_insn (xpart, ypart);
1870 }
7308a047
RS
1871 /* Mark these insns as a libcall block. */
1872 group_insns (prev_insn);
1873
bbf6f052
RK
1874 return last_insn;
1875 }
1876 else
1877 abort ();
1878}
1879\f
1880/* Pushing data onto the stack. */
1881
1882/* Push a block of length SIZE (perhaps variable)
1883 and return an rtx to address the beginning of the block.
1884 Note that it is not possible for the value returned to be a QUEUED.
1885 The value may be virtual_outgoing_args_rtx.
1886
1887 EXTRA is the number of bytes of padding to push in addition to SIZE.
1888 BELOW nonzero means this padding comes at low addresses;
1889 otherwise, the padding comes at high addresses. */
1890
1891rtx
1892push_block (size, extra, below)
1893 rtx size;
1894 int extra, below;
1895{
1896 register rtx temp;
1897 if (CONSTANT_P (size))
1898 anti_adjust_stack (plus_constant (size, extra));
1899 else if (GET_CODE (size) == REG && extra == 0)
1900 anti_adjust_stack (size);
1901 else
1902 {
1903 rtx temp = copy_to_mode_reg (Pmode, size);
1904 if (extra != 0)
906c4e36 1905 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
bbf6f052
RK
1906 temp, 0, OPTAB_LIB_WIDEN);
1907 anti_adjust_stack (temp);
1908 }
1909
1910#ifdef STACK_GROWS_DOWNWARD
1911 temp = virtual_outgoing_args_rtx;
1912 if (extra != 0 && below)
1913 temp = plus_constant (temp, extra);
1914#else
1915 if (GET_CODE (size) == CONST_INT)
1916 temp = plus_constant (virtual_outgoing_args_rtx,
1917 - INTVAL (size) - (below ? 0 : extra));
1918 else if (extra != 0 && !below)
1919 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1920 negate_rtx (Pmode, plus_constant (size, extra)));
1921 else
1922 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1923 negate_rtx (Pmode, size));
1924#endif
1925
1926 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
1927}
1928
87e38d84 1929rtx
bbf6f052
RK
1930gen_push_operand ()
1931{
1932 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
1933}
1934
1935/* Generate code to push X onto the stack, assuming it has mode MODE and
1936 type TYPE.
1937 MODE is redundant except when X is a CONST_INT (since they don't
1938 carry mode info).
1939 SIZE is an rtx for the size of data to be copied (in bytes),
1940 needed only if X is BLKmode.
1941
1942 ALIGN (in bytes) is maximum alignment we can assume.
1943
cd048831
RK
1944 If PARTIAL and REG are both nonzero, then copy that many of the first
1945 words of X into registers starting with REG, and push the rest of X.
bbf6f052
RK
1946 The amount of space pushed is decreased by PARTIAL words,
1947 rounded *down* to a multiple of PARM_BOUNDARY.
1948 REG must be a hard register in this case.
cd048831
RK
1949 If REG is zero but PARTIAL is not, take any all others actions for an
1950 argument partially in registers, but do not actually load any
1951 registers.
bbf6f052
RK
1952
1953 EXTRA is the amount in bytes of extra space to leave next to this arg.
6dc42e49 1954 This is ignored if an argument block has already been allocated.
bbf6f052
RK
1955
1956 On a machine that lacks real push insns, ARGS_ADDR is the address of
1957 the bottom of the argument block for this call. We use indexing off there
1958 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
1959 argument block has not been preallocated.
1960
1961 ARGS_SO_FAR is the size of args previously pushed for this call. */
1962
1963void
1964emit_push_insn (x, mode, type, size, align, partial, reg, extra,
1965 args_addr, args_so_far)
1966 register rtx x;
1967 enum machine_mode mode;
1968 tree type;
1969 rtx size;
1970 int align;
1971 int partial;
1972 rtx reg;
1973 int extra;
1974 rtx args_addr;
1975 rtx args_so_far;
1976{
1977 rtx xinner;
1978 enum direction stack_direction
1979#ifdef STACK_GROWS_DOWNWARD
1980 = downward;
1981#else
1982 = upward;
1983#endif
1984
1985 /* Decide where to pad the argument: `downward' for below,
1986 `upward' for above, or `none' for don't pad it.
1987 Default is below for small data on big-endian machines; else above. */
1988 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
1989
1990 /* Invert direction if stack is post-update. */
1991 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
1992 if (where_pad != none)
1993 where_pad = (where_pad == downward ? upward : downward);
1994
1995 xinner = x = protect_from_queue (x, 0);
1996
1997 if (mode == BLKmode)
1998 {
1999 /* Copy a block into the stack, entirely or partially. */
2000
2001 register rtx temp;
2002 int used = partial * UNITS_PER_WORD;
2003 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2004 int skip;
2005
2006 if (size == 0)
2007 abort ();
2008
2009 used -= offset;
2010
2011 /* USED is now the # of bytes we need not copy to the stack
2012 because registers will take care of them. */
2013
2014 if (partial != 0)
2015 xinner = change_address (xinner, BLKmode,
2016 plus_constant (XEXP (xinner, 0), used));
2017
2018 /* If the partial register-part of the arg counts in its stack size,
2019 skip the part of stack space corresponding to the registers.
2020 Otherwise, start copying to the beginning of the stack space,
2021 by setting SKIP to 0. */
2022#ifndef REG_PARM_STACK_SPACE
2023 skip = 0;
2024#else
2025 skip = used;
2026#endif
2027
2028#ifdef PUSH_ROUNDING
2029 /* Do it with several push insns if that doesn't take lots of insns
2030 and if there is no difficulty with push insns that skip bytes
2031 on the stack for alignment purposes. */
2032 if (args_addr == 0
2033 && GET_CODE (size) == CONST_INT
2034 && skip == 0
2035 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2036 < MOVE_RATIO)
bbf6f052
RK
2037 /* Here we avoid the case of a structure whose weak alignment
2038 forces many pushes of a small amount of data,
2039 and such small pushes do rounding that causes trouble. */
e87b4f3f
RS
2040 && ((! STRICT_ALIGNMENT && ! SLOW_UNALIGNED_ACCESS)
2041 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
bbf6f052 2042 || PUSH_ROUNDING (align) == align)
bbf6f052
RK
2043 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2044 {
2045 /* Push padding now if padding above and stack grows down,
2046 or if padding below and stack grows up.
2047 But if space already allocated, this has already been done. */
2048 if (extra && args_addr == 0
2049 && where_pad != none && where_pad != stack_direction)
906c4e36 2050 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2051
2052 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
2053 INTVAL (size) - used, align);
2054 }
2055 else
2056#endif /* PUSH_ROUNDING */
2057 {
2058 /* Otherwise make space on the stack and copy the data
2059 to the address of that space. */
2060
2061 /* Deduct words put into registers from the size we must copy. */
2062 if (partial != 0)
2063 {
2064 if (GET_CODE (size) == CONST_INT)
906c4e36 2065 size = GEN_INT (INTVAL (size) - used);
bbf6f052
RK
2066 else
2067 size = expand_binop (GET_MODE (size), sub_optab, size,
906c4e36
RK
2068 GEN_INT (used), NULL_RTX, 0,
2069 OPTAB_LIB_WIDEN);
bbf6f052
RK
2070 }
2071
2072 /* Get the address of the stack space.
2073 In this case, we do not deal with EXTRA separately.
2074 A single stack adjust will do. */
2075 if (! args_addr)
2076 {
2077 temp = push_block (size, extra, where_pad == downward);
2078 extra = 0;
2079 }
2080 else if (GET_CODE (args_so_far) == CONST_INT)
2081 temp = memory_address (BLKmode,
2082 plus_constant (args_addr,
2083 skip + INTVAL (args_so_far)));
2084 else
2085 temp = memory_address (BLKmode,
2086 plus_constant (gen_rtx (PLUS, Pmode,
2087 args_addr, args_so_far),
2088 skip));
2089
2090 /* TEMP is the address of the block. Copy the data there. */
2091 if (GET_CODE (size) == CONST_INT
2092 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2093 < MOVE_RATIO))
2094 {
2095 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
2096 INTVAL (size), align);
2097 goto ret;
2098 }
2099 /* Try the most limited insn first, because there's no point
2100 including more than one in the machine description unless
2101 the more limited one has some advantage. */
2102#ifdef HAVE_movstrqi
2103 if (HAVE_movstrqi
2104 && GET_CODE (size) == CONST_INT
2105 && ((unsigned) INTVAL (size)
2106 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
2107 {
c841050e
RS
2108 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
2109 xinner, size, GEN_INT (align));
2110 if (pat != 0)
2111 {
2112 emit_insn (pat);
2113 goto ret;
2114 }
bbf6f052
RK
2115 }
2116#endif
2117#ifdef HAVE_movstrhi
2118 if (HAVE_movstrhi
2119 && GET_CODE (size) == CONST_INT
2120 && ((unsigned) INTVAL (size)
2121 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
2122 {
c841050e
RS
2123 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
2124 xinner, size, GEN_INT (align));
2125 if (pat != 0)
2126 {
2127 emit_insn (pat);
2128 goto ret;
2129 }
bbf6f052
RK
2130 }
2131#endif
2132#ifdef HAVE_movstrsi
2133 if (HAVE_movstrsi)
2134 {
c841050e
RS
2135 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
2136 xinner, size, GEN_INT (align));
2137 if (pat != 0)
2138 {
2139 emit_insn (pat);
2140 goto ret;
2141 }
bbf6f052
RK
2142 }
2143#endif
2144#ifdef HAVE_movstrdi
2145 if (HAVE_movstrdi)
2146 {
c841050e
RS
2147 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
2148 xinner, size, GEN_INT (align));
2149 if (pat != 0)
2150 {
2151 emit_insn (pat);
2152 goto ret;
2153 }
bbf6f052
RK
2154 }
2155#endif
2156
2157#ifndef ACCUMULATE_OUTGOING_ARGS
2158 /* If the source is referenced relative to the stack pointer,
2159 copy it to another register to stabilize it. We do not need
2160 to do this if we know that we won't be changing sp. */
2161
2162 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2163 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2164 temp = copy_to_reg (temp);
2165#endif
2166
2167 /* Make inhibit_defer_pop nonzero around the library call
2168 to force it to pop the bcopy-arguments right away. */
2169 NO_DEFER_POP;
2170#ifdef TARGET_MEM_FUNCTIONS
d562e42e 2171 emit_library_call (memcpy_libfunc, 0,
bbf6f052 2172 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
0fa83258
RK
2173 convert_to_mode (TYPE_MODE (sizetype),
2174 size, TREE_UNSIGNED (sizetype)),
26ba80fc 2175 TYPE_MODE (sizetype));
bbf6f052 2176#else
d562e42e 2177 emit_library_call (bcopy_libfunc, 0,
bbf6f052 2178 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
0fa83258
RK
2179 convert_to_mode (TYPE_MODE (sizetype),
2180 size, TREE_UNSIGNED (sizetype)),
26ba80fc 2181 TYPE_MODE (sizetype));
bbf6f052
RK
2182#endif
2183 OK_DEFER_POP;
2184 }
2185 }
2186 else if (partial > 0)
2187 {
2188 /* Scalar partly in registers. */
2189
2190 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2191 int i;
2192 int not_stack;
2193 /* # words of start of argument
2194 that we must make space for but need not store. */
2195 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2196 int args_offset = INTVAL (args_so_far);
2197 int skip;
2198
2199 /* Push padding now if padding above and stack grows down,
2200 or if padding below and stack grows up.
2201 But if space already allocated, this has already been done. */
2202 if (extra && args_addr == 0
2203 && where_pad != none && where_pad != stack_direction)
906c4e36 2204 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2205
2206 /* If we make space by pushing it, we might as well push
2207 the real data. Otherwise, we can leave OFFSET nonzero
2208 and leave the space uninitialized. */
2209 if (args_addr == 0)
2210 offset = 0;
2211
2212 /* Now NOT_STACK gets the number of words that we don't need to
2213 allocate on the stack. */
2214 not_stack = partial - offset;
2215
2216 /* If the partial register-part of the arg counts in its stack size,
2217 skip the part of stack space corresponding to the registers.
2218 Otherwise, start copying to the beginning of the stack space,
2219 by setting SKIP to 0. */
2220#ifndef REG_PARM_STACK_SPACE
2221 skip = 0;
2222#else
2223 skip = not_stack;
2224#endif
2225
2226 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2227 x = validize_mem (force_const_mem (mode, x));
2228
2229 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2230 SUBREGs of such registers are not allowed. */
2231 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2232 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2233 x = copy_to_reg (x);
2234
2235 /* Loop over all the words allocated on the stack for this arg. */
2236 /* We can do it by words, because any scalar bigger than a word
2237 has a size a multiple of a word. */
2238#ifndef PUSH_ARGS_REVERSED
2239 for (i = not_stack; i < size; i++)
2240#else
2241 for (i = size - 1; i >= not_stack; i--)
2242#endif
2243 if (i >= not_stack + offset)
2244 emit_push_insn (operand_subword_force (x, i, mode),
906c4e36
RK
2245 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2246 0, args_addr,
2247 GEN_INT (args_offset + ((i - not_stack + skip)
bbf6f052
RK
2248 * UNITS_PER_WORD)));
2249 }
2250 else
2251 {
2252 rtx addr;
2253
2254 /* Push padding now if padding above and stack grows down,
2255 or if padding below and stack grows up.
2256 But if space already allocated, this has already been done. */
2257 if (extra && args_addr == 0
2258 && where_pad != none && where_pad != stack_direction)
906c4e36 2259 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2260
2261#ifdef PUSH_ROUNDING
2262 if (args_addr == 0)
2263 addr = gen_push_operand ();
2264 else
2265#endif
2266 if (GET_CODE (args_so_far) == CONST_INT)
2267 addr
2268 = memory_address (mode,
2269 plus_constant (args_addr, INTVAL (args_so_far)));
2270 else
2271 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2272 args_so_far));
2273
2274 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2275 }
2276
2277 ret:
2278 /* If part should go in registers, copy that part
2279 into the appropriate registers. Do this now, at the end,
2280 since mem-to-mem copies above may do function calls. */
cd048831 2281 if (partial > 0 && reg != 0)
bbf6f052
RK
2282 move_block_to_reg (REGNO (reg), x, partial, mode);
2283
2284 if (extra && args_addr == 0 && where_pad == stack_direction)
906c4e36 2285 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2286}
2287\f
bbf6f052
RK
2288/* Expand an assignment that stores the value of FROM into TO.
2289 If WANT_VALUE is nonzero, return an rtx for the value of TO.
709f5be1
RS
2290 (This may contain a QUEUED rtx;
2291 if the value is constant, this rtx is a constant.)
2292 Otherwise, the returned value is NULL_RTX.
bbf6f052
RK
2293
2294 SUGGEST_REG is no longer actually used.
2295 It used to mean, copy the value through a register
2296 and return that register, if that is possible.
709f5be1 2297 We now use WANT_VALUE to decide whether to do this. */
bbf6f052
RK
2298
2299rtx
2300expand_assignment (to, from, want_value, suggest_reg)
2301 tree to, from;
2302 int want_value;
2303 int suggest_reg;
2304{
2305 register rtx to_rtx = 0;
2306 rtx result;
2307
2308 /* Don't crash if the lhs of the assignment was erroneous. */
2309
2310 if (TREE_CODE (to) == ERROR_MARK)
709f5be1
RS
2311 {
2312 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
2313 return want_value ? result : NULL_RTX;
2314 }
bbf6f052 2315
ca695ac9
JB
2316 if (output_bytecode)
2317 {
2318 tree dest_innermost;
2319
2320 bc_expand_expr (from);
6d6e61ce 2321 bc_emit_instruction (duplicate);
ca695ac9
JB
2322
2323 dest_innermost = bc_expand_address (to);
2324
2325 /* Can't deduce from TYPE that we're dealing with a bitfield, so
2326 take care of it here. */
2327
2328 bc_store_memory (TREE_TYPE (to), dest_innermost);
2329 return NULL;
2330 }
2331
bbf6f052
RK
2332 /* Assignment of a structure component needs special treatment
2333 if the structure component's rtx is not simply a MEM.
2334 Assignment of an array element at a constant index
2335 has the same problem. */
2336
2337 if (TREE_CODE (to) == COMPONENT_REF
2338 || TREE_CODE (to) == BIT_FIELD_REF
2339 || (TREE_CODE (to) == ARRAY_REF
2340 && TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2341 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST))
2342 {
2343 enum machine_mode mode1;
2344 int bitsize;
2345 int bitpos;
7bb0943f 2346 tree offset;
bbf6f052
RK
2347 int unsignedp;
2348 int volatilep = 0;
0088fcb1 2349 tree tem;
d78d243c 2350 int alignment;
0088fcb1
RK
2351
2352 push_temp_slots ();
2353 tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
bbf6f052
RK
2354 &mode1, &unsignedp, &volatilep);
2355
2356 /* If we are going to use store_bit_field and extract_bit_field,
2357 make sure to_rtx will be safe for multiple use. */
2358
2359 if (mode1 == VOIDmode && want_value)
2360 tem = stabilize_reference (tem);
2361
d78d243c 2362 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
906c4e36 2363 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
2364 if (offset != 0)
2365 {
906c4e36 2366 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
2367
2368 if (GET_CODE (to_rtx) != MEM)
2369 abort ();
2370 to_rtx = change_address (to_rtx, VOIDmode,
2371 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2372 force_reg (Pmode, offset_rtx)));
d78d243c
RS
2373 /* If we have a variable offset, the known alignment
2374 is only that of the innermost structure containing the field.
2375 (Actually, we could sometimes do better by using the
2376 align of an element of the innermost array, but no need.) */
2377 if (TREE_CODE (to) == COMPONENT_REF
2378 || TREE_CODE (to) == BIT_FIELD_REF)
2379 alignment
2380 = TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (to, 0))) / BITS_PER_UNIT;
7bb0943f 2381 }
bbf6f052
RK
2382 if (volatilep)
2383 {
2384 if (GET_CODE (to_rtx) == MEM)
2385 MEM_VOLATILE_P (to_rtx) = 1;
2386#if 0 /* This was turned off because, when a field is volatile
2387 in an object which is not volatile, the object may be in a register,
2388 and then we would abort over here. */
2389 else
2390 abort ();
2391#endif
2392 }
2393
2394 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2395 (want_value
2396 /* Spurious cast makes HPUX compiler happy. */
2397 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2398 : VOIDmode),
2399 unsignedp,
2400 /* Required alignment of containing datum. */
d78d243c 2401 alignment,
bbf6f052
RK
2402 int_size_in_bytes (TREE_TYPE (tem)));
2403 preserve_temp_slots (result);
2404 free_temp_slots ();
0088fcb1 2405 pop_temp_slots ();
bbf6f052 2406
709f5be1
RS
2407 /* If the value is meaningful, convert RESULT to the proper mode.
2408 Otherwise, return nothing. */
5ffe63ed
RS
2409 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
2410 TYPE_MODE (TREE_TYPE (from)),
2411 result,
2412 TREE_UNSIGNED (TREE_TYPE (to)))
709f5be1 2413 : NULL_RTX);
bbf6f052
RK
2414 }
2415
cd1db108
RS
2416 /* If the rhs is a function call and its value is not an aggregate,
2417 call the function before we start to compute the lhs.
2418 This is needed for correct code for cases such as
2419 val = setjmp (buf) on machines where reference to val
1ad87b63
RK
2420 requires loading up part of an address in a separate insn.
2421
2422 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
2423 a promoted variable where the zero- or sign- extension needs to be done.
2424 Handling this in the normal way is safe because no computation is done
2425 before the call. */
2426 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
2427 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
cd1db108 2428 {
0088fcb1
RK
2429 rtx value;
2430
2431 push_temp_slots ();
2432 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
cd1db108
RS
2433 if (to_rtx == 0)
2434 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2435 emit_move_insn (to_rtx, value);
2436 preserve_temp_slots (to_rtx);
2437 free_temp_slots ();
0088fcb1 2438 pop_temp_slots ();
709f5be1 2439 return want_value ? to_rtx : NULL_RTX;
cd1db108
RS
2440 }
2441
bbf6f052
RK
2442 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2443 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2444
2445 if (to_rtx == 0)
906c4e36 2446 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
bbf6f052 2447
86d38d25
RS
2448 /* Don't move directly into a return register. */
2449 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
2450 {
0088fcb1
RK
2451 rtx temp;
2452
2453 push_temp_slots ();
2454 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
86d38d25
RS
2455 emit_move_insn (to_rtx, temp);
2456 preserve_temp_slots (to_rtx);
2457 free_temp_slots ();
0088fcb1 2458 pop_temp_slots ();
709f5be1 2459 return want_value ? to_rtx : NULL_RTX;
86d38d25
RS
2460 }
2461
bbf6f052
RK
2462 /* In case we are returning the contents of an object which overlaps
2463 the place the value is being stored, use a safe function when copying
2464 a value through a pointer into a structure value return block. */
2465 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2466 && current_function_returns_struct
2467 && !current_function_returns_pcc_struct)
2468 {
0088fcb1
RK
2469 rtx from_rtx, size;
2470
2471 push_temp_slots ();
33a20d10
RK
2472 size = expr_size (from);
2473 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
2474
2475#ifdef TARGET_MEM_FUNCTIONS
d562e42e 2476 emit_library_call (memcpy_libfunc, 0,
bbf6f052
RK
2477 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2478 XEXP (from_rtx, 0), Pmode,
0fa83258
RK
2479 convert_to_mode (TYPE_MODE (sizetype),
2480 size, TREE_UNSIGNED (sizetype)),
26ba80fc 2481 TYPE_MODE (sizetype));
bbf6f052 2482#else
d562e42e 2483 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
2484 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2485 XEXP (to_rtx, 0), Pmode,
0fa83258
RK
2486 convert_to_mode (TYPE_MODE (sizetype),
2487 size, TREE_UNSIGNED (sizetype)),
26ba80fc 2488 TYPE_MODE (sizetype));
bbf6f052
RK
2489#endif
2490
2491 preserve_temp_slots (to_rtx);
2492 free_temp_slots ();
0088fcb1 2493 pop_temp_slots ();
709f5be1 2494 return want_value ? to_rtx : NULL_RTX;
bbf6f052
RK
2495 }
2496
2497 /* Compute FROM and store the value in the rtx we got. */
2498
0088fcb1 2499 push_temp_slots ();
bbf6f052
RK
2500 result = store_expr (from, to_rtx, want_value);
2501 preserve_temp_slots (result);
2502 free_temp_slots ();
0088fcb1 2503 pop_temp_slots ();
709f5be1 2504 return want_value ? result : NULL_RTX;
bbf6f052
RK
2505}
2506
2507/* Generate code for computing expression EXP,
2508 and storing the value into TARGET.
bbf6f052
RK
2509 TARGET may contain a QUEUED rtx.
2510
709f5be1
RS
2511 If WANT_VALUE is nonzero, return a copy of the value
2512 not in TARGET, so that we can be sure to use the proper
2513 value in a containing expression even if TARGET has something
2514 else stored in it. If possible, we copy the value through a pseudo
2515 and return that pseudo. Or, if the value is constant, we try to
2516 return the constant. In some cases, we return a pseudo
2517 copied *from* TARGET.
2518
2519 If the mode is BLKmode then we may return TARGET itself.
2520 It turns out that in BLKmode it doesn't cause a problem.
2521 because C has no operators that could combine two different
2522 assignments into the same BLKmode object with different values
2523 with no sequence point. Will other languages need this to
2524 be more thorough?
2525
2526 If WANT_VALUE is 0, we return NULL, to make sure
2527 to catch quickly any cases where the caller uses the value
2528 and fails to set WANT_VALUE. */
bbf6f052
RK
2529
2530rtx
709f5be1 2531store_expr (exp, target, want_value)
bbf6f052
RK
2532 register tree exp;
2533 register rtx target;
709f5be1 2534 int want_value;
bbf6f052
RK
2535{
2536 register rtx temp;
2537 int dont_return_target = 0;
2538
2539 if (TREE_CODE (exp) == COMPOUND_EXPR)
2540 {
2541 /* Perform first part of compound expression, then assign from second
2542 part. */
2543 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2544 emit_queue ();
709f5be1 2545 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
bbf6f052
RK
2546 }
2547 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2548 {
2549 /* For conditional expression, get safe form of the target. Then
2550 test the condition, doing the appropriate assignment on either
2551 side. This avoids the creation of unnecessary temporaries.
2552 For non-BLKmode, it is more efficient not to do this. */
2553
2554 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2555
2556 emit_queue ();
2557 target = protect_from_queue (target, 1);
2558
2559 NO_DEFER_POP;
2560 jumpifnot (TREE_OPERAND (exp, 0), lab1);
709f5be1 2561 store_expr (TREE_OPERAND (exp, 1), target, 0);
bbf6f052
RK
2562 emit_queue ();
2563 emit_jump_insn (gen_jump (lab2));
2564 emit_barrier ();
2565 emit_label (lab1);
709f5be1 2566 store_expr (TREE_OPERAND (exp, 2), target, 0);
bbf6f052
RK
2567 emit_queue ();
2568 emit_label (lab2);
2569 OK_DEFER_POP;
709f5be1 2570 return want_value ? target : NULL_RTX;
bbf6f052 2571 }
709f5be1 2572 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
bbf6f052
RK
2573 && GET_MODE (target) != BLKmode)
2574 /* If target is in memory and caller wants value in a register instead,
2575 arrange that. Pass TARGET as target for expand_expr so that,
709f5be1 2576 if EXP is another assignment, WANT_VALUE will be nonzero for it.
c2e6aff6
RS
2577 We know expand_expr will not use the target in that case.
2578 Don't do this if TARGET is volatile because we are supposed
2579 to write it and then read it. */
bbf6f052 2580 {
906c4e36 2581 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
bbf6f052
RK
2582 GET_MODE (target), 0);
2583 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2584 temp = copy_to_reg (temp);
2585 dont_return_target = 1;
2586 }
2587 else if (queued_subexp_p (target))
709f5be1
RS
2588 /* If target contains a postincrement, let's not risk
2589 using it as the place to generate the rhs. */
bbf6f052
RK
2590 {
2591 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2592 {
2593 /* Expand EXP into a new pseudo. */
2594 temp = gen_reg_rtx (GET_MODE (target));
2595 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2596 }
2597 else
906c4e36 2598 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
709f5be1
RS
2599
2600 /* If target is volatile, ANSI requires accessing the value
2601 *from* the target, if it is accessed. So make that happen.
2602 In no case return the target itself. */
2603 if (! MEM_VOLATILE_P (target) && want_value)
2604 dont_return_target = 1;
bbf6f052 2605 }
1499e0a8
RK
2606 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2607 /* If this is an scalar in a register that is stored in a wider mode
2608 than the declared mode, compute the result into its declared mode
2609 and then convert to the wider mode. Our value is the computed
2610 expression. */
2611 {
2612 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
b258707c
RS
2613
2614 /* If TEMP is a VOIDmode constant, use convert_modes to make
2615 sure that we properly convert it. */
2616 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
2617 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
2618 TYPE_MODE (TREE_TYPE (exp)), temp,
2619 SUBREG_PROMOTED_UNSIGNED_P (target));
2620
1499e0a8
RK
2621 convert_move (SUBREG_REG (target), temp,
2622 SUBREG_PROMOTED_UNSIGNED_P (target));
709f5be1 2623 return want_value ? temp : NULL_RTX;
1499e0a8 2624 }
bbf6f052
RK
2625 else
2626 {
2627 temp = expand_expr (exp, target, GET_MODE (target), 0);
2628 /* DO return TARGET if it's a specified hardware register.
c2e6aff6 2629 expand_return relies on this.
709f5be1
RS
2630 If TARGET is a volatile mem ref, either return TARGET
2631 or return a reg copied *from* TARGET; ANSI requires this.
2632
2633 Otherwise, if TEMP is not TARGET, return TEMP
2634 if it is constant (for efficiency),
2635 or if we really want the correct value. */
bbf6f052
RK
2636 if (!(target && GET_CODE (target) == REG
2637 && REGNO (target) < FIRST_PSEUDO_REGISTER)
709f5be1
RS
2638 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
2639 && temp != target
2640 && (CONSTANT_P (temp) || want_value))
bbf6f052
RK
2641 dont_return_target = 1;
2642 }
2643
b258707c
RS
2644 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
2645 the same as that of TARGET, adjust the constant. This is needed, for
2646 example, in case it is a CONST_DOUBLE and we want only a word-sized
2647 value. */
2648 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
2649 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2650 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
2651 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
2652
bbf6f052
RK
2653 /* If value was not generated in the target, store it there.
2654 Convert the value to TARGET's type first if nec. */
2655
2656 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2657 {
2658 target = protect_from_queue (target, 1);
2659 if (GET_MODE (temp) != GET_MODE (target)
2660 && GET_MODE (temp) != VOIDmode)
2661 {
2662 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2663 if (dont_return_target)
2664 {
2665 /* In this case, we will return TEMP,
2666 so make sure it has the proper mode.
2667 But don't forget to store the value into TARGET. */
2668 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2669 emit_move_insn (target, temp);
2670 }
2671 else
2672 convert_move (target, temp, unsignedp);
2673 }
2674
2675 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2676 {
2677 /* Handle copying a string constant into an array.
2678 The string constant may be shorter than the array.
2679 So copy just the string's actual length, and clear the rest. */
2680 rtx size;
2681
e87b4f3f
RS
2682 /* Get the size of the data type of the string,
2683 which is actually the size of the target. */
2684 size = expr_size (exp);
2685 if (GET_CODE (size) == CONST_INT
2686 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2687 emit_block_move (target, temp, size,
2688 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2689 else
bbf6f052 2690 {
e87b4f3f
RS
2691 /* Compute the size of the data to copy from the string. */
2692 tree copy_size
c03b7665 2693 = size_binop (MIN_EXPR,
b50d17a1 2694 make_tree (sizetype, size),
c03b7665
RK
2695 convert (sizetype,
2696 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
906c4e36
RK
2697 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
2698 VOIDmode, 0);
e87b4f3f
RS
2699 rtx label = 0;
2700
2701 /* Copy that much. */
2702 emit_block_move (target, temp, copy_size_rtx,
2703 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2704
2705 /* Figure out how much is left in TARGET
2706 that we have to clear. */
2707 if (GET_CODE (copy_size_rtx) == CONST_INT)
2708 {
2709 temp = plus_constant (XEXP (target, 0),
2710 TREE_STRING_LENGTH (exp));
2711 size = plus_constant (size,
2712 - TREE_STRING_LENGTH (exp));
2713 }
2714 else
2715 {
2716 enum machine_mode size_mode = Pmode;
2717
2718 temp = force_reg (Pmode, XEXP (target, 0));
2719 temp = expand_binop (size_mode, add_optab, temp,
906c4e36
RK
2720 copy_size_rtx, NULL_RTX, 0,
2721 OPTAB_LIB_WIDEN);
e87b4f3f
RS
2722
2723 size = expand_binop (size_mode, sub_optab, size,
906c4e36
RK
2724 copy_size_rtx, NULL_RTX, 0,
2725 OPTAB_LIB_WIDEN);
e87b4f3f 2726
906c4e36 2727 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
e87b4f3f
RS
2728 GET_MODE (size), 0, 0);
2729 label = gen_label_rtx ();
2730 emit_jump_insn (gen_blt (label));
2731 }
2732
2733 if (size != const0_rtx)
2734 {
bbf6f052 2735#ifdef TARGET_MEM_FUNCTIONS
d562e42e 2736 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
e87b4f3f 2737 temp, Pmode, const0_rtx, Pmode, size, Pmode);
bbf6f052 2738#else
d562e42e 2739 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
e87b4f3f 2740 temp, Pmode, size, Pmode);
bbf6f052 2741#endif
e87b4f3f
RS
2742 }
2743 if (label)
2744 emit_label (label);
bbf6f052
RK
2745 }
2746 }
2747 else if (GET_MODE (temp) == BLKmode)
2748 emit_block_move (target, temp, expr_size (exp),
2749 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2750 else
2751 emit_move_insn (target, temp);
2752 }
709f5be1 2753
7d26fec6 2754 if (dont_return_target && GET_CODE (temp) != MEM)
bbf6f052 2755 return temp;
709f5be1
RS
2756 if (want_value && GET_MODE (target) != BLKmode)
2757 return copy_to_reg (target);
2758 if (want_value)
2759 return target;
2760 return NULL_RTX;
bbf6f052
RK
2761}
2762\f
2763/* Store the value of constructor EXP into the rtx TARGET.
2764 TARGET is either a REG or a MEM. */
2765
2766static void
2767store_constructor (exp, target)
2768 tree exp;
2769 rtx target;
2770{
4af3895e
JVA
2771 tree type = TREE_TYPE (exp);
2772
bbf6f052
RK
2773 /* We know our target cannot conflict, since safe_from_p has been called. */
2774#if 0
2775 /* Don't try copying piece by piece into a hard register
2776 since that is vulnerable to being clobbered by EXP.
2777 Instead, construct in a pseudo register and then copy it all. */
2778 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
2779 {
2780 rtx temp = gen_reg_rtx (GET_MODE (target));
2781 store_constructor (exp, temp);
2782 emit_move_insn (target, temp);
2783 return;
2784 }
2785#endif
2786
e44842fe
RK
2787 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
2788 || TREE_CODE (type) == QUAL_UNION_TYPE)
bbf6f052
RK
2789 {
2790 register tree elt;
2791
4af3895e 2792 /* Inform later passes that the whole union value is dead. */
e44842fe
RK
2793 if (TREE_CODE (type) == UNION_TYPE
2794 || TREE_CODE (type) == QUAL_UNION_TYPE)
bbf6f052 2795 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
4af3895e
JVA
2796
2797 /* If we are building a static constructor into a register,
2798 set the initial value as zero so we can fold the value into
2799 a constant. */
2800 else if (GET_CODE (target) == REG && TREE_STATIC (exp))
2801 emit_move_insn (target, const0_rtx);
2802
bbf6f052
RK
2803 /* If the constructor has fewer fields than the structure,
2804 clear the whole structure first. */
2805 else if (list_length (CONSTRUCTOR_ELTS (exp))
4af3895e
JVA
2806 != list_length (TYPE_FIELDS (type)))
2807 clear_storage (target, int_size_in_bytes (type));
bbf6f052
RK
2808 else
2809 /* Inform later passes that the old value is dead. */
2810 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2811
2812 /* Store each element of the constructor into
2813 the corresponding field of TARGET. */
2814
2815 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
2816 {
2817 register tree field = TREE_PURPOSE (elt);
2818 register enum machine_mode mode;
2819 int bitsize;
b50d17a1 2820 int bitpos = 0;
bbf6f052 2821 int unsignedp;
b50d17a1
RK
2822 tree pos, constant = 0, offset = 0;
2823 rtx to_rtx = target;
bbf6f052 2824
f32fd778
RS
2825 /* Just ignore missing fields.
2826 We cleared the whole structure, above,
2827 if any fields are missing. */
2828 if (field == 0)
2829 continue;
2830
bbf6f052
RK
2831 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
2832 unsignedp = TREE_UNSIGNED (field);
2833 mode = DECL_MODE (field);
2834 if (DECL_BIT_FIELD (field))
2835 mode = VOIDmode;
2836
b50d17a1
RK
2837 pos = DECL_FIELD_BITPOS (field);
2838 if (TREE_CODE (pos) == INTEGER_CST)
2839 constant = pos;
2840 else if (TREE_CODE (pos) == PLUS_EXPR
2841 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
2842 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
2843 else
2844 offset = pos;
2845
2846 if (constant)
2847 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
2848
2849 if (offset)
2850 {
2851 rtx offset_rtx;
2852
2853 if (contains_placeholder_p (offset))
2854 offset = build (WITH_RECORD_EXPR, sizetype,
2855 offset, exp);
bbf6f052 2856
b50d17a1
RK
2857 offset = size_binop (FLOOR_DIV_EXPR, offset,
2858 size_int (BITS_PER_UNIT));
bbf6f052 2859
b50d17a1
RK
2860 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2861 if (GET_CODE (to_rtx) != MEM)
2862 abort ();
2863
2864 to_rtx
2865 = change_address (to_rtx, VOIDmode,
2866 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2867 force_reg (Pmode, offset_rtx)));
2868 }
2869
2870 store_field (to_rtx, bitsize, bitpos, mode, TREE_VALUE (elt),
bbf6f052
RK
2871 /* The alignment of TARGET is
2872 at least what its type requires. */
2873 VOIDmode, 0,
4af3895e
JVA
2874 TYPE_ALIGN (type) / BITS_PER_UNIT,
2875 int_size_in_bytes (type));
bbf6f052
RK
2876 }
2877 }
4af3895e 2878 else if (TREE_CODE (type) == ARRAY_TYPE)
bbf6f052
RK
2879 {
2880 register tree elt;
2881 register int i;
4af3895e 2882 tree domain = TYPE_DOMAIN (type);
906c4e36
RK
2883 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
2884 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4af3895e 2885 tree elttype = TREE_TYPE (type);
bbf6f052
RK
2886
2887 /* If the constructor has fewer fields than the structure,
4af3895e
JVA
2888 clear the whole structure first. Similarly if this this is
2889 static constructor of a non-BLKmode object. */
bbf6f052 2890
4af3895e
JVA
2891 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
2892 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
597bb7f1 2893 clear_storage (target, int_size_in_bytes (type));
bbf6f052
RK
2894 else
2895 /* Inform later passes that the old value is dead. */
2896 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2897
2898 /* Store each element of the constructor into
2899 the corresponding element of TARGET, determined
2900 by counting the elements. */
2901 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
2902 elt;
2903 elt = TREE_CHAIN (elt), i++)
2904 {
2905 register enum machine_mode mode;
2906 int bitsize;
2907 int bitpos;
2908 int unsignedp;
03dc44a6
RS
2909 tree index = TREE_PURPOSE (elt);
2910 rtx xtarget = target;
bbf6f052
RK
2911
2912 mode = TYPE_MODE (elttype);
2913 bitsize = GET_MODE_BITSIZE (mode);
2914 unsignedp = TREE_UNSIGNED (elttype);
2915
03dc44a6
RS
2916 if (index != 0 && TREE_CODE (index) != INTEGER_CST)
2917 {
2918 /* We don't currently allow variable indices in a
2919 C initializer, but let's try here to support them. */
2920 rtx pos_rtx, addr, xtarget;
2921 tree position;
2922
2923 position = size_binop (MULT_EXPR, index, TYPE_SIZE (elttype));
2924 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
2925 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
2926 xtarget = change_address (target, mode, addr);
2927 store_expr (TREE_VALUE (elt), xtarget, 0);
2928 }
2929 else
2930 {
2931 if (index != 0)
7c314719 2932 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
03dc44a6
RS
2933 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2934 else
2935 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2936
2937 store_field (xtarget, bitsize, bitpos, mode, TREE_VALUE (elt),
2938 /* The alignment of TARGET is
2939 at least what its type requires. */
2940 VOIDmode, 0,
2941 TYPE_ALIGN (type) / BITS_PER_UNIT,
2942 int_size_in_bytes (type));
2943 }
bbf6f052
RK
2944 }
2945 }
2946
2947 else
2948 abort ();
2949}
2950
2951/* Store the value of EXP (an expression tree)
2952 into a subfield of TARGET which has mode MODE and occupies
2953 BITSIZE bits, starting BITPOS bits from the start of TARGET.
2954 If MODE is VOIDmode, it means that we are storing into a bit-field.
2955
2956 If VALUE_MODE is VOIDmode, return nothing in particular.
2957 UNSIGNEDP is not used in this case.
2958
2959 Otherwise, return an rtx for the value stored. This rtx
2960 has mode VALUE_MODE if that is convenient to do.
2961 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
2962
2963 ALIGN is the alignment that TARGET is known to have, measured in bytes.
2964 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
2965
2966static rtx
2967store_field (target, bitsize, bitpos, mode, exp, value_mode,
2968 unsignedp, align, total_size)
2969 rtx target;
2970 int bitsize, bitpos;
2971 enum machine_mode mode;
2972 tree exp;
2973 enum machine_mode value_mode;
2974 int unsignedp;
2975 int align;
2976 int total_size;
2977{
906c4e36 2978 HOST_WIDE_INT width_mask = 0;
bbf6f052 2979
906c4e36
RK
2980 if (bitsize < HOST_BITS_PER_WIDE_INT)
2981 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
bbf6f052
RK
2982
2983 /* If we are storing into an unaligned field of an aligned union that is
2984 in a register, we may have the mode of TARGET being an integer mode but
2985 MODE == BLKmode. In that case, get an aligned object whose size and
2986 alignment are the same as TARGET and store TARGET into it (we can avoid
2987 the store if the field being stored is the entire width of TARGET). Then
2988 call ourselves recursively to store the field into a BLKmode version of
2989 that object. Finally, load from the object into TARGET. This is not
2990 very efficient in general, but should only be slightly more expensive
2991 than the otherwise-required unaligned accesses. Perhaps this can be
2992 cleaned up later. */
2993
2994 if (mode == BLKmode
2995 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
2996 {
2997 rtx object = assign_stack_temp (GET_MODE (target),
2998 GET_MODE_SIZE (GET_MODE (target)), 0);
2999 rtx blk_object = copy_rtx (object);
3000
3001 PUT_MODE (blk_object, BLKmode);
3002
3003 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
3004 emit_move_insn (object, target);
3005
3006 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
3007 align, total_size);
3008
46093b97
RS
3009 /* Even though we aren't returning target, we need to
3010 give it the updated value. */
bbf6f052
RK
3011 emit_move_insn (target, object);
3012
46093b97 3013 return blk_object;
bbf6f052
RK
3014 }
3015
3016 /* If the structure is in a register or if the component
3017 is a bit field, we cannot use addressing to access it.
3018 Use bit-field techniques or SUBREG to store in it. */
3019
4fa52007
RK
3020 if (mode == VOIDmode
3021 || (mode != BLKmode && ! direct_store[(int) mode])
3022 || GET_CODE (target) == REG
c980ac49 3023 || GET_CODE (target) == SUBREG
ccc98036
RS
3024 /* If the field isn't aligned enough to store as an ordinary memref,
3025 store it as a bit field. */
3026 || (STRICT_ALIGNMENT
3027 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
3028 || (STRICT_ALIGNMENT && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
bbf6f052 3029 {
906c4e36 3030 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbd6cf73
RK
3031
3032 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
3033 MODE. */
3034 if (mode != VOIDmode && mode != BLKmode
3035 && mode != TYPE_MODE (TREE_TYPE (exp)))
3036 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
3037
bbf6f052
RK
3038 /* Store the value in the bitfield. */
3039 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
3040 if (value_mode != VOIDmode)
3041 {
3042 /* The caller wants an rtx for the value. */
3043 /* If possible, avoid refetching from the bitfield itself. */
3044 if (width_mask != 0
3045 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5c4d7cfb 3046 {
9074de27 3047 tree count;
5c4d7cfb 3048 enum machine_mode tmode;
86a2c12a 3049
5c4d7cfb
RS
3050 if (unsignedp)
3051 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
3052 tmode = GET_MODE (temp);
86a2c12a
RS
3053 if (tmode == VOIDmode)
3054 tmode = value_mode;
5c4d7cfb
RS
3055 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
3056 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
3057 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
3058 }
bbf6f052 3059 return extract_bit_field (target, bitsize, bitpos, unsignedp,
906c4e36
RK
3060 NULL_RTX, value_mode, 0, align,
3061 total_size);
bbf6f052
RK
3062 }
3063 return const0_rtx;
3064 }
3065 else
3066 {
3067 rtx addr = XEXP (target, 0);
3068 rtx to_rtx;
3069
3070 /* If a value is wanted, it must be the lhs;
3071 so make the address stable for multiple use. */
3072
3073 if (value_mode != VOIDmode && GET_CODE (addr) != REG
3074 && ! CONSTANT_ADDRESS_P (addr)
3075 /* A frame-pointer reference is already stable. */
3076 && ! (GET_CODE (addr) == PLUS
3077 && GET_CODE (XEXP (addr, 1)) == CONST_INT
3078 && (XEXP (addr, 0) == virtual_incoming_args_rtx
3079 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
3080 addr = copy_to_reg (addr);
3081
3082 /* Now build a reference to just the desired component. */
3083
3084 to_rtx = change_address (target, mode,
3085 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
3086 MEM_IN_STRUCT_P (to_rtx) = 1;
3087
3088 return store_expr (exp, to_rtx, value_mode != VOIDmode);
3089 }
3090}
3091\f
3092/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
3093 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
742920c7 3094 ARRAY_REFs and find the ultimate containing object, which we return.
bbf6f052
RK
3095
3096 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
3097 bit position, and *PUNSIGNEDP to the signedness of the field.
7bb0943f
RS
3098 If the position of the field is variable, we store a tree
3099 giving the variable offset (in units) in *POFFSET.
3100 This offset is in addition to the bit position.
3101 If the position is not variable, we store 0 in *POFFSET.
bbf6f052
RK
3102
3103 If any of the extraction expressions is volatile,
3104 we store 1 in *PVOLATILEP. Otherwise we don't change that.
3105
3106 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
3107 is a mode that can be used to access the field. In that case, *PBITSIZE
e7c33f54
RK
3108 is redundant.
3109
3110 If the field describes a variable-sized object, *PMODE is set to
3111 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
3112 this case, but the address of the object can be found. */
bbf6f052
RK
3113
3114tree
4969d05d
RK
3115get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
3116 punsignedp, pvolatilep)
bbf6f052
RK
3117 tree exp;
3118 int *pbitsize;
3119 int *pbitpos;
7bb0943f 3120 tree *poffset;
bbf6f052
RK
3121 enum machine_mode *pmode;
3122 int *punsignedp;
3123 int *pvolatilep;
3124{
b50d17a1 3125 tree orig_exp = exp;
bbf6f052
RK
3126 tree size_tree = 0;
3127 enum machine_mode mode = VOIDmode;
742920c7 3128 tree offset = integer_zero_node;
bbf6f052
RK
3129
3130 if (TREE_CODE (exp) == COMPONENT_REF)
3131 {
3132 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
3133 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
3134 mode = DECL_MODE (TREE_OPERAND (exp, 1));
3135 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
3136 }
3137 else if (TREE_CODE (exp) == BIT_FIELD_REF)
3138 {
3139 size_tree = TREE_OPERAND (exp, 1);
3140 *punsignedp = TREE_UNSIGNED (exp);
3141 }
3142 else
3143 {
3144 mode = TYPE_MODE (TREE_TYPE (exp));
3145 *pbitsize = GET_MODE_BITSIZE (mode);
3146 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3147 }
3148
3149 if (size_tree)
3150 {
3151 if (TREE_CODE (size_tree) != INTEGER_CST)
e7c33f54
RK
3152 mode = BLKmode, *pbitsize = -1;
3153 else
3154 *pbitsize = TREE_INT_CST_LOW (size_tree);
bbf6f052
RK
3155 }
3156
3157 /* Compute cumulative bit-offset for nested component-refs and array-refs,
3158 and find the ultimate containing object. */
3159
3160 *pbitpos = 0;
3161
3162 while (1)
3163 {
7bb0943f 3164 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
bbf6f052 3165 {
7bb0943f
RS
3166 tree pos = (TREE_CODE (exp) == COMPONENT_REF
3167 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
3168 : TREE_OPERAND (exp, 2));
bbf6f052 3169
e7f3c83f
RK
3170 /* If this field hasn't been filled in yet, don't go
3171 past it. This should only happen when folding expressions
3172 made during type construction. */
3173 if (pos == 0)
3174 break;
3175
7bb0943f
RS
3176 if (TREE_CODE (pos) == PLUS_EXPR)
3177 {
3178 tree constant, var;
3179 if (TREE_CODE (TREE_OPERAND (pos, 0)) == INTEGER_CST)
3180 {
3181 constant = TREE_OPERAND (pos, 0);
3182 var = TREE_OPERAND (pos, 1);
3183 }
3184 else if (TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3185 {
3186 constant = TREE_OPERAND (pos, 1);
3187 var = TREE_OPERAND (pos, 0);
3188 }
3189 else
3190 abort ();
742920c7 3191
7bb0943f 3192 *pbitpos += TREE_INT_CST_LOW (constant);
742920c7
RK
3193 offset = size_binop (PLUS_EXPR, offset,
3194 size_binop (FLOOR_DIV_EXPR, var,
3195 size_int (BITS_PER_UNIT)));
7bb0943f
RS
3196 }
3197 else if (TREE_CODE (pos) == INTEGER_CST)
3198 *pbitpos += TREE_INT_CST_LOW (pos);
3199 else
3200 {
3201 /* Assume here that the offset is a multiple of a unit.
3202 If not, there should be an explicitly added constant. */
742920c7
RK
3203 offset = size_binop (PLUS_EXPR, offset,
3204 size_binop (FLOOR_DIV_EXPR, pos,
3205 size_int (BITS_PER_UNIT)));
7bb0943f 3206 }
bbf6f052 3207 }
bbf6f052 3208
742920c7 3209 else if (TREE_CODE (exp) == ARRAY_REF)
bbf6f052 3210 {
742920c7
RK
3211 /* This code is based on the code in case ARRAY_REF in expand_expr
3212 below. We assume here that the size of an array element is
3213 always an integral multiple of BITS_PER_UNIT. */
3214
3215 tree index = TREE_OPERAND (exp, 1);
3216 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
3217 tree low_bound
3218 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
3219 tree index_type = TREE_TYPE (index);
3220
3221 if (! integer_zerop (low_bound))
3222 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
3223
3224 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
3225 {
3226 index = convert (type_for_size (POINTER_SIZE, 0), index);
3227 index_type = TREE_TYPE (index);
3228 }
3229
3230 index = fold (build (MULT_EXPR, index_type, index,
3231 TYPE_SIZE (TREE_TYPE (exp))));
3232
3233 if (TREE_CODE (index) == INTEGER_CST
3234 && TREE_INT_CST_HIGH (index) == 0)
3235 *pbitpos += TREE_INT_CST_LOW (index);
3236 else
3237 offset = size_binop (PLUS_EXPR, offset,
3238 size_binop (FLOOR_DIV_EXPR, index,
3239 size_int (BITS_PER_UNIT)));
bbf6f052
RK
3240 }
3241 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
3242 && ! ((TREE_CODE (exp) == NOP_EXPR
3243 || TREE_CODE (exp) == CONVERT_EXPR)
3244 && (TYPE_MODE (TREE_TYPE (exp))
3245 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
3246 break;
7bb0943f
RS
3247
3248 /* If any reference in the chain is volatile, the effect is volatile. */
3249 if (TREE_THIS_VOLATILE (exp))
3250 *pvolatilep = 1;
bbf6f052
RK
3251 exp = TREE_OPERAND (exp, 0);
3252 }
3253
3254 /* If this was a bit-field, see if there is a mode that allows direct
3255 access in case EXP is in memory. */
e7f3c83f 3256 if (mode == VOIDmode && *pbitsize != 0 && *pbitpos % *pbitsize == 0)
bbf6f052
RK
3257 {
3258 mode = mode_for_size (*pbitsize, MODE_INT, 0);
3259 if (mode == BLKmode)
3260 mode = VOIDmode;
3261 }
3262
742920c7
RK
3263 if (integer_zerop (offset))
3264 offset = 0;
3265
b50d17a1
RK
3266 if (offset != 0 && contains_placeholder_p (offset))
3267 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
3268
bbf6f052 3269 *pmode = mode;
7bb0943f 3270 *poffset = offset;
bbf6f052
RK
3271 return exp;
3272}
3273\f
3274/* Given an rtx VALUE that may contain additions and multiplications,
3275 return an equivalent value that just refers to a register or memory.
3276 This is done by generating instructions to perform the arithmetic
c45a13a6
RK
3277 and returning a pseudo-register containing the value.
3278
3279 The returned value may be a REG, SUBREG, MEM or constant. */
bbf6f052
RK
3280
3281rtx
3282force_operand (value, target)
3283 rtx value, target;
3284{
3285 register optab binoptab = 0;
3286 /* Use a temporary to force order of execution of calls to
3287 `force_operand'. */
3288 rtx tmp;
3289 register rtx op2;
3290 /* Use subtarget as the target for operand 0 of a binary operation. */
3291 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3292
3293 if (GET_CODE (value) == PLUS)
3294 binoptab = add_optab;
3295 else if (GET_CODE (value) == MINUS)
3296 binoptab = sub_optab;
3297 else if (GET_CODE (value) == MULT)
3298 {
3299 op2 = XEXP (value, 1);
3300 if (!CONSTANT_P (op2)
3301 && !(GET_CODE (op2) == REG && op2 != subtarget))
3302 subtarget = 0;
3303 tmp = force_operand (XEXP (value, 0), subtarget);
3304 return expand_mult (GET_MODE (value), tmp,
906c4e36 3305 force_operand (op2, NULL_RTX),
bbf6f052
RK
3306 target, 0);
3307 }
3308
3309 if (binoptab)
3310 {
3311 op2 = XEXP (value, 1);
3312 if (!CONSTANT_P (op2)
3313 && !(GET_CODE (op2) == REG && op2 != subtarget))
3314 subtarget = 0;
3315 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
3316 {
3317 binoptab = add_optab;
3318 op2 = negate_rtx (GET_MODE (value), op2);
3319 }
3320
3321 /* Check for an addition with OP2 a constant integer and our first
3322 operand a PLUS of a virtual register and something else. In that
3323 case, we want to emit the sum of the virtual register and the
3324 constant first and then add the other value. This allows virtual
3325 register instantiation to simply modify the constant rather than
3326 creating another one around this addition. */
3327 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
3328 && GET_CODE (XEXP (value, 0)) == PLUS
3329 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
3330 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
3331 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
3332 {
3333 rtx temp = expand_binop (GET_MODE (value), binoptab,
3334 XEXP (XEXP (value, 0), 0), op2,
3335 subtarget, 0, OPTAB_LIB_WIDEN);
3336 return expand_binop (GET_MODE (value), binoptab, temp,
3337 force_operand (XEXP (XEXP (value, 0), 1), 0),
3338 target, 0, OPTAB_LIB_WIDEN);
3339 }
3340
3341 tmp = force_operand (XEXP (value, 0), subtarget);
3342 return expand_binop (GET_MODE (value), binoptab, tmp,
906c4e36 3343 force_operand (op2, NULL_RTX),
bbf6f052 3344 target, 0, OPTAB_LIB_WIDEN);
8008b228 3345 /* We give UNSIGNEDP = 0 to expand_binop
bbf6f052
RK
3346 because the only operations we are expanding here are signed ones. */
3347 }
3348 return value;
3349}
3350\f
3351/* Subroutine of expand_expr:
3352 save the non-copied parts (LIST) of an expr (LHS), and return a list
3353 which can restore these values to their previous values,
3354 should something modify their storage. */
3355
3356static tree
3357save_noncopied_parts (lhs, list)
3358 tree lhs;
3359 tree list;
3360{
3361 tree tail;
3362 tree parts = 0;
3363
3364 for (tail = list; tail; tail = TREE_CHAIN (tail))
3365 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3366 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
3367 else
3368 {
3369 tree part = TREE_VALUE (tail);
3370 tree part_type = TREE_TYPE (part);
906c4e36 3371 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
bbf6f052
RK
3372 rtx target = assign_stack_temp (TYPE_MODE (part_type),
3373 int_size_in_bytes (part_type), 0);
3374 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
906c4e36 3375 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
bbf6f052 3376 parts = tree_cons (to_be_saved,
906c4e36
RK
3377 build (RTL_EXPR, part_type, NULL_TREE,
3378 (tree) target),
bbf6f052
RK
3379 parts);
3380 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
3381 }
3382 return parts;
3383}
3384
3385/* Subroutine of expand_expr:
3386 record the non-copied parts (LIST) of an expr (LHS), and return a list
3387 which specifies the initial values of these parts. */
3388
3389static tree
3390init_noncopied_parts (lhs, list)
3391 tree lhs;
3392 tree list;
3393{
3394 tree tail;
3395 tree parts = 0;
3396
3397 for (tail = list; tail; tail = TREE_CHAIN (tail))
3398 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3399 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
3400 else
3401 {
3402 tree part = TREE_VALUE (tail);
3403 tree part_type = TREE_TYPE (part);
906c4e36 3404 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
bbf6f052
RK
3405 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
3406 }
3407 return parts;
3408}
3409
3410/* Subroutine of expand_expr: return nonzero iff there is no way that
3411 EXP can reference X, which is being modified. */
3412
3413static int
3414safe_from_p (x, exp)
3415 rtx x;
3416 tree exp;
3417{
3418 rtx exp_rtl = 0;
3419 int i, nops;
3420
3421 if (x == 0)
3422 return 1;
3423
3424 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
3425 find the underlying pseudo. */
3426 if (GET_CODE (x) == SUBREG)
3427 {
3428 x = SUBREG_REG (x);
3429 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3430 return 0;
3431 }
3432
3433 /* If X is a location in the outgoing argument area, it is always safe. */
3434 if (GET_CODE (x) == MEM
3435 && (XEXP (x, 0) == virtual_outgoing_args_rtx
3436 || (GET_CODE (XEXP (x, 0)) == PLUS
3437 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
3438 return 1;
3439
3440 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3441 {
3442 case 'd':
3443 exp_rtl = DECL_RTL (exp);
3444 break;
3445
3446 case 'c':
3447 return 1;
3448
3449 case 'x':
3450 if (TREE_CODE (exp) == TREE_LIST)
f32fd778
RS
3451 return ((TREE_VALUE (exp) == 0
3452 || safe_from_p (x, TREE_VALUE (exp)))
bbf6f052
RK
3453 && (TREE_CHAIN (exp) == 0
3454 || safe_from_p (x, TREE_CHAIN (exp))));
3455 else
3456 return 0;
3457
3458 case '1':
3459 return safe_from_p (x, TREE_OPERAND (exp, 0));
3460
3461 case '2':
3462 case '<':
3463 return (safe_from_p (x, TREE_OPERAND (exp, 0))
3464 && safe_from_p (x, TREE_OPERAND (exp, 1)));
3465
3466 case 'e':
3467 case 'r':
3468 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
3469 the expression. If it is set, we conflict iff we are that rtx or
3470 both are in memory. Otherwise, we check all operands of the
3471 expression recursively. */
3472
3473 switch (TREE_CODE (exp))
3474 {
3475 case ADDR_EXPR:
e44842fe
RK
3476 return (staticp (TREE_OPERAND (exp, 0))
3477 || safe_from_p (x, TREE_OPERAND (exp, 0)));
bbf6f052
RK
3478
3479 case INDIRECT_REF:
3480 if (GET_CODE (x) == MEM)
3481 return 0;
3482 break;
3483
3484 case CALL_EXPR:
3485 exp_rtl = CALL_EXPR_RTL (exp);
3486 if (exp_rtl == 0)
3487 {
3488 /* Assume that the call will clobber all hard registers and
3489 all of memory. */
3490 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3491 || GET_CODE (x) == MEM)
3492 return 0;
3493 }
3494
3495 break;
3496
3497 case RTL_EXPR:
3498 exp_rtl = RTL_EXPR_RTL (exp);
3499 if (exp_rtl == 0)
3500 /* We don't know what this can modify. */
3501 return 0;
3502
3503 break;
3504
3505 case WITH_CLEANUP_EXPR:
3506 exp_rtl = RTL_EXPR_RTL (exp);
3507 break;
3508
3509 case SAVE_EXPR:
3510 exp_rtl = SAVE_EXPR_RTL (exp);
3511 break;
3512
8129842c
RS
3513 case BIND_EXPR:
3514 /* The only operand we look at is operand 1. The rest aren't
3515 part of the expression. */
3516 return safe_from_p (x, TREE_OPERAND (exp, 1));
3517
bbf6f052
RK
3518 case METHOD_CALL_EXPR:
3519 /* This takes a rtx argument, but shouldn't appear here. */
3520 abort ();
3521 }
3522
3523 /* If we have an rtx, we do not need to scan our operands. */
3524 if (exp_rtl)
3525 break;
3526
3527 nops = tree_code_length[(int) TREE_CODE (exp)];
3528 for (i = 0; i < nops; i++)
3529 if (TREE_OPERAND (exp, i) != 0
3530 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3531 return 0;
3532 }
3533
3534 /* If we have an rtl, find any enclosed object. Then see if we conflict
3535 with it. */
3536 if (exp_rtl)
3537 {
3538 if (GET_CODE (exp_rtl) == SUBREG)
3539 {
3540 exp_rtl = SUBREG_REG (exp_rtl);
3541 if (GET_CODE (exp_rtl) == REG
3542 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3543 return 0;
3544 }
3545
3546 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3547 are memory and EXP is not readonly. */
3548 return ! (rtx_equal_p (x, exp_rtl)
3549 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3550 && ! TREE_READONLY (exp)));
3551 }
3552
3553 /* If we reach here, it is safe. */
3554 return 1;
3555}
3556
3557/* Subroutine of expand_expr: return nonzero iff EXP is an
3558 expression whose type is statically determinable. */
3559
3560static int
3561fixed_type_p (exp)
3562 tree exp;
3563{
3564 if (TREE_CODE (exp) == PARM_DECL
3565 || TREE_CODE (exp) == VAR_DECL
3566 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3567 || TREE_CODE (exp) == COMPONENT_REF
3568 || TREE_CODE (exp) == ARRAY_REF)
3569 return 1;
3570 return 0;
3571}
3572\f
3573/* expand_expr: generate code for computing expression EXP.
3574 An rtx for the computed value is returned. The value is never null.
3575 In the case of a void EXP, const0_rtx is returned.
3576
3577 The value may be stored in TARGET if TARGET is nonzero.
3578 TARGET is just a suggestion; callers must assume that
3579 the rtx returned may not be the same as TARGET.
3580
3581 If TARGET is CONST0_RTX, it means that the value will be ignored.
3582
3583 If TMODE is not VOIDmode, it suggests generating the
3584 result in mode TMODE. But this is done only when convenient.
3585 Otherwise, TMODE is ignored and the value generated in its natural mode.
3586 TMODE is just a suggestion; callers must assume that
3587 the rtx returned may not have mode TMODE.
3588
3589 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
3590 with a constant address even if that address is not normally legitimate.
3591 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
3592
3593 If MODIFIER is EXPAND_SUM then when EXP is an addition
3594 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3595 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3596 products as above, or REG or MEM, or constant.
3597 Ordinarily in such cases we would output mul or add instructions
3598 and then return a pseudo reg containing the sum.
3599
3600 EXPAND_INITIALIZER is much like EXPAND_SUM except that
3601 it also marks a label as absolutely required (it can't be dead).
26fcb35a 3602 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6dc42e49 3603 This is used for outputting expressions used in initializers. */
bbf6f052
RK
3604
3605rtx
3606expand_expr (exp, target, tmode, modifier)
3607 register tree exp;
3608 rtx target;
3609 enum machine_mode tmode;
3610 enum expand_modifier modifier;
3611{
b50d17a1
RK
3612 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
3613 This is static so it will be accessible to our recursive callees. */
3614 static tree placeholder_list = 0;
bbf6f052
RK
3615 register rtx op0, op1, temp;
3616 tree type = TREE_TYPE (exp);
3617 int unsignedp = TREE_UNSIGNED (type);
3618 register enum machine_mode mode = TYPE_MODE (type);
3619 register enum tree_code code = TREE_CODE (exp);
3620 optab this_optab;
3621 /* Use subtarget as the target for operand 0 of a binary operation. */
3622 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3623 rtx original_target = target;
ca695ac9 3624 /* Maybe defer this until sure not doing bytecode? */
dd27116b
RK
3625 int ignore = (target == const0_rtx
3626 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
4d87de75
RS
3627 || code == CONVERT_EXPR || code == REFERENCE_EXPR
3628 || code == COND_EXPR)
dd27116b 3629 && TREE_CODE (type) == VOID_TYPE));
bbf6f052
RK
3630 tree context;
3631
ca695ac9
JB
3632
3633 if (output_bytecode)
3634 {
3635 bc_expand_expr (exp);
3636 return NULL;
3637 }
3638
bbf6f052
RK
3639 /* Don't use hard regs as subtargets, because the combiner
3640 can only handle pseudo regs. */
3641 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
3642 subtarget = 0;
3643 /* Avoid subtargets inside loops,
3644 since they hide some invariant expressions. */
3645 if (preserve_subexpressions_p ())
3646 subtarget = 0;
3647
dd27116b
RK
3648 /* If we are going to ignore this result, we need only do something
3649 if there is a side-effect somewhere in the expression. If there
b50d17a1
RK
3650 is, short-circuit the most common cases here. Note that we must
3651 not call expand_expr with anything but const0_rtx in case this
3652 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
bbf6f052 3653
dd27116b
RK
3654 if (ignore)
3655 {
3656 if (! TREE_SIDE_EFFECTS (exp))
3657 return const0_rtx;
3658
3659 /* Ensure we reference a volatile object even if value is ignored. */
3660 if (TREE_THIS_VOLATILE (exp)
3661 && TREE_CODE (exp) != FUNCTION_DECL
3662 && mode != VOIDmode && mode != BLKmode)
3663 {
3664 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
3665 if (GET_CODE (temp) == MEM)
3666 temp = copy_to_reg (temp);
3667 return const0_rtx;
3668 }
3669
3670 if (TREE_CODE_CLASS (code) == '1')
3671 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
3672 VOIDmode, modifier);
3673 else if (TREE_CODE_CLASS (code) == '2'
3674 || TREE_CODE_CLASS (code) == '<')
3675 {
3676 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
3677 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
3678 return const0_rtx;
3679 }
3680 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
3681 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
3682 /* If the second operand has no side effects, just evaluate
3683 the first. */
3684 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
3685 VOIDmode, modifier);
dd27116b 3686
90764a87 3687 target = 0;
dd27116b 3688 }
bbf6f052 3689
e44842fe
RK
3690 /* If will do cse, generate all results into pseudo registers
3691 since 1) that allows cse to find more things
3692 and 2) otherwise cse could produce an insn the machine
3693 cannot support. */
3694
bbf6f052
RK
3695 if (! cse_not_expected && mode != BLKmode && target
3696 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
3697 target = subtarget;
3698
bbf6f052
RK
3699 switch (code)
3700 {
3701 case LABEL_DECL:
b552441b
RS
3702 {
3703 tree function = decl_function_context (exp);
3704 /* Handle using a label in a containing function. */
3705 if (function != current_function_decl && function != 0)
3706 {
3707 struct function *p = find_function_data (function);
3708 /* Allocate in the memory associated with the function
3709 that the label is in. */
3710 push_obstacks (p->function_obstack,
3711 p->function_maybepermanent_obstack);
3712
3713 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3714 label_rtx (exp), p->forced_labels);
3715 pop_obstacks ();
3716 }
3717 else if (modifier == EXPAND_INITIALIZER)
3718 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3719 label_rtx (exp), forced_labels);
26fcb35a 3720 temp = gen_rtx (MEM, FUNCTION_MODE,
b552441b 3721 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
26fcb35a
RS
3722 if (function != current_function_decl && function != 0)
3723 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
3724 return temp;
b552441b 3725 }
bbf6f052
RK
3726
3727 case PARM_DECL:
3728 if (DECL_RTL (exp) == 0)
3729 {
3730 error_with_decl (exp, "prior parameter's size depends on `%s'");
4af3895e 3731 return CONST0_RTX (mode);
bbf6f052
RK
3732 }
3733
bbf6f052 3734 case VAR_DECL:
2dca20cd
RS
3735 /* If a static var's type was incomplete when the decl was written,
3736 but the type is complete now, lay out the decl now. */
3737 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
3738 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
3739 {
3740 push_obstacks_nochange ();
3741 end_temporary_allocation ();
3742 layout_decl (exp, 0);
3743 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
3744 pop_obstacks ();
3745 }
3746 case FUNCTION_DECL:
bbf6f052
RK
3747 case RESULT_DECL:
3748 if (DECL_RTL (exp) == 0)
3749 abort ();
e44842fe
RK
3750 /* Ensure variable marked as used even if it doesn't go through
3751 a parser. If it hasn't be used yet, write out an external
3752 definition. */
3753 if (! TREE_USED (exp))
3754 {
3755 assemble_external (exp);
3756 TREE_USED (exp) = 1;
3757 }
3758
bbf6f052
RK
3759 /* Handle variables inherited from containing functions. */
3760 context = decl_function_context (exp);
3761
3762 /* We treat inline_function_decl as an alias for the current function
3763 because that is the inline function whose vars, types, etc.
3764 are being merged into the current function.
3765 See expand_inline_function. */
3766 if (context != 0 && context != current_function_decl
3767 && context != inline_function_decl
3768 /* If var is static, we don't need a static chain to access it. */
3769 && ! (GET_CODE (DECL_RTL (exp)) == MEM
3770 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
3771 {
3772 rtx addr;
3773
3774 /* Mark as non-local and addressable. */
81feeecb 3775 DECL_NONLOCAL (exp) = 1;
bbf6f052
RK
3776 mark_addressable (exp);
3777 if (GET_CODE (DECL_RTL (exp)) != MEM)
3778 abort ();
3779 addr = XEXP (DECL_RTL (exp), 0);
3780 if (GET_CODE (addr) == MEM)
3781 addr = gen_rtx (MEM, Pmode, fix_lexical_addr (XEXP (addr, 0), exp));
3782 else
3783 addr = fix_lexical_addr (addr, exp);
3784 return change_address (DECL_RTL (exp), mode, addr);
3785 }
4af3895e 3786
bbf6f052
RK
3787 /* This is the case of an array whose size is to be determined
3788 from its initializer, while the initializer is still being parsed.
3789 See expand_decl. */
3790 if (GET_CODE (DECL_RTL (exp)) == MEM
3791 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
3792 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
3793 XEXP (DECL_RTL (exp), 0));
3794 if (GET_CODE (DECL_RTL (exp)) == MEM
3795 && modifier != EXPAND_CONST_ADDRESS
3796 && modifier != EXPAND_SUM
3797 && modifier != EXPAND_INITIALIZER)
3798 {
3799 /* DECL_RTL probably contains a constant address.
3800 On RISC machines where a constant address isn't valid,
3801 make some insns to get that address into a register. */
3802 if (!memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
3803 || (flag_force_addr
3804 && CONSTANT_ADDRESS_P (XEXP (DECL_RTL (exp), 0))))
3805 return change_address (DECL_RTL (exp), VOIDmode,
3806 copy_rtx (XEXP (DECL_RTL (exp), 0)));
3807 }
1499e0a8
RK
3808
3809 /* If the mode of DECL_RTL does not match that of the decl, it
3810 must be a promoted value. We return a SUBREG of the wanted mode,
3811 but mark it so that we know that it was already extended. */
3812
3813 if (GET_CODE (DECL_RTL (exp)) == REG
3814 && GET_MODE (DECL_RTL (exp)) != mode)
3815 {
3816 enum machine_mode decl_mode = DECL_MODE (exp);
3817
3818 /* Get the signedness used for this variable. Ensure we get the
3819 same mode we got when the variable was declared. */
3820
3821 PROMOTE_MODE (decl_mode, unsignedp, type);
3822
3823 if (decl_mode != GET_MODE (DECL_RTL (exp)))
3824 abort ();
3825
3826 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
3827 SUBREG_PROMOTED_VAR_P (temp) = 1;
3828 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3829 return temp;
3830 }
3831
bbf6f052
RK
3832 return DECL_RTL (exp);
3833
3834 case INTEGER_CST:
3835 return immed_double_const (TREE_INT_CST_LOW (exp),
3836 TREE_INT_CST_HIGH (exp),
3837 mode);
3838
3839 case CONST_DECL:
3840 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
3841
3842 case REAL_CST:
3843 /* If optimized, generate immediate CONST_DOUBLE
3844 which will be turned into memory by reload if necessary.
3845
3846 We used to force a register so that loop.c could see it. But
3847 this does not allow gen_* patterns to perform optimizations with
3848 the constants. It also produces two insns in cases like "x = 1.0;".
3849 On most machines, floating-point constants are not permitted in
3850 many insns, so we'd end up copying it to a register in any case.
3851
3852 Now, we do the copying in expand_binop, if appropriate. */
3853 return immed_real_const (exp);
3854
3855 case COMPLEX_CST:
3856 case STRING_CST:
3857 if (! TREE_CST_RTL (exp))
3858 output_constant_def (exp);
3859
3860 /* TREE_CST_RTL probably contains a constant address.
3861 On RISC machines where a constant address isn't valid,
3862 make some insns to get that address into a register. */
3863 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
3864 && modifier != EXPAND_CONST_ADDRESS
3865 && modifier != EXPAND_INITIALIZER
3866 && modifier != EXPAND_SUM
3867 && !memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0)))
3868 return change_address (TREE_CST_RTL (exp), VOIDmode,
3869 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
3870 return TREE_CST_RTL (exp);
3871
3872 case SAVE_EXPR:
3873 context = decl_function_context (exp);
3874 /* We treat inline_function_decl as an alias for the current function
3875 because that is the inline function whose vars, types, etc.
3876 are being merged into the current function.
3877 See expand_inline_function. */
3878 if (context == current_function_decl || context == inline_function_decl)
3879 context = 0;
3880
3881 /* If this is non-local, handle it. */
3882 if (context)
3883 {
3884 temp = SAVE_EXPR_RTL (exp);
3885 if (temp && GET_CODE (temp) == REG)
3886 {
3887 put_var_into_stack (exp);
3888 temp = SAVE_EXPR_RTL (exp);
3889 }
3890 if (temp == 0 || GET_CODE (temp) != MEM)
3891 abort ();
3892 return change_address (temp, mode,
3893 fix_lexical_addr (XEXP (temp, 0), exp));
3894 }
3895 if (SAVE_EXPR_RTL (exp) == 0)
3896 {
3897 if (mode == BLKmode)
34a25822
RK
3898 {
3899 temp
3900 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
3901 MEM_IN_STRUCT_P (temp)
3902 = (TREE_CODE (type) == RECORD_TYPE
3903 || TREE_CODE (type) == UNION_TYPE
3904 || TREE_CODE (type) == QUAL_UNION_TYPE
3905 || TREE_CODE (type) == ARRAY_TYPE);
3906 }
bbf6f052 3907 else
1499e0a8
RK
3908 {
3909 enum machine_mode var_mode = mode;
3910
3911 if (TREE_CODE (type) == INTEGER_TYPE
3912 || TREE_CODE (type) == ENUMERAL_TYPE
3913 || TREE_CODE (type) == BOOLEAN_TYPE
3914 || TREE_CODE (type) == CHAR_TYPE
3915 || TREE_CODE (type) == REAL_TYPE
3916 || TREE_CODE (type) == POINTER_TYPE
3917 || TREE_CODE (type) == OFFSET_TYPE)
3918 {
3919 PROMOTE_MODE (var_mode, unsignedp, type);
3920 }
3921
3922 temp = gen_reg_rtx (var_mode);
3923 }
3924
bbf6f052 3925 SAVE_EXPR_RTL (exp) = temp;
bbf6f052
RK
3926 if (!optimize && GET_CODE (temp) == REG)
3927 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
3928 save_expr_regs);
ff78f773
RK
3929
3930 /* If the mode of TEMP does not match that of the expression, it
3931 must be a promoted value. We pass store_expr a SUBREG of the
3932 wanted mode but mark it so that we know that it was already
3933 extended. Note that `unsignedp' was modified above in
3934 this case. */
3935
3936 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
3937 {
3938 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
3939 SUBREG_PROMOTED_VAR_P (temp) = 1;
3940 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3941 }
3942
3943 store_expr (TREE_OPERAND (exp, 0), temp, 0);
bbf6f052 3944 }
1499e0a8
RK
3945
3946 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
3947 must be a promoted value. We return a SUBREG of the wanted mode,
3948 but mark it so that we know that it was already extended. Note
3949 that `unsignedp' was modified above in this case. */
3950
3951 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
3952 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
3953 {
3954 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
3955 SUBREG_PROMOTED_VAR_P (temp) = 1;
3956 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3957 return temp;
3958 }
3959
bbf6f052
RK
3960 return SAVE_EXPR_RTL (exp);
3961
b50d17a1
RK
3962 case PLACEHOLDER_EXPR:
3963 /* If there is an object on the head of the placeholder list,
3964 see if some object in it's references is of type TYPE. For
3965 further information, see tree.def. */
3966 if (placeholder_list)
3967 {
3968 tree object;
3969
3970 for (object = TREE_PURPOSE (placeholder_list);
3971 TREE_TYPE (object) != type
3972 && (TREE_CODE_CLASS (TREE_CODE (object)) == 'r'
4805bfa0
RK
3973 || TREE_CODE_CLASS (TREE_CODE (object)) == '1'
3974 || TREE_CODE_CLASS (TREE_CODE (object)) == '2'
3975 || TREE_CODE_CLASS (TREE_CODE (object)) == 'e');
b50d17a1
RK
3976 object = TREE_OPERAND (object, 0))
3977 ;
3978
4805bfa0 3979 if (object && TREE_TYPE (object) == type)
b50d17a1
RK
3980 return expand_expr (object, original_target, tmode, modifier);
3981 }
3982
3983 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
3984 abort ();
3985
3986 case WITH_RECORD_EXPR:
3987 /* Put the object on the placeholder list, expand our first operand,
3988 and pop the list. */
3989 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
3990 placeholder_list);
3991 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
3992 tmode, modifier);
3993 placeholder_list = TREE_CHAIN (placeholder_list);
3994 return target;
3995
bbf6f052 3996 case EXIT_EXPR:
e44842fe
RK
3997 expand_exit_loop_if_false (NULL_PTR,
3998 invert_truthvalue (TREE_OPERAND (exp, 0)));
bbf6f052
RK
3999 return const0_rtx;
4000
4001 case LOOP_EXPR:
0088fcb1 4002 push_temp_slots ();
bbf6f052
RK
4003 expand_start_loop (1);
4004 expand_expr_stmt (TREE_OPERAND (exp, 0));
4005 expand_end_loop ();
0088fcb1 4006 pop_temp_slots ();
bbf6f052
RK
4007
4008 return const0_rtx;
4009
4010 case BIND_EXPR:
4011 {
4012 tree vars = TREE_OPERAND (exp, 0);
4013 int vars_need_expansion = 0;
4014
4015 /* Need to open a binding contour here because
4016 if there are any cleanups they most be contained here. */
4017 expand_start_bindings (0);
4018
2df53c0b
RS
4019 /* Mark the corresponding BLOCK for output in its proper place. */
4020 if (TREE_OPERAND (exp, 2) != 0
4021 && ! TREE_USED (TREE_OPERAND (exp, 2)))
4022 insert_block (TREE_OPERAND (exp, 2));
bbf6f052
RK
4023
4024 /* If VARS have not yet been expanded, expand them now. */
4025 while (vars)
4026 {
4027 if (DECL_RTL (vars) == 0)
4028 {
4029 vars_need_expansion = 1;
4030 expand_decl (vars);
4031 }
4032 expand_decl_init (vars);
4033 vars = TREE_CHAIN (vars);
4034 }
4035
4036 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
4037
4038 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
4039
4040 return temp;
4041 }
4042
4043 case RTL_EXPR:
4044 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
4045 abort ();
4046 emit_insns (RTL_EXPR_SEQUENCE (exp));
4047 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
4048 return RTL_EXPR_RTL (exp);
4049
4050 case CONSTRUCTOR:
dd27116b
RK
4051 /* If we don't need the result, just ensure we evaluate any
4052 subexpressions. */
4053 if (ignore)
4054 {
4055 tree elt;
4056 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4057 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
4058 return const0_rtx;
4059 }
4af3895e
JVA
4060 /* All elts simple constants => refer to a constant in memory. But
4061 if this is a non-BLKmode mode, let it store a field at a time
4062 since that should make a CONST_INT or CONST_DOUBLE when we
dd27116b
RK
4063 fold. If we are making an initializer and all operands are
4064 constant, put it in memory as well. */
4065 else if ((TREE_STATIC (exp)
4066 && (mode == BLKmode || TREE_ADDRESSABLE (exp)))
4067 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
bbf6f052
RK
4068 {
4069 rtx constructor = output_constant_def (exp);
b552441b
RS
4070 if (modifier != EXPAND_CONST_ADDRESS
4071 && modifier != EXPAND_INITIALIZER
4072 && modifier != EXPAND_SUM
4073 && !memory_address_p (GET_MODE (constructor),
4074 XEXP (constructor, 0)))
bbf6f052
RK
4075 constructor = change_address (constructor, VOIDmode,
4076 XEXP (constructor, 0));
4077 return constructor;
4078 }
4079
bbf6f052
RK
4080 else
4081 {
4082 if (target == 0 || ! safe_from_p (target, exp))
4083 {
4084 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
4085 target = gen_reg_rtx (mode);
4086 else
4087 {
3b94d087
RS
4088 enum tree_code c = TREE_CODE (type);
4089 target
4090 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
e7f3c83f
RK
4091 if (c == RECORD_TYPE || c == UNION_TYPE
4092 || c == QUAL_UNION_TYPE || c == ARRAY_TYPE)
3b94d087 4093 MEM_IN_STRUCT_P (target) = 1;
bbf6f052
RK
4094 }
4095 }
4096 store_constructor (exp, target);
4097 return target;
4098 }
4099
4100 case INDIRECT_REF:
4101 {
4102 tree exp1 = TREE_OPERAND (exp, 0);
4103 tree exp2;
4104
4105 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
4106 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
4107 This code has the same general effect as simply doing
4108 expand_expr on the save expr, except that the expression PTR
4109 is computed for use as a memory address. This means different
4110 code, suitable for indexing, may be generated. */
4111 if (TREE_CODE (exp1) == SAVE_EXPR
4112 && SAVE_EXPR_RTL (exp1) == 0
4113 && TREE_CODE (exp2 = TREE_OPERAND (exp1, 0)) != ERROR_MARK
4114 && TYPE_MODE (TREE_TYPE (exp1)) == Pmode
4115 && TYPE_MODE (TREE_TYPE (exp2)) == Pmode)
4116 {
906c4e36
RK
4117 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
4118 VOIDmode, EXPAND_SUM);
bbf6f052
RK
4119 op0 = memory_address (mode, temp);
4120 op0 = copy_all_regs (op0);
4121 SAVE_EXPR_RTL (exp1) = op0;
4122 }
4123 else
4124 {
906c4e36 4125 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
bbf6f052
RK
4126 op0 = memory_address (mode, op0);
4127 }
8c8a8e34
JW
4128
4129 temp = gen_rtx (MEM, mode, op0);
4130 /* If address was computed by addition,
4131 mark this as an element of an aggregate. */
4132 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4133 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
4134 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
4135 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
4136 || TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4137 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
e7f3c83f 4138 || TREE_CODE (TREE_TYPE (exp)) == QUAL_UNION_TYPE
8c8a8e34
JW
4139 || (TREE_CODE (exp1) == ADDR_EXPR
4140 && (exp2 = TREE_OPERAND (exp1, 0))
4141 && (TREE_CODE (TREE_TYPE (exp2)) == ARRAY_TYPE
4142 || TREE_CODE (TREE_TYPE (exp2)) == RECORD_TYPE
e7f3c83f
RK
4143 || TREE_CODE (TREE_TYPE (exp2)) == UNION_TYPE
4144 || TREE_CODE (TREE_TYPE (exp2)) == QUAL_UNION_TYPE)))
8c8a8e34 4145 MEM_IN_STRUCT_P (temp) = 1;
2c4c436a 4146 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
89742723 4147#if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
bbf6f052
RK
4148 a location is accessed through a pointer to const does not mean
4149 that the value there can never change. */
8c8a8e34 4150 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
bbf6f052 4151#endif
8c8a8e34
JW
4152 return temp;
4153 }
bbf6f052
RK
4154
4155 case ARRAY_REF:
742920c7
RK
4156 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
4157 abort ();
bbf6f052 4158
bbf6f052 4159 {
742920c7
RK
4160 tree array = TREE_OPERAND (exp, 0);
4161 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
4162 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4163 tree index = TREE_OPERAND (exp, 1);
4164 tree index_type = TREE_TYPE (index);
bbf6f052 4165 int i;
bbf6f052 4166
b50d17a1
RK
4167 if (TREE_CODE (low_bound) != INTEGER_CST
4168 && contains_placeholder_p (low_bound))
4169 low_bound = build (WITH_RECORD_EXPR, sizetype, low_bound, exp);
4170
d4c89139
PB
4171 /* Optimize the special-case of a zero lower bound.
4172
4173 We convert the low_bound to sizetype to avoid some problems
4174 with constant folding. (E.g. suppose the lower bound is 1,
4175 and its mode is QI. Without the conversion, (ARRAY
4176 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
4177 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
4178
4179 But sizetype isn't quite right either (especially if
4180 the lowbound is negative). FIXME */
4181
742920c7 4182 if (! integer_zerop (low_bound))
d4c89139
PB
4183 index = fold (build (MINUS_EXPR, index_type, index,
4184 convert (sizetype, low_bound)));
742920c7
RK
4185
4186 if (TREE_CODE (index) != INTEGER_CST
4187 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4188 {
4189 /* Nonconstant array index or nonconstant element size.
4190 Generate the tree for *(&array+index) and expand that,
4191 except do it in a language-independent way
4192 and don't complain about non-lvalue arrays.
4193 `mark_addressable' should already have been called
4194 for any array for which this case will be reached. */
4195
4196 /* Don't forget the const or volatile flag from the array
4197 element. */
4198 tree variant_type = build_type_variant (type,
4199 TREE_READONLY (exp),
4200 TREE_THIS_VOLATILE (exp));
4201 tree array_adr = build1 (ADDR_EXPR,
4202 build_pointer_type (variant_type), array);
4203 tree elt;
b50d17a1 4204 tree size = size_in_bytes (type);
742920c7
RK
4205
4206 /* Convert the integer argument to a type the same size as a
4207 pointer so the multiply won't overflow spuriously. */
4208 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
4209 index = convert (type_for_size (POINTER_SIZE, 0), index);
4210
b50d17a1
RK
4211 if (TREE_CODE (size) != INTEGER_CST
4212 && contains_placeholder_p (size))
4213 size = build (WITH_RECORD_EXPR, sizetype, size, exp);
4214
742920c7
RK
4215 /* Don't think the address has side effects
4216 just because the array does.
4217 (In some cases the address might have side effects,
4218 and we fail to record that fact here. However, it should not
4219 matter, since expand_expr should not care.) */
4220 TREE_SIDE_EFFECTS (array_adr) = 0;
4221
4222 elt = build1 (INDIRECT_REF, type,
4223 fold (build (PLUS_EXPR,
4224 TYPE_POINTER_TO (variant_type),
4225 array_adr,
4226 fold (build (MULT_EXPR,
4227 TYPE_POINTER_TO (variant_type),
b50d17a1 4228 index, size)))));
742920c7
RK
4229
4230 /* Volatility, etc., of new expression is same as old
4231 expression. */
4232 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
4233 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
4234 TREE_READONLY (elt) = TREE_READONLY (exp);
4235
4236 return expand_expr (elt, target, tmode, modifier);
4237 }
4238
4239 /* Fold an expression like: "foo"[2].
4240 This is not done in fold so it won't happen inside &. */
4241
4242 if (TREE_CODE (array) == STRING_CST
4243 && TREE_CODE (index) == INTEGER_CST
4244 && !TREE_INT_CST_HIGH (index)
4245 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array))
bbf6f052 4246 {
742920c7 4247 if (TREE_TYPE (TREE_TYPE (array)) == integer_type_node)
bbf6f052 4248 {
742920c7 4249 exp = build_int_2 (((int *)TREE_STRING_POINTER (array))[i], 0);
bbf6f052
RK
4250 TREE_TYPE (exp) = integer_type_node;
4251 return expand_expr (exp, target, tmode, modifier);
4252 }
742920c7 4253 if (TREE_TYPE (TREE_TYPE (array)) == char_type_node)
bbf6f052 4254 {
742920c7 4255 exp = build_int_2 (TREE_STRING_POINTER (array)[i], 0);
bbf6f052 4256 TREE_TYPE (exp) = integer_type_node;
742920c7
RK
4257 return expand_expr (convert (TREE_TYPE (TREE_TYPE (array)),
4258 exp),
4259 target, tmode, modifier);
bbf6f052
RK
4260 }
4261 }
bbf6f052 4262
742920c7
RK
4263 /* If this is a constant index into a constant array,
4264 just get the value from the array. Handle both the cases when
4265 we have an explicit constructor and when our operand is a variable
4266 that was declared const. */
4af3895e 4267
742920c7
RK
4268 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
4269 {
4270 if (TREE_CODE (index) == INTEGER_CST
4271 && TREE_INT_CST_HIGH (index) == 0)
4272 {
4273 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
4274
4275 i = TREE_INT_CST_LOW (index);
4276 while (elem && i--)
4277 elem = TREE_CHAIN (elem);
4278 if (elem)
4279 return expand_expr (fold (TREE_VALUE (elem)), target,
4280 tmode, modifier);
4281 }
4282 }
4af3895e 4283
742920c7
RK
4284 else if (optimize >= 1
4285 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
4286 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
4287 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
4288 {
4289 if (TREE_CODE (index) == INTEGER_CST
4290 && TREE_INT_CST_HIGH (index) == 0)
4291 {
4292 tree init = DECL_INITIAL (array);
4293
4294 i = TREE_INT_CST_LOW (index);
4295 if (TREE_CODE (init) == CONSTRUCTOR)
4296 {
4297 tree elem = CONSTRUCTOR_ELTS (init);
4298
03dc44a6
RS
4299 while (elem
4300 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
742920c7
RK
4301 elem = TREE_CHAIN (elem);
4302 if (elem)
4303 return expand_expr (fold (TREE_VALUE (elem)), target,
4304 tmode, modifier);
4305 }
4306 else if (TREE_CODE (init) == STRING_CST
4307 && i < TREE_STRING_LENGTH (init))
4308 {
4309 temp = GEN_INT (TREE_STRING_POINTER (init)[i]);
4310 return convert_to_mode (mode, temp, 0);
4311 }
4312 }
4313 }
4314 }
8c8a8e34 4315
bbf6f052
RK
4316 /* Treat array-ref with constant index as a component-ref. */
4317
4318 case COMPONENT_REF:
4319 case BIT_FIELD_REF:
4af3895e
JVA
4320 /* If the operand is a CONSTRUCTOR, we can just extract the
4321 appropriate field if it is present. */
4322 if (code != ARRAY_REF
4323 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
4324 {
4325 tree elt;
4326
4327 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
4328 elt = TREE_CHAIN (elt))
4329 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
4330 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
4331 }
4332
bbf6f052
RK
4333 {
4334 enum machine_mode mode1;
4335 int bitsize;
4336 int bitpos;
7bb0943f 4337 tree offset;
bbf6f052 4338 int volatilep = 0;
7bb0943f 4339 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
bbf6f052 4340 &mode1, &unsignedp, &volatilep);
034f9101 4341 int alignment;
bbf6f052 4342
e7f3c83f
RK
4343 /* If we got back the original object, something is wrong. Perhaps
4344 we are evaluating an expression too early. In any event, don't
4345 infinitely recurse. */
4346 if (tem == exp)
4347 abort ();
4348
bbf6f052
RK
4349 /* In some cases, we will be offsetting OP0's address by a constant.
4350 So get it as a sum, if possible. If we will be using it
4351 directly in an insn, we validate it. */
906c4e36 4352 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_SUM);
bbf6f052 4353
8c8a8e34 4354 /* If this is a constant, put it into a register if it is a
8008b228 4355 legitimate constant and memory if it isn't. */
8c8a8e34
JW
4356 if (CONSTANT_P (op0))
4357 {
4358 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
f2878c6b 4359 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
8c8a8e34
JW
4360 op0 = force_reg (mode, op0);
4361 else
4362 op0 = validize_mem (force_const_mem (mode, op0));
4363 }
4364
034f9101 4365 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
7bb0943f
RS
4366 if (offset != 0)
4367 {
906c4e36 4368 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
4369
4370 if (GET_CODE (op0) != MEM)
4371 abort ();
4372 op0 = change_address (op0, VOIDmode,
4373 gen_rtx (PLUS, Pmode, XEXP (op0, 0),
4374 force_reg (Pmode, offset_rtx)));
034f9101
RS
4375 /* If we have a variable offset, the known alignment
4376 is only that of the innermost structure containing the field.
4377 (Actually, we could sometimes do better by using the
4378 size of an element of the innermost array, but no need.) */
4379 if (TREE_CODE (exp) == COMPONENT_REF
4380 || TREE_CODE (exp) == BIT_FIELD_REF)
4381 alignment = (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
4382 / BITS_PER_UNIT);
7bb0943f
RS
4383 }
4384
bbf6f052
RK
4385 /* Don't forget about volatility even if this is a bitfield. */
4386 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
4387 {
4388 op0 = copy_rtx (op0);
4389 MEM_VOLATILE_P (op0) = 1;
4390 }
4391
ccc98036
RS
4392 /* In cases where an aligned union has an unaligned object
4393 as a field, we might be extracting a BLKmode value from
4394 an integer-mode (e.g., SImode) object. Handle this case
4395 by doing the extract into an object as wide as the field
4396 (which we know to be the width of a basic mode), then
4397 storing into memory, and changing the mode to BLKmode. */
bbf6f052 4398 if (mode1 == VOIDmode
0bba3f6f
RK
4399 || (mode1 != BLKmode && ! direct_load[(int) mode1]
4400 && modifier != EXPAND_CONST_ADDRESS
4401 && modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
ccc98036
RS
4402 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
4403 /* If the field isn't aligned enough to fetch as a memref,
4404 fetch it as a bit field. */
4405 || (STRICT_ALIGNMENT
4406 && TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
4407 || (STRICT_ALIGNMENT && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
bbf6f052 4408 {
bbf6f052
RK
4409 enum machine_mode ext_mode = mode;
4410
4411 if (ext_mode == BLKmode)
4412 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
4413
4414 if (ext_mode == BLKmode)
4415 abort ();
4416
4417 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
4418 unsignedp, target, ext_mode, ext_mode,
034f9101 4419 alignment,
bbf6f052
RK
4420 int_size_in_bytes (TREE_TYPE (tem)));
4421 if (mode == BLKmode)
4422 {
4423 rtx new = assign_stack_temp (ext_mode,
4424 bitsize / BITS_PER_UNIT, 0);
4425
4426 emit_move_insn (new, op0);
4427 op0 = copy_rtx (new);
4428 PUT_MODE (op0, BLKmode);
092dded9 4429 MEM_IN_STRUCT_P (op0) = 1;
bbf6f052
RK
4430 }
4431
4432 return op0;
4433 }
4434
4435 /* Get a reference to just this component. */
4436 if (modifier == EXPAND_CONST_ADDRESS
4437 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4438 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
4439 (bitpos / BITS_PER_UNIT)));
4440 else
4441 op0 = change_address (op0, mode1,
4442 plus_constant (XEXP (op0, 0),
4443 (bitpos / BITS_PER_UNIT)));
4444 MEM_IN_STRUCT_P (op0) = 1;
4445 MEM_VOLATILE_P (op0) |= volatilep;
4446 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
4447 return op0;
4448 if (target == 0)
4449 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4450 convert_move (target, op0, unsignedp);
4451 return target;
4452 }
4453
4454 case OFFSET_REF:
4455 {
da120c2f 4456 tree base = build1 (ADDR_EXPR, type, TREE_OPERAND (exp, 0));
bbf6f052 4457 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
906c4e36 4458 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
bbf6f052
RK
4459 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
4460 MEM_IN_STRUCT_P (temp) = 1;
f94cc92f 4461 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
89742723 4462#if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
bbf6f052
RK
4463 a location is accessed through a pointer to const does not mean
4464 that the value there can never change. */
4465 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
4466#endif
4467 return temp;
4468 }
4469
4470 /* Intended for a reference to a buffer of a file-object in Pascal.
4471 But it's not certain that a special tree code will really be
4472 necessary for these. INDIRECT_REF might work for them. */
4473 case BUFFER_REF:
4474 abort ();
4475
7308a047
RS
4476 /* IN_EXPR: Inlined pascal set IN expression.
4477
4478 Algorithm:
4479 rlo = set_low - (set_low%bits_per_word);
4480 the_word = set [ (index - rlo)/bits_per_word ];
4481 bit_index = index % bits_per_word;
4482 bitmask = 1 << bit_index;
4483 return !!(the_word & bitmask); */
4484 case IN_EXPR:
4485 preexpand_calls (exp);
4486 {
4487 tree set = TREE_OPERAND (exp, 0);
4488 tree index = TREE_OPERAND (exp, 1);
4489 tree set_type = TREE_TYPE (set);
4490
4491 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
4492 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
4493
4494 rtx index_val;
4495 rtx lo_r;
4496 rtx hi_r;
4497 rtx rlow;
4498 rtx diff, quo, rem, addr, bit, result;
4499 rtx setval, setaddr;
4500 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
4501
4502 if (target == 0)
17938e57 4503 target = gen_reg_rtx (mode);
7308a047
RS
4504
4505 /* If domain is empty, answer is no. */
4506 if (tree_int_cst_lt (set_high_bound, set_low_bound))
4507 return const0_rtx;
4508
4509 index_val = expand_expr (index, 0, VOIDmode, 0);
4510 lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
4511 hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
4512 setval = expand_expr (set, 0, VOIDmode, 0);
4513 setaddr = XEXP (setval, 0);
4514
4515 /* Compare index against bounds, if they are constant. */
4516 if (GET_CODE (index_val) == CONST_INT
17938e57
RK
4517 && GET_CODE (lo_r) == CONST_INT
4518 && INTVAL (index_val) < INTVAL (lo_r))
4519 return const0_rtx;
7308a047
RS
4520
4521 if (GET_CODE (index_val) == CONST_INT
17938e57
RK
4522 && GET_CODE (hi_r) == CONST_INT
4523 && INTVAL (hi_r) < INTVAL (index_val))
4524 return const0_rtx;
7308a047
RS
4525
4526 /* If we get here, we have to generate the code for both cases
4527 (in range and out of range). */
4528
4529 op0 = gen_label_rtx ();
4530 op1 = gen_label_rtx ();
4531
4532 if (! (GET_CODE (index_val) == CONST_INT
4533 && GET_CODE (lo_r) == CONST_INT))
4534 {
17938e57
RK
4535 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
4536 GET_MODE (index_val), 0, 0);
7308a047
RS
4537 emit_jump_insn (gen_blt (op1));
4538 }
4539
4540 if (! (GET_CODE (index_val) == CONST_INT
4541 && GET_CODE (hi_r) == CONST_INT))
4542 {
17938e57
RK
4543 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
4544 GET_MODE (index_val), 0, 0);
7308a047
RS
4545 emit_jump_insn (gen_bgt (op1));
4546 }
4547
4548 /* Calculate the element number of bit zero in the first word
4549 of the set. */
4550 if (GET_CODE (lo_r) == CONST_INT)
17938e57
RK
4551 rlow = GEN_INT (INTVAL (lo_r)
4552 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7308a047 4553 else
17938e57
RK
4554 rlow = expand_binop (index_mode, and_optab, lo_r,
4555 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
4556 NULL_RTX, 0, OPTAB_LIB_WIDEN);
7308a047
RS
4557
4558 diff = expand_binop (index_mode, sub_optab,
17938e57 4559 index_val, rlow, NULL_RTX, 0, OPTAB_LIB_WIDEN);
7308a047
RS
4560
4561 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
17938e57 4562 GEN_INT (BITS_PER_UNIT), NULL_RTX, 0);
7308a047 4563 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
17938e57 4564 GEN_INT (BITS_PER_UNIT), NULL_RTX, 0);
7308a047
RS
4565 addr = memory_address (byte_mode,
4566 expand_binop (index_mode, add_optab,
17938e57
RK
4567 diff, setaddr, NULL_RTX, 0,
4568 OPTAB_LIB_WIDEN));
7308a047
RS
4569 /* Extract the bit we want to examine */
4570 bit = expand_shift (RSHIFT_EXPR, byte_mode,
17938e57
RK
4571 gen_rtx (MEM, byte_mode, addr),
4572 make_tree (TREE_TYPE (index), rem),
4573 NULL_RTX, 1);
4574 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
4575 GET_MODE (target) == byte_mode ? target : 0,
7308a047 4576 1, OPTAB_LIB_WIDEN);
17938e57
RK
4577
4578 if (result != target)
4579 convert_move (target, result, 1);
7308a047
RS
4580
4581 /* Output the code to handle the out-of-range case. */
4582 emit_jump (op0);
4583 emit_label (op1);
4584 emit_move_insn (target, const0_rtx);
4585 emit_label (op0);
4586 return target;
4587 }
4588
bbf6f052
RK
4589 case WITH_CLEANUP_EXPR:
4590 if (RTL_EXPR_RTL (exp) == 0)
4591 {
4592 RTL_EXPR_RTL (exp)
4593 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
906c4e36
RK
4594 cleanups_this_call
4595 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
bbf6f052
RK
4596 /* That's it for this cleanup. */
4597 TREE_OPERAND (exp, 2) = 0;
4598 }
4599 return RTL_EXPR_RTL (exp);
4600
4601 case CALL_EXPR:
4602 /* Check for a built-in function. */
4603 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
4604 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == FUNCTION_DECL
4605 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4606 return expand_builtin (exp, target, subtarget, tmode, ignore);
4607 /* If this call was expanded already by preexpand_calls,
4608 just return the result we got. */
4609 if (CALL_EXPR_RTL (exp) != 0)
4610 return CALL_EXPR_RTL (exp);
8129842c 4611 return expand_call (exp, target, ignore);
bbf6f052
RK
4612
4613 case NON_LVALUE_EXPR:
4614 case NOP_EXPR:
4615 case CONVERT_EXPR:
4616 case REFERENCE_EXPR:
bbf6f052
RK
4617 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4618 return expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
4619 if (TREE_CODE (type) == UNION_TYPE)
4620 {
4621 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
4622 if (target == 0)
4623 {
4624 if (mode == BLKmode)
4625 {
4626 if (TYPE_SIZE (type) == 0
4627 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4628 abort ();
4629 target = assign_stack_temp (BLKmode,
4630 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4631 + BITS_PER_UNIT - 1)
4632 / BITS_PER_UNIT, 0);
4633 }
4634 else
4635 target = gen_reg_rtx (mode);
4636 }
4637 if (GET_CODE (target) == MEM)
4638 /* Store data into beginning of memory target. */
4639 store_expr (TREE_OPERAND (exp, 0),
1499e0a8
RK
4640 change_address (target, TYPE_MODE (valtype), 0), 0);
4641
bbf6f052
RK
4642 else if (GET_CODE (target) == REG)
4643 /* Store this field into a union of the proper type. */
4644 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
4645 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
4646 VOIDmode, 0, 1,
4647 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
4648 else
4649 abort ();
4650
4651 /* Return the entire union. */
4652 return target;
4653 }
1499e0a8 4654 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
12342f90
RS
4655 if (GET_MODE (op0) == mode)
4656 return op0;
4657 /* If arg is a constant integer being extended from a narrower mode,
4658 we must really truncate to get the extended bits right. Otherwise
4659 (unsigned long) (unsigned char) ("\377"[0])
4660 would come out as ffffffff. */
4661 if (GET_MODE (op0) == VOIDmode
4662 && (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4663 < GET_MODE_BITSIZE (mode)))
4664 {
4665 /* MODE must be narrower than HOST_BITS_PER_INT. */
4666 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4667
4668 if (width < HOST_BITS_PER_WIDE_INT)
4669 {
4670 HOST_WIDE_INT val = (GET_CODE (op0) == CONST_INT ? INTVAL (op0)
4671 : CONST_DOUBLE_LOW (op0));
4672 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4673 || !(val & ((HOST_WIDE_INT) 1 << (width - 1))))
4674 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4675 else
4676 val |= ~(((HOST_WIDE_INT) 1 << width) - 1);
4677
4678 op0 = GEN_INT (val);
4679 }
4680 else
4681 {
4682 op0 = (simplify_unary_operation
4683 ((TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4684 ? ZERO_EXTEND : SIGN_EXTEND),
4685 mode, op0,
4686 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))));
4687 if (op0 == 0)
4688 abort ();
4689 }
4690 }
4691 if (GET_MODE (op0) == VOIDmode)
bbf6f052 4692 return op0;
26fcb35a
RS
4693 if (modifier == EXPAND_INITIALIZER)
4694 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
bbf6f052
RK
4695 if (flag_force_mem && GET_CODE (op0) == MEM)
4696 op0 = copy_to_reg (op0);
4697
4698 if (target == 0)
4699 return convert_to_mode (mode, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4700 else
4701 convert_move (target, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4702 return target;
4703
4704 case PLUS_EXPR:
4705 /* We come here from MINUS_EXPR when the second operand is a constant. */
4706 plus_expr:
4707 this_optab = add_optab;
4708
4709 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
4710 something else, make sure we add the register to the constant and
4711 then to the other thing. This case can occur during strength
4712 reduction and doing it this way will produce better code if the
4713 frame pointer or argument pointer is eliminated.
4714
4715 fold-const.c will ensure that the constant is always in the inner
4716 PLUS_EXPR, so the only case we need to do anything about is if
4717 sp, ap, or fp is our second argument, in which case we must swap
4718 the innermost first argument and our second argument. */
4719
4720 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4721 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
4722 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
4723 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
4724 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
4725 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
4726 {
4727 tree t = TREE_OPERAND (exp, 1);
4728
4729 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4730 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
4731 }
4732
4733 /* If the result is to be Pmode and we are adding an integer to
4734 something, we might be forming a constant. So try to use
4735 plus_constant. If it produces a sum and we can't accept it,
4736 use force_operand. This allows P = &ARR[const] to generate
4737 efficient code on machines where a SYMBOL_REF is not a valid
4738 address.
4739
4740 If this is an EXPAND_SUM call, always return the sum. */
c980ac49
RS
4741 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4742 || mode == Pmode)
bbf6f052 4743 {
c980ac49
RS
4744 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
4745 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4746 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
4747 {
4748 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
4749 EXPAND_SUM);
4750 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
4751 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4752 op1 = force_operand (op1, target);
4753 return op1;
4754 }
bbf6f052 4755
c980ac49
RS
4756 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4757 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
4758 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
4759 {
4760 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
4761 EXPAND_SUM);
4762 if (! CONSTANT_P (op0))
4763 {
4764 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4765 VOIDmode, modifier);
709f5be1
RS
4766 /* Don't go to both_summands if modifier
4767 says it's not right to return a PLUS. */
4768 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4769 goto binop2;
c980ac49
RS
4770 goto both_summands;
4771 }
4772 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
4773 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4774 op0 = force_operand (op0, target);
4775 return op0;
4776 }
bbf6f052
RK
4777 }
4778
4779 /* No sense saving up arithmetic to be done
4780 if it's all in the wrong mode to form part of an address.
4781 And force_operand won't know whether to sign-extend or
4782 zero-extend. */
4783 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
c980ac49
RS
4784 || mode != Pmode)
4785 goto binop;
bbf6f052
RK
4786
4787 preexpand_calls (exp);
4788 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4789 subtarget = 0;
4790
4791 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
906c4e36 4792 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
bbf6f052 4793
c980ac49 4794 both_summands:
bbf6f052
RK
4795 /* Make sure any term that's a sum with a constant comes last. */
4796 if (GET_CODE (op0) == PLUS
4797 && CONSTANT_P (XEXP (op0, 1)))
4798 {
4799 temp = op0;
4800 op0 = op1;
4801 op1 = temp;
4802 }
4803 /* If adding to a sum including a constant,
4804 associate it to put the constant outside. */
4805 if (GET_CODE (op1) == PLUS
4806 && CONSTANT_P (XEXP (op1, 1)))
4807 {
4808 rtx constant_term = const0_rtx;
4809
4810 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
4811 if (temp != 0)
4812 op0 = temp;
6f90e075
JW
4813 /* Ensure that MULT comes first if there is one. */
4814 else if (GET_CODE (op0) == MULT)
4815 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
bbf6f052
RK
4816 else
4817 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
4818
4819 /* Let's also eliminate constants from op0 if possible. */
4820 op0 = eliminate_constant_term (op0, &constant_term);
4821
4822 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
4823 their sum should be a constant. Form it into OP1, since the
4824 result we want will then be OP0 + OP1. */
4825
4826 temp = simplify_binary_operation (PLUS, mode, constant_term,
4827 XEXP (op1, 1));
4828 if (temp != 0)
4829 op1 = temp;
4830 else
4831 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
4832 }
4833
4834 /* Put a constant term last and put a multiplication first. */
4835 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
4836 temp = op1, op1 = op0, op0 = temp;
4837
4838 temp = simplify_binary_operation (PLUS, mode, op0, op1);
4839 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
4840
4841 case MINUS_EXPR:
4842 /* Handle difference of two symbolic constants,
4843 for the sake of an initializer. */
4844 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4845 && really_constant_p (TREE_OPERAND (exp, 0))
4846 && really_constant_p (TREE_OPERAND (exp, 1)))
4847 {
906c4e36
RK
4848 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
4849 VOIDmode, modifier);
4850 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4851 VOIDmode, modifier);
bbf6f052
RK
4852 return gen_rtx (MINUS, mode, op0, op1);
4853 }
4854 /* Convert A - const to A + (-const). */
4855 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4856 {
4857 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0),
4858 fold (build1 (NEGATE_EXPR, type,
4859 TREE_OPERAND (exp, 1))));
4860 goto plus_expr;
4861 }
4862 this_optab = sub_optab;
4863 goto binop;
4864
4865 case MULT_EXPR:
4866 preexpand_calls (exp);
4867 /* If first operand is constant, swap them.
4868 Thus the following special case checks need only
4869 check the second operand. */
4870 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
4871 {
4872 register tree t1 = TREE_OPERAND (exp, 0);
4873 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
4874 TREE_OPERAND (exp, 1) = t1;
4875 }
4876
4877 /* Attempt to return something suitable for generating an
4878 indexed address, for machines that support that. */
4879
4880 if (modifier == EXPAND_SUM && mode == Pmode
4881 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
906c4e36 4882 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
4883 {
4884 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
4885
4886 /* Apply distributive law if OP0 is x+c. */
4887 if (GET_CODE (op0) == PLUS
4888 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
4889 return gen_rtx (PLUS, mode,
4890 gen_rtx (MULT, mode, XEXP (op0, 0),
906c4e36
RK
4891 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
4892 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
4893 * INTVAL (XEXP (op0, 1))));
bbf6f052
RK
4894
4895 if (GET_CODE (op0) != REG)
906c4e36 4896 op0 = force_operand (op0, NULL_RTX);
bbf6f052
RK
4897 if (GET_CODE (op0) != REG)
4898 op0 = copy_to_mode_reg (mode, op0);
4899
4900 return gen_rtx (MULT, mode, op0,
906c4e36 4901 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
bbf6f052
RK
4902 }
4903
4904 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4905 subtarget = 0;
4906
4907 /* Check for multiplying things that have been extended
4908 from a narrower type. If this machine supports multiplying
4909 in that narrower type with a result in the desired type,
4910 do it that way, and avoid the explicit type-conversion. */
4911 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
4912 && TREE_CODE (type) == INTEGER_TYPE
4913 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4914 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
4915 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4916 && int_fits_type_p (TREE_OPERAND (exp, 1),
4917 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4918 /* Don't use a widening multiply if a shift will do. */
4919 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
906c4e36 4920 > HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
4921 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
4922 ||
4923 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
4924 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4925 ==
4926 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
4927 /* If both operands are extended, they must either both
4928 be zero-extended or both be sign-extended. */
4929 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4930 ==
4931 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
4932 {
4933 enum machine_mode innermode
4934 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
4935 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4936 ? umul_widen_optab : smul_widen_optab);
4937 if (mode == GET_MODE_WIDER_MODE (innermode)
4938 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
4939 {
4940 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
906c4e36 4941 NULL_RTX, VOIDmode, 0);
bbf6f052 4942 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
906c4e36
RK
4943 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4944 VOIDmode, 0);
bbf6f052
RK
4945 else
4946 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
906c4e36 4947 NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4948 goto binop2;
4949 }
4950 }
4951 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 4952 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4953 return expand_mult (mode, op0, op1, target, unsignedp);
4954
4955 case TRUNC_DIV_EXPR:
4956 case FLOOR_DIV_EXPR:
4957 case CEIL_DIV_EXPR:
4958 case ROUND_DIV_EXPR:
4959 case EXACT_DIV_EXPR:
4960 preexpand_calls (exp);
4961 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4962 subtarget = 0;
4963 /* Possible optimization: compute the dividend with EXPAND_SUM
4964 then if the divisor is constant can optimize the case
4965 where some terms of the dividend have coeffs divisible by it. */
4966 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 4967 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4968 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
4969
4970 case RDIV_EXPR:
4971 this_optab = flodiv_optab;
4972 goto binop;
4973
4974 case TRUNC_MOD_EXPR:
4975 case FLOOR_MOD_EXPR:
4976 case CEIL_MOD_EXPR:
4977 case ROUND_MOD_EXPR:
4978 preexpand_calls (exp);
4979 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4980 subtarget = 0;
4981 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 4982 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4983 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
4984
4985 case FIX_ROUND_EXPR:
4986 case FIX_FLOOR_EXPR:
4987 case FIX_CEIL_EXPR:
4988 abort (); /* Not used for C. */
4989
4990 case FIX_TRUNC_EXPR:
906c4e36 4991 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4992 if (target == 0)
4993 target = gen_reg_rtx (mode);
4994 expand_fix (target, op0, unsignedp);
4995 return target;
4996
4997 case FLOAT_EXPR:
906c4e36 4998 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4999 if (target == 0)
5000 target = gen_reg_rtx (mode);
5001 /* expand_float can't figure out what to do if FROM has VOIDmode.
5002 So give it the correct mode. With -O, cse will optimize this. */
5003 if (GET_MODE (op0) == VOIDmode)
5004 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5005 op0);
5006 expand_float (target, op0,
5007 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5008 return target;
5009
5010 case NEGATE_EXPR:
5011 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5012 temp = expand_unop (mode, neg_optab, op0, target, 0);
5013 if (temp == 0)
5014 abort ();
5015 return temp;
5016
5017 case ABS_EXPR:
5018 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5019
2d7050fd
RS
5020 /* Handle complex values specially. */
5021 {
5022 enum machine_mode opmode
5023 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5024
5025 if (GET_MODE_CLASS (opmode) == MODE_COMPLEX_INT
5026 || GET_MODE_CLASS (opmode) == MODE_COMPLEX_FLOAT)
5027 return expand_complex_abs (opmode, op0, target, unsignedp);
5028 }
5029
bbf6f052
RK
5030 /* Unsigned abs is simply the operand. Testing here means we don't
5031 risk generating incorrect code below. */
5032 if (TREE_UNSIGNED (type))
5033 return op0;
5034
5035 /* First try to do it with a special abs instruction. */
5036 temp = expand_unop (mode, abs_optab, op0, target, 0);
5037 if (temp != 0)
5038 return temp;
5039
5040 /* If this machine has expensive jumps, we can do integer absolute
5041 value of X as (((signed) x >> (W-1)) ^ x) - ((signed) x >> (W-1)),
5042 where W is the width of MODE. */
5043
5044 if (GET_MODE_CLASS (mode) == MODE_INT && BRANCH_COST >= 2)
5045 {
5046 rtx extended = expand_shift (RSHIFT_EXPR, mode, op0,
5047 size_int (GET_MODE_BITSIZE (mode) - 1),
906c4e36 5048 NULL_RTX, 0);
bbf6f052
RK
5049
5050 temp = expand_binop (mode, xor_optab, extended, op0, target, 0,
5051 OPTAB_LIB_WIDEN);
5052 if (temp != 0)
5053 temp = expand_binop (mode, sub_optab, temp, extended, target, 0,
5054 OPTAB_LIB_WIDEN);
5055
5056 if (temp != 0)
5057 return temp;
5058 }
5059
5060 /* If that does not win, use conditional jump and negate. */
5061 target = original_target;
5062 temp = gen_label_rtx ();
5063 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 0))
37568125 5064 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
bbf6f052
RK
5065 || (GET_CODE (target) == REG
5066 && REGNO (target) < FIRST_PSEUDO_REGISTER))
5067 target = gen_reg_rtx (mode);
5068 emit_move_insn (target, op0);
5069 emit_cmp_insn (target,
5070 expand_expr (convert (type, integer_zero_node),
906c4e36
RK
5071 NULL_RTX, VOIDmode, 0),
5072 GE, NULL_RTX, mode, 0, 0);
bbf6f052
RK
5073 NO_DEFER_POP;
5074 emit_jump_insn (gen_bge (temp));
5075 op0 = expand_unop (mode, neg_optab, target, target, 0);
5076 if (op0 != target)
5077 emit_move_insn (target, op0);
5078 emit_label (temp);
5079 OK_DEFER_POP;
5080 return target;
5081
5082 case MAX_EXPR:
5083 case MIN_EXPR:
5084 target = original_target;
5085 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
fc155707 5086 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
bbf6f052
RK
5087 || (GET_CODE (target) == REG
5088 && REGNO (target) < FIRST_PSEUDO_REGISTER))
5089 target = gen_reg_rtx (mode);
906c4e36 5090 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5091 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5092
5093 /* First try to do it with a special MIN or MAX instruction.
5094 If that does not win, use a conditional jump to select the proper
5095 value. */
5096 this_optab = (TREE_UNSIGNED (type)
5097 ? (code == MIN_EXPR ? umin_optab : umax_optab)
5098 : (code == MIN_EXPR ? smin_optab : smax_optab));
5099
5100 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
5101 OPTAB_WIDEN);
5102 if (temp != 0)
5103 return temp;
5104
ee456b1c
RK
5105 if (target != op0)
5106 emit_move_insn (target, op0);
bbf6f052 5107 op0 = gen_label_rtx ();
f81497d9
RS
5108 /* If this mode is an integer too wide to compare properly,
5109 compare word by word. Rely on cse to optimize constant cases. */
5110 if (GET_MODE_CLASS (mode) == MODE_INT
5111 && !can_compare_p (mode))
bbf6f052 5112 {
f81497d9 5113 if (code == MAX_EXPR)
ee456b1c 5114 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type), target, op1, NULL, op0);
bbf6f052 5115 else
ee456b1c
RK
5116 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type), op1, target, NULL, op0);
5117 emit_move_insn (target, op1);
bbf6f052 5118 }
f81497d9
RS
5119 else
5120 {
5121 if (code == MAX_EXPR)
5122 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
ee456b1c
RK
5123 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
5124 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
f81497d9
RS
5125 else
5126 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
ee456b1c
RK
5127 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
5128 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
f81497d9 5129 if (temp == const0_rtx)
ee456b1c 5130 emit_move_insn (target, op1);
f81497d9
RS
5131 else if (temp != const_true_rtx)
5132 {
5133 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
5134 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
5135 else
5136 abort ();
ee456b1c 5137 emit_move_insn (target, op1);
f81497d9
RS
5138 }
5139 }
bbf6f052
RK
5140 emit_label (op0);
5141 return target;
5142
5143/* ??? Can optimize when the operand of this is a bitwise operation,
5144 by using a different bitwise operation. */
5145 case BIT_NOT_EXPR:
5146 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5147 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
5148 if (temp == 0)
5149 abort ();
5150 return temp;
5151
5152 case FFS_EXPR:
5153 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5154 temp = expand_unop (mode, ffs_optab, op0, target, 1);
5155 if (temp == 0)
5156 abort ();
5157 return temp;
5158
5159/* ??? Can optimize bitwise operations with one arg constant.
5160 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
5161 and (a bitwise1 b) bitwise2 b (etc)
5162 but that is probably not worth while. */
5163
5164/* BIT_AND_EXPR is for bitwise anding.
5165 TRUTH_AND_EXPR is for anding two boolean values
5166 when we want in all cases to compute both of them.
5167 In general it is fastest to do TRUTH_AND_EXPR by
5168 computing both operands as actual zero-or-1 values
5169 and then bitwise anding. In cases where there cannot
5170 be any side effects, better code would be made by
5171 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR;
5172 but the question is how to recognize those cases. */
5173
b258707c
RS
5174 /* TRUTH_AND_EXPR can have a result whose mode doesn't match
5175 th operands. If so, don't use our target. */
bbf6f052 5176 case TRUTH_AND_EXPR:
b258707c
RS
5177 if (mode != TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5178 subtarget = 0;
bbf6f052
RK
5179 case BIT_AND_EXPR:
5180 this_optab = and_optab;
5181 goto binop;
5182
5183/* See comment above about TRUTH_AND_EXPR; it applies here too. */
5184 case TRUTH_OR_EXPR:
b258707c
RS
5185 if (mode != TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5186 subtarget = 0;
bbf6f052
RK
5187 case BIT_IOR_EXPR:
5188 this_optab = ior_optab;
5189 goto binop;
5190
874726a8 5191 case TRUTH_XOR_EXPR:
b258707c
RS
5192 if (mode != TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5193 subtarget = 0;
bbf6f052
RK
5194 case BIT_XOR_EXPR:
5195 this_optab = xor_optab;
5196 goto binop;
5197
5198 case LSHIFT_EXPR:
5199 case RSHIFT_EXPR:
5200 case LROTATE_EXPR:
5201 case RROTATE_EXPR:
5202 preexpand_calls (exp);
5203 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5204 subtarget = 0;
5205 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5206 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
5207 unsignedp);
5208
5209/* Could determine the answer when only additive constants differ.
5210 Also, the addition of one can be handled by changing the condition. */
5211 case LT_EXPR:
5212 case LE_EXPR:
5213 case GT_EXPR:
5214 case GE_EXPR:
5215 case EQ_EXPR:
5216 case NE_EXPR:
5217 preexpand_calls (exp);
5218 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
5219 if (temp != 0)
5220 return temp;
5221 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
5222 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
5223 && original_target
5224 && GET_CODE (original_target) == REG
5225 && (GET_MODE (original_target)
5226 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5227 {
5228 temp = expand_expr (TREE_OPERAND (exp, 0), original_target, VOIDmode, 0);
5229 if (temp != original_target)
5230 temp = copy_to_reg (temp);
5231 op1 = gen_label_rtx ();
906c4e36 5232 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
bbf6f052
RK
5233 GET_MODE (temp), unsignedp, 0);
5234 emit_jump_insn (gen_beq (op1));
5235 emit_move_insn (temp, const1_rtx);
5236 emit_label (op1);
5237 return temp;
5238 }
5239 /* If no set-flag instruction, must generate a conditional
5240 store into a temporary variable. Drop through
5241 and handle this like && and ||. */
5242
5243 case TRUTH_ANDIF_EXPR:
5244 case TRUTH_ORIF_EXPR:
e44842fe
RK
5245 if (! ignore
5246 && (target == 0 || ! safe_from_p (target, exp)
5247 /* Make sure we don't have a hard reg (such as function's return
5248 value) live across basic blocks, if not optimizing. */
5249 || (!optimize && GET_CODE (target) == REG
5250 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
bbf6f052 5251 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
e44842fe
RK
5252
5253 if (target)
5254 emit_clr_insn (target);
5255
bbf6f052
RK
5256 op1 = gen_label_rtx ();
5257 jumpifnot (exp, op1);
e44842fe
RK
5258
5259 if (target)
5260 emit_0_to_1_insn (target);
5261
bbf6f052 5262 emit_label (op1);
e44842fe 5263 return ignore ? const0_rtx : target;
bbf6f052
RK
5264
5265 case TRUTH_NOT_EXPR:
5266 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5267 /* The parser is careful to generate TRUTH_NOT_EXPR
5268 only with operands that are always zero or one. */
906c4e36 5269 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
bbf6f052
RK
5270 target, 1, OPTAB_LIB_WIDEN);
5271 if (temp == 0)
5272 abort ();
5273 return temp;
5274
5275 case COMPOUND_EXPR:
5276 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
5277 emit_queue ();
5278 return expand_expr (TREE_OPERAND (exp, 1),
5279 (ignore ? const0_rtx : target),
5280 VOIDmode, 0);
5281
5282 case COND_EXPR:
5283 {
5284 /* Note that COND_EXPRs whose type is a structure or union
5285 are required to be constructed to contain assignments of
5286 a temporary variable, so that we can evaluate them here
5287 for side effect only. If type is void, we must do likewise. */
5288
5289 /* If an arm of the branch requires a cleanup,
5290 only that cleanup is performed. */
5291
5292 tree singleton = 0;
5293 tree binary_op = 0, unary_op = 0;
5294 tree old_cleanups = cleanups_this_call;
5295 cleanups_this_call = 0;
5296
5297 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
5298 convert it to our mode, if necessary. */
5299 if (integer_onep (TREE_OPERAND (exp, 1))
5300 && integer_zerop (TREE_OPERAND (exp, 2))
5301 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
5302 {
dd27116b
RK
5303 if (ignore)
5304 {
5305 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5306 modifier);
5307 return const0_rtx;
5308 }
5309
bbf6f052
RK
5310 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
5311 if (GET_MODE (op0) == mode)
5312 return op0;
5313 if (target == 0)
5314 target = gen_reg_rtx (mode);
5315 convert_move (target, op0, unsignedp);
5316 return target;
5317 }
5318
5319 /* If we are not to produce a result, we have no target. Otherwise,
5320 if a target was specified use it; it will not be used as an
5321 intermediate target unless it is safe. If no target, use a
5322 temporary. */
5323
dd27116b 5324 if (ignore)
bbf6f052
RK
5325 temp = 0;
5326 else if (original_target
5327 && safe_from_p (original_target, TREE_OPERAND (exp, 0)))
5328 temp = original_target;
5329 else if (mode == BLKmode)
5330 {
5331 if (TYPE_SIZE (type) == 0
5332 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5333 abort ();
673bc773 5334
bbf6f052
RK
5335 temp = assign_stack_temp (BLKmode,
5336 (TREE_INT_CST_LOW (TYPE_SIZE (type))
5337 + BITS_PER_UNIT - 1)
5338 / BITS_PER_UNIT, 0);
673bc773
RS
5339 MEM_IN_STRUCT_P (temp)
5340 = (TREE_CODE (type) == RECORD_TYPE
5341 || TREE_CODE (type) == UNION_TYPE
5342 || TREE_CODE (type) == QUAL_UNION_TYPE
5343 || TREE_CODE (type) == ARRAY_TYPE);
bbf6f052
RK
5344 }
5345 else
5346 temp = gen_reg_rtx (mode);
5347
5348 /* Check for X ? A + B : A. If we have this, we can copy
5349 A to the output and conditionally add B. Similarly for unary
5350 operations. Don't do this if X has side-effects because
5351 those side effects might affect A or B and the "?" operation is
5352 a sequence point in ANSI. (We test for side effects later.) */
5353
5354 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
5355 && operand_equal_p (TREE_OPERAND (exp, 2),
5356 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
5357 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
5358 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
5359 && operand_equal_p (TREE_OPERAND (exp, 1),
5360 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
5361 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
5362 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
5363 && operand_equal_p (TREE_OPERAND (exp, 2),
5364 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
5365 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
5366 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
5367 && operand_equal_p (TREE_OPERAND (exp, 1),
5368 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
5369 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
5370
5371 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
5372 operation, do this as A + (X != 0). Similarly for other simple
5373 binary operators. */
dd27116b 5374 if (temp && singleton && binary_op
bbf6f052
RK
5375 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5376 && (TREE_CODE (binary_op) == PLUS_EXPR
5377 || TREE_CODE (binary_op) == MINUS_EXPR
5378 || TREE_CODE (binary_op) == BIT_IOR_EXPR
5379 || TREE_CODE (binary_op) == BIT_XOR_EXPR
5380 || TREE_CODE (binary_op) == BIT_AND_EXPR)
5381 && integer_onep (TREE_OPERAND (binary_op, 1))
5382 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
5383 {
5384 rtx result;
5385 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
5386 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
5387 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
5388 : TREE_CODE (binary_op) == BIT_XOR_EXPR ? xor_optab
5389 : and_optab);
5390
5391 /* If we had X ? A : A + 1, do this as A + (X == 0).
5392
5393 We have to invert the truth value here and then put it
5394 back later if do_store_flag fails. We cannot simply copy
5395 TREE_OPERAND (exp, 0) to another variable and modify that
5396 because invert_truthvalue can modify the tree pointed to
5397 by its argument. */
5398 if (singleton == TREE_OPERAND (exp, 1))
5399 TREE_OPERAND (exp, 0)
5400 = invert_truthvalue (TREE_OPERAND (exp, 0));
5401
5402 result = do_store_flag (TREE_OPERAND (exp, 0),
906c4e36
RK
5403 (safe_from_p (temp, singleton)
5404 ? temp : NULL_RTX),
bbf6f052
RK
5405 mode, BRANCH_COST <= 1);
5406
5407 if (result)
5408 {
906c4e36 5409 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5410 return expand_binop (mode, boptab, op1, result, temp,
5411 unsignedp, OPTAB_LIB_WIDEN);
5412 }
5413 else if (singleton == TREE_OPERAND (exp, 1))
5414 TREE_OPERAND (exp, 0)
5415 = invert_truthvalue (TREE_OPERAND (exp, 0));
5416 }
5417
5418 NO_DEFER_POP;
5419 op0 = gen_label_rtx ();
5420
5421 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
5422 {
5423 if (temp != 0)
5424 {
5425 /* If the target conflicts with the other operand of the
5426 binary op, we can't use it. Also, we can't use the target
5427 if it is a hard register, because evaluating the condition
5428 might clobber it. */
5429 if ((binary_op
5430 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
5431 || (GET_CODE (temp) == REG
5432 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
5433 temp = gen_reg_rtx (mode);
5434 store_expr (singleton, temp, 0);
5435 }
5436 else
906c4e36 5437 expand_expr (singleton,
2937cf87 5438 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5439 if (cleanups_this_call)
5440 {
5441 sorry ("aggregate value in COND_EXPR");
5442 cleanups_this_call = 0;
5443 }
5444 if (singleton == TREE_OPERAND (exp, 1))
5445 jumpif (TREE_OPERAND (exp, 0), op0);
5446 else
5447 jumpifnot (TREE_OPERAND (exp, 0), op0);
5448
5449 if (binary_op && temp == 0)
5450 /* Just touch the other operand. */
5451 expand_expr (TREE_OPERAND (binary_op, 1),
906c4e36 5452 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5453 else if (binary_op)
5454 store_expr (build (TREE_CODE (binary_op), type,
5455 make_tree (type, temp),
5456 TREE_OPERAND (binary_op, 1)),
5457 temp, 0);
5458 else
5459 store_expr (build1 (TREE_CODE (unary_op), type,
5460 make_tree (type, temp)),
5461 temp, 0);
5462 op1 = op0;
5463 }
5464#if 0
5465 /* This is now done in jump.c and is better done there because it
5466 produces shorter register lifetimes. */
5467
5468 /* Check for both possibilities either constants or variables
5469 in registers (but not the same as the target!). If so, can
5470 save branches by assigning one, branching, and assigning the
5471 other. */
5472 else if (temp && GET_MODE (temp) != BLKmode
5473 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
5474 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
5475 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
5476 && DECL_RTL (TREE_OPERAND (exp, 1))
5477 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
5478 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
5479 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
5480 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
5481 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
5482 && DECL_RTL (TREE_OPERAND (exp, 2))
5483 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
5484 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
5485 {
5486 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5487 temp = gen_reg_rtx (mode);
5488 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5489 jumpifnot (TREE_OPERAND (exp, 0), op0);
5490 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5491 op1 = op0;
5492 }
5493#endif
5494 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
5495 comparison operator. If we have one of these cases, set the
5496 output to A, branch on A (cse will merge these two references),
5497 then set the output to FOO. */
5498 else if (temp
5499 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5500 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5501 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5502 TREE_OPERAND (exp, 1), 0)
5503 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5504 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
5505 {
5506 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5507 temp = gen_reg_rtx (mode);
5508 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5509 jumpif (TREE_OPERAND (exp, 0), op0);
5510 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5511 op1 = op0;
5512 }
5513 else if (temp
5514 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5515 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5516 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5517 TREE_OPERAND (exp, 2), 0)
5518 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5519 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
5520 {
5521 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5522 temp = gen_reg_rtx (mode);
5523 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5524 jumpifnot (TREE_OPERAND (exp, 0), op0);
5525 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5526 op1 = op0;
5527 }
5528 else
5529 {
5530 op1 = gen_label_rtx ();
5531 jumpifnot (TREE_OPERAND (exp, 0), op0);
5532 if (temp != 0)
5533 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5534 else
906c4e36
RK
5535 expand_expr (TREE_OPERAND (exp, 1),
5536 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5537 if (cleanups_this_call)
5538 {
5539 sorry ("aggregate value in COND_EXPR");
5540 cleanups_this_call = 0;
5541 }
5542
5543 emit_queue ();
5544 emit_jump_insn (gen_jump (op1));
5545 emit_barrier ();
5546 emit_label (op0);
5547 if (temp != 0)
5548 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5549 else
906c4e36
RK
5550 expand_expr (TREE_OPERAND (exp, 2),
5551 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5552 }
5553
5554 if (cleanups_this_call)
5555 {
5556 sorry ("aggregate value in COND_EXPR");
5557 cleanups_this_call = 0;
5558 }
5559
5560 emit_queue ();
5561 emit_label (op1);
5562 OK_DEFER_POP;
5563 cleanups_this_call = old_cleanups;
5564 return temp;
5565 }
5566
5567 case TARGET_EXPR:
5568 {
5569 /* Something needs to be initialized, but we didn't know
5570 where that thing was when building the tree. For example,
5571 it could be the return value of a function, or a parameter
5572 to a function which lays down in the stack, or a temporary
5573 variable which must be passed by reference.
5574
5575 We guarantee that the expression will either be constructed
5576 or copied into our original target. */
5577
5578 tree slot = TREE_OPERAND (exp, 0);
5c062816 5579 tree exp1;
bbf6f052
RK
5580
5581 if (TREE_CODE (slot) != VAR_DECL)
5582 abort ();
5583
5584 if (target == 0)
5585 {
5586 if (DECL_RTL (slot) != 0)
ac993f4f
MS
5587 {
5588 target = DECL_RTL (slot);
5c062816 5589 /* If we have already expanded the slot, so don't do
ac993f4f 5590 it again. (mrs) */
5c062816
MS
5591 if (TREE_OPERAND (exp, 1) == NULL_TREE)
5592 return target;
ac993f4f 5593 }
bbf6f052
RK
5594 else
5595 {
5596 target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
5597 /* All temp slots at this level must not conflict. */
5598 preserve_temp_slots (target);
5599 DECL_RTL (slot) = target;
5600 }
5601
5602#if 0
ac993f4f
MS
5603 /* I bet this needs to be done, and I bet that it needs to
5604 be above, inside the else clause. The reason is
5605 simple, how else is it going to get cleaned up? (mrs)
5606
5607 The reason is probably did not work before, and was
5608 commented out is because this was re-expanding already
5609 expanded target_exprs (target == 0 and DECL_RTL (slot)
5610 != 0) also cleaning them up many times as well. :-( */
5611
bbf6f052
RK
5612 /* Since SLOT is not known to the called function
5613 to belong to its stack frame, we must build an explicit
5614 cleanup. This case occurs when we must build up a reference
5615 to pass the reference as an argument. In this case,
5616 it is very likely that such a reference need not be
5617 built here. */
5618
5619 if (TREE_OPERAND (exp, 2) == 0)
5620 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
5621 if (TREE_OPERAND (exp, 2))
906c4e36
RK
5622 cleanups_this_call = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2),
5623 cleanups_this_call);
bbf6f052
RK
5624#endif
5625 }
5626 else
5627 {
5628 /* This case does occur, when expanding a parameter which
5629 needs to be constructed on the stack. The target
5630 is the actual stack address that we want to initialize.
5631 The function we call will perform the cleanup in this case. */
5632
8c042b47
RS
5633 /* If we have already assigned it space, use that space,
5634 not target that we were passed in, as our target
5635 parameter is only a hint. */
5636 if (DECL_RTL (slot) != 0)
5637 {
5638 target = DECL_RTL (slot);
5639 /* If we have already expanded the slot, so don't do
5640 it again. (mrs) */
5641 if (TREE_OPERAND (exp, 1) == NULL_TREE)
5642 return target;
5643 }
5644
bbf6f052
RK
5645 DECL_RTL (slot) = target;
5646 }
5647
5c062816
MS
5648 exp1 = TREE_OPERAND (exp, 1);
5649 /* Mark it as expanded. */
5650 TREE_OPERAND (exp, 1) = NULL_TREE;
5651
5652 return expand_expr (exp1, target, tmode, modifier);
bbf6f052
RK
5653 }
5654
5655 case INIT_EXPR:
5656 {
5657 tree lhs = TREE_OPERAND (exp, 0);
5658 tree rhs = TREE_OPERAND (exp, 1);
5659 tree noncopied_parts = 0;
5660 tree lhs_type = TREE_TYPE (lhs);
5661
5662 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5663 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
5664 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
5665 TYPE_NONCOPIED_PARTS (lhs_type));
5666 while (noncopied_parts != 0)
5667 {
5668 expand_assignment (TREE_VALUE (noncopied_parts),
5669 TREE_PURPOSE (noncopied_parts), 0, 0);
5670 noncopied_parts = TREE_CHAIN (noncopied_parts);
5671 }
5672 return temp;
5673 }
5674
5675 case MODIFY_EXPR:
5676 {
5677 /* If lhs is complex, expand calls in rhs before computing it.
5678 That's so we don't compute a pointer and save it over a call.
5679 If lhs is simple, compute it first so we can give it as a
5680 target if the rhs is just a call. This avoids an extra temp and copy
5681 and that prevents a partial-subsumption which makes bad code.
5682 Actually we could treat component_ref's of vars like vars. */
5683
5684 tree lhs = TREE_OPERAND (exp, 0);
5685 tree rhs = TREE_OPERAND (exp, 1);
5686 tree noncopied_parts = 0;
5687 tree lhs_type = TREE_TYPE (lhs);
5688
5689 temp = 0;
5690
5691 if (TREE_CODE (lhs) != VAR_DECL
5692 && TREE_CODE (lhs) != RESULT_DECL
5693 && TREE_CODE (lhs) != PARM_DECL)
5694 preexpand_calls (exp);
5695
5696 /* Check for |= or &= of a bitfield of size one into another bitfield
5697 of size 1. In this case, (unless we need the result of the
5698 assignment) we can do this more efficiently with a
5699 test followed by an assignment, if necessary.
5700
5701 ??? At this point, we can't get a BIT_FIELD_REF here. But if
5702 things change so we do, this code should be enhanced to
5703 support it. */
5704 if (ignore
5705 && TREE_CODE (lhs) == COMPONENT_REF
5706 && (TREE_CODE (rhs) == BIT_IOR_EXPR
5707 || TREE_CODE (rhs) == BIT_AND_EXPR)
5708 && TREE_OPERAND (rhs, 0) == lhs
5709 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
5710 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
5711 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
5712 {
5713 rtx label = gen_label_rtx ();
5714
5715 do_jump (TREE_OPERAND (rhs, 1),
5716 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
5717 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
5718 expand_assignment (lhs, convert (TREE_TYPE (rhs),
5719 (TREE_CODE (rhs) == BIT_IOR_EXPR
5720 ? integer_one_node
5721 : integer_zero_node)),
5722 0, 0);
e7c33f54 5723 do_pending_stack_adjust ();
bbf6f052
RK
5724 emit_label (label);
5725 return const0_rtx;
5726 }
5727
5728 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
5729 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
5730 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
5731 TYPE_NONCOPIED_PARTS (lhs_type));
5732
5733 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5734 while (noncopied_parts != 0)
5735 {
5736 expand_assignment (TREE_PURPOSE (noncopied_parts),
5737 TREE_VALUE (noncopied_parts), 0, 0);
5738 noncopied_parts = TREE_CHAIN (noncopied_parts);
5739 }
5740 return temp;
5741 }
5742
5743 case PREINCREMENT_EXPR:
5744 case PREDECREMENT_EXPR:
5745 return expand_increment (exp, 0);
5746
5747 case POSTINCREMENT_EXPR:
5748 case POSTDECREMENT_EXPR:
5749 /* Faster to treat as pre-increment if result is not used. */
5750 return expand_increment (exp, ! ignore);
5751
5752 case ADDR_EXPR:
5753 /* Are we taking the address of a nested function? */
5754 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
5755 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
5756 {
5757 op0 = trampoline_address (TREE_OPERAND (exp, 0));
5758 op0 = force_operand (op0, target);
5759 }
5760 else
5761 {
906c4e36 5762 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
bbf6f052
RK
5763 (modifier == EXPAND_INITIALIZER
5764 ? modifier : EXPAND_CONST_ADDRESS));
896102d0
RK
5765
5766 /* We would like the object in memory. If it is a constant,
5767 we can have it be statically allocated into memory. For
5768 a non-constant (REG or SUBREG), we need to allocate some
5769 memory and store the value into it. */
5770
5771 if (CONSTANT_P (op0))
5772 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5773 op0);
5774
b6f01001
RS
5775 /* These cases happen in Fortran. Is that legitimate?
5776 Should Fortran work in another way?
5777 Do they happen in C? */
5778 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
5779 || GET_CODE (op0) == CONCAT)
896102d0
RK
5780 {
5781 /* If this object is in a register, it must be not
5782 be BLKmode. */
5783 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
5784 enum machine_mode inner_mode = TYPE_MODE (inner_type);
5785 rtx memloc
5786 = assign_stack_temp (inner_mode,
5787 int_size_in_bytes (inner_type), 1);
5788
5789 emit_move_insn (memloc, op0);
5790 op0 = memloc;
5791 }
5792
bbf6f052
RK
5793 if (GET_CODE (op0) != MEM)
5794 abort ();
5795
5796 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5797 return XEXP (op0, 0);
5798 op0 = force_operand (XEXP (op0, 0), target);
5799 }
5800 if (flag_force_addr && GET_CODE (op0) != REG)
5801 return force_reg (Pmode, op0);
5802 return op0;
5803
5804 case ENTRY_VALUE_EXPR:
5805 abort ();
5806
7308a047
RS
5807 /* COMPLEX type for Extended Pascal & Fortran */
5808 case COMPLEX_EXPR:
5809 {
5810 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5811
5812 rtx prev;
5813
5814 /* Get the rtx code of the operands. */
5815 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5816 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
5817
5818 if (! target)
5819 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5820
5821 prev = get_last_insn ();
5822
5823 /* Tell flow that the whole of the destination is being set. */
5824 if (GET_CODE (target) == REG)
5825 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5826
5827 /* Move the real (op0) and imaginary (op1) parts to their location. */
2d7050fd
RS
5828 emit_move_insn (gen_realpart (mode, target), op0);
5829 emit_move_insn (gen_imagpart (mode, target), op1);
7308a047
RS
5830
5831 /* Complex construction should appear as a single unit. */
6d6e61ce
RS
5832 if (GET_CODE (target) != CONCAT)
5833 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
5834 each with a separate pseudo as destination.
5835 It's not correct for flow to treat them as a unit. */
5836 group_insns (prev);
7308a047
RS
5837
5838 return target;
5839 }
5840
5841 case REALPART_EXPR:
2d7050fd
RS
5842 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5843 return gen_realpart (mode, op0);
7308a047
RS
5844
5845 case IMAGPART_EXPR:
2d7050fd
RS
5846 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5847 return gen_imagpart (mode, op0);
7308a047
RS
5848
5849 case CONJ_EXPR:
5850 {
5851 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5852 rtx imag_t;
5853 rtx prev;
5854
5855 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5856
5857 if (! target)
5858 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5859
5860 prev = get_last_insn ();
5861
5862 /* Tell flow that the whole of the destination is being set. */
5863 if (GET_CODE (target) == REG)
5864 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5865
5866 /* Store the realpart and the negated imagpart to target. */
2d7050fd 5867 emit_move_insn (gen_realpart (mode, target), gen_realpart (mode, op0));
7308a047 5868
2d7050fd 5869 imag_t = gen_imagpart (mode, target);
7308a047 5870 temp = expand_unop (mode, neg_optab,
2d7050fd 5871 gen_imagpart (mode, op0), imag_t, 0);
7308a047
RS
5872 if (temp != imag_t)
5873 emit_move_insn (imag_t, temp);
5874
5875 /* Conjugate should appear as a single unit */
6d6e61ce
RS
5876 if (GET_CODE (target) != CONCAT)
5877 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
5878 each with a separate pseudo as destination.
5879 It's not correct for flow to treat them as a unit. */
5880 group_insns (prev);
7308a047
RS
5881
5882 return target;
5883 }
5884
bbf6f052 5885 case ERROR_MARK:
66538193
RS
5886 op0 = CONST0_RTX (tmode);
5887 if (op0 != 0)
5888 return op0;
bbf6f052
RK
5889 return const0_rtx;
5890
5891 default:
90764a87 5892 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
bbf6f052
RK
5893 }
5894
5895 /* Here to do an ordinary binary operator, generating an instruction
5896 from the optab already placed in `this_optab'. */
5897 binop:
5898 preexpand_calls (exp);
5899 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5900 subtarget = 0;
5901 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 5902 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5903 binop2:
5904 temp = expand_binop (mode, this_optab, op0, op1, target,
5905 unsignedp, OPTAB_LIB_WIDEN);
5906 if (temp == 0)
5907 abort ();
5908 return temp;
5909}
bbf6f052 5910
bbf6f052 5911
ca695ac9
JB
5912/* Emit bytecode to evaluate the given expression EXP to the stack. */
5913void
5914bc_expand_expr (exp)
5915 tree exp;
bbf6f052 5916{
ca695ac9
JB
5917 enum tree_code code;
5918 tree type, arg0;
5919 rtx r;
5920 struct binary_operator *binoptab;
5921 struct unary_operator *unoptab;
5922 struct increment_operator *incroptab;
5923 struct bc_label *lab, *lab1;
5924 enum bytecode_opcode opcode;
5925
5926
5927 code = TREE_CODE (exp);
5928
5929 switch (code)
bbf6f052 5930 {
ca695ac9
JB
5931 case PARM_DECL:
5932
5933 if (DECL_RTL (exp) == 0)
bbf6f052 5934 {
ca695ac9
JB
5935 error_with_decl (exp, "prior parameter's size depends on `%s'");
5936 return;
bbf6f052 5937 }
ca695ac9
JB
5938
5939 bc_load_parmaddr (DECL_RTL (exp));
5940 bc_load_memory (TREE_TYPE (exp), exp);
5941
5942 return;
5943
5944 case VAR_DECL:
5945
5946 if (DECL_RTL (exp) == 0)
5947 abort ();
5948
5949#if 0
e7a42772 5950 if (BYTECODE_LABEL (DECL_RTL (exp)))
ca695ac9
JB
5951 bc_load_externaddr (DECL_RTL (exp));
5952 else
5953 bc_load_localaddr (DECL_RTL (exp));
5954#endif
5955 if (TREE_PUBLIC (exp))
e7a42772
JB
5956 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
5957 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
ca695ac9
JB
5958 else
5959 bc_load_localaddr (DECL_RTL (exp));
5960
5961 bc_load_memory (TREE_TYPE (exp), exp);
5962 return;
5963
5964 case INTEGER_CST:
5965
5966#ifdef DEBUG_PRINT_CODE
5967 fprintf (stderr, " [%x]\n", TREE_INT_CST_LOW (exp));
5968#endif
6bd6178d 5969 bc_emit_instruction (mode_to_const_map[(int) (DECL_BIT_FIELD (exp)
ca695ac9 5970 ? SImode
6bd6178d 5971 : TYPE_MODE (TREE_TYPE (exp)))],
ca695ac9
JB
5972 (HOST_WIDE_INT) TREE_INT_CST_LOW (exp));
5973 return;
5974
5975 case REAL_CST:
5976
c02bd5d9 5977#if 0
ca695ac9
JB
5978#ifdef DEBUG_PRINT_CODE
5979 fprintf (stderr, " [%g]\n", (double) TREE_INT_CST_LOW (exp));
5980#endif
c02bd5d9 5981 /* FIX THIS: find a better way to pass real_cst's. -bson */
ca695ac9
JB
5982 bc_emit_instruction (mode_to_const_map[TYPE_MODE (TREE_TYPE (exp))],
5983 (double) TREE_REAL_CST (exp));
c02bd5d9
JB
5984#else
5985 abort ();
5986#endif
5987
ca695ac9
JB
5988 return;
5989
5990 case CALL_EXPR:
5991
5992 /* We build a call description vector describing the type of
5993 the return value and of the arguments; this call vector,
5994 together with a pointer to a location for the return value
5995 and the base of the argument list, is passed to the low
5996 level machine dependent call subroutine, which is responsible
5997 for putting the arguments wherever real functions expect
5998 them, as well as getting the return value back. */
5999 {
6000 tree calldesc = 0, arg;
6001 int nargs = 0, i;
6002 rtx retval;
6003
6004 /* Push the evaluated args on the evaluation stack in reverse
6005 order. Also make an entry for each arg in the calldesc
6006 vector while we're at it. */
6007
6008 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
6009
6010 for (arg = TREE_OPERAND (exp, 1); arg; arg = TREE_CHAIN (arg))
6011 {
6012 ++nargs;
6013 bc_expand_expr (TREE_VALUE (arg));
6014
6015 calldesc = tree_cons ((tree) 0,
6016 size_in_bytes (TREE_TYPE (TREE_VALUE (arg))),
6017 calldesc);
6018 calldesc = tree_cons ((tree) 0,
6019 bc_runtime_type_code (TREE_TYPE (TREE_VALUE (arg))),
6020 calldesc);
6021 }
6022
6023 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
6024
6025 /* Allocate a location for the return value and push its
6026 address on the evaluation stack. Also make an entry
6027 at the front of the calldesc for the return value type. */
6028
6029 type = TREE_TYPE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
6030 retval = bc_allocate_local (int_size_in_bytes (type), TYPE_ALIGN (type));
6031 bc_load_localaddr (retval);
6032
6033 calldesc = tree_cons ((tree) 0, size_in_bytes (type), calldesc);
6034 calldesc = tree_cons ((tree) 0, bc_runtime_type_code (type), calldesc);
6035
6036 /* Prepend the argument count. */
6037 calldesc = tree_cons ((tree) 0,
6038 build_int_2 (nargs, 0),
6039 calldesc);
6040
6041 /* Push the address of the call description vector on the stack. */
6042 calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
6043 TREE_TYPE (calldesc) = build_array_type (integer_type_node,
6044 build_index_type (build_int_2 (nargs * 2, 0)));
6045 r = output_constant_def (calldesc);
6046 bc_load_externaddr (r);
6047
6048 /* Push the address of the function to be called. */
6049 bc_expand_expr (TREE_OPERAND (exp, 0));
6050
6051 /* Call the function, popping its address and the calldesc vector
6052 address off the evaluation stack in the process. */
6053 bc_emit_instruction (call);
6054
6055 /* Pop the arguments off the stack. */
6056 bc_adjust_stack (nargs);
6057
6058 /* Load the return value onto the stack. */
6059 bc_load_localaddr (retval);
6060 bc_load_memory (type, TREE_OPERAND (exp, 0));
6061 }
6062 return;
6063
6064 case SAVE_EXPR:
6065
6066 if (!SAVE_EXPR_RTL (exp))
bbf6f052 6067 {
ca695ac9
JB
6068 /* First time around: copy to local variable */
6069 SAVE_EXPR_RTL (exp) = bc_allocate_local (int_size_in_bytes (TREE_TYPE (exp)),
6070 TYPE_ALIGN (TREE_TYPE(exp)));
6071 bc_expand_expr (TREE_OPERAND (exp, 0));
6d6e61ce 6072 bc_emit_instruction (duplicate);
ca695ac9
JB
6073
6074 bc_load_localaddr (SAVE_EXPR_RTL (exp));
6075 bc_store_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
bbf6f052 6076 }
ca695ac9 6077 else
bbf6f052 6078 {
ca695ac9
JB
6079 /* Consecutive reference: use saved copy */
6080 bc_load_localaddr (SAVE_EXPR_RTL (exp));
6081 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
bbf6f052 6082 }
ca695ac9
JB
6083 return;
6084
6085#if 0
6086 /* FIXME: the XXXX_STMT codes have been removed in GCC2, but
6087 how are they handled instead? */
6088 case LET_STMT:
6089
6090 TREE_USED (exp) = 1;
6091 bc_expand_expr (STMT_BODY (exp));
6092 return;
6093#endif
6094
6095 case NOP_EXPR:
6096 case CONVERT_EXPR:
6097
6098 bc_expand_expr (TREE_OPERAND (exp, 0));
6099 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)), TREE_TYPE (exp));
6100 return;
6101
6102 case MODIFY_EXPR:
6103
c02bd5d9 6104 expand_assignment (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 0, 0);
ca695ac9
JB
6105 return;
6106
6107 case ADDR_EXPR:
6108
6109 bc_expand_address (TREE_OPERAND (exp, 0));
6110 return;
6111
6112 case INDIRECT_REF:
6113
6114 bc_expand_expr (TREE_OPERAND (exp, 0));
6115 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6116 return;
6117
6118 case ARRAY_REF:
6119
6120 bc_expand_expr (bc_canonicalize_array_ref (exp));
6121 return;
6122
6123 case COMPONENT_REF:
6124
6125 bc_expand_component_address (exp);
6126
6127 /* If we have a bitfield, generate a proper load */
6128 bc_load_memory (TREE_TYPE (TREE_OPERAND (exp, 1)), TREE_OPERAND (exp, 1));
6129 return;
6130
6131 case COMPOUND_EXPR:
6132
6133 bc_expand_expr (TREE_OPERAND (exp, 0));
6134 bc_emit_instruction (drop);
6135 bc_expand_expr (TREE_OPERAND (exp, 1));
6136 return;
6137
6138 case COND_EXPR:
6139
6140 bc_expand_expr (TREE_OPERAND (exp, 0));
6141 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
6142 lab = bc_get_bytecode_label ();
c02bd5d9 6143 bc_emit_bytecode (xjumpifnot);
ca695ac9
JB
6144 bc_emit_bytecode_labelref (lab);
6145
6146#ifdef DEBUG_PRINT_CODE
6147 fputc ('\n', stderr);
6148#endif
6149 bc_expand_expr (TREE_OPERAND (exp, 1));
6150 lab1 = bc_get_bytecode_label ();
6151 bc_emit_bytecode (jump);
6152 bc_emit_bytecode_labelref (lab1);
6153
6154#ifdef DEBUG_PRINT_CODE
6155 fputc ('\n', stderr);
6156#endif
6157
6158 bc_emit_bytecode_labeldef (lab);
6159 bc_expand_expr (TREE_OPERAND (exp, 2));
6160 bc_emit_bytecode_labeldef (lab1);
6161 return;
6162
6163 case TRUTH_ANDIF_EXPR:
6164
c02bd5d9 6165 opcode = xjumpifnot;
ca695ac9
JB
6166 goto andorif;
6167
6168 case TRUTH_ORIF_EXPR:
6169
c02bd5d9 6170 opcode = xjumpif;
ca695ac9
JB
6171 goto andorif;
6172
6173 case PLUS_EXPR:
6174
6175 binoptab = optab_plus_expr;
6176 goto binop;
6177
6178 case MINUS_EXPR:
6179
6180 binoptab = optab_minus_expr;
6181 goto binop;
6182
6183 case MULT_EXPR:
6184
6185 binoptab = optab_mult_expr;
6186 goto binop;
6187
6188 case TRUNC_DIV_EXPR:
6189 case FLOOR_DIV_EXPR:
6190 case CEIL_DIV_EXPR:
6191 case ROUND_DIV_EXPR:
6192 case EXACT_DIV_EXPR:
6193
6194 binoptab = optab_trunc_div_expr;
6195 goto binop;
6196
6197 case TRUNC_MOD_EXPR:
6198 case FLOOR_MOD_EXPR:
6199 case CEIL_MOD_EXPR:
6200 case ROUND_MOD_EXPR:
6201
6202 binoptab = optab_trunc_mod_expr;
6203 goto binop;
6204
6205 case FIX_ROUND_EXPR:
6206 case FIX_FLOOR_EXPR:
6207 case FIX_CEIL_EXPR:
6208 abort (); /* Not used for C. */
6209
6210 case FIX_TRUNC_EXPR:
6211 case FLOAT_EXPR:
6212 case MAX_EXPR:
6213 case MIN_EXPR:
6214 case FFS_EXPR:
6215 case LROTATE_EXPR:
6216 case RROTATE_EXPR:
6217 abort (); /* FIXME */
6218
6219 case RDIV_EXPR:
6220
6221 binoptab = optab_rdiv_expr;
6222 goto binop;
6223
6224 case BIT_AND_EXPR:
6225
6226 binoptab = optab_bit_and_expr;
6227 goto binop;
6228
6229 case BIT_IOR_EXPR:
6230
6231 binoptab = optab_bit_ior_expr;
6232 goto binop;
6233
6234 case BIT_XOR_EXPR:
6235
6236 binoptab = optab_bit_xor_expr;
6237 goto binop;
6238
6239 case LSHIFT_EXPR:
6240
6241 binoptab = optab_lshift_expr;
6242 goto binop;
6243
6244 case RSHIFT_EXPR:
6245
6246 binoptab = optab_rshift_expr;
6247 goto binop;
6248
6249 case TRUTH_AND_EXPR:
6250
6251 binoptab = optab_truth_and_expr;
6252 goto binop;
6253
6254 case TRUTH_OR_EXPR:
6255
6256 binoptab = optab_truth_or_expr;
6257 goto binop;
6258
6259 case LT_EXPR:
6260
6261 binoptab = optab_lt_expr;
6262 goto binop;
6263
6264 case LE_EXPR:
6265
6266 binoptab = optab_le_expr;
6267 goto binop;
6268
6269 case GE_EXPR:
6270
6271 binoptab = optab_ge_expr;
6272 goto binop;
6273
6274 case GT_EXPR:
6275
6276 binoptab = optab_gt_expr;
6277 goto binop;
6278
6279 case EQ_EXPR:
6280
6281 binoptab = optab_eq_expr;
6282 goto binop;
6283
6284 case NE_EXPR:
6285
6286 binoptab = optab_ne_expr;
6287 goto binop;
6288
6289 case NEGATE_EXPR:
6290
6291 unoptab = optab_negate_expr;
6292 goto unop;
6293
6294 case BIT_NOT_EXPR:
6295
6296 unoptab = optab_bit_not_expr;
6297 goto unop;
6298
6299 case TRUTH_NOT_EXPR:
6300
6301 unoptab = optab_truth_not_expr;
6302 goto unop;
6303
6304 case PREDECREMENT_EXPR:
6305
6306 incroptab = optab_predecrement_expr;
6307 goto increment;
6308
6309 case PREINCREMENT_EXPR:
6310
6311 incroptab = optab_preincrement_expr;
6312 goto increment;
6313
6314 case POSTDECREMENT_EXPR:
6315
6316 incroptab = optab_postdecrement_expr;
6317 goto increment;
6318
6319 case POSTINCREMENT_EXPR:
6320
6321 incroptab = optab_postincrement_expr;
6322 goto increment;
6323
6324 case CONSTRUCTOR:
6325
6326 bc_expand_constructor (exp);
6327 return;
6328
6329 case ERROR_MARK:
6330 case RTL_EXPR:
6331
6332 return;
6333
6334 case BIND_EXPR:
6335 {
6336 tree vars = TREE_OPERAND (exp, 0);
6337 int vars_need_expansion = 0;
6338
6339 /* Need to open a binding contour here because
6340 if there are any cleanups they most be contained here. */
6341 expand_start_bindings (0);
6342
6343 /* Mark the corresponding BLOCK for output. */
6344 if (TREE_OPERAND (exp, 2) != 0)
6345 TREE_USED (TREE_OPERAND (exp, 2)) = 1;
6346
6347 /* If VARS have not yet been expanded, expand them now. */
6348 while (vars)
6349 {
6350 if (DECL_RTL (vars) == 0)
6351 {
6352 vars_need_expansion = 1;
6353 bc_expand_decl (vars, 0);
6354 }
6355 bc_expand_decl_init (vars);
6356 vars = TREE_CHAIN (vars);
6357 }
6358
6359 bc_expand_expr (TREE_OPERAND (exp, 1));
6360
6361 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6362
6363 return;
6364 }
6365 }
6366
6367 abort ();
6368
6369 binop:
6370
6371 bc_expand_binary_operation (binoptab, TREE_TYPE (exp),
6372 TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1));
6373 return;
6374
6375
6376 unop:
6377
6378 bc_expand_unary_operation (unoptab, TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6379 return;
6380
6381
6382 andorif:
6383
6384 bc_expand_expr (TREE_OPERAND (exp, 0));
6385 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
6386 lab = bc_get_bytecode_label ();
6387
6d6e61ce 6388 bc_emit_instruction (duplicate);
ca695ac9
JB
6389 bc_emit_bytecode (opcode);
6390 bc_emit_bytecode_labelref (lab);
6391
6392#ifdef DEBUG_PRINT_CODE
6393 fputc ('\n', stderr);
6394#endif
6395
6396 bc_emit_instruction (drop);
6397
6398 bc_expand_expr (TREE_OPERAND (exp, 1));
6399 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)));
6400 bc_emit_bytecode_labeldef (lab);
6401 return;
6402
6403
6404 increment:
6405
6406 type = TREE_TYPE (TREE_OPERAND (exp, 0));
6407
6408 /* Push the quantum. */
6409 bc_expand_expr (TREE_OPERAND (exp, 1));
6410
6411 /* Convert it to the lvalue's type. */
6412 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)), type);
6413
6414 /* Push the address of the lvalue */
c02bd5d9 6415 bc_expand_expr (build1 (ADDR_EXPR, TYPE_POINTER_TO (type), TREE_OPERAND (exp, 0)));
ca695ac9
JB
6416
6417 /* Perform actual increment */
c02bd5d9 6418 bc_expand_increment (incroptab, type);
ca695ac9
JB
6419 return;
6420}
6421\f
6422/* Return the alignment in bits of EXP, a pointer valued expression.
6423 But don't return more than MAX_ALIGN no matter what.
6424 The alignment returned is, by default, the alignment of the thing that
6425 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
6426
6427 Otherwise, look at the expression to see if we can do better, i.e., if the
6428 expression is actually pointing at an object whose alignment is tighter. */
6429
6430static int
6431get_pointer_alignment (exp, max_align)
6432 tree exp;
6433 unsigned max_align;
6434{
6435 unsigned align, inner;
6436
6437 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
6438 return 0;
6439
6440 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
6441 align = MIN (align, max_align);
6442
6443 while (1)
6444 {
6445 switch (TREE_CODE (exp))
6446 {
6447 case NOP_EXPR:
6448 case CONVERT_EXPR:
6449 case NON_LVALUE_EXPR:
6450 exp = TREE_OPERAND (exp, 0);
6451 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
6452 return align;
6453 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
6454 inner = MIN (inner, max_align);
6455 align = MAX (align, inner);
6456 break;
6457
6458 case PLUS_EXPR:
6459 /* If sum of pointer + int, restrict our maximum alignment to that
6460 imposed by the integer. If not, we can't do any better than
6461 ALIGN. */
6462 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
6463 return align;
6464
6465 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
6466 & (max_align - 1))
6467 != 0)
6468 max_align >>= 1;
6469
6470 exp = TREE_OPERAND (exp, 0);
6471 break;
6472
6473 case ADDR_EXPR:
6474 /* See what we are pointing at and look at its alignment. */
6475 exp = TREE_OPERAND (exp, 0);
6476 if (TREE_CODE (exp) == FUNCTION_DECL)
6477 align = MAX (align, FUNCTION_BOUNDARY);
6478 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
6479 align = MAX (align, DECL_ALIGN (exp));
6480#ifdef CONSTANT_ALIGNMENT
6481 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
6482 align = CONSTANT_ALIGNMENT (exp, align);
6483#endif
6484 return MIN (align, max_align);
6485
6486 default:
6487 return align;
6488 }
6489 }
6490}
6491\f
6492/* Return the tree node and offset if a given argument corresponds to
6493 a string constant. */
6494
6495static tree
6496string_constant (arg, ptr_offset)
6497 tree arg;
6498 tree *ptr_offset;
6499{
6500 STRIP_NOPS (arg);
6501
6502 if (TREE_CODE (arg) == ADDR_EXPR
6503 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
6504 {
6505 *ptr_offset = integer_zero_node;
6506 return TREE_OPERAND (arg, 0);
6507 }
6508 else if (TREE_CODE (arg) == PLUS_EXPR)
6509 {
6510 tree arg0 = TREE_OPERAND (arg, 0);
6511 tree arg1 = TREE_OPERAND (arg, 1);
6512
6513 STRIP_NOPS (arg0);
6514 STRIP_NOPS (arg1);
6515
6516 if (TREE_CODE (arg0) == ADDR_EXPR
6517 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
6518 {
6519 *ptr_offset = arg1;
6520 return TREE_OPERAND (arg0, 0);
6521 }
6522 else if (TREE_CODE (arg1) == ADDR_EXPR
6523 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
6524 {
6525 *ptr_offset = arg0;
6526 return TREE_OPERAND (arg1, 0);
6527 }
6528 }
6529
6530 return 0;
6531}
6532
6533/* Compute the length of a C string. TREE_STRING_LENGTH is not the right
6534 way, because it could contain a zero byte in the middle.
6535 TREE_STRING_LENGTH is the size of the character array, not the string.
6536
6537 Unfortunately, string_constant can't access the values of const char
6538 arrays with initializers, so neither can we do so here. */
6539
6540static tree
6541c_strlen (src)
6542 tree src;
6543{
6544 tree offset_node;
6545 int offset, max;
6546 char *ptr;
6547
6548 src = string_constant (src, &offset_node);
6549 if (src == 0)
6550 return 0;
6551 max = TREE_STRING_LENGTH (src);
6552 ptr = TREE_STRING_POINTER (src);
6553 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
6554 {
6555 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
6556 compute the offset to the following null if we don't know where to
6557 start searching for it. */
6558 int i;
6559 for (i = 0; i < max; i++)
6560 if (ptr[i] == 0)
6561 return 0;
6562 /* We don't know the starting offset, but we do know that the string
6563 has no internal zero bytes. We can assume that the offset falls
6564 within the bounds of the string; otherwise, the programmer deserves
6565 what he gets. Subtract the offset from the length of the string,
6566 and return that. */
6567 /* This would perhaps not be valid if we were dealing with named
6568 arrays in addition to literal string constants. */
6569 return size_binop (MINUS_EXPR, size_int (max), offset_node);
6570 }
6571
6572 /* We have a known offset into the string. Start searching there for
6573 a null character. */
6574 if (offset_node == 0)
6575 offset = 0;
6576 else
6577 {
6578 /* Did we get a long long offset? If so, punt. */
6579 if (TREE_INT_CST_HIGH (offset_node) != 0)
6580 return 0;
6581 offset = TREE_INT_CST_LOW (offset_node);
6582 }
6583 /* If the offset is known to be out of bounds, warn, and call strlen at
6584 runtime. */
6585 if (offset < 0 || offset > max)
6586 {
6587 warning ("offset outside bounds of constant string");
6588 return 0;
6589 }
6590 /* Use strlen to search for the first zero byte. Since any strings
6591 constructed with build_string will have nulls appended, we win even
6592 if we get handed something like (char[4])"abcd".
6593
6594 Since OFFSET is our starting index into the string, no further
6595 calculation is needed. */
6596 return size_int (strlen (ptr + offset));
6597}
6598\f
6599/* Expand an expression EXP that calls a built-in function,
6600 with result going to TARGET if that's convenient
6601 (and in mode MODE if that's convenient).
6602 SUBTARGET may be used as the target for computing one of EXP's operands.
6603 IGNORE is nonzero if the value is to be ignored. */
6604
6605static rtx
6606expand_builtin (exp, target, subtarget, mode, ignore)
6607 tree exp;
6608 rtx target;
6609 rtx subtarget;
6610 enum machine_mode mode;
6611 int ignore;
6612{
6613 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6614 tree arglist = TREE_OPERAND (exp, 1);
6615 rtx op0;
6616 rtx lab1, insns;
6617 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
6618 optab builtin_optab;
6619
6620 switch (DECL_FUNCTION_CODE (fndecl))
6621 {
6622 case BUILT_IN_ABS:
6623 case BUILT_IN_LABS:
6624 case BUILT_IN_FABS:
6625 /* build_function_call changes these into ABS_EXPR. */
6626 abort ();
6627
6628 case BUILT_IN_SIN:
6629 case BUILT_IN_COS:
6630 case BUILT_IN_FSQRT:
6631 /* If not optimizing, call the library function. */
6632 if (! optimize)
6633 break;
6634
6635 if (arglist == 0
6636 /* Arg could be wrong type if user redeclared this fcn wrong. */
6637 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
7b073ca6 6638 break;
ca695ac9
JB
6639
6640 /* Stabilize and compute the argument. */
6641 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
6642 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
6643 {
6644 exp = copy_node (exp);
6645 arglist = copy_node (arglist);
6646 TREE_OPERAND (exp, 1) = arglist;
6647 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
6648 }
6649 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
6650
6651 /* Make a suitable register to place result in. */
6652 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6653
6654 emit_queue ();
6655 start_sequence ();
6656
6657 switch (DECL_FUNCTION_CODE (fndecl))
6658 {
6659 case BUILT_IN_SIN:
6660 builtin_optab = sin_optab; break;
6661 case BUILT_IN_COS:
6662 builtin_optab = cos_optab; break;
6663 case BUILT_IN_FSQRT:
6664 builtin_optab = sqrt_optab; break;
6665 default:
6666 abort ();
6667 }
6668
6669 /* Compute into TARGET.
6670 Set TARGET to wherever the result comes back. */
6671 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
6672 builtin_optab, op0, target, 0);
6673
6674 /* If we were unable to expand via the builtin, stop the
6675 sequence (without outputting the insns) and break, causing
6676 a call the the library function. */
6677 if (target == 0)
6678 {
6679 end_sequence ();
6680 break;
6681 }
6682
6683 /* Check the results by default. But if flag_fast_math is turned on,
6684 then assume sqrt will always be called with valid arguments. */
6685
6686 if (! flag_fast_math)
6687 {
6688 /* Don't define the builtin FP instructions
6689 if your machine is not IEEE. */
6690 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
6691 abort ();
6692
6693 lab1 = gen_label_rtx ();
6694
6695 /* Test the result; if it is NaN, set errno=EDOM because
6696 the argument was not in the domain. */
6697 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
6698 emit_jump_insn (gen_beq (lab1));
6699
6700#if TARGET_EDOM
6701 {
6702#ifdef GEN_ERRNO_RTX
6703 rtx errno_rtx = GEN_ERRNO_RTX;
6704#else
6705 rtx errno_rtx
6706 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "*errno"));
6707#endif
6708
6709 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
6710 }
6711#else
6712 /* We can't set errno=EDOM directly; let the library call do it.
6713 Pop the arguments right away in case the call gets deleted. */
6714 NO_DEFER_POP;
6715 expand_call (exp, target, 0);
6716 OK_DEFER_POP;
6717#endif
6718
6719 emit_label (lab1);
6720 }
6721
6722 /* Output the entire sequence. */
6723 insns = get_insns ();
6724 end_sequence ();
6725 emit_insns (insns);
6726
6727 return target;
6728
6729 /* __builtin_apply_args returns block of memory allocated on
6730 the stack into which is stored the arg pointer, structure
6731 value address, static chain, and all the registers that might
6732 possibly be used in performing a function call. The code is
6733 moved to the start of the function so the incoming values are
6734 saved. */
6735 case BUILT_IN_APPLY_ARGS:
6736 /* Don't do __builtin_apply_args more than once in a function.
6737 Save the result of the first call and reuse it. */
6738 if (apply_args_value != 0)
6739 return apply_args_value;
6740 {
6741 /* When this function is called, it means that registers must be
6742 saved on entry to this function. So we migrate the
6743 call to the first insn of this function. */
6744 rtx temp;
6745 rtx seq;
6746
6747 start_sequence ();
6748 temp = expand_builtin_apply_args ();
6749 seq = get_insns ();
6750 end_sequence ();
6751
6752 apply_args_value = temp;
6753
6754 /* Put the sequence after the NOTE that starts the function.
6755 If this is inside a SEQUENCE, make the outer-level insn
6756 chain current, so the code is placed at the start of the
6757 function. */
6758 push_topmost_sequence ();
6759 emit_insns_before (seq, NEXT_INSN (get_insns ()));
6760 pop_topmost_sequence ();
6761 return temp;
6762 }
6763
6764 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6765 FUNCTION with a copy of the parameters described by
6766 ARGUMENTS, and ARGSIZE. It returns a block of memory
6767 allocated on the stack into which is stored all the registers
6768 that might possibly be used for returning the result of a
6769 function. ARGUMENTS is the value returned by
6770 __builtin_apply_args. ARGSIZE is the number of bytes of
6771 arguments that must be copied. ??? How should this value be
6772 computed? We'll also need a safe worst case value for varargs
6773 functions. */
6774 case BUILT_IN_APPLY:
6775 if (arglist == 0
6776 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6777 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6778 || TREE_CHAIN (arglist) == 0
6779 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6780 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6781 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6782 return const0_rtx;
6783 else
6784 {
6785 int i;
6786 tree t;
6787 rtx ops[3];
6788
6789 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
6790 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
6791
6792 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6793 }
6794
6795 /* __builtin_return (RESULT) causes the function to return the
6796 value described by RESULT. RESULT is address of the block of
6797 memory returned by __builtin_apply. */
6798 case BUILT_IN_RETURN:
6799 if (arglist
6800 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6801 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
6802 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
6803 NULL_RTX, VOIDmode, 0));
6804 return const0_rtx;
6805
6806 case BUILT_IN_SAVEREGS:
6807 /* Don't do __builtin_saveregs more than once in a function.
6808 Save the result of the first call and reuse it. */
6809 if (saveregs_value != 0)
6810 return saveregs_value;
6811 {
6812 /* When this function is called, it means that registers must be
6813 saved on entry to this function. So we migrate the
6814 call to the first insn of this function. */
6815 rtx temp;
6816 rtx seq;
6817 rtx valreg, saved_valreg;
6818
6819 /* Now really call the function. `expand_call' does not call
6820 expand_builtin, so there is no danger of infinite recursion here. */
6821 start_sequence ();
6822
6823#ifdef EXPAND_BUILTIN_SAVEREGS
6824 /* Do whatever the machine needs done in this case. */
6825 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
6826#else
6827 /* The register where the function returns its value
6828 is likely to have something else in it, such as an argument.
6829 So preserve that register around the call. */
6830 if (value_mode != VOIDmode)
6831 {
6832 valreg = hard_libcall_value (value_mode);
6833 saved_valreg = gen_reg_rtx (value_mode);
6834 emit_move_insn (saved_valreg, valreg);
6835 }
6836
6837 /* Generate the call, putting the value in a pseudo. */
6838 temp = expand_call (exp, target, ignore);
6839
6840 if (value_mode != VOIDmode)
6841 emit_move_insn (valreg, saved_valreg);
6842#endif
6843
6844 seq = get_insns ();
6845 end_sequence ();
6846
6847 saveregs_value = temp;
6848
6849 /* Put the sequence after the NOTE that starts the function.
6850 If this is inside a SEQUENCE, make the outer-level insn
6851 chain current, so the code is placed at the start of the
6852 function. */
6853 push_topmost_sequence ();
6854 emit_insns_before (seq, NEXT_INSN (get_insns ()));
6855 pop_topmost_sequence ();
6856 return temp;
6857 }
6858
6859 /* __builtin_args_info (N) returns word N of the arg space info
6860 for the current function. The number and meanings of words
6861 is controlled by the definition of CUMULATIVE_ARGS. */
6862 case BUILT_IN_ARGS_INFO:
6863 {
6864 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
6865 int i;
6866 int *word_ptr = (int *) &current_function_args_info;
6867 tree type, elts, result;
6868
6869 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
6870 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
6871 __FILE__, __LINE__);
6872
6873 if (arglist != 0)
6874 {
6875 tree arg = TREE_VALUE (arglist);
6876 if (TREE_CODE (arg) != INTEGER_CST)
6877 error ("argument of `__builtin_args_info' must be constant");
6878 else
6879 {
6880 int wordnum = TREE_INT_CST_LOW (arg);
6881
6882 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
6883 error ("argument of `__builtin_args_info' out of range");
6884 else
6885 return GEN_INT (word_ptr[wordnum]);
6886 }
6887 }
6888 else
6889 error ("missing argument in `__builtin_args_info'");
6890
6891 return const0_rtx;
6892
6893#if 0
6894 for (i = 0; i < nwords; i++)
6895 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
6896
6897 type = build_array_type (integer_type_node,
6898 build_index_type (build_int_2 (nwords, 0)));
6899 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
6900 TREE_CONSTANT (result) = 1;
6901 TREE_STATIC (result) = 1;
6902 result = build (INDIRECT_REF, build_pointer_type (type), result);
6903 TREE_CONSTANT (result) = 1;
6904 return expand_expr (result, NULL_RTX, VOIDmode, 0);
6905#endif
6906 }
6907
6908 /* Return the address of the first anonymous stack arg. */
6909 case BUILT_IN_NEXT_ARG:
6910 {
6911 tree fntype = TREE_TYPE (current_function_decl);
6912 if (!(TYPE_ARG_TYPES (fntype) != 0
6913 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
6914 != void_type_node)))
6915 {
6916 error ("`va_start' used in function with fixed args");
6917 return const0_rtx;
6918 }
6919 }
6920
6921 return expand_binop (Pmode, add_optab,
6922 current_function_internal_arg_pointer,
6923 current_function_arg_offset_rtx,
6924 NULL_RTX, 0, OPTAB_LIB_WIDEN);
6925
6926 case BUILT_IN_CLASSIFY_TYPE:
6927 if (arglist != 0)
6928 {
6929 tree type = TREE_TYPE (TREE_VALUE (arglist));
6930 enum tree_code code = TREE_CODE (type);
6931 if (code == VOID_TYPE)
6932 return GEN_INT (void_type_class);
6933 if (code == INTEGER_TYPE)
6934 return GEN_INT (integer_type_class);
6935 if (code == CHAR_TYPE)
6936 return GEN_INT (char_type_class);
6937 if (code == ENUMERAL_TYPE)
6938 return GEN_INT (enumeral_type_class);
6939 if (code == BOOLEAN_TYPE)
6940 return GEN_INT (boolean_type_class);
6941 if (code == POINTER_TYPE)
6942 return GEN_INT (pointer_type_class);
6943 if (code == REFERENCE_TYPE)
6944 return GEN_INT (reference_type_class);
6945 if (code == OFFSET_TYPE)
6946 return GEN_INT (offset_type_class);
6947 if (code == REAL_TYPE)
6948 return GEN_INT (real_type_class);
6949 if (code == COMPLEX_TYPE)
6950 return GEN_INT (complex_type_class);
6951 if (code == FUNCTION_TYPE)
6952 return GEN_INT (function_type_class);
6953 if (code == METHOD_TYPE)
6954 return GEN_INT (method_type_class);
6955 if (code == RECORD_TYPE)
6956 return GEN_INT (record_type_class);
6957 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
6958 return GEN_INT (union_type_class);
6959 if (code == ARRAY_TYPE)
6960 return GEN_INT (array_type_class);
6961 if (code == STRING_TYPE)
6962 return GEN_INT (string_type_class);
6963 if (code == SET_TYPE)
6964 return GEN_INT (set_type_class);
6965 if (code == FILE_TYPE)
6966 return GEN_INT (file_type_class);
6967 if (code == LANG_TYPE)
6968 return GEN_INT (lang_type_class);
6969 }
6970 return GEN_INT (no_type_class);
6971
6972 case BUILT_IN_CONSTANT_P:
6973 if (arglist == 0)
6974 return const0_rtx;
6975 else
6976 return (TREE_CODE_CLASS (TREE_CODE (TREE_VALUE (arglist))) == 'c'
6977 ? const1_rtx : const0_rtx);
6978
6979 case BUILT_IN_FRAME_ADDRESS:
6980 /* The argument must be a nonnegative integer constant.
6981 It counts the number of frames to scan up the stack.
6982 The value is the address of that frame. */
6983 case BUILT_IN_RETURN_ADDRESS:
6984 /* The argument must be a nonnegative integer constant.
6985 It counts the number of frames to scan up the stack.
6986 The value is the return address saved in that frame. */
6987 if (arglist == 0)
6988 /* Warning about missing arg was already issued. */
6989 return const0_rtx;
6990 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
6991 {
6992 error ("invalid arg to `__builtin_return_address'");
6993 return const0_rtx;
6994 }
6995 else if (tree_int_cst_lt (TREE_VALUE (arglist), integer_zero_node))
6996 {
6997 error ("invalid arg to `__builtin_return_address'");
6998 return const0_rtx;
6999 }
7000 else
7001 {
7002 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
7003 rtx tem = frame_pointer_rtx;
7004 int i;
7005
7006 /* Some machines need special handling before we can access arbitrary
7007 frames. For example, on the sparc, we must first flush all
7008 register windows to the stack. */
7009#ifdef SETUP_FRAME_ADDRESSES
7010 SETUP_FRAME_ADDRESSES ();
7011#endif
7012
7013 /* On the sparc, the return address is not in the frame, it is
7014 in a register. There is no way to access it off of the current
7015 frame pointer, but it can be accessed off the previous frame
7016 pointer by reading the value from the register window save
7017 area. */
7018#ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
7019 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_RETURN_ADDRESS)
7020 count--;
7021#endif
7022
7023 /* Scan back COUNT frames to the specified frame. */
7024 for (i = 0; i < count; i++)
7025 {
7026 /* Assume the dynamic chain pointer is in the word that
7027 the frame address points to, unless otherwise specified. */
7028#ifdef DYNAMIC_CHAIN_ADDRESS
7029 tem = DYNAMIC_CHAIN_ADDRESS (tem);
7030#endif
7031 tem = memory_address (Pmode, tem);
7032 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
7033 }
7034
7035 /* For __builtin_frame_address, return what we've got. */
7036 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
7037 return tem;
7038
7039 /* For __builtin_return_address,
7040 Get the return address from that frame. */
7041#ifdef RETURN_ADDR_RTX
7042 return RETURN_ADDR_RTX (count, tem);
7043#else
7044 tem = memory_address (Pmode,
7045 plus_constant (tem, GET_MODE_SIZE (Pmode)));
7046 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
7047#endif
7048 }
7049
7050 case BUILT_IN_ALLOCA:
7051 if (arglist == 0
7052 /* Arg could be non-integer if user redeclared this fcn wrong. */
7053 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
7b073ca6 7054 break;
ca695ac9
JB
7055 current_function_calls_alloca = 1;
7056 /* Compute the argument. */
7057 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
7058
7059 /* Allocate the desired space. */
7060 target = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
7061
7062 /* Record the new stack level for nonlocal gotos. */
7063 if (nonlocal_goto_handler_slot != 0)
7064 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
7065 return target;
7066
7067 case BUILT_IN_FFS:
7068 /* If not optimizing, call the library function. */
7069 if (!optimize)
7070 break;
7071
7072 if (arglist == 0
7073 /* Arg could be non-integer if user redeclared this fcn wrong. */
7074 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
7b073ca6 7075 break;
ca695ac9
JB
7076
7077 /* Compute the argument. */
7078 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
7079 /* Compute ffs, into TARGET if possible.
7080 Set TARGET to wherever the result comes back. */
7081 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
7082 ffs_optab, op0, target, 1);
7083 if (target == 0)
7084 abort ();
7085 return target;
7086
7087 case BUILT_IN_STRLEN:
7088 /* If not optimizing, call the library function. */
7089 if (!optimize)
7090 break;
7091
7092 if (arglist == 0
7093 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7094 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
7b073ca6 7095 break;
ca695ac9
JB
7096 else
7097 {
7098 tree src = TREE_VALUE (arglist);
7099 tree len = c_strlen (src);
7100
7101 int align
7102 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7103
7104 rtx result, src_rtx, char_rtx;
7105 enum machine_mode insn_mode = value_mode, char_mode;
7106 enum insn_code icode;
7107
7108 /* If the length is known, just return it. */
7109 if (len != 0)
7110 return expand_expr (len, target, mode, 0);
7111
7112 /* If SRC is not a pointer type, don't do this operation inline. */
7113 if (align == 0)
7114 break;
7115
7116 /* Call a function if we can't compute strlen in the right mode. */
7117
7118 while (insn_mode != VOIDmode)
7119 {
7120 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
7121 if (icode != CODE_FOR_nothing)
7122 break;
bbf6f052 7123
ca695ac9
JB
7124 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
7125 }
7126 if (insn_mode == VOIDmode)
7127 break;
bbf6f052 7128
ca695ac9
JB
7129 /* Make a place to write the result of the instruction. */
7130 result = target;
7131 if (! (result != 0
7132 && GET_CODE (result) == REG
7133 && GET_MODE (result) == insn_mode
7134 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
7135 result = gen_reg_rtx (insn_mode);
bbf6f052 7136
ca695ac9
JB
7137 /* Make sure the operands are acceptable to the predicates. */
7138
7139 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
7140 result = gen_reg_rtx (insn_mode);
7141
7142 src_rtx = memory_address (BLKmode,
7143 expand_expr (src, NULL_RTX, Pmode,
7144 EXPAND_NORMAL));
7145 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
7146 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
7147
7148 char_rtx = const0_rtx;
7149 char_mode = insn_operand_mode[(int)icode][2];
7150 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
7151 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
7152
7153 emit_insn (GEN_FCN (icode) (result,
7154 gen_rtx (MEM, BLKmode, src_rtx),
7155 char_rtx, GEN_INT (align)));
7156
7157 /* Return the value in the proper mode for this function. */
7158 if (GET_MODE (result) == value_mode)
7159 return result;
7160 else if (target != 0)
7161 {
7162 convert_move (target, result, 0);
7163 return target;
7164 }
7165 else
7166 return convert_to_mode (value_mode, result, 0);
7167 }
7168
7169 case BUILT_IN_STRCPY:
e87b4f3f 7170 /* If not optimizing, call the library function. */
ca695ac9 7171 if (!optimize)
e87b4f3f
RS
7172 break;
7173
7174 if (arglist == 0
ca695ac9
JB
7175 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7176 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7177 || TREE_CHAIN (arglist) == 0
7178 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
7b073ca6 7179 break;
ca695ac9 7180 else
db0e6d01 7181 {
ca695ac9 7182 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
e7c33f54 7183
ca695ac9
JB
7184 if (len == 0)
7185 break;
e7c33f54 7186
ca695ac9 7187 len = size_binop (PLUS_EXPR, len, integer_one_node);
e7c33f54 7188
ca695ac9 7189 chainon (arglist, build_tree_list (NULL_TREE, len));
1bbddf11
JVA
7190 }
7191
ca695ac9
JB
7192 /* Drops in. */
7193 case BUILT_IN_MEMCPY:
7194 /* If not optimizing, call the library function. */
7195 if (!optimize)
7196 break;
e7c33f54 7197
ca695ac9
JB
7198 if (arglist == 0
7199 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7200 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7201 || TREE_CHAIN (arglist) == 0
7202 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
7203 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
7204 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7b073ca6 7205 break;
ca695ac9 7206 else
e7c33f54 7207 {
ca695ac9
JB
7208 tree dest = TREE_VALUE (arglist);
7209 tree src = TREE_VALUE (TREE_CHAIN (arglist));
7210 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
e87b4f3f 7211
ca695ac9
JB
7212 int src_align
7213 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7214 int dest_align
7215 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7216 rtx dest_rtx, dest_mem, src_mem;
60bac6ea 7217
ca695ac9
JB
7218 /* If either SRC or DEST is not a pointer type, don't do
7219 this operation in-line. */
7220 if (src_align == 0 || dest_align == 0)
7221 {
7222 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
7223 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
7224 break;
7225 }
7226
7227 dest_rtx = expand_expr (dest, NULL_RTX, Pmode, EXPAND_NORMAL);
7228 dest_mem = gen_rtx (MEM, BLKmode,
7229 memory_address (BLKmode, dest_rtx));
7230 src_mem = gen_rtx (MEM, BLKmode,
7231 memory_address (BLKmode,
7232 expand_expr (src, NULL_RTX,
7233 Pmode,
7234 EXPAND_NORMAL)));
7235
7236 /* Copy word part most expediently. */
7237 emit_block_move (dest_mem, src_mem,
7238 expand_expr (len, NULL_RTX, VOIDmode, 0),
7239 MIN (src_align, dest_align));
7240 return dest_rtx;
7241 }
7242
7243/* These comparison functions need an instruction that returns an actual
7244 index. An ordinary compare that just sets the condition codes
7245 is not enough. */
7246#ifdef HAVE_cmpstrsi
7247 case BUILT_IN_STRCMP:
7248 /* If not optimizing, call the library function. */
7249 if (!optimize)
7250 break;
7251
7252 if (arglist == 0
7253 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7254 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7255 || TREE_CHAIN (arglist) == 0
7256 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
7b073ca6 7257 break;
ca695ac9
JB
7258 else if (!HAVE_cmpstrsi)
7259 break;
7260 {
7261 tree arg1 = TREE_VALUE (arglist);
7262 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
7263 tree offset;
7264 tree len, len2;
7265
7266 len = c_strlen (arg1);
7267 if (len)
7268 len = size_binop (PLUS_EXPR, integer_one_node, len);
7269 len2 = c_strlen (arg2);
7270 if (len2)
7271 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
7272
7273 /* If we don't have a constant length for the first, use the length
7274 of the second, if we know it. We don't require a constant for
7275 this case; some cost analysis could be done if both are available
7276 but neither is constant. For now, assume they're equally cheap.
7277
7278 If both strings have constant lengths, use the smaller. This
7279 could arise if optimization results in strcpy being called with
7280 two fixed strings, or if the code was machine-generated. We should
7281 add some code to the `memcmp' handler below to deal with such
7282 situations, someday. */
7283 if (!len || TREE_CODE (len) != INTEGER_CST)
7284 {
7285 if (len2)
7286 len = len2;
7287 else if (len == 0)
7288 break;
7289 }
7290 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
7291 {
7292 if (tree_int_cst_lt (len2, len))
7293 len = len2;
7294 }
7295
7296 chainon (arglist, build_tree_list (NULL_TREE, len));
7297 }
7298
7299 /* Drops in. */
7300 case BUILT_IN_MEMCMP:
7301 /* If not optimizing, call the library function. */
7302 if (!optimize)
7303 break;
7304
7305 if (arglist == 0
7306 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7307 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7308 || TREE_CHAIN (arglist) == 0
7309 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
7310 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
7311 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7b073ca6 7312 break;
ca695ac9
JB
7313 else if (!HAVE_cmpstrsi)
7314 break;
7315 {
7316 tree arg1 = TREE_VALUE (arglist);
7317 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
7318 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7319 rtx result;
7320
7321 int arg1_align
7322 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7323 int arg2_align
7324 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7325 enum machine_mode insn_mode
7326 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
60bac6ea 7327
ca695ac9
JB
7328 /* If we don't have POINTER_TYPE, call the function. */
7329 if (arg1_align == 0 || arg2_align == 0)
7330 {
7331 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
7332 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
7333 break;
7334 }
60bac6ea 7335
ca695ac9
JB
7336 /* Make a place to write the result of the instruction. */
7337 result = target;
7338 if (! (result != 0
7339 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
7340 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
7341 result = gen_reg_rtx (insn_mode);
60bac6ea 7342
ca695ac9
JB
7343 emit_insn (gen_cmpstrsi (result,
7344 gen_rtx (MEM, BLKmode,
7345 expand_expr (arg1, NULL_RTX, Pmode,
7346 EXPAND_NORMAL)),
7347 gen_rtx (MEM, BLKmode,
7348 expand_expr (arg2, NULL_RTX, Pmode,
7349 EXPAND_NORMAL)),
7350 expand_expr (len, NULL_RTX, VOIDmode, 0),
7351 GEN_INT (MIN (arg1_align, arg2_align))));
60bac6ea 7352
ca695ac9
JB
7353 /* Return the value in the proper mode for this function. */
7354 mode = TYPE_MODE (TREE_TYPE (exp));
7355 if (GET_MODE (result) == mode)
7356 return result;
7357 else if (target != 0)
7358 {
7359 convert_move (target, result, 0);
7360 return target;
60bac6ea 7361 }
ca695ac9
JB
7362 else
7363 return convert_to_mode (mode, result, 0);
7364 }
60bac6ea 7365#else
ca695ac9
JB
7366 case BUILT_IN_STRCMP:
7367 case BUILT_IN_MEMCMP:
7368 break;
60bac6ea
RS
7369#endif
7370
ca695ac9
JB
7371 default: /* just do library call, if unknown builtin */
7372 error ("built-in function `%s' not currently supported",
7373 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
7374 }
e87b4f3f 7375
ca695ac9
JB
7376 /* The switch statement above can drop through to cause the function
7377 to be called normally. */
e7c33f54 7378
ca695ac9
JB
7379 return expand_call (exp, target, ignore);
7380}
7381\f
7382/* Built-in functions to perform an untyped call and return. */
0006469d 7383
ca695ac9
JB
7384/* For each register that may be used for calling a function, this
7385 gives a mode used to copy the register's value. VOIDmode indicates
7386 the register is not used for calling a function. If the machine
7387 has register windows, this gives only the outbound registers.
7388 INCOMING_REGNO gives the corresponding inbound register. */
7389static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
0006469d 7390
ca695ac9
JB
7391/* For each register that may be used for returning values, this gives
7392 a mode used to copy the register's value. VOIDmode indicates the
7393 register is not used for returning values. If the machine has
7394 register windows, this gives only the outbound registers.
7395 INCOMING_REGNO gives the corresponding inbound register. */
7396static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
0006469d 7397
ca695ac9
JB
7398/* For each register that may be used for calling a function, this
7399 gives the offset of that register into the block returned by
7400 __bultin_apply_args. 0 indicates that the register is not
7401 used for calling a function. */
7402static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
0006469d 7403
ca695ac9
JB
7404/* Return the offset of register REGNO into the block returned by
7405 __builtin_apply_args. This is not declared static, since it is
7406 needed in objc-act.c. */
0006469d 7407
ca695ac9
JB
7408int
7409apply_args_register_offset (regno)
7410 int regno;
7411{
7412 apply_args_size ();
0006469d 7413
ca695ac9
JB
7414 /* Arguments are always put in outgoing registers (in the argument
7415 block) if such make sense. */
7416#ifdef OUTGOING_REGNO
7417 regno = OUTGOING_REGNO(regno);
7418#endif
7419 return apply_args_reg_offset[regno];
7420}
0006469d 7421
ca695ac9
JB
7422/* Return the size required for the block returned by __builtin_apply_args,
7423 and initialize apply_args_mode. */
0006469d 7424
ca695ac9
JB
7425static int
7426apply_args_size ()
7427{
7428 static int size = -1;
7429 int align, regno;
7430 enum machine_mode mode;
bbf6f052 7431
ca695ac9
JB
7432 /* The values computed by this function never change. */
7433 if (size < 0)
7434 {
7435 /* The first value is the incoming arg-pointer. */
7436 size = GET_MODE_SIZE (Pmode);
bbf6f052 7437
ca695ac9
JB
7438 /* The second value is the structure value address unless this is
7439 passed as an "invisible" first argument. */
7440 if (struct_value_rtx)
7441 size += GET_MODE_SIZE (Pmode);
7442
7443 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7444 if (FUNCTION_ARG_REGNO_P (regno))
bbf6f052 7445 {
ca695ac9
JB
7446 /* Search for the proper mode for copying this register's
7447 value. I'm not sure this is right, but it works so far. */
7448 enum machine_mode best_mode = VOIDmode;
7449
7450 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
7451 mode != VOIDmode;
7452 mode = GET_MODE_WIDER_MODE (mode))
7453 if (HARD_REGNO_MODE_OK (regno, mode)
7454 && HARD_REGNO_NREGS (regno, mode) == 1)
7455 best_mode = mode;
7456
7457 if (best_mode == VOIDmode)
7458 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
7459 mode != VOIDmode;
7460 mode = GET_MODE_WIDER_MODE (mode))
7461 if (HARD_REGNO_MODE_OK (regno, mode)
7462 && (mov_optab->handlers[(int) mode].insn_code
7463 != CODE_FOR_nothing))
7464 best_mode = mode;
7465
7466 mode = best_mode;
7467 if (mode == VOIDmode)
7468 abort ();
7469
7470 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7471 if (size % align != 0)
7472 size = CEIL (size, align) * align;
7473 apply_args_reg_offset[regno] = size;
7474 size += GET_MODE_SIZE (mode);
7475 apply_args_mode[regno] = mode;
7476 }
7477 else
7478 {
7479 apply_args_mode[regno] = VOIDmode;
7480 apply_args_reg_offset[regno] = 0;
bbf6f052 7481 }
ca695ac9
JB
7482 }
7483 return size;
7484}
bbf6f052 7485
ca695ac9
JB
7486/* Return the size required for the block returned by __builtin_apply,
7487 and initialize apply_result_mode. */
bbf6f052 7488
ca695ac9
JB
7489static int
7490apply_result_size ()
7491{
7492 static int size = -1;
7493 int align, regno;
7494 enum machine_mode mode;
bbf6f052 7495
ca695ac9
JB
7496 /* The values computed by this function never change. */
7497 if (size < 0)
7498 {
7499 size = 0;
bbf6f052 7500
ca695ac9
JB
7501 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7502 if (FUNCTION_VALUE_REGNO_P (regno))
7503 {
7504 /* Search for the proper mode for copying this register's
7505 value. I'm not sure this is right, but it works so far. */
7506 enum machine_mode best_mode = VOIDmode;
bbf6f052 7507
ca695ac9
JB
7508 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
7509 mode != TImode;
7510 mode = GET_MODE_WIDER_MODE (mode))
7511 if (HARD_REGNO_MODE_OK (regno, mode))
7512 best_mode = mode;
bbf6f052 7513
ca695ac9
JB
7514 if (best_mode == VOIDmode)
7515 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
7516 mode != VOIDmode;
7517 mode = GET_MODE_WIDER_MODE (mode))
7518 if (HARD_REGNO_MODE_OK (regno, mode)
7519 && (mov_optab->handlers[(int) mode].insn_code
7520 != CODE_FOR_nothing))
7521 best_mode = mode;
bbf6f052 7522
ca695ac9
JB
7523 mode = best_mode;
7524 if (mode == VOIDmode)
7525 abort ();
bbf6f052 7526
ca695ac9
JB
7527 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7528 if (size % align != 0)
7529 size = CEIL (size, align) * align;
7530 size += GET_MODE_SIZE (mode);
7531 apply_result_mode[regno] = mode;
bbf6f052
RK
7532 }
7533 else
ca695ac9 7534 apply_result_mode[regno] = VOIDmode;
bbf6f052 7535
ca695ac9
JB
7536 /* Allow targets that use untyped_call and untyped_return to override
7537 the size so that machine-specific information can be stored here. */
7538#ifdef APPLY_RESULT_SIZE
7539 size = APPLY_RESULT_SIZE;
7540#endif
7541 }
7542 return size;
7543}
bbf6f052 7544
ca695ac9
JB
7545#if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
7546/* Create a vector describing the result block RESULT. If SAVEP is true,
7547 the result block is used to save the values; otherwise it is used to
7548 restore the values. */
bbf6f052 7549
ca695ac9
JB
7550static rtx
7551result_vector (savep, result)
7552 int savep;
7553 rtx result;
7554{
7555 int regno, size, align, nelts;
7556 enum machine_mode mode;
7557 rtx reg, mem;
7558 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
7559
7560 size = nelts = 0;
7561 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7562 if ((mode = apply_result_mode[regno]) != VOIDmode)
7563 {
7564 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7565 if (size % align != 0)
7566 size = CEIL (size, align) * align;
7567 reg = gen_rtx (REG, mode, savep ? INCOMING_REGNO (regno) : regno);
7568 mem = change_address (result, mode,
7569 plus_constant (XEXP (result, 0), size));
7570 savevec[nelts++] = (savep
7571 ? gen_rtx (SET, VOIDmode, mem, reg)
7572 : gen_rtx (SET, VOIDmode, reg, mem));
7573 size += GET_MODE_SIZE (mode);
bbf6f052 7574 }
ca695ac9
JB
7575 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
7576}
7577#endif /* HAVE_untyped_call or HAVE_untyped_return */
bbf6f052 7578
ca695ac9
JB
7579/* Save the state required to perform an untyped call with the same
7580 arguments as were passed to the current function. */
7581
7582static rtx
7583expand_builtin_apply_args ()
7584{
7585 rtx registers;
7586 int size, align, regno;
7587 enum machine_mode mode;
7588
7589 /* Create a block where the arg-pointer, structure value address,
7590 and argument registers can be saved. */
7591 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
7592
7593 /* Walk past the arg-pointer and structure value address. */
7594 size = GET_MODE_SIZE (Pmode);
7595 if (struct_value_rtx)
7596 size += GET_MODE_SIZE (Pmode);
7597
7598 /* Save each register used in calling a function to the block. */
7599 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7600 if ((mode = apply_args_mode[regno]) != VOIDmode)
bbf6f052 7601 {
ca695ac9
JB
7602 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7603 if (size % align != 0)
7604 size = CEIL (size, align) * align;
7605 emit_move_insn (change_address (registers, mode,
7606 plus_constant (XEXP (registers, 0),
7607 size)),
7608 gen_rtx (REG, mode, INCOMING_REGNO (regno)));
7609 size += GET_MODE_SIZE (mode);
bbf6f052
RK
7610 }
7611
ca695ac9
JB
7612 /* Save the arg pointer to the block. */
7613 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
7614 copy_to_reg (virtual_incoming_args_rtx));
7615 size = GET_MODE_SIZE (Pmode);
bbf6f052 7616
ca695ac9
JB
7617 /* Save the structure value address unless this is passed as an
7618 "invisible" first argument. */
7619 if (struct_value_incoming_rtx)
7620 {
7621 emit_move_insn (change_address (registers, Pmode,
7622 plus_constant (XEXP (registers, 0),
7623 size)),
7624 copy_to_reg (struct_value_incoming_rtx));
7625 size += GET_MODE_SIZE (Pmode);
7626 }
7627
7628 /* Return the address of the block. */
7629 return copy_addr_to_reg (XEXP (registers, 0));
7630}
7631
7632/* Perform an untyped call and save the state required to perform an
7633 untyped return of whatever value was returned by the given function. */
7634
7635static rtx
7636expand_builtin_apply (function, arguments, argsize)
7637 rtx function, arguments, argsize;
7638{
7639 int size, align, regno;
7640 enum machine_mode mode;
7641 rtx incoming_args, result, reg, dest, call_insn;
7642 rtx old_stack_level = 0;
7643 rtx use_insns = 0;
bbf6f052 7644
ca695ac9
JB
7645 /* Create a block where the return registers can be saved. */
7646 result = assign_stack_local (BLKmode, apply_result_size (), -1);
bbf6f052 7647
ca695ac9 7648 /* ??? The argsize value should be adjusted here. */
bbf6f052 7649
ca695ac9
JB
7650 /* Fetch the arg pointer from the ARGUMENTS block. */
7651 incoming_args = gen_reg_rtx (Pmode);
7652 emit_move_insn (incoming_args,
7653 gen_rtx (MEM, Pmode, arguments));
7654#ifndef STACK_GROWS_DOWNWARD
7655 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
7656 incoming_args, 0, OPTAB_LIB_WIDEN);
46b68a37
JW
7657#endif
7658
ca695ac9
JB
7659 /* Perform postincrements before actually calling the function. */
7660 emit_queue ();
46b68a37 7661
ca695ac9
JB
7662 /* Push a new argument block and copy the arguments. */
7663 do_pending_stack_adjust ();
7664 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
bbf6f052 7665
ca695ac9
JB
7666 /* Push a block of memory onto the stack to store the memory arguments.
7667 Save the address in a register, and copy the memory arguments. ??? I
7668 haven't figured out how the calling convention macros effect this,
7669 but it's likely that the source and/or destination addresses in
7670 the block copy will need updating in machine specific ways. */
7671 dest = copy_addr_to_reg (push_block (argsize, 0, 0));
7672 emit_block_move (gen_rtx (MEM, BLKmode, dest),
7673 gen_rtx (MEM, BLKmode, incoming_args),
7674 argsize,
7675 PARM_BOUNDARY / BITS_PER_UNIT);
bbf6f052 7676
ca695ac9
JB
7677 /* Refer to the argument block. */
7678 apply_args_size ();
7679 arguments = gen_rtx (MEM, BLKmode, arguments);
7680
7681 /* Walk past the arg-pointer and structure value address. */
7682 size = GET_MODE_SIZE (Pmode);
7683 if (struct_value_rtx)
7684 size += GET_MODE_SIZE (Pmode);
7685
7686 /* Restore each of the registers previously saved. Make USE insns
7687 for each of these registers for use in making the call. */
7688 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7689 if ((mode = apply_args_mode[regno]) != VOIDmode)
7690 {
7691 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7692 if (size % align != 0)
7693 size = CEIL (size, align) * align;
7694 reg = gen_rtx (REG, mode, regno);
7695 emit_move_insn (reg,
7696 change_address (arguments, mode,
7697 plus_constant (XEXP (arguments, 0),
7698 size)));
7699
7700 push_to_sequence (use_insns);
7701 emit_insn (gen_rtx (USE, VOIDmode, reg));
7702 use_insns = get_insns ();
7703 end_sequence ();
7704 size += GET_MODE_SIZE (mode);
7705 }
7706
7707 /* Restore the structure value address unless this is passed as an
7708 "invisible" first argument. */
7709 size = GET_MODE_SIZE (Pmode);
7710 if (struct_value_rtx)
7711 {
7712 rtx value = gen_reg_rtx (Pmode);
7713 emit_move_insn (value,
7714 change_address (arguments, Pmode,
7715 plus_constant (XEXP (arguments, 0),
7716 size)));
7717 emit_move_insn (struct_value_rtx, value);
7718 if (GET_CODE (struct_value_rtx) == REG)
7719 {
7720 push_to_sequence (use_insns);
7721 emit_insn (gen_rtx (USE, VOIDmode, struct_value_rtx));
7722 use_insns = get_insns ();
7723 end_sequence ();
bbf6f052 7724 }
ca695ac9
JB
7725 size += GET_MODE_SIZE (Pmode);
7726 }
bbf6f052 7727
ca695ac9
JB
7728 /* All arguments and registers used for the call are set up by now! */
7729 function = prepare_call_address (function, NULL_TREE, &use_insns);
bbf6f052 7730
ca695ac9
JB
7731 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
7732 and we don't want to load it into a register as an optimization,
7733 because prepare_call_address already did it if it should be done. */
7734 if (GET_CODE (function) != SYMBOL_REF)
7735 function = memory_address (FUNCTION_MODE, function);
bbf6f052 7736
ca695ac9
JB
7737 /* Generate the actual call instruction and save the return value. */
7738#ifdef HAVE_untyped_call
7739 if (HAVE_untyped_call)
7740 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
7741 result, result_vector (1, result)));
7742 else
7743#endif
7744#ifdef HAVE_call_value
7745 if (HAVE_call_value)
7746 {
7747 rtx valreg = 0;
bbf6f052 7748
ca695ac9
JB
7749 /* Locate the unique return register. It is not possible to
7750 express a call that sets more than one return register using
7751 call_value; use untyped_call for that. In fact, untyped_call
7752 only needs to save the return registers in the given block. */
7753 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7754 if ((mode = apply_result_mode[regno]) != VOIDmode)
7755 {
7756 if (valreg)
7757 abort (); /* HAVE_untyped_call required. */
7758 valreg = gen_rtx (REG, mode, regno);
7759 }
bbf6f052 7760
ca695ac9
JB
7761 emit_call_insn (gen_call_value (valreg,
7762 gen_rtx (MEM, FUNCTION_MODE, function),
7763 const0_rtx, NULL_RTX, const0_rtx));
bbf6f052 7764
ca695ac9
JB
7765 emit_move_insn (change_address (result, GET_MODE (valreg),
7766 XEXP (result, 0)),
7767 valreg);
7768 }
7769 else
7770#endif
7771 abort ();
bbf6f052 7772
ca695ac9
JB
7773 /* Find the CALL insn we just emitted and write the USE insns before it. */
7774 for (call_insn = get_last_insn ();
7775 call_insn && GET_CODE (call_insn) != CALL_INSN;
7776 call_insn = PREV_INSN (call_insn))
7777 ;
bbf6f052 7778
ca695ac9
JB
7779 if (! call_insn)
7780 abort ();
bbf6f052 7781
ca695ac9
JB
7782 /* Put the USE insns before the CALL. */
7783 emit_insns_before (use_insns, call_insn);
e7c33f54 7784
ca695ac9
JB
7785 /* Restore the stack. */
7786 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
e7c33f54 7787
ca695ac9
JB
7788 /* Return the address of the result block. */
7789 return copy_addr_to_reg (XEXP (result, 0));
7790}
e7c33f54 7791
ca695ac9 7792/* Perform an untyped return. */
e7c33f54 7793
ca695ac9
JB
7794static void
7795expand_builtin_return (result)
7796 rtx result;
7797{
7798 int size, align, regno;
7799 enum machine_mode mode;
7800 rtx reg;
7801 rtx use_insns = 0;
e7c33f54 7802
ca695ac9
JB
7803 apply_result_size ();
7804 result = gen_rtx (MEM, BLKmode, result);
e7c33f54 7805
ca695ac9
JB
7806#ifdef HAVE_untyped_return
7807 if (HAVE_untyped_return)
7808 {
7809 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
7810 emit_barrier ();
7811 return;
7812 }
7813#endif
e7c33f54 7814
ca695ac9
JB
7815 /* Restore the return value and note that each value is used. */
7816 size = 0;
7817 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7818 if ((mode = apply_result_mode[regno]) != VOIDmode)
7819 {
7820 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7821 if (size % align != 0)
7822 size = CEIL (size, align) * align;
7823 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
7824 emit_move_insn (reg,
7825 change_address (result, mode,
7826 plus_constant (XEXP (result, 0),
7827 size)));
e7c33f54 7828
ca695ac9
JB
7829 push_to_sequence (use_insns);
7830 emit_insn (gen_rtx (USE, VOIDmode, reg));
7831 use_insns = get_insns ();
7832 end_sequence ();
7833 size += GET_MODE_SIZE (mode);
7834 }
e7c33f54 7835
ca695ac9
JB
7836 /* Put the USE insns before the return. */
7837 emit_insns (use_insns);
e7c33f54 7838
ca695ac9
JB
7839 /* Return whatever values was restored by jumping directly to the end
7840 of the function. */
7841 expand_null_return ();
7842}
7843\f
7844/* Expand code for a post- or pre- increment or decrement
7845 and return the RTX for the result.
7846 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
e7c33f54 7847
ca695ac9
JB
7848static rtx
7849expand_increment (exp, post)
7850 register tree exp;
7851 int post;
7852{
7853 register rtx op0, op1;
7854 register rtx temp, value;
7855 register tree incremented = TREE_OPERAND (exp, 0);
7856 optab this_optab = add_optab;
7857 int icode;
7858 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
7859 int op0_is_copy = 0;
7860 int single_insn = 0;
a97f5a86
RS
7861 /* 1 means we can't store into OP0 directly,
7862 because it is a subreg narrower than a word,
7863 and we don't dare clobber the rest of the word. */
7864 int bad_subreg = 0;
e7c33f54 7865
ca695ac9 7866 if (output_bytecode)
c02bd5d9
JB
7867 {
7868 bc_expand_expr (exp);
7869 return NULL_RTX;
7870 }
e7c33f54 7871
ca695ac9
JB
7872 /* Stabilize any component ref that might need to be
7873 evaluated more than once below. */
7874 if (!post
7875 || TREE_CODE (incremented) == BIT_FIELD_REF
7876 || (TREE_CODE (incremented) == COMPONENT_REF
7877 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
7878 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
7879 incremented = stabilize_reference (incremented);
7880 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
7881 ones into save exprs so that they don't accidentally get evaluated
7882 more than once by the code below. */
7883 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
7884 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
7885 incremented = save_expr (incremented);
bbf6f052 7886
ca695ac9
JB
7887 /* Compute the operands as RTX.
7888 Note whether OP0 is the actual lvalue or a copy of it:
7889 I believe it is a copy iff it is a register or subreg
7890 and insns were generated in computing it. */
bbf6f052 7891
ca695ac9
JB
7892 temp = get_last_insn ();
7893 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
bbf6f052 7894
ca695ac9
JB
7895 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
7896 in place but intead must do sign- or zero-extension during assignment,
7897 so we copy it into a new register and let the code below use it as
7898 a copy.
bbf6f052 7899
ca695ac9
JB
7900 Note that we can safely modify this SUBREG since it is know not to be
7901 shared (it was made by the expand_expr call above). */
bbf6f052 7902
ca695ac9
JB
7903 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
7904 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
a97f5a86
RS
7905 else if (GET_CODE (op0) == SUBREG
7906 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
7907 bad_subreg = 1;
bbf6f052 7908
ca695ac9
JB
7909 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
7910 && temp != get_last_insn ());
7911 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052 7912
ca695ac9
JB
7913 /* Decide whether incrementing or decrementing. */
7914 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
7915 || TREE_CODE (exp) == PREDECREMENT_EXPR)
7916 this_optab = sub_optab;
bbf6f052 7917
ca695ac9
JB
7918 /* Convert decrement by a constant into a negative increment. */
7919 if (this_optab == sub_optab
7920 && GET_CODE (op1) == CONST_INT)
7921 {
7922 op1 = GEN_INT (- INTVAL (op1));
7923 this_optab = add_optab;
7924 }
bbf6f052 7925
ca695ac9
JB
7926 /* For a preincrement, see if we can do this with a single instruction. */
7927 if (!post)
7928 {
7929 icode = (int) this_optab->handlers[(int) mode].insn_code;
7930 if (icode != (int) CODE_FOR_nothing
7931 /* Make sure that OP0 is valid for operands 0 and 1
7932 of the insn we want to queue. */
7933 && (*insn_operand_predicate[icode][0]) (op0, mode)
7934 && (*insn_operand_predicate[icode][1]) (op0, mode)
7935 && (*insn_operand_predicate[icode][2]) (op1, mode))
7936 single_insn = 1;
7937 }
bbf6f052 7938
ca695ac9
JB
7939 /* If OP0 is not the actual lvalue, but rather a copy in a register,
7940 then we cannot just increment OP0. We must therefore contrive to
7941 increment the original value. Then, for postincrement, we can return
7942 OP0 since it is a copy of the old value. For preincrement, expand here
a97f5a86
RS
7943 unless we can do it with a single insn.
7944
7945 Likewise if storing directly into OP0 would clobber high bits
7946 we need to preserve (bad_subreg). */
7947 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
ca695ac9
JB
7948 {
7949 /* This is the easiest way to increment the value wherever it is.
7950 Problems with multiple evaluation of INCREMENTED are prevented
7951 because either (1) it is a component_ref or preincrement,
7952 in which case it was stabilized above, or (2) it is an array_ref
7953 with constant index in an array in a register, which is
7954 safe to reevaluate. */
7955 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
7956 || TREE_CODE (exp) == PREDECREMENT_EXPR)
7957 ? MINUS_EXPR : PLUS_EXPR),
7958 TREE_TYPE (exp),
7959 incremented,
7960 TREE_OPERAND (exp, 1));
7961 temp = expand_assignment (incremented, newexp, ! post, 0);
7962 return post ? op0 : temp;
7963 }
bbf6f052 7964
ca695ac9
JB
7965 if (post)
7966 {
7967 /* We have a true reference to the value in OP0.
7968 If there is an insn to add or subtract in this mode, queue it.
7969 Queueing the increment insn avoids the register shuffling
7970 that often results if we must increment now and first save
7971 the old value for subsequent use. */
bbf6f052 7972
ca695ac9
JB
7973#if 0 /* Turned off to avoid making extra insn for indexed memref. */
7974 op0 = stabilize (op0);
7975#endif
bbf6f052 7976
ca695ac9
JB
7977 icode = (int) this_optab->handlers[(int) mode].insn_code;
7978 if (icode != (int) CODE_FOR_nothing
7979 /* Make sure that OP0 is valid for operands 0 and 1
7980 of the insn we want to queue. */
7981 && (*insn_operand_predicate[icode][0]) (op0, mode)
7982 && (*insn_operand_predicate[icode][1]) (op0, mode))
7983 {
7984 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
7985 op1 = force_reg (mode, op1);
bbf6f052 7986
ca695ac9
JB
7987 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
7988 }
7989 }
bbf6f052 7990
ca695ac9
JB
7991 /* Preincrement, or we can't increment with one simple insn. */
7992 if (post)
7993 /* Save a copy of the value before inc or dec, to return it later. */
7994 temp = value = copy_to_reg (op0);
7995 else
7996 /* Arrange to return the incremented value. */
7997 /* Copy the rtx because expand_binop will protect from the queue,
7998 and the results of that would be invalid for us to return
7999 if our caller does emit_queue before using our result. */
8000 temp = copy_rtx (value = op0);
bbf6f052 8001
ca695ac9
JB
8002 /* Increment however we can. */
8003 op1 = expand_binop (mode, this_optab, value, op1, op0,
8004 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
8005 /* Make sure the value is stored into OP0. */
8006 if (op1 != op0)
8007 emit_move_insn (op0, op1);
bbf6f052 8008
ca695ac9
JB
8009 return temp;
8010}
8011\f
8012/* Expand all function calls contained within EXP, innermost ones first.
8013 But don't look within expressions that have sequence points.
8014 For each CALL_EXPR, record the rtx for its value
8015 in the CALL_EXPR_RTL field. */
bbf6f052 8016
ca695ac9
JB
8017static void
8018preexpand_calls (exp)
8019 tree exp;
8020{
8021 register int nops, i;
8022 int type = TREE_CODE_CLASS (TREE_CODE (exp));
bbf6f052 8023
ca695ac9
JB
8024 if (! do_preexpand_calls)
8025 return;
bbf6f052 8026
ca695ac9 8027 /* Only expressions and references can contain calls. */
bbf6f052 8028
ca695ac9
JB
8029 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
8030 return;
bbf6f052 8031
ca695ac9
JB
8032 switch (TREE_CODE (exp))
8033 {
8034 case CALL_EXPR:
8035 /* Do nothing if already expanded. */
8036 if (CALL_EXPR_RTL (exp) != 0)
8037 return;
bbf6f052 8038
ca695ac9
JB
8039 /* Do nothing to built-in functions. */
8040 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
8041 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
8042 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8043 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
8044 return;
bbf6f052 8045
ca695ac9
JB
8046 case COMPOUND_EXPR:
8047 case COND_EXPR:
8048 case TRUTH_ANDIF_EXPR:
8049 case TRUTH_ORIF_EXPR:
8050 /* If we find one of these, then we can be sure
8051 the adjust will be done for it (since it makes jumps).
8052 Do it now, so that if this is inside an argument
8053 of a function, we don't get the stack adjustment
8054 after some other args have already been pushed. */
8055 do_pending_stack_adjust ();
8056 return;
bbf6f052 8057
ca695ac9
JB
8058 case BLOCK:
8059 case RTL_EXPR:
8060 case WITH_CLEANUP_EXPR:
8061 return;
bbf6f052 8062
ca695ac9
JB
8063 case SAVE_EXPR:
8064 if (SAVE_EXPR_RTL (exp) != 0)
8065 return;
8066 }
bbf6f052 8067
ca695ac9
JB
8068 nops = tree_code_length[(int) TREE_CODE (exp)];
8069 for (i = 0; i < nops; i++)
8070 if (TREE_OPERAND (exp, i) != 0)
8071 {
8072 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
8073 if (type == 'e' || type == '<' || type == '1' || type == '2'
8074 || type == 'r')
8075 preexpand_calls (TREE_OPERAND (exp, i));
8076 }
bbf6f052
RK
8077}
8078\f
ca695ac9
JB
8079/* At the start of a function, record that we have no previously-pushed
8080 arguments waiting to be popped. */
0006469d 8081
ca695ac9
JB
8082void
8083init_pending_stack_adjust ()
8084{
8085 pending_stack_adjust = 0;
8086}
fb2ca25a 8087
ca695ac9
JB
8088/* When exiting from function, if safe, clear out any pending stack adjust
8089 so the adjustment won't get done. */
904762c8 8090
ca695ac9
JB
8091void
8092clear_pending_stack_adjust ()
fb2ca25a 8093{
ca695ac9
JB
8094#ifdef EXIT_IGNORE_STACK
8095 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
8096 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
8097 && ! flag_inline_functions)
8098 pending_stack_adjust = 0;
fb2ca25a 8099#endif
fb2ca25a
KKT
8100}
8101
ca695ac9
JB
8102/* Pop any previously-pushed arguments that have not been popped yet. */
8103
8104void
8105do_pending_stack_adjust ()
8106{
8107 if (inhibit_defer_pop == 0)
8108 {
8109 if (pending_stack_adjust != 0)
8110 adjust_stack (GEN_INT (pending_stack_adjust));
8111 pending_stack_adjust = 0;
8112 }
8113}
8114
8115/* Expand all cleanups up to OLD_CLEANUPS.
8116 Needed here, and also for language-dependent calls. */
904762c8 8117
ca695ac9
JB
8118void
8119expand_cleanups_to (old_cleanups)
8120 tree old_cleanups;
0006469d 8121{
ca695ac9 8122 while (cleanups_this_call != old_cleanups)
0006469d 8123 {
ca695ac9
JB
8124 expand_expr (TREE_VALUE (cleanups_this_call), NULL_RTX, VOIDmode, 0);
8125 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
8126 }
8127}
8128\f
8129/* Expand conditional expressions. */
0006469d 8130
ca695ac9
JB
8131/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
8132 LABEL is an rtx of code CODE_LABEL, in this function and all the
8133 functions here. */
0006469d 8134
ca695ac9
JB
8135void
8136jumpifnot (exp, label)
8137 tree exp;
8138 rtx label;
8139{
8140 do_jump (exp, label, NULL_RTX);
8141}
0006469d 8142
ca695ac9 8143/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
0006469d 8144
ca695ac9
JB
8145void
8146jumpif (exp, label)
8147 tree exp;
8148 rtx label;
8149{
8150 do_jump (exp, NULL_RTX, label);
8151}
0006469d 8152
ca695ac9
JB
8153/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
8154 the result is zero, or IF_TRUE_LABEL if the result is one.
8155 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
8156 meaning fall through in that case.
0006469d 8157
ca695ac9
JB
8158 do_jump always does any pending stack adjust except when it does not
8159 actually perform a jump. An example where there is no jump
8160 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
0006469d 8161
ca695ac9
JB
8162 This function is responsible for optimizing cases such as
8163 &&, || and comparison operators in EXP. */
904762c8 8164
ca695ac9
JB
8165void
8166do_jump (exp, if_false_label, if_true_label)
8167 tree exp;
8168 rtx if_false_label, if_true_label;
0006469d 8169{
ca695ac9
JB
8170 register enum tree_code code = TREE_CODE (exp);
8171 /* Some cases need to create a label to jump to
8172 in order to properly fall through.
8173 These cases set DROP_THROUGH_LABEL nonzero. */
8174 rtx drop_through_label = 0;
8175 rtx temp;
8176 rtx comparison = 0;
8177 int i;
8178 tree type;
0006469d 8179
ca695ac9 8180 emit_queue ();
0006469d 8181
ca695ac9
JB
8182 switch (code)
8183 {
8184 case ERROR_MARK:
8185 break;
0006469d 8186
ca695ac9
JB
8187 case INTEGER_CST:
8188 temp = integer_zerop (exp) ? if_false_label : if_true_label;
8189 if (temp)
8190 emit_jump (temp);
8191 break;
0006469d 8192
ca695ac9
JB
8193#if 0
8194 /* This is not true with #pragma weak */
8195 case ADDR_EXPR:
8196 /* The address of something can never be zero. */
8197 if (if_true_label)
8198 emit_jump (if_true_label);
8199 break;
8200#endif
0006469d 8201
ca695ac9
JB
8202 case NOP_EXPR:
8203 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
8204 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
8205 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
8206 goto normal;
8207 case CONVERT_EXPR:
8208 /* If we are narrowing the operand, we have to do the compare in the
8209 narrower mode. */
8210 if ((TYPE_PRECISION (TREE_TYPE (exp))
8211 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8212 goto normal;
8213 case NON_LVALUE_EXPR:
8214 case REFERENCE_EXPR:
8215 case ABS_EXPR:
8216 case NEGATE_EXPR:
8217 case LROTATE_EXPR:
8218 case RROTATE_EXPR:
8219 /* These cannot change zero->non-zero or vice versa. */
8220 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8221 break;
0006469d 8222
ca695ac9
JB
8223#if 0
8224 /* This is never less insns than evaluating the PLUS_EXPR followed by
8225 a test and can be longer if the test is eliminated. */
8226 case PLUS_EXPR:
8227 /* Reduce to minus. */
8228 exp = build (MINUS_EXPR, TREE_TYPE (exp),
8229 TREE_OPERAND (exp, 0),
8230 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
8231 TREE_OPERAND (exp, 1))));
8232 /* Process as MINUS. */
0006469d 8233#endif
0006469d 8234
ca695ac9
JB
8235 case MINUS_EXPR:
8236 /* Non-zero iff operands of minus differ. */
8237 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
8238 TREE_OPERAND (exp, 0),
8239 TREE_OPERAND (exp, 1)),
8240 NE, NE);
8241 break;
904762c8 8242
ca695ac9
JB
8243 case BIT_AND_EXPR:
8244 /* If we are AND'ing with a small constant, do this comparison in the
8245 smallest type that fits. If the machine doesn't have comparisons
8246 that small, it will be converted back to the wider comparison.
8247 This helps if we are testing the sign bit of a narrower object.
8248 combine can't do this for us because it can't know whether a
8249 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
0006469d 8250
ca695ac9
JB
8251 if (! SLOW_BYTE_ACCESS
8252 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8253 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
8254 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
8255 && (type = type_for_size (i + 1, 1)) != 0
8256 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
8257 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
8258 != CODE_FOR_nothing))
8259 {
8260 do_jump (convert (type, exp), if_false_label, if_true_label);
8261 break;
8262 }
8263 goto normal;
904762c8 8264
ca695ac9
JB
8265 case TRUTH_NOT_EXPR:
8266 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8267 break;
0006469d 8268
ca695ac9
JB
8269 case TRUTH_ANDIF_EXPR:
8270 if (if_false_label == 0)
8271 if_false_label = drop_through_label = gen_label_rtx ();
8272 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
8273 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8274 break;
0006469d 8275
ca695ac9
JB
8276 case TRUTH_ORIF_EXPR:
8277 if (if_true_label == 0)
8278 if_true_label = drop_through_label = gen_label_rtx ();
8279 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
8280 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8281 break;
0006469d 8282
ca695ac9 8283 case COMPOUND_EXPR:
0088fcb1 8284 push_temp_slots ();
ca695ac9
JB
8285 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8286 free_temp_slots ();
0088fcb1 8287 pop_temp_slots ();
ca695ac9
JB
8288 emit_queue ();
8289 do_pending_stack_adjust ();
8290 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8291 break;
0006469d 8292
ca695ac9
JB
8293 case COMPONENT_REF:
8294 case BIT_FIELD_REF:
8295 case ARRAY_REF:
8296 {
8297 int bitsize, bitpos, unsignedp;
8298 enum machine_mode mode;
8299 tree type;
8300 tree offset;
8301 int volatilep = 0;
0006469d 8302
ca695ac9
JB
8303 /* Get description of this reference. We don't actually care
8304 about the underlying object here. */
8305 get_inner_reference (exp, &bitsize, &bitpos, &offset,
8306 &mode, &unsignedp, &volatilep);
0006469d 8307
ca695ac9
JB
8308 type = type_for_size (bitsize, unsignedp);
8309 if (! SLOW_BYTE_ACCESS
8310 && type != 0 && bitsize >= 0
8311 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
8312 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
8313 != CODE_FOR_nothing))
8314 {
8315 do_jump (convert (type, exp), if_false_label, if_true_label);
8316 break;
8317 }
8318 goto normal;
8319 }
0006469d 8320
ca695ac9
JB
8321 case COND_EXPR:
8322 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
8323 if (integer_onep (TREE_OPERAND (exp, 1))
8324 && integer_zerop (TREE_OPERAND (exp, 2)))
8325 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
904762c8 8326
ca695ac9
JB
8327 else if (integer_zerop (TREE_OPERAND (exp, 1))
8328 && integer_onep (TREE_OPERAND (exp, 2)))
8329 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
0006469d 8330
ca695ac9
JB
8331 else
8332 {
8333 register rtx label1 = gen_label_rtx ();
8334 drop_through_label = gen_label_rtx ();
8335 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
8336 /* Now the THEN-expression. */
8337 do_jump (TREE_OPERAND (exp, 1),
8338 if_false_label ? if_false_label : drop_through_label,
8339 if_true_label ? if_true_label : drop_through_label);
8340 /* In case the do_jump just above never jumps. */
8341 do_pending_stack_adjust ();
8342 emit_label (label1);
8343 /* Now the ELSE-expression. */
8344 do_jump (TREE_OPERAND (exp, 2),
8345 if_false_label ? if_false_label : drop_through_label,
8346 if_true_label ? if_true_label : drop_through_label);
8347 }
8348 break;
0006469d 8349
ca695ac9
JB
8350 case EQ_EXPR:
8351 if (integer_zerop (TREE_OPERAND (exp, 1)))
8352 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
0766f239
RS
8353 else if (((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8354 == MODE_INT)
8355 &&
8356 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8357 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_FLOAT
8358 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_INT)
ca695ac9
JB
8359 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
8360 else
8361 comparison = compare (exp, EQ, EQ);
8362 break;
0006469d 8363
ca695ac9
JB
8364 case NE_EXPR:
8365 if (integer_zerop (TREE_OPERAND (exp, 1)))
8366 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
0766f239
RS
8367 else if (((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8368 == MODE_INT)
8369 &&
8370 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8371 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_FLOAT
8372 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_INT)
ca695ac9
JB
8373 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
8374 else
8375 comparison = compare (exp, NE, NE);
8376 break;
0006469d 8377
ca695ac9
JB
8378 case LT_EXPR:
8379 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8380 == MODE_INT)
8381 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8382 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
8383 else
8384 comparison = compare (exp, LT, LTU);
8385 break;
0006469d 8386
ca695ac9
JB
8387 case LE_EXPR:
8388 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8389 == MODE_INT)
8390 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8391 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
8392 else
8393 comparison = compare (exp, LE, LEU);
8394 break;
0006469d 8395
ca695ac9
JB
8396 case GT_EXPR:
8397 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8398 == MODE_INT)
8399 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8400 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
8401 else
8402 comparison = compare (exp, GT, GTU);
8403 break;
0006469d 8404
ca695ac9
JB
8405 case GE_EXPR:
8406 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8407 == MODE_INT)
8408 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8409 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
8410 else
8411 comparison = compare (exp, GE, GEU);
8412 break;
0006469d 8413
ca695ac9
JB
8414 default:
8415 normal:
8416 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
8417#if 0
8418 /* This is not needed any more and causes poor code since it causes
8419 comparisons and tests from non-SI objects to have different code
8420 sequences. */
8421 /* Copy to register to avoid generating bad insns by cse
8422 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
8423 if (!cse_not_expected && GET_CODE (temp) == MEM)
8424 temp = copy_to_reg (temp);
8425#endif
8426 do_pending_stack_adjust ();
8427 if (GET_CODE (temp) == CONST_INT)
8428 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
8429 else if (GET_CODE (temp) == LABEL_REF)
8430 comparison = const_true_rtx;
8431 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
8432 && !can_compare_p (GET_MODE (temp)))
8433 /* Note swapping the labels gives us not-equal. */
8434 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
8435 else if (GET_MODE (temp) != VOIDmode)
8436 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
8437 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
8438 GET_MODE (temp), NULL_RTX, 0);
8439 else
8440 abort ();
8441 }
0006469d 8442
ca695ac9
JB
8443 /* Do any postincrements in the expression that was tested. */
8444 emit_queue ();
0006469d 8445
ca695ac9
JB
8446 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
8447 straight into a conditional jump instruction as the jump condition.
8448 Otherwise, all the work has been done already. */
0006469d 8449
ca695ac9 8450 if (comparison == const_true_rtx)
0006469d 8451 {
ca695ac9
JB
8452 if (if_true_label)
8453 emit_jump (if_true_label);
0006469d 8454 }
ca695ac9
JB
8455 else if (comparison == const0_rtx)
8456 {
8457 if (if_false_label)
8458 emit_jump (if_false_label);
8459 }
8460 else if (comparison)
8461 do_jump_for_compare (comparison, if_false_label, if_true_label);
0006469d 8462
ca695ac9 8463 if (drop_through_label)
0006469d 8464 {
ca695ac9
JB
8465 /* If do_jump produces code that might be jumped around,
8466 do any stack adjusts from that code, before the place
8467 where control merges in. */
8468 do_pending_stack_adjust ();
8469 emit_label (drop_through_label);
8470 }
8471}
8472\f
8473/* Given a comparison expression EXP for values too wide to be compared
8474 with one insn, test the comparison and jump to the appropriate label.
8475 The code of EXP is ignored; we always test GT if SWAP is 0,
8476 and LT if SWAP is 1. */
0006469d 8477
ca695ac9
JB
8478static void
8479do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
8480 tree exp;
8481 int swap;
8482 rtx if_false_label, if_true_label;
8483{
8484 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
8485 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
8486 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
8487 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
8488 rtx drop_through_label = 0;
8489 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
8490 int i;
0006469d 8491
ca695ac9
JB
8492 if (! if_true_label || ! if_false_label)
8493 drop_through_label = gen_label_rtx ();
8494 if (! if_true_label)
8495 if_true_label = drop_through_label;
8496 if (! if_false_label)
8497 if_false_label = drop_through_label;
0006469d 8498
ca695ac9
JB
8499 /* Compare a word at a time, high order first. */
8500 for (i = 0; i < nwords; i++)
8501 {
8502 rtx comp;
8503 rtx op0_word, op1_word;
0006469d 8504
ca695ac9
JB
8505 if (WORDS_BIG_ENDIAN)
8506 {
8507 op0_word = operand_subword_force (op0, i, mode);
8508 op1_word = operand_subword_force (op1, i, mode);
8509 }
8510 else
8511 {
8512 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
8513 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
8514 }
0006469d 8515
ca695ac9
JB
8516 /* All but high-order word must be compared as unsigned. */
8517 comp = compare_from_rtx (op0_word, op1_word,
8518 (unsignedp || i > 0) ? GTU : GT,
8519 unsignedp, word_mode, NULL_RTX, 0);
8520 if (comp == const_true_rtx)
8521 emit_jump (if_true_label);
8522 else if (comp != const0_rtx)
8523 do_jump_for_compare (comp, NULL_RTX, if_true_label);
0006469d 8524
ca695ac9
JB
8525 /* Consider lower words only if these are equal. */
8526 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
8527 NULL_RTX, 0);
8528 if (comp == const_true_rtx)
8529 emit_jump (if_false_label);
8530 else if (comp != const0_rtx)
8531 do_jump_for_compare (comp, NULL_RTX, if_false_label);
8532 }
0006469d 8533
ca695ac9
JB
8534 if (if_false_label)
8535 emit_jump (if_false_label);
8536 if (drop_through_label)
8537 emit_label (drop_through_label);
0006469d
TW
8538}
8539
ca695ac9
JB
8540/* Compare OP0 with OP1, word at a time, in mode MODE.
8541 UNSIGNEDP says to do unsigned comparison.
8542 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
904762c8 8543
0006469d 8544static void
ca695ac9
JB
8545do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
8546 enum machine_mode mode;
8547 int unsignedp;
8548 rtx op0, op1;
8549 rtx if_false_label, if_true_label;
0006469d 8550{
ca695ac9
JB
8551 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
8552 rtx drop_through_label = 0;
8553 int i;
0006469d 8554
ca695ac9
JB
8555 if (! if_true_label || ! if_false_label)
8556 drop_through_label = gen_label_rtx ();
8557 if (! if_true_label)
8558 if_true_label = drop_through_label;
8559 if (! if_false_label)
8560 if_false_label = drop_through_label;
0006469d 8561
ca695ac9
JB
8562 /* Compare a word at a time, high order first. */
8563 for (i = 0; i < nwords; i++)
0006469d 8564 {
ca695ac9
JB
8565 rtx comp;
8566 rtx op0_word, op1_word;
0006469d 8567
ca695ac9
JB
8568 if (WORDS_BIG_ENDIAN)
8569 {
8570 op0_word = operand_subword_force (op0, i, mode);
8571 op1_word = operand_subword_force (op1, i, mode);
8572 }
8573 else
8574 {
8575 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
8576 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
8577 }
0006469d 8578
ca695ac9
JB
8579 /* All but high-order word must be compared as unsigned. */
8580 comp = compare_from_rtx (op0_word, op1_word,
8581 (unsignedp || i > 0) ? GTU : GT,
8582 unsignedp, word_mode, NULL_RTX, 0);
8583 if (comp == const_true_rtx)
8584 emit_jump (if_true_label);
8585 else if (comp != const0_rtx)
8586 do_jump_for_compare (comp, NULL_RTX, if_true_label);
0006469d 8587
ca695ac9
JB
8588 /* Consider lower words only if these are equal. */
8589 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
8590 NULL_RTX, 0);
8591 if (comp == const_true_rtx)
8592 emit_jump (if_false_label);
8593 else if (comp != const0_rtx)
8594 do_jump_for_compare (comp, NULL_RTX, if_false_label);
8595 }
0006469d 8596
ca695ac9
JB
8597 if (if_false_label)
8598 emit_jump (if_false_label);
8599 if (drop_through_label)
8600 emit_label (drop_through_label);
0006469d 8601}
bbf6f052 8602
ca695ac9
JB
8603/* Given an EQ_EXPR expression EXP for values too wide to be compared
8604 with one insn, test the comparison and jump to the appropriate label. */
8605
8606static void
8607do_jump_by_parts_equality (exp, if_false_label, if_true_label)
8608 tree exp;
8609 rtx if_false_label, if_true_label;
bbf6f052 8610{
ca695ac9
JB
8611 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8612 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8613 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
8614 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
8615 int i;
8616 rtx drop_through_label = 0;
bbf6f052 8617
ca695ac9
JB
8618 if (! if_false_label)
8619 drop_through_label = if_false_label = gen_label_rtx ();
bbf6f052 8620
ca695ac9
JB
8621 for (i = 0; i < nwords; i++)
8622 {
8623 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
8624 operand_subword_force (op1, i, mode),
8625 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
8626 word_mode, NULL_RTX, 0);
8627 if (comp == const_true_rtx)
8628 emit_jump (if_false_label);
8629 else if (comp != const0_rtx)
8630 do_jump_for_compare (comp, if_false_label, NULL_RTX);
8631 }
1499e0a8 8632
ca695ac9
JB
8633 if (if_true_label)
8634 emit_jump (if_true_label);
8635 if (drop_through_label)
8636 emit_label (drop_through_label);
8637}
8638\f
8639/* Jump according to whether OP0 is 0.
8640 We assume that OP0 has an integer mode that is too wide
8641 for the available compare insns. */
1499e0a8 8642
ca695ac9
JB
8643static void
8644do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
8645 rtx op0;
8646 rtx if_false_label, if_true_label;
8647{
8648 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
8649 int i;
8650 rtx drop_through_label = 0;
1499e0a8 8651
ca695ac9
JB
8652 if (! if_false_label)
8653 drop_through_label = if_false_label = gen_label_rtx ();
1499e0a8 8654
ca695ac9
JB
8655 for (i = 0; i < nwords; i++)
8656 {
8657 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
8658 GET_MODE (op0)),
8659 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
8660 if (comp == const_true_rtx)
8661 emit_jump (if_false_label);
8662 else if (comp != const0_rtx)
8663 do_jump_for_compare (comp, if_false_label, NULL_RTX);
8664 }
1499e0a8 8665
ca695ac9
JB
8666 if (if_true_label)
8667 emit_jump (if_true_label);
8668 if (drop_through_label)
8669 emit_label (drop_through_label);
8670}
bbf6f052 8671
ca695ac9
JB
8672/* Given a comparison expression in rtl form, output conditional branches to
8673 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
bbf6f052 8674
ca695ac9
JB
8675static void
8676do_jump_for_compare (comparison, if_false_label, if_true_label)
8677 rtx comparison, if_false_label, if_true_label;
8678{
8679 if (if_true_label)
a358cee0 8680 {
ca695ac9
JB
8681 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
8682 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
8683 else
8684 abort ();
a358cee0 8685
ca695ac9
JB
8686 if (if_false_label)
8687 emit_jump (if_false_label);
c980ac49 8688 }
ca695ac9 8689 else if (if_false_label)
bbf6f052 8690 {
ca695ac9 8691 rtx insn;
f12f485a 8692 rtx prev = get_last_insn ();
ca695ac9 8693 rtx branch = 0;
bbf6f052 8694
f12f485a
RK
8695 if (prev != 0)
8696 prev = PREV_INSN (prev);
8697
ca695ac9
JB
8698 /* Output the branch with the opposite condition. Then try to invert
8699 what is generated. If more than one insn is a branch, or if the
8700 branch is not the last insn written, abort. If we can't invert
8701 the branch, emit make a true label, redirect this jump to that,
8702 emit a jump to the false label and define the true label. */
bbf6f052 8703
ca695ac9
JB
8704 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
8705 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_false_label));
8706 else
8707 abort ();
bbf6f052 8708
ca695ac9
JB
8709 /* Here we get the insn before what was just emitted.
8710 On some machines, emitting the branch can discard
8711 the previous compare insn and emit a replacement. */
8712 if (prev == 0)
8713 /* If there's only one preceding insn... */
8714 insn = get_insns ();
8715 else
8716 insn = NEXT_INSN (prev);
bbf6f052 8717
ca695ac9
JB
8718 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
8719 if (GET_CODE (insn) == JUMP_INSN)
8720 {
8721 if (branch)
8722 abort ();
8723 branch = insn;
8724 }
8725
8726 if (branch != get_last_insn ())
8727 abort ();
8728
8729 if (! invert_jump (branch, if_false_label))
8730 {
8731 if_true_label = gen_label_rtx ();
8732 redirect_jump (branch, if_true_label);
8733 emit_jump (if_false_label);
8734 emit_label (if_true_label);
bbf6f052
RK
8735 }
8736 }
ca695ac9
JB
8737}
8738\f
8739/* Generate code for a comparison expression EXP
8740 (including code to compute the values to be compared)
8741 and set (CC0) according to the result.
8742 SIGNED_CODE should be the rtx operation for this comparison for
8743 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
8744
8745 We force a stack adjustment unless there are currently
8746 things pushed on the stack that aren't yet used. */
8747
8748static rtx
8749compare (exp, signed_code, unsigned_code)
8750 register tree exp;
8751 enum rtx_code signed_code, unsigned_code;
8752{
8753 register rtx op0
8754 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8755 register rtx op1
8756 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8757 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
8758 register enum machine_mode mode = TYPE_MODE (type);
8759 int unsignedp = TREE_UNSIGNED (type);
8760 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
bbf6f052 8761
ca695ac9
JB
8762 return compare_from_rtx (op0, op1, code, unsignedp, mode,
8763 ((mode == BLKmode)
8764 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
8765 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
8766}
bbf6f052 8767
ca695ac9
JB
8768/* Like compare but expects the values to compare as two rtx's.
8769 The decision as to signed or unsigned comparison must be made by the caller.
bbf6f052 8770
ca695ac9
JB
8771 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
8772 compared.
bbf6f052 8773
ca695ac9
JB
8774 If ALIGN is non-zero, it is the alignment of this type; if zero, the
8775 size of MODE should be used. */
bbf6f052 8776
ca695ac9
JB
8777rtx
8778compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
8779 register rtx op0, op1;
8780 enum rtx_code code;
8781 int unsignedp;
8782 enum machine_mode mode;
8783 rtx size;
8784 int align;
8785{
8786 rtx tem;
bbf6f052 8787
ca695ac9
JB
8788 /* If one operand is constant, make it the second one. Only do this
8789 if the other operand is not constant as well. */
bbf6f052 8790
ca695ac9
JB
8791 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
8792 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
8793 {
8794 tem = op0;
8795 op0 = op1;
8796 op1 = tem;
8797 code = swap_condition (code);
8798 }
bbf6f052 8799
ca695ac9 8800 if (flag_force_mem)
bbf6f052 8801 {
ca695ac9
JB
8802 op0 = force_not_mem (op0);
8803 op1 = force_not_mem (op1);
8804 }
bbf6f052 8805
ca695ac9 8806 do_pending_stack_adjust ();
bbf6f052 8807
ca695ac9
JB
8808 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
8809 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
8810 return tem;
bbf6f052 8811
ca695ac9
JB
8812#if 0
8813 /* There's no need to do this now that combine.c can eliminate lots of
8814 sign extensions. This can be less efficient in certain cases on other
8815 machines. */
bbf6f052 8816
ca695ac9
JB
8817 /* If this is a signed equality comparison, we can do it as an
8818 unsigned comparison since zero-extension is cheaper than sign
8819 extension and comparisons with zero are done as unsigned. This is
8820 the case even on machines that can do fast sign extension, since
8821 zero-extension is easier to combine with other operations than
8822 sign-extension is. If we are comparing against a constant, we must
8823 convert it to what it would look like unsigned. */
8824 if ((code == EQ || code == NE) && ! unsignedp
8825 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
8826 {
8827 if (GET_CODE (op1) == CONST_INT
8828 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
8829 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
8830 unsignedp = 1;
bbf6f052 8831 }
ca695ac9
JB
8832#endif
8833
8834 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
bbf6f052 8835
ca695ac9 8836 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
bbf6f052
RK
8837}
8838\f
ca695ac9
JB
8839/* Generate code to calculate EXP using a store-flag instruction
8840 and return an rtx for the result. EXP is either a comparison
8841 or a TRUTH_NOT_EXPR whose operand is a comparison.
bbf6f052 8842
ca695ac9 8843 If TARGET is nonzero, store the result there if convenient.
bbf6f052 8844
ca695ac9
JB
8845 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
8846 cheap.
bbf6f052 8847
ca695ac9
JB
8848 Return zero if there is no suitable set-flag instruction
8849 available on this machine.
bbf6f052 8850
ca695ac9
JB
8851 Once expand_expr has been called on the arguments of the comparison,
8852 we are committed to doing the store flag, since it is not safe to
8853 re-evaluate the expression. We emit the store-flag insn by calling
8854 emit_store_flag, but only expand the arguments if we have a reason
8855 to believe that emit_store_flag will be successful. If we think that
8856 it will, but it isn't, we have to simulate the store-flag with a
8857 set/jump/set sequence. */
bbf6f052 8858
ca695ac9
JB
8859static rtx
8860do_store_flag (exp, target, mode, only_cheap)
8861 tree exp;
8862 rtx target;
8863 enum machine_mode mode;
8864 int only_cheap;
bbf6f052 8865{
ca695ac9
JB
8866 enum rtx_code code;
8867 tree arg0, arg1, type;
8868 tree tem;
8869 enum machine_mode operand_mode;
8870 int invert = 0;
8871 int unsignedp;
8872 rtx op0, op1;
8873 enum insn_code icode;
8874 rtx subtarget = target;
8875 rtx result, label, pattern, jump_pat;
bbf6f052 8876
ca695ac9
JB
8877 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
8878 result at the end. We can't simply invert the test since it would
8879 have already been inverted if it were valid. This case occurs for
8880 some floating-point comparisons. */
8881
8882 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
8883 invert = 1, exp = TREE_OPERAND (exp, 0);
8884
8885 arg0 = TREE_OPERAND (exp, 0);
8886 arg1 = TREE_OPERAND (exp, 1);
8887 type = TREE_TYPE (arg0);
8888 operand_mode = TYPE_MODE (type);
8889 unsignedp = TREE_UNSIGNED (type);
8890
8891 /* We won't bother with BLKmode store-flag operations because it would mean
8892 passing a lot of information to emit_store_flag. */
8893 if (operand_mode == BLKmode)
8894 return 0;
8895
8896 STRIP_NOPS (arg0);
8897 STRIP_NOPS (arg1);
8898
8899 /* Get the rtx comparison code to use. We know that EXP is a comparison
8900 operation of some type. Some comparisons against 1 and -1 can be
8901 converted to comparisons with zero. Do so here so that the tests
8902 below will be aware that we have a comparison with zero. These
8903 tests will not catch constants in the first operand, but constants
8904 are rarely passed as the first operand. */
8905
8906 switch (TREE_CODE (exp))
8907 {
8908 case EQ_EXPR:
8909 code = EQ;
8910 break;
8911 case NE_EXPR:
8912 code = NE;
8913 break;
8914 case LT_EXPR:
8915 if (integer_onep (arg1))
8916 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
8917 else
8918 code = unsignedp ? LTU : LT;
8919 break;
8920 case LE_EXPR:
8921 if (! unsignedp && integer_all_onesp (arg1))
8922 arg1 = integer_zero_node, code = LT;
8923 else
8924 code = unsignedp ? LEU : LE;
8925 break;
8926 case GT_EXPR:
8927 if (! unsignedp && integer_all_onesp (arg1))
8928 arg1 = integer_zero_node, code = GE;
8929 else
8930 code = unsignedp ? GTU : GT;
8931 break;
8932 case GE_EXPR:
8933 if (integer_onep (arg1))
8934 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
8935 else
8936 code = unsignedp ? GEU : GE;
8937 break;
8938 default:
8939 abort ();
8940 }
bbf6f052 8941
ca695ac9
JB
8942 /* Put a constant second. */
8943 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
bbf6f052 8944 {
ca695ac9
JB
8945 tem = arg0; arg0 = arg1; arg1 = tem;
8946 code = swap_condition (code);
bbf6f052 8947 }
bbf6f052 8948
ca695ac9
JB
8949 /* If this is an equality or inequality test of a single bit, we can
8950 do this by shifting the bit being tested to the low-order bit and
8951 masking the result with the constant 1. If the condition was EQ,
8952 we xor it with 1. This does not require an scc insn and is faster
8953 than an scc insn even if we have it. */
bbf6f052 8954
ca695ac9
JB
8955 if ((code == NE || code == EQ)
8956 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
8957 && integer_pow2p (TREE_OPERAND (arg0, 1))
8958 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
8959 {
8960 tree inner = TREE_OPERAND (arg0, 0);
8961 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
8962 NULL_RTX, VOIDmode, 0)));
8963 int ops_unsignedp;
bbf6f052 8964
ca695ac9
JB
8965 /* If INNER is a right shift of a constant and it plus BITNUM does
8966 not overflow, adjust BITNUM and INNER. */
bbf6f052 8967
ca695ac9
JB
8968 if (TREE_CODE (inner) == RSHIFT_EXPR
8969 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
8970 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
8971 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
8972 < TYPE_PRECISION (type)))
8973 {
8974 bitnum +=TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
8975 inner = TREE_OPERAND (inner, 0);
8976 }
bbf6f052 8977
ca695ac9
JB
8978 /* If we are going to be able to omit the AND below, we must do our
8979 operations as unsigned. If we must use the AND, we have a choice.
8980 Normally unsigned is faster, but for some machines signed is. */
8981 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
ad92c826
RK
8982#ifdef LOAD_EXTEND_OP
8983 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
ca695ac9
JB
8984#else
8985 : 1
8986#endif
8987 );
bbf6f052 8988
ca695ac9
JB
8989 if (subtarget == 0 || GET_CODE (subtarget) != REG
8990 || GET_MODE (subtarget) != operand_mode
8991 || ! safe_from_p (subtarget, inner))
8992 subtarget = 0;
e7c33f54 8993
ca695ac9 8994 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
bbf6f052 8995
ca695ac9
JB
8996 if (bitnum != 0)
8997 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
0c316b20 8998 size_int (bitnum), subtarget, ops_unsignedp);
bbf6f052 8999
ca695ac9
JB
9000 if (GET_MODE (op0) != mode)
9001 op0 = convert_to_mode (mode, op0, ops_unsignedp);
bbf6f052 9002
ca695ac9 9003 if ((code == EQ && ! invert) || (code == NE && invert))
0c316b20 9004 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
ca695ac9 9005 ops_unsignedp, OPTAB_LIB_WIDEN);
bbf6f052 9006
ca695ac9
JB
9007 /* Put the AND last so it can combine with more things. */
9008 if (bitnum != TYPE_PRECISION (type) - 1)
0c316b20 9009 op0 = expand_and (op0, const1_rtx, subtarget);
bbf6f052 9010
ca695ac9
JB
9011 return op0;
9012 }
bbf6f052 9013
ca695ac9
JB
9014 /* Now see if we are likely to be able to do this. Return if not. */
9015 if (! can_compare_p (operand_mode))
9016 return 0;
9017 icode = setcc_gen_code[(int) code];
9018 if (icode == CODE_FOR_nothing
9019 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
9020 {
9021 /* We can only do this if it is one of the special cases that
9022 can be handled without an scc insn. */
9023 if ((code == LT && integer_zerop (arg1))
9024 || (! only_cheap && code == GE && integer_zerop (arg1)))
9025 ;
9026 else if (BRANCH_COST >= 0
9027 && ! only_cheap && (code == NE || code == EQ)
9028 && TREE_CODE (type) != REAL_TYPE
9029 && ((abs_optab->handlers[(int) operand_mode].insn_code
9030 != CODE_FOR_nothing)
9031 || (ffs_optab->handlers[(int) operand_mode].insn_code
9032 != CODE_FOR_nothing)))
9033 ;
9034 else
9035 return 0;
9036 }
9037
9038 preexpand_calls (exp);
9039 if (subtarget == 0 || GET_CODE (subtarget) != REG
9040 || GET_MODE (subtarget) != operand_mode
9041 || ! safe_from_p (subtarget, arg1))
9042 subtarget = 0;
bbf6f052 9043
ca695ac9
JB
9044 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
9045 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
bbf6f052 9046
ca695ac9
JB
9047 if (target == 0)
9048 target = gen_reg_rtx (mode);
bbf6f052 9049
ca695ac9
JB
9050 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9051 because, if the emit_store_flag does anything it will succeed and
9052 OP0 and OP1 will not be used subsequently. */
bbf6f052 9053
ca695ac9
JB
9054 result = emit_store_flag (target, code,
9055 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9056 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9057 operand_mode, unsignedp, 1);
bbf6f052 9058
ca695ac9
JB
9059 if (result)
9060 {
9061 if (invert)
9062 result = expand_binop (mode, xor_optab, result, const1_rtx,
9063 result, 0, OPTAB_LIB_WIDEN);
9064 return result;
9065 }
bbf6f052 9066
ca695ac9
JB
9067 /* If this failed, we have to do this with set/compare/jump/set code. */
9068 if (target == 0 || GET_CODE (target) != REG
9069 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9070 target = gen_reg_rtx (GET_MODE (target));
bbf6f052 9071
ca695ac9
JB
9072 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9073 result = compare_from_rtx (op0, op1, code, unsignedp,
9074 operand_mode, NULL_RTX, 0);
9075 if (GET_CODE (result) == CONST_INT)
9076 return (((result == const0_rtx && ! invert)
9077 || (result != const0_rtx && invert))
9078 ? const0_rtx : const1_rtx);
bbf6f052 9079
ca695ac9
JB
9080 label = gen_label_rtx ();
9081 if (bcc_gen_fctn[(int) code] == 0)
9082 abort ();
bbf6f052 9083
ca695ac9
JB
9084 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9085 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9086 emit_label (label);
bbf6f052 9087
ca695ac9
JB
9088 return target;
9089}
9090\f
9091/* Generate a tablejump instruction (used for switch statements). */
bbf6f052 9092
ca695ac9 9093#ifdef HAVE_tablejump
bbf6f052 9094
ca695ac9
JB
9095/* INDEX is the value being switched on, with the lowest value
9096 in the table already subtracted.
9097 MODE is its expected mode (needed if INDEX is constant).
9098 RANGE is the length of the jump table.
9099 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
bbf6f052 9100
ca695ac9
JB
9101 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9102 index value is out of range. */
bbf6f052 9103
ca695ac9
JB
9104void
9105do_tablejump (index, mode, range, table_label, default_label)
9106 rtx index, range, table_label, default_label;
9107 enum machine_mode mode;
9108{
9109 register rtx temp, vector;
bbf6f052 9110
ca695ac9
JB
9111 /* Do an unsigned comparison (in the proper mode) between the index
9112 expression and the value which represents the length of the range.
9113 Since we just finished subtracting the lower bound of the range
9114 from the index expression, this comparison allows us to simultaneously
9115 check that the original index expression value is both greater than
9116 or equal to the minimum value of the range and less than or equal to
9117 the maximum value of the range. */
bbf6f052 9118
ca695ac9
JB
9119 emit_cmp_insn (range, index, LTU, NULL_RTX, mode, 1, 0);
9120 emit_jump_insn (gen_bltu (default_label));
bbf6f052 9121
ca695ac9
JB
9122 /* If index is in range, it must fit in Pmode.
9123 Convert to Pmode so we can index with it. */
9124 if (mode != Pmode)
9125 index = convert_to_mode (Pmode, index, 1);
bbf6f052 9126
ca695ac9
JB
9127 /* Don't let a MEM slip thru, because then INDEX that comes
9128 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9129 and break_out_memory_refs will go to work on it and mess it up. */
9130#ifdef PIC_CASE_VECTOR_ADDRESS
9131 if (flag_pic && GET_CODE (index) != REG)
9132 index = copy_to_mode_reg (Pmode, index);
9133#endif
bbf6f052 9134
ca695ac9
JB
9135 /* If flag_force_addr were to affect this address
9136 it could interfere with the tricky assumptions made
9137 about addresses that contain label-refs,
9138 which may be valid only very near the tablejump itself. */
9139 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9140 GET_MODE_SIZE, because this indicates how large insns are. The other
9141 uses should all be Pmode, because they are addresses. This code
9142 could fail if addresses and insns are not the same size. */
9143 index = gen_rtx (PLUS, Pmode,
9144 gen_rtx (MULT, Pmode, index,
9145 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9146 gen_rtx (LABEL_REF, Pmode, table_label));
9147#ifdef PIC_CASE_VECTOR_ADDRESS
9148 if (flag_pic)
9149 index = PIC_CASE_VECTOR_ADDRESS (index);
9150 else
9151#endif
9152 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9153 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9154 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
9155 RTX_UNCHANGING_P (vector) = 1;
9156 convert_move (temp, vector, 0);
bbf6f052 9157
ca695ac9 9158 emit_jump_insn (gen_tablejump (temp, table_label));
bbf6f052 9159
ca695ac9
JB
9160#ifndef CASE_VECTOR_PC_RELATIVE
9161 /* If we are generating PIC code or if the table is PC-relative, the
9162 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9163 if (! flag_pic)
9164 emit_barrier ();
bbf6f052 9165#endif
ca695ac9 9166}
bbf6f052 9167
ca695ac9 9168#endif /* HAVE_tablejump */
bbf6f052 9169
bbf6f052 9170
ca695ac9
JB
9171/* Emit a suitable bytecode to load a value from memory, assuming a pointer
9172 to that value is on the top of the stack. The resulting type is TYPE, and
9173 the source declaration is DECL. */
bbf6f052 9174
ca695ac9
JB
9175void
9176bc_load_memory (type, decl)
9177 tree type, decl;
9178{
9179 enum bytecode_opcode opcode;
9180
9181
9182 /* Bit fields are special. We only know about signed and
9183 unsigned ints, and enums. The latter are treated as
9184 signed integers. */
9185
9186 if (DECL_BIT_FIELD (decl))
9187 if (TREE_CODE (type) == ENUMERAL_TYPE
9188 || TREE_CODE (type) == INTEGER_TYPE)
9189 opcode = TREE_UNSIGNED (type) ? zxloadBI : sxloadBI;
9190 else
9191 abort ();
9192 else
9193 /* See corresponding comment in bc_store_memory(). */
9194 if (TYPE_MODE (type) == BLKmode
9195 || TYPE_MODE (type) == VOIDmode)
9196 return;
9197 else
6bd6178d 9198 opcode = mode_to_load_map [(int) TYPE_MODE (type)];
bbf6f052 9199
ca695ac9
JB
9200 if (opcode == neverneverland)
9201 abort ();
9202
9203 bc_emit_bytecode (opcode);
9204
9205#ifdef DEBUG_PRINT_CODE
9206 fputc ('\n', stderr);
9207#endif
bbf6f052 9208}
bbf6f052 9209
bbf6f052 9210
ca695ac9
JB
9211/* Store the contents of the second stack slot to the address in the
9212 top stack slot. DECL is the declaration of the destination and is used
9213 to determine whether we're dealing with a bitfield. */
bbf6f052 9214
ca695ac9
JB
9215void
9216bc_store_memory (type, decl)
9217 tree type, decl;
9218{
9219 enum bytecode_opcode opcode;
9220
9221
9222 if (DECL_BIT_FIELD (decl))
f81497d9 9223 {
ca695ac9
JB
9224 if (TREE_CODE (type) == ENUMERAL_TYPE
9225 || TREE_CODE (type) == INTEGER_TYPE)
9226 opcode = sstoreBI;
f81497d9 9227 else
ca695ac9 9228 abort ();
f81497d9 9229 }
ca695ac9
JB
9230 else
9231 if (TYPE_MODE (type) == BLKmode)
9232 {
9233 /* Copy structure. This expands to a block copy instruction, storeBLK.
9234 In addition to the arguments expected by the other store instructions,
9235 it also expects a type size (SImode) on top of the stack, which is the
9236 structure size in size units (usually bytes). The two first arguments
9237 are already on the stack; so we just put the size on level 1. For some
9238 other languages, the size may be variable, this is why we don't encode
9239 it as a storeBLK literal, but rather treat it as a full-fledged expression. */
9240
9241 bc_expand_expr (TYPE_SIZE (type));
9242 opcode = storeBLK;
9243 }
9244 else
6bd6178d 9245 opcode = mode_to_store_map [(int) TYPE_MODE (type)];
f81497d9 9246
ca695ac9
JB
9247 if (opcode == neverneverland)
9248 abort ();
9249
9250 bc_emit_bytecode (opcode);
9251
9252#ifdef DEBUG_PRINT_CODE
9253 fputc ('\n', stderr);
9254#endif
f81497d9
RS
9255}
9256
f81497d9 9257
ca695ac9
JB
9258/* Allocate local stack space sufficient to hold a value of the given
9259 SIZE at alignment boundary ALIGNMENT bits. ALIGNMENT must be an
9260 integral power of 2. A special case is locals of type VOID, which
9261 have size 0 and alignment 1 - any "voidish" SIZE or ALIGNMENT is
9262 remapped into the corresponding attribute of SI. */
9263
9264rtx
9265bc_allocate_local (size, alignment)
9266 int size, alignment;
f81497d9 9267{
ca695ac9
JB
9268 rtx retval;
9269 int byte_alignment;
f81497d9 9270
ca695ac9
JB
9271 if (size < 0)
9272 abort ();
f81497d9 9273
ca695ac9
JB
9274 /* Normalize size and alignment */
9275 if (!size)
9276 size = UNITS_PER_WORD;
bbf6f052 9277
ca695ac9
JB
9278 if (alignment < BITS_PER_UNIT)
9279 byte_alignment = 1 << (INT_ALIGN - 1);
9280 else
9281 /* Align */
9282 byte_alignment = alignment / BITS_PER_UNIT;
bbf6f052 9283
ca695ac9
JB
9284 if (local_vars_size & (byte_alignment - 1))
9285 local_vars_size += byte_alignment - (local_vars_size & (byte_alignment - 1));
bbf6f052 9286
ca695ac9
JB
9287 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
9288 local_vars_size += size;
bbf6f052 9289
ca695ac9 9290 return retval;
bbf6f052
RK
9291}
9292
bbf6f052 9293
ca695ac9
JB
9294/* Allocate variable-sized local array. Variable-sized arrays are
9295 actually pointers to the address in memory where they are stored. */
9296
9297rtx
9298bc_allocate_variable_array (size)
9299 tree size;
bbf6f052 9300{
ca695ac9
JB
9301 rtx retval;
9302 const int ptralign = (1 << (PTR_ALIGN - 1));
bbf6f052 9303
ca695ac9
JB
9304 /* Align pointer */
9305 if (local_vars_size & ptralign)
9306 local_vars_size += ptralign - (local_vars_size & ptralign);
bbf6f052 9307
ca695ac9
JB
9308 /* Note down local space needed: pointer to block; also return
9309 dummy rtx */
bbf6f052 9310
ca695ac9
JB
9311 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
9312 local_vars_size += POINTER_SIZE / BITS_PER_UNIT;
9313 return retval;
bbf6f052 9314}
bbf6f052 9315
bbf6f052 9316
ca695ac9
JB
9317/* Push the machine address for the given external variable offset. */
9318void
9319bc_load_externaddr (externaddr)
9320 rtx externaddr;
9321{
9322 bc_emit_bytecode (constP);
e7a42772
JB
9323 bc_emit_code_labelref (BYTECODE_LABEL (externaddr),
9324 BYTECODE_BC_LABEL (externaddr)->offset);
bbf6f052 9325
ca695ac9
JB
9326#ifdef DEBUG_PRINT_CODE
9327 fputc ('\n', stderr);
9328#endif
bbf6f052
RK
9329}
9330
bbf6f052 9331
ca695ac9
JB
9332static char *
9333bc_strdup (s)
9334 char *s;
bbf6f052 9335{
5e70898c
RS
9336 char *new = (char *) xmalloc ((strlen (s) + 1) * sizeof *s);
9337 strcpy (new, s);
9338 return new;
ca695ac9 9339}
bbf6f052 9340
bbf6f052 9341
ca695ac9
JB
9342/* Like above, but expects an IDENTIFIER. */
9343void
9344bc_load_externaddr_id (id, offset)
9345 tree id;
9346 int offset;
9347{
9348 if (!IDENTIFIER_POINTER (id))
9349 abort ();
bbf6f052 9350
ca695ac9
JB
9351 bc_emit_bytecode (constP);
9352 bc_emit_code_labelref (bc_xstrdup (IDENTIFIER_POINTER (id)), offset);
bbf6f052 9353
ca695ac9
JB
9354#ifdef DEBUG_PRINT_CODE
9355 fputc ('\n', stderr);
9356#endif
9357}
bbf6f052 9358
bbf6f052 9359
ca695ac9
JB
9360/* Push the machine address for the given local variable offset. */
9361void
9362bc_load_localaddr (localaddr)
9363 rtx localaddr;
9364{
e7a42772 9365 bc_emit_instruction (localP, (HOST_WIDE_INT) BYTECODE_BC_LABEL (localaddr)->offset);
bbf6f052 9366}
bbf6f052 9367
bbf6f052 9368
ca695ac9
JB
9369/* Push the machine address for the given parameter offset.
9370 NOTE: offset is in bits. */
9371void
9372bc_load_parmaddr (parmaddr)
9373 rtx parmaddr;
bbf6f052 9374{
e7a42772
JB
9375 bc_emit_instruction (argP, ((HOST_WIDE_INT) BYTECODE_BC_LABEL (parmaddr)->offset
9376 / BITS_PER_UNIT));
ca695ac9 9377}
bbf6f052 9378
ca695ac9
JB
9379
9380/* Convert a[i] into *(a + i). */
9381tree
9382bc_canonicalize_array_ref (exp)
9383 tree exp;
9384{
9385 tree type = TREE_TYPE (exp);
9386 tree array_adr = build1 (ADDR_EXPR, TYPE_POINTER_TO (type),
9387 TREE_OPERAND (exp, 0));
9388 tree index = TREE_OPERAND (exp, 1);
9389
9390
9391 /* Convert the integer argument to a type the same size as a pointer
9392 so the multiply won't overflow spuriously. */
9393
9394 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
9395 index = convert (type_for_size (POINTER_SIZE, 0), index);
9396
9397 /* The array address isn't volatile even if the array is.
9398 (Of course this isn't terribly relevant since the bytecode
9399 translator treats nearly everything as volatile anyway.) */
9400 TREE_THIS_VOLATILE (array_adr) = 0;
9401
9402 return build1 (INDIRECT_REF, type,
9403 fold (build (PLUS_EXPR,
9404 TYPE_POINTER_TO (type),
9405 array_adr,
9406 fold (build (MULT_EXPR,
9407 TYPE_POINTER_TO (type),
9408 index,
9409 size_in_bytes (type))))));
bbf6f052
RK
9410}
9411
bbf6f052 9412
ca695ac9
JB
9413/* Load the address of the component referenced by the given
9414 COMPONENT_REF expression.
bbf6f052 9415
ca695ac9 9416 Returns innermost lvalue. */
bbf6f052 9417
ca695ac9
JB
9418tree
9419bc_expand_component_address (exp)
9420 tree exp;
bbf6f052 9421{
ca695ac9
JB
9422 tree tem, chain;
9423 enum machine_mode mode;
9424 int bitpos = 0;
9425 HOST_WIDE_INT SIval;
a7c5971a 9426
bbf6f052 9427
ca695ac9
JB
9428 tem = TREE_OPERAND (exp, 1);
9429 mode = DECL_MODE (tem);
bbf6f052 9430
ca695ac9
JB
9431
9432 /* Compute cumulative bit offset for nested component refs
9433 and array refs, and find the ultimate containing object. */
9434
9435 for (tem = exp;; tem = TREE_OPERAND (tem, 0))
bbf6f052 9436 {
ca695ac9
JB
9437 if (TREE_CODE (tem) == COMPONENT_REF)
9438 bitpos += TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (tem, 1)));
9439 else
9440 if (TREE_CODE (tem) == ARRAY_REF
9441 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
9442 && TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) == INTEGER_CST)
bbf6f052 9443
ca695ac9
JB
9444 bitpos += (TREE_INT_CST_LOW (TREE_OPERAND (tem, 1))
9445 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)))
9446 /* * TYPE_SIZE_UNIT (TREE_TYPE (tem)) */);
9447 else
9448 break;
9449 }
bbf6f052 9450
c02bd5d9 9451 bc_expand_expr (tem);
bbf6f052 9452
cd1b4b44 9453
ca695ac9
JB
9454 /* For bitfields also push their offset and size */
9455 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
9456 bc_push_offset_and_size (bitpos, /* DECL_SIZE_UNIT */ (TREE_OPERAND (exp, 1)));
9457 else
9458 if (SIval = bitpos / BITS_PER_UNIT)
9459 bc_emit_instruction (addconstPSI, SIval);
bbf6f052 9460
ca695ac9 9461 return (TREE_OPERAND (exp, 1));
bbf6f052 9462}
e7c33f54 9463
bbf6f052 9464
ca695ac9
JB
9465/* Emit code to push two SI constants */
9466void
9467bc_push_offset_and_size (offset, size)
9468 HOST_WIDE_INT offset, size;
9469{
9470 bc_emit_instruction (constSI, offset);
9471 bc_emit_instruction (constSI, size);
9472}
bbf6f052 9473
bbf6f052 9474
ca695ac9
JB
9475/* Emit byte code to push the address of the given lvalue expression to
9476 the stack. If it's a bit field, we also push offset and size info.
bbf6f052 9477
ca695ac9
JB
9478 Returns innermost component, which allows us to determine not only
9479 its type, but also whether it's a bitfield. */
9480
9481tree
9482bc_expand_address (exp)
bbf6f052 9483 tree exp;
bbf6f052 9484{
ca695ac9
JB
9485 /* Safeguard */
9486 if (!exp || TREE_CODE (exp) == ERROR_MARK)
9487 return (exp);
bbf6f052 9488
e7c33f54 9489
ca695ac9
JB
9490 switch (TREE_CODE (exp))
9491 {
9492 case ARRAY_REF:
e7c33f54 9493
ca695ac9 9494 return (bc_expand_address (bc_canonicalize_array_ref (exp)));
e7c33f54 9495
ca695ac9 9496 case COMPONENT_REF:
bbf6f052 9497
ca695ac9 9498 return (bc_expand_component_address (exp));
bbf6f052 9499
ca695ac9 9500 case INDIRECT_REF:
bbf6f052 9501
ca695ac9
JB
9502 bc_expand_expr (TREE_OPERAND (exp, 0));
9503
9504 /* For variable-sized types: retrieve pointer. Sometimes the
9505 TYPE_SIZE tree is NULL. Is this a bug or a feature? Let's
9506 also make sure we have an operand, just in case... */
9507
9508 if (TREE_OPERAND (exp, 0)
9509 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))
9510 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))) != INTEGER_CST)
9511 bc_emit_instruction (loadP);
9512
9513 /* If packed, also return offset and size */
9514 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 0)))
9515
9516 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (exp, 0))),
9517 TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (exp, 0))));
9518
9519 return (TREE_OPERAND (exp, 0));
9520
9521 case FUNCTION_DECL:
9522
e7a42772
JB
9523 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
9524 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
bbf6f052 9525 break;
ca695ac9
JB
9526
9527 case PARM_DECL:
9528
9529 bc_load_parmaddr (DECL_RTL (exp));
9530
9531 /* For variable-sized types: retrieve pointer */
9532 if (TYPE_SIZE (TREE_TYPE (exp))
9533 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
9534 bc_emit_instruction (loadP);
9535
9536 /* If packed, also return offset and size */
9537 if (DECL_BIT_FIELD (exp))
9538 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
9539 TREE_INT_CST_LOW (DECL_SIZE (exp)));
9540
bbf6f052 9541 break;
ca695ac9
JB
9542
9543 case RESULT_DECL:
9544
9545 bc_emit_instruction (returnP);
bbf6f052 9546 break;
ca695ac9
JB
9547
9548 case VAR_DECL:
9549
9550#if 0
e7a42772 9551 if (BYTECODE_LABEL (DECL_RTL (exp)))
ca695ac9
JB
9552 bc_load_externaddr (DECL_RTL (exp));
9553#endif
9554
9555 if (DECL_EXTERNAL (exp))
e7a42772 9556 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
eb862a37 9557 (BYTECODE_BC_LABEL (DECL_RTL (exp)))->offset);
bbf6f052 9558 else
ca695ac9
JB
9559 bc_load_localaddr (DECL_RTL (exp));
9560
9561 /* For variable-sized types: retrieve pointer */
9562 if (TYPE_SIZE (TREE_TYPE (exp))
9563 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
9564 bc_emit_instruction (loadP);
9565
9566 /* If packed, also return offset and size */
9567 if (DECL_BIT_FIELD (exp))
9568 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
9569 TREE_INT_CST_LOW (DECL_SIZE (exp)));
9570
bbf6f052 9571 break;
ca695ac9
JB
9572
9573 case STRING_CST:
9574 {
9575 rtx r;
9576
9577 bc_emit_bytecode (constP);
9578 r = output_constant_def (exp);
e7a42772 9579 bc_emit_code_labelref (BYTECODE_LABEL (r), BYTECODE_BC_LABEL (r)->offset);
ca695ac9
JB
9580
9581#ifdef DEBUG_PRINT_CODE
9582 fputc ('\n', stderr);
9583#endif
9584 }
bbf6f052 9585 break;
ca695ac9 9586
bbf6f052 9587 default:
bbf6f052 9588
ca695ac9
JB
9589 abort();
9590 break;
bbf6f052
RK
9591 }
9592
ca695ac9
JB
9593 /* Most lvalues don't have components. */
9594 return (exp);
9595}
bbf6f052 9596
ca695ac9
JB
9597
9598/* Emit a type code to be used by the runtime support in handling
9599 parameter passing. The type code consists of the machine mode
9600 plus the minimal alignment shifted left 8 bits. */
9601
9602tree
9603bc_runtime_type_code (type)
9604 tree type;
9605{
9606 int val;
9607
9608 switch (TREE_CODE (type))
bbf6f052 9609 {
ca695ac9
JB
9610 case VOID_TYPE:
9611 case INTEGER_TYPE:
9612 case REAL_TYPE:
9613 case COMPLEX_TYPE:
9614 case ENUMERAL_TYPE:
9615 case POINTER_TYPE:
9616 case RECORD_TYPE:
9617
6bd6178d 9618 val = (int) TYPE_MODE (type) | TYPE_ALIGN (type) << 8;
ca695ac9
JB
9619 break;
9620
9621 case ERROR_MARK:
9622
9623 val = 0;
9624 break;
9625
9626 default:
af508edd 9627
ca695ac9
JB
9628 abort ();
9629 }
9630 return build_int_2 (val, 0);
9631}
af508edd 9632
af508edd 9633
ca695ac9
JB
9634/* Generate constructor label */
9635char *
9636bc_gen_constr_label ()
9637{
9638 static int label_counter;
9639 static char label[20];
bbf6f052 9640
ca695ac9 9641 sprintf (label, "*LR%d", label_counter++);
bbf6f052 9642
ca695ac9
JB
9643 return (obstack_copy0 (&permanent_obstack, label, strlen (label)));
9644}
bbf6f052 9645
bbf6f052 9646
ca695ac9
JB
9647/* Evaluate constructor CONSTR and return pointer to it on level one. We
9648 expand the constructor data as static data, and push a pointer to it.
9649 The pointer is put in the pointer table and is retrieved by a constP
9650 bytecode instruction. We then loop and store each constructor member in
9651 the corresponding component. Finally, we return the original pointer on
9652 the stack. */
af508edd 9653
ca695ac9
JB
9654void
9655bc_expand_constructor (constr)
9656 tree constr;
9657{
9658 char *l;
9659 HOST_WIDE_INT ptroffs;
9660 rtx constr_rtx;
bbf6f052 9661
ca695ac9
JB
9662
9663 /* Literal constructors are handled as constants, whereas
9664 non-literals are evaluated and stored element by element
9665 into the data segment. */
9666
9667 /* Allocate space in proper segment and push pointer to space on stack.
9668 */
bbf6f052 9669
ca695ac9 9670 l = bc_gen_constr_label ();
bbf6f052 9671
ca695ac9 9672 if (TREE_CONSTANT (constr))
bbf6f052 9673 {
ca695ac9
JB
9674 text_section ();
9675
9676 bc_emit_const_labeldef (l);
9677 bc_output_constructor (constr, int_size_in_bytes (TREE_TYPE (constr)));
bbf6f052 9678 }
ca695ac9
JB
9679 else
9680 {
9681 data_section ();
bbf6f052 9682
ca695ac9
JB
9683 bc_emit_data_labeldef (l);
9684 bc_output_data_constructor (constr);
9685 }
bbf6f052 9686
ca695ac9
JB
9687
9688 /* Add reference to pointer table and recall pointer to stack;
9689 this code is common for both types of constructors: literals
9690 and non-literals. */
bbf6f052 9691
de7d9320
JB
9692 ptroffs = bc_define_pointer (l);
9693 bc_emit_instruction (constP, ptroffs);
d39985fa 9694
ca695ac9
JB
9695 /* This is all that has to be done if it's a literal. */
9696 if (TREE_CONSTANT (constr))
9697 return;
bbf6f052 9698
ca695ac9
JB
9699
9700 /* At this point, we have the pointer to the structure on top of the stack.
9701 Generate sequences of store_memory calls for the constructor. */
9702
9703 /* constructor type is structure */
9704 if (TREE_CODE (TREE_TYPE (constr)) == RECORD_TYPE)
e7c33f54 9705 {
ca695ac9
JB
9706 register tree elt;
9707
9708 /* If the constructor has fewer fields than the structure,
9709 clear the whole structure first. */
9710
9711 if (list_length (CONSTRUCTOR_ELTS (constr))
9712 != list_length (TYPE_FIELDS (TREE_TYPE (constr))))
9713 {
6d6e61ce 9714 bc_emit_instruction (duplicate);
ca695ac9
JB
9715 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
9716 bc_emit_instruction (clearBLK);
9717 }
9718
9719 /* Store each element of the constructor into the corresponding
9720 field of TARGET. */
9721
9722 for (elt = CONSTRUCTOR_ELTS (constr); elt; elt = TREE_CHAIN (elt))
9723 {
9724 register tree field = TREE_PURPOSE (elt);
9725 register enum machine_mode mode;
9726 int bitsize;
9727 int bitpos;
9728 int unsignedp;
9729
9730 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field)) /* * DECL_SIZE_UNIT (field) */;
9731 mode = DECL_MODE (field);
9732 unsignedp = TREE_UNSIGNED (field);
9733
9734 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
9735
9736 bc_store_field (elt, bitsize, bitpos, mode, TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
9737 /* The alignment of TARGET is
9738 at least what its type requires. */
9739 VOIDmode, 0,
9740 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
9741 int_size_in_bytes (TREE_TYPE (constr)));
9742 }
e7c33f54 9743 }
ca695ac9
JB
9744 else
9745
9746 /* Constructor type is array */
9747 if (TREE_CODE (TREE_TYPE (constr)) == ARRAY_TYPE)
9748 {
9749 register tree elt;
9750 register int i;
9751 tree domain = TYPE_DOMAIN (TREE_TYPE (constr));
9752 int minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
9753 int maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
9754 tree elttype = TREE_TYPE (TREE_TYPE (constr));
9755
9756 /* If the constructor has fewer fields than the structure,
9757 clear the whole structure first. */
9758
9759 if (list_length (CONSTRUCTOR_ELTS (constr)) < maxelt - minelt + 1)
9760 {
6d6e61ce 9761 bc_emit_instruction (duplicate);
ca695ac9
JB
9762 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
9763 bc_emit_instruction (clearBLK);
9764 }
9765
9766
9767 /* Store each element of the constructor into the corresponding
9768 element of TARGET, determined by counting the elements. */
9769
9770 for (elt = CONSTRUCTOR_ELTS (constr), i = 0;
9771 elt;
9772 elt = TREE_CHAIN (elt), i++)
9773 {
9774 register enum machine_mode mode;
9775 int bitsize;
9776 int bitpos;
9777 int unsignedp;
9778
9779 mode = TYPE_MODE (elttype);
9780 bitsize = GET_MODE_BITSIZE (mode);
9781 unsignedp = TREE_UNSIGNED (elttype);
9782
9783 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype))
9784 /* * TYPE_SIZE_UNIT (elttype) */ );
9785
9786 bc_store_field (elt, bitsize, bitpos, mode,
9787 TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
9788 /* The alignment of TARGET is
9789 at least what its type requires. */
9790 VOIDmode, 0,
9791 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
9792 int_size_in_bytes (TREE_TYPE (constr)));
9793 }
9794
9795 }
9796}
bbf6f052 9797
bbf6f052 9798
ca695ac9
JB
9799/* Store the value of EXP (an expression tree) into member FIELD of
9800 structure at address on stack, which has type TYPE, mode MODE and
9801 occupies BITSIZE bits, starting BITPOS bits from the beginning of the
9802 structure.
bbf6f052 9803
ca695ac9
JB
9804 ALIGN is the alignment that TARGET is known to have, measured in bytes.
9805 TOTAL_SIZE is its size in bytes, or -1 if variable. */
bbf6f052 9806
ca695ac9
JB
9807void
9808bc_store_field (field, bitsize, bitpos, mode, exp, type,
9809 value_mode, unsignedp, align, total_size)
9810 int bitsize, bitpos;
9811 enum machine_mode mode;
9812 tree field, exp, type;
9813 enum machine_mode value_mode;
9814 int unsignedp;
9815 int align;
9816 int total_size;
9817{
bbf6f052 9818
ca695ac9
JB
9819 /* Expand expression and copy pointer */
9820 bc_expand_expr (exp);
9821 bc_emit_instruction (over);
bbf6f052 9822
bbf6f052 9823
ca695ac9
JB
9824 /* If the component is a bit field, we cannot use addressing to access
9825 it. Use bit-field techniques to store in it. */
bbf6f052 9826
ca695ac9
JB
9827 if (DECL_BIT_FIELD (field))
9828 {
9829 bc_store_bit_field (bitpos, bitsize, unsignedp);
9830 return;
9831 }
9832 else
9833 /* Not bit field */
9834 {
9835 HOST_WIDE_INT offset = bitpos / BITS_PER_UNIT;
9836
9837 /* Advance pointer to the desired member */
9838 if (offset)
9839 bc_emit_instruction (addconstPSI, offset);
9840
9841 /* Store */
9842 bc_store_memory (type, field);
9843 }
9844}
bbf6f052 9845
ca695ac9
JB
9846
9847/* Store SI/SU in bitfield */
bbf6f052 9848void
ca695ac9
JB
9849bc_store_bit_field (offset, size, unsignedp)
9850 int offset, size, unsignedp;
bbf6f052 9851{
ca695ac9
JB
9852 /* Push bitfield offset and size */
9853 bc_push_offset_and_size (offset, size);
bbf6f052 9854
ca695ac9
JB
9855 /* Store */
9856 bc_emit_instruction (sstoreBI);
9857}
e87b4f3f 9858
88d3b7f0 9859
ca695ac9
JB
9860/* Load SI/SU from bitfield */
9861void
9862bc_load_bit_field (offset, size, unsignedp)
9863 int offset, size, unsignedp;
9864{
9865 /* Push bitfield offset and size */
9866 bc_push_offset_and_size (offset, size);
88d3b7f0 9867
ca695ac9
JB
9868 /* Load: sign-extend if signed, else zero-extend */
9869 bc_emit_instruction (unsignedp ? zxloadBI : sxloadBI);
9870}
709f5be1 9871
bbf6f052 9872
ca695ac9
JB
9873/* Adjust interpreter stack by NLEVELS. Positive means drop NLEVELS
9874 (adjust stack pointer upwards), negative means add that number of
9875 levels (adjust the stack pointer downwards). Only positive values
9876 normally make sense. */
bbf6f052 9877
ca695ac9
JB
9878void
9879bc_adjust_stack (nlevels)
9880 int nlevels;
9881{
9882 switch (nlevels)
9883 {
9884 case 0:
9885 break;
9886
9887 case 2:
9888 bc_emit_instruction (drop);
9889
9890 case 1:
9891 bc_emit_instruction (drop);
9892 break;
9893
9894 default:
9895
9896 bc_emit_instruction (adjstackSI, (HOST_WIDE_INT) nlevels);
9897 stack_depth -= nlevels;
9898 }
9899
a68c7608
RS
9900#if defined (VALIDATE_STACK_FOR_BC)
9901 VALIDATE_STACK_FOR_BC ();
bbf6f052
RK
9902#endif
9903}
This page took 1.368596 seconds and 5 git commands to generate.