]> gcc.gnu.org Git - gcc.git/blame - gcc/expr.c
cse.c (cse_insn): Set "cse_jumps_altered" when collapsing a switch statement into...
[gcc.git] / gcc / expr.c
CommitLineData
bbf6f052 1/* Convert tree expression to rtl instructions, for GNU compiler.
4be204f0 2 Copyright (C) 1988, 1992, 1993 Free Software Foundation, Inc.
bbf6f052
RK
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21#include "config.h"
ca695ac9 22#include "machmode.h"
bbf6f052
RK
23#include "rtl.h"
24#include "tree.h"
ca695ac9 25#include "obstack.h"
bbf6f052
RK
26#include "flags.h"
27#include "function.h"
28#include "insn-flags.h"
29#include "insn-codes.h"
30#include "expr.h"
31#include "insn-config.h"
32#include "recog.h"
33#include "output.h"
bbf6f052
RK
34#include "typeclass.h"
35
ca695ac9
JB
36#include "bytecode.h"
37#include "bc-opcode.h"
38#include "bc-typecd.h"
39#include "bc-optab.h"
40#include "bc-emit.h"
41
42
bbf6f052
RK
43#define CEIL(x,y) (((x) + (y) - 1) / (y))
44
45/* Decide whether a function's arguments should be processed
bbc8a071
RK
46 from first to last or from last to first.
47
48 They should if the stack and args grow in opposite directions, but
49 only if we have push insns. */
bbf6f052 50
bbf6f052 51#ifdef PUSH_ROUNDING
bbc8a071 52
3319a347 53#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
bbf6f052
RK
54#define PUSH_ARGS_REVERSED /* If it's last to first */
55#endif
bbc8a071 56
bbf6f052
RK
57#endif
58
59#ifndef STACK_PUSH_CODE
60#ifdef STACK_GROWS_DOWNWARD
61#define STACK_PUSH_CODE PRE_DEC
62#else
63#define STACK_PUSH_CODE PRE_INC
64#endif
65#endif
66
67/* Like STACK_BOUNDARY but in units of bytes, not bits. */
68#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
69
70/* If this is nonzero, we do not bother generating VOLATILE
71 around volatile memory references, and we are willing to
72 output indirect addresses. If cse is to follow, we reject
73 indirect addresses so a useful potential cse is generated;
74 if it is used only once, instruction combination will produce
75 the same indirect address eventually. */
76int cse_not_expected;
77
78/* Nonzero to generate code for all the subroutines within an
79 expression before generating the upper levels of the expression.
80 Nowadays this is never zero. */
81int do_preexpand_calls = 1;
82
83/* Number of units that we should eventually pop off the stack.
84 These are the arguments to function calls that have already returned. */
85int pending_stack_adjust;
86
87/* Nonzero means stack pops must not be deferred, and deferred stack
88 pops must not be output. It is nonzero inside a function call,
89 inside a conditional expression, inside a statement expression,
90 and in other cases as well. */
91int inhibit_defer_pop;
92
93/* A list of all cleanups which belong to the arguments of
94 function calls being expanded by expand_call. */
95tree cleanups_this_call;
96
97/* Nonzero means __builtin_saveregs has already been done in this function.
98 The value is the pseudoreg containing the value __builtin_saveregs
99 returned. */
100static rtx saveregs_value;
101
dcf76fff
TW
102/* Similarly for __builtin_apply_args. */
103static rtx apply_args_value;
104
4969d05d
RK
105/* This structure is used by move_by_pieces to describe the move to
106 be performed. */
107
108struct move_by_pieces
109{
110 rtx to;
111 rtx to_addr;
112 int autinc_to;
113 int explicit_inc_to;
114 rtx from;
115 rtx from_addr;
116 int autinc_from;
117 int explicit_inc_from;
118 int len;
119 int offset;
120 int reverse;
121};
122
c02bd5d9
JB
123/* Used to generate bytecodes: keep track of size of local variables,
124 as well as depth of arithmetic stack. (Notice that variables are
125 stored on the machine's stack, not the arithmetic stack.) */
126
127int local_vars_size;
128extern int stack_depth;
129extern int max_stack_depth;
292b1216 130extern struct obstack permanent_obstack;
c02bd5d9
JB
131
132
4969d05d
RK
133static rtx enqueue_insn PROTO((rtx, rtx));
134static int queued_subexp_p PROTO((rtx));
135static void init_queue PROTO((void));
136static void move_by_pieces PROTO((rtx, rtx, int, int));
137static int move_by_pieces_ninsns PROTO((unsigned int, int));
138static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
139 struct move_by_pieces *));
140static void group_insns PROTO((rtx));
141static void store_constructor PROTO((tree, rtx));
142static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
143 enum machine_mode, int, int, int));
144static tree save_noncopied_parts PROTO((tree, tree));
145static tree init_noncopied_parts PROTO((tree, tree));
146static int safe_from_p PROTO((rtx, tree));
147static int fixed_type_p PROTO((tree));
148static int get_pointer_alignment PROTO((tree, unsigned));
149static tree string_constant PROTO((tree, tree *));
150static tree c_strlen PROTO((tree));
151static rtx expand_builtin PROTO((tree, rtx, rtx, enum machine_mode, int));
0006469d
TW
152static int apply_args_size PROTO((void));
153static int apply_result_size PROTO((void));
154static rtx result_vector PROTO((int, rtx));
155static rtx expand_builtin_apply_args PROTO((void));
156static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
157static void expand_builtin_return PROTO((rtx));
4969d05d 158static rtx expand_increment PROTO((tree, int));
ca695ac9
JB
159rtx bc_expand_increment PROTO((struct increment_operator *, tree));
160tree bc_runtime_type_code PROTO((tree));
161rtx bc_allocate_local PROTO((int, int));
162void bc_store_memory PROTO((tree, tree));
163tree bc_expand_component_address PROTO((tree));
164tree bc_expand_address PROTO((tree));
165void bc_expand_constructor PROTO((tree));
166void bc_adjust_stack PROTO((int));
167tree bc_canonicalize_array_ref PROTO((tree));
168void bc_load_memory PROTO((tree, tree));
169void bc_load_externaddr PROTO((rtx));
170void bc_load_externaddr_id PROTO((tree, int));
171void bc_load_localaddr PROTO((rtx));
172void bc_load_parmaddr PROTO((rtx));
4969d05d
RK
173static void preexpand_calls PROTO((tree));
174static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
f81497d9 175static void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
4969d05d
RK
176static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
177static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
178static void do_jump_for_compare PROTO((rtx, rtx, rtx));
179static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
180static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
bbf6f052 181
4fa52007
RK
182/* Record for each mode whether we can move a register directly to or
183 from an object of that mode in memory. If we can't, we won't try
184 to use that mode directly when accessing a field of that mode. */
185
186static char direct_load[NUM_MACHINE_MODES];
187static char direct_store[NUM_MACHINE_MODES];
188
bbf6f052
RK
189/* MOVE_RATIO is the number of move instructions that is better than
190 a block move. */
191
192#ifndef MOVE_RATIO
266007a7 193#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
bbf6f052
RK
194#define MOVE_RATIO 2
195#else
196/* A value of around 6 would minimize code size; infinity would minimize
197 execution time. */
198#define MOVE_RATIO 15
199#endif
200#endif
e87b4f3f 201
266007a7 202/* This array records the insn_code of insns to perform block moves. */
e6677db3 203enum insn_code movstr_optab[NUM_MACHINE_MODES];
266007a7 204
e87b4f3f
RS
205/* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
206
207#ifndef SLOW_UNALIGNED_ACCESS
208#define SLOW_UNALIGNED_ACCESS 0
209#endif
0006469d
TW
210
211/* Register mappings for target machines without register windows. */
212#ifndef INCOMING_REGNO
213#define INCOMING_REGNO(OUT) (OUT)
214#endif
215#ifndef OUTGOING_REGNO
216#define OUTGOING_REGNO(IN) (IN)
217#endif
bbf6f052 218\f
ca695ac9
JB
219/* Maps used to convert modes to const, load, and store bytecodes. */
220enum bytecode_opcode mode_to_const_map[MAX_MACHINE_MODE];
221enum bytecode_opcode mode_to_load_map[MAX_MACHINE_MODE];
222enum bytecode_opcode mode_to_store_map[MAX_MACHINE_MODE];
223
224/* Initialize maps used to convert modes to const, load, and store
225 bytecodes. */
226void
227bc_init_mode_to_opcode_maps ()
228{
229 int mode;
230
6bd6178d 231 for (mode = 0; mode < (int) MAX_MACHINE_MODE; mode++)
ca695ac9
JB
232 mode_to_const_map[mode] =
233 mode_to_load_map[mode] =
234 mode_to_store_map[mode] = neverneverland;
235
236#define DEF_MODEMAP(SYM, CODE, UCODE, CONST, LOAD, STORE) \
6bd6178d
RK
237 mode_to_const_map[(int) SYM] = CONST; \
238 mode_to_load_map[(int) SYM] = LOAD; \
239 mode_to_store_map[(int) SYM] = STORE;
ca695ac9
JB
240
241#include "modemap.def"
242#undef DEF_MODEMAP
243}
244\f
4fa52007 245/* This is run once per compilation to set up which modes can be used
266007a7 246 directly in memory and to initialize the block move optab. */
4fa52007
RK
247
248void
249init_expr_once ()
250{
251 rtx insn, pat;
252 enum machine_mode mode;
e2549997
RS
253 /* Try indexing by frame ptr and try by stack ptr.
254 It is known that on the Convex the stack ptr isn't a valid index.
255 With luck, one or the other is valid on any machine. */
4fa52007 256 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
e2549997 257 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
4fa52007
RK
258
259 start_sequence ();
260 insn = emit_insn (gen_rtx (SET, 0, 0));
261 pat = PATTERN (insn);
262
263 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
264 mode = (enum machine_mode) ((int) mode + 1))
265 {
266 int regno;
267 rtx reg;
268 int num_clobbers;
269
270 direct_load[(int) mode] = direct_store[(int) mode] = 0;
271 PUT_MODE (mem, mode);
e2549997 272 PUT_MODE (mem1, mode);
4fa52007 273
e6fe56a4
RK
274 /* See if there is some register that can be used in this mode and
275 directly loaded or stored from memory. */
276
7308a047
RS
277 if (mode != VOIDmode && mode != BLKmode)
278 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
279 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
280 regno++)
281 {
282 if (! HARD_REGNO_MODE_OK (regno, mode))
283 continue;
e6fe56a4 284
7308a047 285 reg = gen_rtx (REG, mode, regno);
e6fe56a4 286
7308a047
RS
287 SET_SRC (pat) = mem;
288 SET_DEST (pat) = reg;
289 if (recog (pat, insn, &num_clobbers) >= 0)
290 direct_load[(int) mode] = 1;
e6fe56a4 291
e2549997
RS
292 SET_SRC (pat) = mem1;
293 SET_DEST (pat) = reg;
294 if (recog (pat, insn, &num_clobbers) >= 0)
295 direct_load[(int) mode] = 1;
296
7308a047
RS
297 SET_SRC (pat) = reg;
298 SET_DEST (pat) = mem;
299 if (recog (pat, insn, &num_clobbers) >= 0)
300 direct_store[(int) mode] = 1;
e2549997
RS
301
302 SET_SRC (pat) = reg;
303 SET_DEST (pat) = mem1;
304 if (recog (pat, insn, &num_clobbers) >= 0)
305 direct_store[(int) mode] = 1;
7308a047 306 }
4fa52007
RK
307 }
308
309 end_sequence ();
310}
311
bbf6f052
RK
312/* This is run at the start of compiling a function. */
313
314void
315init_expr ()
316{
317 init_queue ();
318
319 pending_stack_adjust = 0;
320 inhibit_defer_pop = 0;
321 cleanups_this_call = 0;
322 saveregs_value = 0;
0006469d 323 apply_args_value = 0;
e87b4f3f 324 forced_labels = 0;
bbf6f052
RK
325}
326
327/* Save all variables describing the current status into the structure *P.
328 This is used before starting a nested function. */
329
330void
331save_expr_status (p)
332 struct function *p;
333{
334 /* Instead of saving the postincrement queue, empty it. */
335 emit_queue ();
336
337 p->pending_stack_adjust = pending_stack_adjust;
338 p->inhibit_defer_pop = inhibit_defer_pop;
339 p->cleanups_this_call = cleanups_this_call;
340 p->saveregs_value = saveregs_value;
0006469d 341 p->apply_args_value = apply_args_value;
e87b4f3f 342 p->forced_labels = forced_labels;
bbf6f052
RK
343
344 pending_stack_adjust = 0;
345 inhibit_defer_pop = 0;
346 cleanups_this_call = 0;
347 saveregs_value = 0;
0006469d 348 apply_args_value = 0;
e87b4f3f 349 forced_labels = 0;
bbf6f052
RK
350}
351
352/* Restore all variables describing the current status from the structure *P.
353 This is used after a nested function. */
354
355void
356restore_expr_status (p)
357 struct function *p;
358{
359 pending_stack_adjust = p->pending_stack_adjust;
360 inhibit_defer_pop = p->inhibit_defer_pop;
361 cleanups_this_call = p->cleanups_this_call;
362 saveregs_value = p->saveregs_value;
0006469d 363 apply_args_value = p->apply_args_value;
e87b4f3f 364 forced_labels = p->forced_labels;
bbf6f052
RK
365}
366\f
367/* Manage the queue of increment instructions to be output
368 for POSTINCREMENT_EXPR expressions, etc. */
369
370static rtx pending_chain;
371
372/* Queue up to increment (or change) VAR later. BODY says how:
373 BODY should be the same thing you would pass to emit_insn
374 to increment right away. It will go to emit_insn later on.
375
376 The value is a QUEUED expression to be used in place of VAR
377 where you want to guarantee the pre-incrementation value of VAR. */
378
379static rtx
380enqueue_insn (var, body)
381 rtx var, body;
382{
383 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
906c4e36 384 var, NULL_RTX, NULL_RTX, body, pending_chain);
bbf6f052
RK
385 return pending_chain;
386}
387
388/* Use protect_from_queue to convert a QUEUED expression
389 into something that you can put immediately into an instruction.
390 If the queued incrementation has not happened yet,
391 protect_from_queue returns the variable itself.
392 If the incrementation has happened, protect_from_queue returns a temp
393 that contains a copy of the old value of the variable.
394
395 Any time an rtx which might possibly be a QUEUED is to be put
396 into an instruction, it must be passed through protect_from_queue first.
397 QUEUED expressions are not meaningful in instructions.
398
399 Do not pass a value through protect_from_queue and then hold
400 on to it for a while before putting it in an instruction!
401 If the queue is flushed in between, incorrect code will result. */
402
403rtx
404protect_from_queue (x, modify)
405 register rtx x;
406 int modify;
407{
408 register RTX_CODE code = GET_CODE (x);
409
410#if 0 /* A QUEUED can hang around after the queue is forced out. */
411 /* Shortcut for most common case. */
412 if (pending_chain == 0)
413 return x;
414#endif
415
416 if (code != QUEUED)
417 {
418 /* A special hack for read access to (MEM (QUEUED ...))
419 to facilitate use of autoincrement.
420 Make a copy of the contents of the memory location
421 rather than a copy of the address, but not
422 if the value is of mode BLKmode. */
423 if (code == MEM && GET_MODE (x) != BLKmode
424 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
425 {
426 register rtx y = XEXP (x, 0);
427 XEXP (x, 0) = QUEUED_VAR (y);
428 if (QUEUED_INSN (y))
429 {
430 register rtx temp = gen_reg_rtx (GET_MODE (x));
431 emit_insn_before (gen_move_insn (temp, x),
432 QUEUED_INSN (y));
433 return temp;
434 }
435 return x;
436 }
437 /* Otherwise, recursively protect the subexpressions of all
438 the kinds of rtx's that can contain a QUEUED. */
439 if (code == MEM)
3f15938e
RS
440 {
441 rtx tem = protect_from_queue (XEXP (x, 0), 0);
442 if (tem != XEXP (x, 0))
443 {
444 x = copy_rtx (x);
445 XEXP (x, 0) = tem;
446 }
447 }
bbf6f052
RK
448 else if (code == PLUS || code == MULT)
449 {
3f15938e
RS
450 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
451 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
452 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
453 {
454 x = copy_rtx (x);
455 XEXP (x, 0) = new0;
456 XEXP (x, 1) = new1;
457 }
bbf6f052
RK
458 }
459 return x;
460 }
461 /* If the increment has not happened, use the variable itself. */
462 if (QUEUED_INSN (x) == 0)
463 return QUEUED_VAR (x);
464 /* If the increment has happened and a pre-increment copy exists,
465 use that copy. */
466 if (QUEUED_COPY (x) != 0)
467 return QUEUED_COPY (x);
468 /* The increment has happened but we haven't set up a pre-increment copy.
469 Set one up now, and use it. */
470 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
471 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
472 QUEUED_INSN (x));
473 return QUEUED_COPY (x);
474}
475
476/* Return nonzero if X contains a QUEUED expression:
477 if it contains anything that will be altered by a queued increment.
478 We handle only combinations of MEM, PLUS, MINUS and MULT operators
479 since memory addresses generally contain only those. */
480
481static int
482queued_subexp_p (x)
483 rtx x;
484{
485 register enum rtx_code code = GET_CODE (x);
486 switch (code)
487 {
488 case QUEUED:
489 return 1;
490 case MEM:
491 return queued_subexp_p (XEXP (x, 0));
492 case MULT:
493 case PLUS:
494 case MINUS:
495 return queued_subexp_p (XEXP (x, 0))
496 || queued_subexp_p (XEXP (x, 1));
497 }
498 return 0;
499}
500
501/* Perform all the pending incrementations. */
502
503void
504emit_queue ()
505{
506 register rtx p;
507 while (p = pending_chain)
508 {
509 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
510 pending_chain = QUEUED_NEXT (p);
511 }
512}
513
514static void
515init_queue ()
516{
517 if (pending_chain)
518 abort ();
519}
520\f
521/* Copy data from FROM to TO, where the machine modes are not the same.
522 Both modes may be integer, or both may be floating.
523 UNSIGNEDP should be nonzero if FROM is an unsigned type.
524 This causes zero-extension instead of sign-extension. */
525
526void
527convert_move (to, from, unsignedp)
528 register rtx to, from;
529 int unsignedp;
530{
531 enum machine_mode to_mode = GET_MODE (to);
532 enum machine_mode from_mode = GET_MODE (from);
533 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
534 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
535 enum insn_code code;
536 rtx libcall;
537
538 /* rtx code for making an equivalent value. */
539 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
540
541 to = protect_from_queue (to, 1);
542 from = protect_from_queue (from, 0);
543
544 if (to_real != from_real)
545 abort ();
546
1499e0a8
RK
547 /* If FROM is a SUBREG that indicates that we have already done at least
548 the required extension, strip it. We don't handle such SUBREGs as
549 TO here. */
550
551 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
552 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
553 >= GET_MODE_SIZE (to_mode))
554 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
555 from = gen_lowpart (to_mode, from), from_mode = to_mode;
556
557 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
558 abort ();
559
bbf6f052
RK
560 if (to_mode == from_mode
561 || (from_mode == VOIDmode && CONSTANT_P (from)))
562 {
563 emit_move_insn (to, from);
564 return;
565 }
566
567 if (to_real)
568 {
81d79e2c
RS
569 rtx value;
570
b424402e
RS
571#ifdef HAVE_extendqfhf2
572 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == HFmode)
573 {
574 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
575 return;
576 }
577#endif
578#ifdef HAVE_extendqfsf2
579 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == SFmode)
580 {
581 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
582 return;
583 }
584#endif
585#ifdef HAVE_extendqfdf2
586 if (HAVE_extendqfdf2 && from_mode == QFmode && to_mode == DFmode)
587 {
588 emit_unop_insn (CODE_FOR_extendqfdf2, to, from, UNKNOWN);
589 return;
590 }
591#endif
592#ifdef HAVE_extendqfxf2
593 if (HAVE_extendqfxf2 && from_mode == QFmode && to_mode == XFmode)
594 {
595 emit_unop_insn (CODE_FOR_extendqfxf2, to, from, UNKNOWN);
596 return;
597 }
598#endif
599#ifdef HAVE_extendqftf2
600 if (HAVE_extendqftf2 && from_mode == QFmode && to_mode == TFmode)
601 {
602 emit_unop_insn (CODE_FOR_extendqftf2, to, from, UNKNOWN);
603 return;
604 }
605#endif
606
607#ifdef HAVE_extendhfsf2
608 if (HAVE_extendhfsf2 && from_mode == HFmode && to_mode == SFmode)
609 {
610 emit_unop_insn (CODE_FOR_extendhfsf2, to, from, UNKNOWN);
611 return;
612 }
613#endif
614#ifdef HAVE_extendhfdf2
615 if (HAVE_extendhfdf2 && from_mode == HFmode && to_mode == DFmode)
616 {
617 emit_unop_insn (CODE_FOR_extendhfdf2, to, from, UNKNOWN);
618 return;
619 }
620#endif
621#ifdef HAVE_extendhfxf2
622 if (HAVE_extendhfxf2 && from_mode == HFmode && to_mode == XFmode)
623 {
624 emit_unop_insn (CODE_FOR_extendhfxf2, to, from, UNKNOWN);
625 return;
626 }
627#endif
628#ifdef HAVE_extendhftf2
629 if (HAVE_extendhftf2 && from_mode == HFmode && to_mode == TFmode)
630 {
631 emit_unop_insn (CODE_FOR_extendhftf2, to, from, UNKNOWN);
632 return;
633 }
634#endif
635
bbf6f052
RK
636#ifdef HAVE_extendsfdf2
637 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
638 {
639 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
640 return;
641 }
642#endif
b092b471
JW
643#ifdef HAVE_extendsfxf2
644 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
645 {
646 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
647 return;
648 }
649#endif
bbf6f052
RK
650#ifdef HAVE_extendsftf2
651 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
652 {
653 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
654 return;
655 }
656#endif
b092b471
JW
657#ifdef HAVE_extenddfxf2
658 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
659 {
660 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
661 return;
662 }
663#endif
bbf6f052
RK
664#ifdef HAVE_extenddftf2
665 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
666 {
667 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
668 return;
669 }
670#endif
b424402e
RS
671
672#ifdef HAVE_trunchfqf2
673 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
674 {
675 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
676 return;
677 }
678#endif
679#ifdef HAVE_truncsfqf2
680 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
681 {
682 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
683 return;
684 }
685#endif
686#ifdef HAVE_truncdfqf2
687 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
688 {
689 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
690 return;
691 }
692#endif
693#ifdef HAVE_truncxfqf2
694 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
695 {
696 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
697 return;
698 }
699#endif
700#ifdef HAVE_trunctfqf2
701 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
702 {
703 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
704 return;
705 }
706#endif
707#ifdef HAVE_truncsfhf2
708 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
709 {
710 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
711 return;
712 }
713#endif
714#ifdef HAVE_truncdfhf2
715 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
716 {
717 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
718 return;
719 }
720#endif
721#ifdef HAVE_truncxfhf2
722 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
723 {
724 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
725 return;
726 }
727#endif
728#ifdef HAVE_trunctfhf2
729 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
730 {
731 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
732 return;
733 }
734#endif
bbf6f052
RK
735#ifdef HAVE_truncdfsf2
736 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
737 {
738 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
739 return;
740 }
741#endif
b092b471
JW
742#ifdef HAVE_truncxfsf2
743 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
744 {
745 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
746 return;
747 }
748#endif
bbf6f052
RK
749#ifdef HAVE_trunctfsf2
750 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
751 {
752 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
753 return;
754 }
755#endif
b092b471
JW
756#ifdef HAVE_truncxfdf2
757 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
758 {
759 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
760 return;
761 }
762#endif
bbf6f052
RK
763#ifdef HAVE_trunctfdf2
764 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
765 {
766 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
767 return;
768 }
769#endif
770
b092b471
JW
771 libcall = (rtx) 0;
772 switch (from_mode)
773 {
774 case SFmode:
775 switch (to_mode)
776 {
777 case DFmode:
778 libcall = extendsfdf2_libfunc;
779 break;
780
781 case XFmode:
782 libcall = extendsfxf2_libfunc;
783 break;
784
785 case TFmode:
786 libcall = extendsftf2_libfunc;
787 break;
788 }
789 break;
790
791 case DFmode:
792 switch (to_mode)
793 {
794 case SFmode:
795 libcall = truncdfsf2_libfunc;
796 break;
797
798 case XFmode:
799 libcall = extenddfxf2_libfunc;
800 break;
801
802 case TFmode:
803 libcall = extenddftf2_libfunc;
804 break;
805 }
806 break;
807
808 case XFmode:
809 switch (to_mode)
810 {
811 case SFmode:
812 libcall = truncxfsf2_libfunc;
813 break;
814
815 case DFmode:
816 libcall = truncxfdf2_libfunc;
817 break;
818 }
819 break;
820
821 case TFmode:
822 switch (to_mode)
823 {
824 case SFmode:
825 libcall = trunctfsf2_libfunc;
826 break;
827
828 case DFmode:
829 libcall = trunctfdf2_libfunc;
830 break;
831 }
832 break;
833 }
834
835 if (libcall == (rtx) 0)
836 /* This conversion is not implemented yet. */
bbf6f052
RK
837 abort ();
838
81d79e2c
RS
839 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
840 1, from, from_mode);
841 emit_move_insn (to, value);
bbf6f052
RK
842 return;
843 }
844
845 /* Now both modes are integers. */
846
847 /* Handle expanding beyond a word. */
848 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
849 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
850 {
851 rtx insns;
852 rtx lowpart;
853 rtx fill_value;
854 rtx lowfrom;
855 int i;
856 enum machine_mode lowpart_mode;
857 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
858
859 /* Try converting directly if the insn is supported. */
860 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
861 != CODE_FOR_nothing)
862 {
cd1b4b44
RK
863 /* If FROM is a SUBREG, put it into a register. Do this
864 so that we always generate the same set of insns for
865 better cse'ing; if an intermediate assignment occurred,
866 we won't be doing the operation directly on the SUBREG. */
867 if (optimize > 0 && GET_CODE (from) == SUBREG)
868 from = force_reg (from_mode, from);
bbf6f052
RK
869 emit_unop_insn (code, to, from, equiv_code);
870 return;
871 }
872 /* Next, try converting via full word. */
873 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
874 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
875 != CODE_FOR_nothing))
876 {
a81fee56
RS
877 if (GET_CODE (to) == REG)
878 emit_insn (gen_rtx (CLOBBER, VOIDmode, to));
bbf6f052
RK
879 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
880 emit_unop_insn (code, to,
881 gen_lowpart (word_mode, to), equiv_code);
882 return;
883 }
884
885 /* No special multiword conversion insn; do it by hand. */
886 start_sequence ();
887
888 /* Get a copy of FROM widened to a word, if necessary. */
889 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
890 lowpart_mode = word_mode;
891 else
892 lowpart_mode = from_mode;
893
894 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
895
896 lowpart = gen_lowpart (lowpart_mode, to);
897 emit_move_insn (lowpart, lowfrom);
898
899 /* Compute the value to put in each remaining word. */
900 if (unsignedp)
901 fill_value = const0_rtx;
902 else
903 {
904#ifdef HAVE_slt
905 if (HAVE_slt
906 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
907 && STORE_FLAG_VALUE == -1)
908 {
906c4e36
RK
909 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
910 lowpart_mode, 0, 0);
bbf6f052
RK
911 fill_value = gen_reg_rtx (word_mode);
912 emit_insn (gen_slt (fill_value));
913 }
914 else
915#endif
916 {
917 fill_value
918 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
919 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
906c4e36 920 NULL_RTX, 0);
bbf6f052
RK
921 fill_value = convert_to_mode (word_mode, fill_value, 1);
922 }
923 }
924
925 /* Fill the remaining words. */
926 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
927 {
928 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
929 rtx subword = operand_subword (to, index, 1, to_mode);
930
931 if (subword == 0)
932 abort ();
933
934 if (fill_value != subword)
935 emit_move_insn (subword, fill_value);
936 }
937
938 insns = get_insns ();
939 end_sequence ();
940
906c4e36 941 emit_no_conflict_block (insns, to, from, NULL_RTX,
2abec1b7 942 gen_rtx (equiv_code, to_mode, copy_rtx (from)));
bbf6f052
RK
943 return;
944 }
945
d3c64ee3
RS
946 /* Truncating multi-word to a word or less. */
947 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
948 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
bbf6f052 949 {
431a6eca
JW
950 if (!((GET_CODE (from) == MEM
951 && ! MEM_VOLATILE_P (from)
952 && direct_load[(int) to_mode]
953 && ! mode_dependent_address_p (XEXP (from, 0)))
954 || GET_CODE (from) == REG
955 || GET_CODE (from) == SUBREG))
956 from = force_reg (from_mode, from);
bbf6f052
RK
957 convert_move (to, gen_lowpart (word_mode, from), 0);
958 return;
959 }
960
961 /* Handle pointer conversion */ /* SPEE 900220 */
962 if (to_mode == PSImode)
963 {
964 if (from_mode != SImode)
965 from = convert_to_mode (SImode, from, unsignedp);
966
967#ifdef HAVE_truncsipsi
968 if (HAVE_truncsipsi)
969 {
970 emit_unop_insn (CODE_FOR_truncsipsi, to, from, UNKNOWN);
971 return;
972 }
973#endif /* HAVE_truncsipsi */
974 abort ();
975 }
976
977 if (from_mode == PSImode)
978 {
979 if (to_mode != SImode)
980 {
981 from = convert_to_mode (SImode, from, unsignedp);
982 from_mode = SImode;
983 }
984 else
985 {
986#ifdef HAVE_extendpsisi
987 if (HAVE_extendpsisi)
988 {
989 emit_unop_insn (CODE_FOR_extendpsisi, to, from, UNKNOWN);
990 return;
991 }
992#endif /* HAVE_extendpsisi */
993 abort ();
994 }
995 }
996
997 /* Now follow all the conversions between integers
998 no more than a word long. */
999
1000 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1001 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1002 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
d3c64ee3 1003 GET_MODE_BITSIZE (from_mode)))
bbf6f052 1004 {
d3c64ee3
RS
1005 if (!((GET_CODE (from) == MEM
1006 && ! MEM_VOLATILE_P (from)
1007 && direct_load[(int) to_mode]
1008 && ! mode_dependent_address_p (XEXP (from, 0)))
1009 || GET_CODE (from) == REG
1010 || GET_CODE (from) == SUBREG))
1011 from = force_reg (from_mode, from);
bbf6f052
RK
1012 emit_move_insn (to, gen_lowpart (to_mode, from));
1013 return;
1014 }
1015
d3c64ee3 1016 /* Handle extension. */
bbf6f052
RK
1017 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1018 {
1019 /* Convert directly if that works. */
1020 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1021 != CODE_FOR_nothing)
1022 {
3dc4195c
RK
1023 /* If FROM is a SUBREG, put it into a register. Do this
1024 so that we always generate the same set of insns for
1025 better cse'ing; if an intermediate assignment occurred,
1026 we won't be doing the operation directly on the SUBREG. */
1027 if (optimize > 0 && GET_CODE (from) == SUBREG)
1028 from = force_reg (from_mode, from);
bbf6f052
RK
1029 emit_unop_insn (code, to, from, equiv_code);
1030 return;
1031 }
1032 else
1033 {
1034 enum machine_mode intermediate;
1035
1036 /* Search for a mode to convert via. */
1037 for (intermediate = from_mode; intermediate != VOIDmode;
1038 intermediate = GET_MODE_WIDER_MODE (intermediate))
1039 if ((can_extend_p (to_mode, intermediate, unsignedp)
1040 != CODE_FOR_nothing)
1041 && (can_extend_p (intermediate, from_mode, unsignedp)
1042 != CODE_FOR_nothing))
1043 {
1044 convert_move (to, convert_to_mode (intermediate, from,
1045 unsignedp), unsignedp);
1046 return;
1047 }
1048
1049 /* No suitable intermediate mode. */
1050 abort ();
1051 }
1052 }
1053
1054 /* Support special truncate insns for certain modes. */
1055
1056 if (from_mode == DImode && to_mode == SImode)
1057 {
1058#ifdef HAVE_truncdisi2
1059 if (HAVE_truncdisi2)
1060 {
1061 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1062 return;
1063 }
1064#endif
1065 convert_move (to, force_reg (from_mode, from), unsignedp);
1066 return;
1067 }
1068
1069 if (from_mode == DImode && to_mode == HImode)
1070 {
1071#ifdef HAVE_truncdihi2
1072 if (HAVE_truncdihi2)
1073 {
1074 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1075 return;
1076 }
1077#endif
1078 convert_move (to, force_reg (from_mode, from), unsignedp);
1079 return;
1080 }
1081
1082 if (from_mode == DImode && to_mode == QImode)
1083 {
1084#ifdef HAVE_truncdiqi2
1085 if (HAVE_truncdiqi2)
1086 {
1087 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1088 return;
1089 }
1090#endif
1091 convert_move (to, force_reg (from_mode, from), unsignedp);
1092 return;
1093 }
1094
1095 if (from_mode == SImode && to_mode == HImode)
1096 {
1097#ifdef HAVE_truncsihi2
1098 if (HAVE_truncsihi2)
1099 {
1100 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1101 return;
1102 }
1103#endif
1104 convert_move (to, force_reg (from_mode, from), unsignedp);
1105 return;
1106 }
1107
1108 if (from_mode == SImode && to_mode == QImode)
1109 {
1110#ifdef HAVE_truncsiqi2
1111 if (HAVE_truncsiqi2)
1112 {
1113 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1114 return;
1115 }
1116#endif
1117 convert_move (to, force_reg (from_mode, from), unsignedp);
1118 return;
1119 }
1120
1121 if (from_mode == HImode && to_mode == QImode)
1122 {
1123#ifdef HAVE_trunchiqi2
1124 if (HAVE_trunchiqi2)
1125 {
1126 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1127 return;
1128 }
1129#endif
1130 convert_move (to, force_reg (from_mode, from), unsignedp);
1131 return;
1132 }
1133
1134 /* Handle truncation of volatile memrefs, and so on;
1135 the things that couldn't be truncated directly,
1136 and for which there was no special instruction. */
1137 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1138 {
1139 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1140 emit_move_insn (to, temp);
1141 return;
1142 }
1143
1144 /* Mode combination is not recognized. */
1145 abort ();
1146}
1147
1148/* Return an rtx for a value that would result
1149 from converting X to mode MODE.
1150 Both X and MODE may be floating, or both integer.
1151 UNSIGNEDP is nonzero if X is an unsigned value.
1152 This can be done by referring to a part of X in place
5d901c31
RS
1153 or by copying to a new temporary with conversion.
1154
1155 This function *must not* call protect_from_queue
1156 except when putting X into an insn (in which case convert_move does it). */
bbf6f052
RK
1157
1158rtx
1159convert_to_mode (mode, x, unsignedp)
1160 enum machine_mode mode;
1161 rtx x;
1162 int unsignedp;
5ffe63ed
RS
1163{
1164 return convert_modes (mode, VOIDmode, x, unsignedp);
1165}
1166
1167/* Return an rtx for a value that would result
1168 from converting X from mode OLDMODE to mode MODE.
1169 Both modes may be floating, or both integer.
1170 UNSIGNEDP is nonzero if X is an unsigned value.
1171
1172 This can be done by referring to a part of X in place
1173 or by copying to a new temporary with conversion.
1174
1175 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1176
1177 This function *must not* call protect_from_queue
1178 except when putting X into an insn (in which case convert_move does it). */
1179
1180rtx
1181convert_modes (mode, oldmode, x, unsignedp)
1182 enum machine_mode mode, oldmode;
1183 rtx x;
1184 int unsignedp;
bbf6f052
RK
1185{
1186 register rtx temp;
5ffe63ed 1187
1499e0a8
RK
1188 /* If FROM is a SUBREG that indicates that we have already done at least
1189 the required extension, strip it. */
1190
1191 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1192 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1193 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1194 x = gen_lowpart (mode, x);
bbf6f052 1195
64791b18
RK
1196 if (GET_MODE (x) != VOIDmode)
1197 oldmode = GET_MODE (x);
1198
5ffe63ed 1199 if (mode == oldmode)
bbf6f052
RK
1200 return x;
1201
1202 /* There is one case that we must handle specially: If we are converting
906c4e36 1203 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
bbf6f052
RK
1204 we are to interpret the constant as unsigned, gen_lowpart will do
1205 the wrong if the constant appears negative. What we want to do is
1206 make the high-order word of the constant zero, not all ones. */
1207
1208 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
906c4e36 1209 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
bbf6f052 1210 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
906c4e36 1211 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
bbf6f052
RK
1212
1213 /* We can do this with a gen_lowpart if both desired and current modes
1214 are integer, and this is either a constant integer, a register, or a
ba2e110c
RK
1215 non-volatile MEM. Except for the constant case where MODE is no
1216 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
bbf6f052 1217
ba2e110c
RK
1218 if ((GET_CODE (x) == CONST_INT
1219 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 1220 || (GET_MODE_CLASS (mode) == MODE_INT
5ffe63ed 1221 && GET_MODE_CLASS (oldmode) == MODE_INT
bbf6f052 1222 && (GET_CODE (x) == CONST_DOUBLE
5ffe63ed 1223 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
d57c66da
JW
1224 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1225 && direct_load[(int) mode])
bbf6f052 1226 || GET_CODE (x) == REG)))))
ba2e110c
RK
1227 {
1228 /* ?? If we don't know OLDMODE, we have to assume here that
1229 X does not need sign- or zero-extension. This may not be
1230 the case, but it's the best we can do. */
1231 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1232 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1233 {
1234 HOST_WIDE_INT val = INTVAL (x);
1235 int width = GET_MODE_BITSIZE (oldmode);
1236
1237 /* We must sign or zero-extend in this case. Start by
1238 zero-extending, then sign extend if we need to. */
1239 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1240 if (! unsignedp
1241 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1242 val |= (HOST_WIDE_INT) (-1) << width;
1243
1244 return GEN_INT (val);
1245 }
1246
1247 return gen_lowpart (mode, x);
1248 }
bbf6f052
RK
1249
1250 temp = gen_reg_rtx (mode);
1251 convert_move (temp, x, unsignedp);
1252 return temp;
1253}
1254\f
1255/* Generate several move instructions to copy LEN bytes
1256 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1257 The caller must pass FROM and TO
1258 through protect_from_queue before calling.
1259 ALIGN (in bytes) is maximum alignment we can assume. */
1260
bbf6f052
RK
1261static void
1262move_by_pieces (to, from, len, align)
1263 rtx to, from;
1264 int len, align;
1265{
1266 struct move_by_pieces data;
1267 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
e87b4f3f 1268 int max_size = MOVE_MAX + 1;
bbf6f052
RK
1269
1270 data.offset = 0;
1271 data.to_addr = to_addr;
1272 data.from_addr = from_addr;
1273 data.to = to;
1274 data.from = from;
1275 data.autinc_to
1276 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1277 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1278 data.autinc_from
1279 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1280 || GET_CODE (from_addr) == POST_INC
1281 || GET_CODE (from_addr) == POST_DEC);
1282
1283 data.explicit_inc_from = 0;
1284 data.explicit_inc_to = 0;
1285 data.reverse
1286 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1287 if (data.reverse) data.offset = len;
1288 data.len = len;
1289
1290 /* If copying requires more than two move insns,
1291 copy addresses to registers (to make displacements shorter)
1292 and use post-increment if available. */
1293 if (!(data.autinc_from && data.autinc_to)
1294 && move_by_pieces_ninsns (len, align) > 2)
1295 {
1296#ifdef HAVE_PRE_DECREMENT
1297 if (data.reverse && ! data.autinc_from)
1298 {
1299 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1300 data.autinc_from = 1;
1301 data.explicit_inc_from = -1;
1302 }
1303#endif
1304#ifdef HAVE_POST_INCREMENT
1305 if (! data.autinc_from)
1306 {
1307 data.from_addr = copy_addr_to_reg (from_addr);
1308 data.autinc_from = 1;
1309 data.explicit_inc_from = 1;
1310 }
1311#endif
1312 if (!data.autinc_from && CONSTANT_P (from_addr))
1313 data.from_addr = copy_addr_to_reg (from_addr);
1314#ifdef HAVE_PRE_DECREMENT
1315 if (data.reverse && ! data.autinc_to)
1316 {
1317 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1318 data.autinc_to = 1;
1319 data.explicit_inc_to = -1;
1320 }
1321#endif
1322#ifdef HAVE_POST_INCREMENT
1323 if (! data.reverse && ! data.autinc_to)
1324 {
1325 data.to_addr = copy_addr_to_reg (to_addr);
1326 data.autinc_to = 1;
1327 data.explicit_inc_to = 1;
1328 }
1329#endif
1330 if (!data.autinc_to && CONSTANT_P (to_addr))
1331 data.to_addr = copy_addr_to_reg (to_addr);
1332 }
1333
e87b4f3f
RS
1334 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1335 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1336 align = MOVE_MAX;
bbf6f052
RK
1337
1338 /* First move what we can in the largest integer mode, then go to
1339 successively smaller modes. */
1340
1341 while (max_size > 1)
1342 {
1343 enum machine_mode mode = VOIDmode, tmode;
1344 enum insn_code icode;
1345
e7c33f54
RK
1346 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1347 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1348 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1349 mode = tmode;
1350
1351 if (mode == VOIDmode)
1352 break;
1353
1354 icode = mov_optab->handlers[(int) mode].insn_code;
1355 if (icode != CODE_FOR_nothing
1356 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1357 GET_MODE_SIZE (mode)))
1358 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1359
1360 max_size = GET_MODE_SIZE (mode);
1361 }
1362
1363 /* The code above should have handled everything. */
1364 if (data.len != 0)
1365 abort ();
1366}
1367
1368/* Return number of insns required to move L bytes by pieces.
1369 ALIGN (in bytes) is maximum alignment we can assume. */
1370
1371static int
1372move_by_pieces_ninsns (l, align)
1373 unsigned int l;
1374 int align;
1375{
1376 register int n_insns = 0;
e87b4f3f 1377 int max_size = MOVE_MAX + 1;
bbf6f052 1378
e87b4f3f
RS
1379 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1380 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1381 align = MOVE_MAX;
bbf6f052
RK
1382
1383 while (max_size > 1)
1384 {
1385 enum machine_mode mode = VOIDmode, tmode;
1386 enum insn_code icode;
1387
e7c33f54
RK
1388 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1389 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1390 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1391 mode = tmode;
1392
1393 if (mode == VOIDmode)
1394 break;
1395
1396 icode = mov_optab->handlers[(int) mode].insn_code;
1397 if (icode != CODE_FOR_nothing
1398 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1399 GET_MODE_SIZE (mode)))
1400 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1401
1402 max_size = GET_MODE_SIZE (mode);
1403 }
1404
1405 return n_insns;
1406}
1407
1408/* Subroutine of move_by_pieces. Move as many bytes as appropriate
1409 with move instructions for mode MODE. GENFUN is the gen_... function
1410 to make a move insn for that mode. DATA has all the other info. */
1411
1412static void
1413move_by_pieces_1 (genfun, mode, data)
1414 rtx (*genfun) ();
1415 enum machine_mode mode;
1416 struct move_by_pieces *data;
1417{
1418 register int size = GET_MODE_SIZE (mode);
1419 register rtx to1, from1;
1420
1421 while (data->len >= size)
1422 {
1423 if (data->reverse) data->offset -= size;
1424
1425 to1 = (data->autinc_to
1426 ? gen_rtx (MEM, mode, data->to_addr)
1427 : change_address (data->to, mode,
1428 plus_constant (data->to_addr, data->offset)));
1429 from1 =
1430 (data->autinc_from
1431 ? gen_rtx (MEM, mode, data->from_addr)
1432 : change_address (data->from, mode,
1433 plus_constant (data->from_addr, data->offset)));
1434
1435#ifdef HAVE_PRE_DECREMENT
1436 if (data->explicit_inc_to < 0)
906c4e36 1437 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
bbf6f052 1438 if (data->explicit_inc_from < 0)
906c4e36 1439 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
bbf6f052
RK
1440#endif
1441
1442 emit_insn ((*genfun) (to1, from1));
1443#ifdef HAVE_POST_INCREMENT
1444 if (data->explicit_inc_to > 0)
906c4e36 1445 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
bbf6f052 1446 if (data->explicit_inc_from > 0)
906c4e36 1447 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
bbf6f052
RK
1448#endif
1449
1450 if (! data->reverse) data->offset += size;
1451
1452 data->len -= size;
1453 }
1454}
1455\f
1456/* Emit code to move a block Y to a block X.
1457 This may be done with string-move instructions,
1458 with multiple scalar move instructions, or with a library call.
1459
1460 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1461 with mode BLKmode.
1462 SIZE is an rtx that says how long they are.
1463 ALIGN is the maximum alignment we can assume they have,
1464 measured in bytes. */
1465
1466void
1467emit_block_move (x, y, size, align)
1468 rtx x, y;
1469 rtx size;
1470 int align;
1471{
1472 if (GET_MODE (x) != BLKmode)
1473 abort ();
1474
1475 if (GET_MODE (y) != BLKmode)
1476 abort ();
1477
1478 x = protect_from_queue (x, 1);
1479 y = protect_from_queue (y, 0);
5d901c31 1480 size = protect_from_queue (size, 0);
bbf6f052
RK
1481
1482 if (GET_CODE (x) != MEM)
1483 abort ();
1484 if (GET_CODE (y) != MEM)
1485 abort ();
1486 if (size == 0)
1487 abort ();
1488
1489 if (GET_CODE (size) == CONST_INT
906c4e36 1490 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
bbf6f052
RK
1491 move_by_pieces (x, y, INTVAL (size), align);
1492 else
1493 {
1494 /* Try the most limited insn first, because there's no point
1495 including more than one in the machine description unless
1496 the more limited one has some advantage. */
266007a7 1497
0bba3f6f 1498 rtx opalign = GEN_INT (align);
266007a7
RK
1499 enum machine_mode mode;
1500
1501 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1502 mode = GET_MODE_WIDER_MODE (mode))
bbf6f052 1503 {
266007a7 1504 enum insn_code code = movstr_optab[(int) mode];
266007a7
RK
1505
1506 if (code != CODE_FOR_nothing
803090c4
RK
1507 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1508 here because if SIZE is less than the mode mask, as it is
8008b228 1509 returned by the macro, it will definitely be less than the
803090c4 1510 actual mode mask. */
f85b95d1 1511 && (unsigned HOST_WIDE_INT) INTVAL (size) <= GET_MODE_MASK (mode)
0bba3f6f
RK
1512 && (insn_operand_predicate[(int) code][0] == 0
1513 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1514 && (insn_operand_predicate[(int) code][1] == 0
1515 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1516 && (insn_operand_predicate[(int) code][3] == 0
1517 || (*insn_operand_predicate[(int) code][3]) (opalign,
1518 VOIDmode)))
bbf6f052 1519 {
1ba1e2a8 1520 rtx op2;
266007a7
RK
1521 rtx last = get_last_insn ();
1522 rtx pat;
1523
1ba1e2a8 1524 op2 = convert_to_mode (mode, size, 1);
0bba3f6f
RK
1525 if (insn_operand_predicate[(int) code][2] != 0
1526 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
266007a7
RK
1527 op2 = copy_to_mode_reg (mode, op2);
1528
1529 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1530 if (pat)
1531 {
1532 emit_insn (pat);
1533 return;
1534 }
1535 else
1536 delete_insns_since (last);
bbf6f052
RK
1537 }
1538 }
bbf6f052
RK
1539
1540#ifdef TARGET_MEM_FUNCTIONS
d562e42e 1541 emit_library_call (memcpy_libfunc, 0,
bbf6f052
RK
1542 VOIDmode, 3, XEXP (x, 0), Pmode,
1543 XEXP (y, 0), Pmode,
0fa83258
RK
1544 convert_to_mode (TYPE_MODE (sizetype), size,
1545 TREE_UNSIGNED (sizetype)),
1546 TYPE_MODE (sizetype));
bbf6f052 1547#else
d562e42e 1548 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
1549 VOIDmode, 3, XEXP (y, 0), Pmode,
1550 XEXP (x, 0), Pmode,
0fa83258
RK
1551 convert_to_mode (TYPE_MODE (sizetype), size,
1552 TREE_UNSIGNED (sizetype)),
1553 TYPE_MODE (sizetype));
bbf6f052
RK
1554#endif
1555 }
1556}
1557\f
1558/* Copy all or part of a value X into registers starting at REGNO.
1559 The number of registers to be filled is NREGS. */
1560
1561void
1562move_block_to_reg (regno, x, nregs, mode)
1563 int regno;
1564 rtx x;
1565 int nregs;
1566 enum machine_mode mode;
1567{
1568 int i;
1569 rtx pat, last;
1570
1571 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1572 x = validize_mem (force_const_mem (mode, x));
1573
1574 /* See if the machine can do this with a load multiple insn. */
1575#ifdef HAVE_load_multiple
c3a02afe 1576 if (HAVE_load_multiple)
bbf6f052 1577 {
c3a02afe
RK
1578 last = get_last_insn ();
1579 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1580 GEN_INT (nregs));
1581 if (pat)
1582 {
1583 emit_insn (pat);
1584 return;
1585 }
1586 else
1587 delete_insns_since (last);
bbf6f052 1588 }
bbf6f052
RK
1589#endif
1590
1591 for (i = 0; i < nregs; i++)
1592 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1593 operand_subword_force (x, i, mode));
1594}
1595
1596/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
0040593d
JW
1597 The number of registers to be filled is NREGS. SIZE indicates the number
1598 of bytes in the object X. */
1599
bbf6f052
RK
1600
1601void
0040593d 1602move_block_from_reg (regno, x, nregs, size)
bbf6f052
RK
1603 int regno;
1604 rtx x;
1605 int nregs;
0040593d 1606 int size;
bbf6f052
RK
1607{
1608 int i;
1609 rtx pat, last;
1610
0040593d
JW
1611 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1612 to the left before storing to memory. */
1613 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1614 {
1615 rtx tem = operand_subword (x, 0, 1, BLKmode);
1616 rtx shift;
1617
1618 if (tem == 0)
1619 abort ();
1620
1621 shift = expand_shift (LSHIFT_EXPR, word_mode,
1622 gen_rtx (REG, word_mode, regno),
1623 build_int_2 ((UNITS_PER_WORD - size)
1624 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1625 emit_move_insn (tem, shift);
1626 return;
1627 }
1628
bbf6f052
RK
1629 /* See if the machine can do this with a store multiple insn. */
1630#ifdef HAVE_store_multiple
c3a02afe 1631 if (HAVE_store_multiple)
bbf6f052 1632 {
c3a02afe
RK
1633 last = get_last_insn ();
1634 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1635 GEN_INT (nregs));
1636 if (pat)
1637 {
1638 emit_insn (pat);
1639 return;
1640 }
1641 else
1642 delete_insns_since (last);
bbf6f052 1643 }
bbf6f052
RK
1644#endif
1645
1646 for (i = 0; i < nregs; i++)
1647 {
1648 rtx tem = operand_subword (x, i, 1, BLKmode);
1649
1650 if (tem == 0)
1651 abort ();
1652
1653 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1654 }
1655}
1656
1657/* Mark NREGS consecutive regs, starting at REGNO, as being live now. */
1658
1659void
1660use_regs (regno, nregs)
1661 int regno;
1662 int nregs;
1663{
1664 int i;
1665
1666 for (i = 0; i < nregs; i++)
1667 emit_insn (gen_rtx (USE, VOIDmode, gen_rtx (REG, word_mode, regno + i)));
1668}
7308a047
RS
1669
1670/* Mark the instructions since PREV as a libcall block.
1671 Add REG_LIBCALL to PREV and add a REG_RETVAL to the most recent insn. */
1672
f76a70d5 1673static void
7308a047
RS
1674group_insns (prev)
1675 rtx prev;
1676{
1677 rtx insn_first;
1678 rtx insn_last;
1679
1680 /* Find the instructions to mark */
1681 if (prev)
1682 insn_first = NEXT_INSN (prev);
1683 else
1684 insn_first = get_insns ();
1685
1686 insn_last = get_last_insn ();
1687
1688 REG_NOTES (insn_last) = gen_rtx (INSN_LIST, REG_RETVAL, insn_first,
1689 REG_NOTES (insn_last));
1690
1691 REG_NOTES (insn_first) = gen_rtx (INSN_LIST, REG_LIBCALL, insn_last,
1692 REG_NOTES (insn_first));
1693}
bbf6f052
RK
1694\f
1695/* Write zeros through the storage of OBJECT.
1696 If OBJECT has BLKmode, SIZE is its length in bytes. */
1697
1698void
1699clear_storage (object, size)
1700 rtx object;
1701 int size;
1702{
1703 if (GET_MODE (object) == BLKmode)
1704 {
1705#ifdef TARGET_MEM_FUNCTIONS
d562e42e 1706 emit_library_call (memset_libfunc, 0,
bbf6f052
RK
1707 VOIDmode, 3,
1708 XEXP (object, 0), Pmode, const0_rtx, Pmode,
906c4e36 1709 GEN_INT (size), Pmode);
bbf6f052 1710#else
d562e42e 1711 emit_library_call (bzero_libfunc, 0,
bbf6f052
RK
1712 VOIDmode, 2,
1713 XEXP (object, 0), Pmode,
906c4e36 1714 GEN_INT (size), Pmode);
bbf6f052
RK
1715#endif
1716 }
1717 else
1718 emit_move_insn (object, const0_rtx);
1719}
1720
1721/* Generate code to copy Y into X.
1722 Both Y and X must have the same mode, except that
1723 Y can be a constant with VOIDmode.
1724 This mode cannot be BLKmode; use emit_block_move for that.
1725
1726 Return the last instruction emitted. */
1727
1728rtx
1729emit_move_insn (x, y)
1730 rtx x, y;
1731{
1732 enum machine_mode mode = GET_MODE (x);
7308a047
RS
1733 enum machine_mode submode;
1734 enum mode_class class = GET_MODE_CLASS (mode);
bbf6f052
RK
1735 int i;
1736
1737 x = protect_from_queue (x, 1);
1738 y = protect_from_queue (y, 0);
1739
1740 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1741 abort ();
1742
1743 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1744 y = force_const_mem (mode, y);
1745
1746 /* If X or Y are memory references, verify that their addresses are valid
1747 for the machine. */
1748 if (GET_CODE (x) == MEM
1749 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1750 && ! push_operand (x, GET_MODE (x)))
1751 || (flag_force_addr
1752 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1753 x = change_address (x, VOIDmode, XEXP (x, 0));
1754
1755 if (GET_CODE (y) == MEM
1756 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1757 || (flag_force_addr
1758 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1759 y = change_address (y, VOIDmode, XEXP (y, 0));
1760
1761 if (mode == BLKmode)
1762 abort ();
1763
261c4230
RS
1764 return emit_move_insn_1 (x, y);
1765}
1766
1767/* Low level part of emit_move_insn.
1768 Called just like emit_move_insn, but assumes X and Y
1769 are basically valid. */
1770
1771rtx
1772emit_move_insn_1 (x, y)
1773 rtx x, y;
1774{
1775 enum machine_mode mode = GET_MODE (x);
1776 enum machine_mode submode;
1777 enum mode_class class = GET_MODE_CLASS (mode);
1778 int i;
1779
7308a047
RS
1780 if (class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1781 submode = mode_for_size (GET_MODE_UNIT_SIZE (mode) * BITS_PER_UNIT,
1782 (class == MODE_COMPLEX_INT
1783 ? MODE_INT : MODE_FLOAT),
1784 0);
1785
bbf6f052
RK
1786 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1787 return
1788 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1789
89742723 1790 /* Expand complex moves by moving real part and imag part, if possible. */
7308a047
RS
1791 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1792 && submode != BLKmode
1793 && (mov_optab->handlers[(int) submode].insn_code
1794 != CODE_FOR_nothing))
1795 {
1796 /* Don't split destination if it is a stack push. */
1797 int stack = push_operand (x, GET_MODE (x));
1798 rtx prev = get_last_insn ();
1799
1800 /* Tell flow that the whole of the destination is being set. */
1801 if (GET_CODE (x) == REG)
1802 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
1803
1804 /* If this is a stack, push the highpart first, so it
1805 will be in the argument order.
1806
1807 In that case, change_address is used only to convert
1808 the mode, not to change the address. */
c937357e
RS
1809 if (stack)
1810 {
e33c0d66
RS
1811 /* Note that the real part always precedes the imag part in memory
1812 regardless of machine's endianness. */
c937357e
RS
1813#ifdef STACK_GROWS_DOWNWARD
1814 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1815 (gen_rtx (MEM, submode, (XEXP (x, 0))),
e33c0d66 1816 gen_imagpart (submode, y)));
c937357e
RS
1817 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1818 (gen_rtx (MEM, submode, (XEXP (x, 0))),
e33c0d66 1819 gen_realpart (submode, y)));
c937357e
RS
1820#else
1821 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1822 (gen_rtx (MEM, submode, (XEXP (x, 0))),
e33c0d66 1823 gen_realpart (submode, y)));
c937357e
RS
1824 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1825 (gen_rtx (MEM, submode, (XEXP (x, 0))),
e33c0d66 1826 gen_imagpart (submode, y)));
c937357e
RS
1827#endif
1828 }
1829 else
1830 {
1831 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1832 (gen_highpart (submode, x), gen_highpart (submode, y)));
1833 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1834 (gen_lowpart (submode, x), gen_lowpart (submode, y)));
1835 }
7308a047 1836
6d6e61ce
RS
1837 if (GET_CODE (x) != CONCAT)
1838 /* If X is a CONCAT, we got insns like RD = RS, ID = IS,
1839 each with a separate pseudo as destination.
1840 It's not correct for flow to treat them as a unit. */
1841 group_insns (prev);
7a1ab50a
RS
1842
1843 return get_last_insn ();
7308a047
RS
1844 }
1845
bbf6f052
RK
1846 /* This will handle any multi-word mode that lacks a move_insn pattern.
1847 However, you will get better code if you define such patterns,
1848 even if they must turn into multiple assembler instructions. */
a4320483 1849 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
bbf6f052
RK
1850 {
1851 rtx last_insn = 0;
7308a047 1852 rtx prev_insn = get_last_insn ();
bbf6f052
RK
1853
1854 for (i = 0;
1855 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1856 i++)
1857 {
1858 rtx xpart = operand_subword (x, i, 1, mode);
1859 rtx ypart = operand_subword (y, i, 1, mode);
1860
1861 /* If we can't get a part of Y, put Y into memory if it is a
1862 constant. Otherwise, force it into a register. If we still
1863 can't get a part of Y, abort. */
1864 if (ypart == 0 && CONSTANT_P (y))
1865 {
1866 y = force_const_mem (mode, y);
1867 ypart = operand_subword (y, i, 1, mode);
1868 }
1869 else if (ypart == 0)
1870 ypart = operand_subword_force (y, i, mode);
1871
1872 if (xpart == 0 || ypart == 0)
1873 abort ();
1874
1875 last_insn = emit_move_insn (xpart, ypart);
1876 }
7308a047
RS
1877 /* Mark these insns as a libcall block. */
1878 group_insns (prev_insn);
1879
bbf6f052
RK
1880 return last_insn;
1881 }
1882 else
1883 abort ();
1884}
1885\f
1886/* Pushing data onto the stack. */
1887
1888/* Push a block of length SIZE (perhaps variable)
1889 and return an rtx to address the beginning of the block.
1890 Note that it is not possible for the value returned to be a QUEUED.
1891 The value may be virtual_outgoing_args_rtx.
1892
1893 EXTRA is the number of bytes of padding to push in addition to SIZE.
1894 BELOW nonzero means this padding comes at low addresses;
1895 otherwise, the padding comes at high addresses. */
1896
1897rtx
1898push_block (size, extra, below)
1899 rtx size;
1900 int extra, below;
1901{
1902 register rtx temp;
1903 if (CONSTANT_P (size))
1904 anti_adjust_stack (plus_constant (size, extra));
1905 else if (GET_CODE (size) == REG && extra == 0)
1906 anti_adjust_stack (size);
1907 else
1908 {
1909 rtx temp = copy_to_mode_reg (Pmode, size);
1910 if (extra != 0)
906c4e36 1911 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
bbf6f052
RK
1912 temp, 0, OPTAB_LIB_WIDEN);
1913 anti_adjust_stack (temp);
1914 }
1915
1916#ifdef STACK_GROWS_DOWNWARD
1917 temp = virtual_outgoing_args_rtx;
1918 if (extra != 0 && below)
1919 temp = plus_constant (temp, extra);
1920#else
1921 if (GET_CODE (size) == CONST_INT)
1922 temp = plus_constant (virtual_outgoing_args_rtx,
1923 - INTVAL (size) - (below ? 0 : extra));
1924 else if (extra != 0 && !below)
1925 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1926 negate_rtx (Pmode, plus_constant (size, extra)));
1927 else
1928 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1929 negate_rtx (Pmode, size));
1930#endif
1931
1932 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
1933}
1934
87e38d84 1935rtx
bbf6f052
RK
1936gen_push_operand ()
1937{
1938 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
1939}
1940
1941/* Generate code to push X onto the stack, assuming it has mode MODE and
1942 type TYPE.
1943 MODE is redundant except when X is a CONST_INT (since they don't
1944 carry mode info).
1945 SIZE is an rtx for the size of data to be copied (in bytes),
1946 needed only if X is BLKmode.
1947
1948 ALIGN (in bytes) is maximum alignment we can assume.
1949
cd048831
RK
1950 If PARTIAL and REG are both nonzero, then copy that many of the first
1951 words of X into registers starting with REG, and push the rest of X.
bbf6f052
RK
1952 The amount of space pushed is decreased by PARTIAL words,
1953 rounded *down* to a multiple of PARM_BOUNDARY.
1954 REG must be a hard register in this case.
cd048831
RK
1955 If REG is zero but PARTIAL is not, take any all others actions for an
1956 argument partially in registers, but do not actually load any
1957 registers.
bbf6f052
RK
1958
1959 EXTRA is the amount in bytes of extra space to leave next to this arg.
6dc42e49 1960 This is ignored if an argument block has already been allocated.
bbf6f052
RK
1961
1962 On a machine that lacks real push insns, ARGS_ADDR is the address of
1963 the bottom of the argument block for this call. We use indexing off there
1964 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
1965 argument block has not been preallocated.
1966
1967 ARGS_SO_FAR is the size of args previously pushed for this call. */
1968
1969void
1970emit_push_insn (x, mode, type, size, align, partial, reg, extra,
1971 args_addr, args_so_far)
1972 register rtx x;
1973 enum machine_mode mode;
1974 tree type;
1975 rtx size;
1976 int align;
1977 int partial;
1978 rtx reg;
1979 int extra;
1980 rtx args_addr;
1981 rtx args_so_far;
1982{
1983 rtx xinner;
1984 enum direction stack_direction
1985#ifdef STACK_GROWS_DOWNWARD
1986 = downward;
1987#else
1988 = upward;
1989#endif
1990
1991 /* Decide where to pad the argument: `downward' for below,
1992 `upward' for above, or `none' for don't pad it.
1993 Default is below for small data on big-endian machines; else above. */
1994 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
1995
1996 /* Invert direction if stack is post-update. */
1997 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
1998 if (where_pad != none)
1999 where_pad = (where_pad == downward ? upward : downward);
2000
2001 xinner = x = protect_from_queue (x, 0);
2002
2003 if (mode == BLKmode)
2004 {
2005 /* Copy a block into the stack, entirely or partially. */
2006
2007 register rtx temp;
2008 int used = partial * UNITS_PER_WORD;
2009 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2010 int skip;
2011
2012 if (size == 0)
2013 abort ();
2014
2015 used -= offset;
2016
2017 /* USED is now the # of bytes we need not copy to the stack
2018 because registers will take care of them. */
2019
2020 if (partial != 0)
2021 xinner = change_address (xinner, BLKmode,
2022 plus_constant (XEXP (xinner, 0), used));
2023
2024 /* If the partial register-part of the arg counts in its stack size,
2025 skip the part of stack space corresponding to the registers.
2026 Otherwise, start copying to the beginning of the stack space,
2027 by setting SKIP to 0. */
2028#ifndef REG_PARM_STACK_SPACE
2029 skip = 0;
2030#else
2031 skip = used;
2032#endif
2033
2034#ifdef PUSH_ROUNDING
2035 /* Do it with several push insns if that doesn't take lots of insns
2036 and if there is no difficulty with push insns that skip bytes
2037 on the stack for alignment purposes. */
2038 if (args_addr == 0
2039 && GET_CODE (size) == CONST_INT
2040 && skip == 0
2041 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2042 < MOVE_RATIO)
bbf6f052
RK
2043 /* Here we avoid the case of a structure whose weak alignment
2044 forces many pushes of a small amount of data,
2045 and such small pushes do rounding that causes trouble. */
e87b4f3f
RS
2046 && ((! STRICT_ALIGNMENT && ! SLOW_UNALIGNED_ACCESS)
2047 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
bbf6f052 2048 || PUSH_ROUNDING (align) == align)
bbf6f052
RK
2049 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2050 {
2051 /* Push padding now if padding above and stack grows down,
2052 or if padding below and stack grows up.
2053 But if space already allocated, this has already been done. */
2054 if (extra && args_addr == 0
2055 && where_pad != none && where_pad != stack_direction)
906c4e36 2056 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2057
2058 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
2059 INTVAL (size) - used, align);
2060 }
2061 else
2062#endif /* PUSH_ROUNDING */
2063 {
2064 /* Otherwise make space on the stack and copy the data
2065 to the address of that space. */
2066
2067 /* Deduct words put into registers from the size we must copy. */
2068 if (partial != 0)
2069 {
2070 if (GET_CODE (size) == CONST_INT)
906c4e36 2071 size = GEN_INT (INTVAL (size) - used);
bbf6f052
RK
2072 else
2073 size = expand_binop (GET_MODE (size), sub_optab, size,
906c4e36
RK
2074 GEN_INT (used), NULL_RTX, 0,
2075 OPTAB_LIB_WIDEN);
bbf6f052
RK
2076 }
2077
2078 /* Get the address of the stack space.
2079 In this case, we do not deal with EXTRA separately.
2080 A single stack adjust will do. */
2081 if (! args_addr)
2082 {
2083 temp = push_block (size, extra, where_pad == downward);
2084 extra = 0;
2085 }
2086 else if (GET_CODE (args_so_far) == CONST_INT)
2087 temp = memory_address (BLKmode,
2088 plus_constant (args_addr,
2089 skip + INTVAL (args_so_far)));
2090 else
2091 temp = memory_address (BLKmode,
2092 plus_constant (gen_rtx (PLUS, Pmode,
2093 args_addr, args_so_far),
2094 skip));
2095
2096 /* TEMP is the address of the block. Copy the data there. */
2097 if (GET_CODE (size) == CONST_INT
2098 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2099 < MOVE_RATIO))
2100 {
2101 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
2102 INTVAL (size), align);
2103 goto ret;
2104 }
2105 /* Try the most limited insn first, because there's no point
2106 including more than one in the machine description unless
2107 the more limited one has some advantage. */
2108#ifdef HAVE_movstrqi
2109 if (HAVE_movstrqi
2110 && GET_CODE (size) == CONST_INT
2111 && ((unsigned) INTVAL (size)
2112 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
2113 {
c841050e
RS
2114 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
2115 xinner, size, GEN_INT (align));
2116 if (pat != 0)
2117 {
2118 emit_insn (pat);
2119 goto ret;
2120 }
bbf6f052
RK
2121 }
2122#endif
2123#ifdef HAVE_movstrhi
2124 if (HAVE_movstrhi
2125 && GET_CODE (size) == CONST_INT
2126 && ((unsigned) INTVAL (size)
2127 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
2128 {
c841050e
RS
2129 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
2130 xinner, size, GEN_INT (align));
2131 if (pat != 0)
2132 {
2133 emit_insn (pat);
2134 goto ret;
2135 }
bbf6f052
RK
2136 }
2137#endif
2138#ifdef HAVE_movstrsi
2139 if (HAVE_movstrsi)
2140 {
c841050e
RS
2141 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
2142 xinner, size, GEN_INT (align));
2143 if (pat != 0)
2144 {
2145 emit_insn (pat);
2146 goto ret;
2147 }
bbf6f052
RK
2148 }
2149#endif
2150#ifdef HAVE_movstrdi
2151 if (HAVE_movstrdi)
2152 {
c841050e
RS
2153 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
2154 xinner, size, GEN_INT (align));
2155 if (pat != 0)
2156 {
2157 emit_insn (pat);
2158 goto ret;
2159 }
bbf6f052
RK
2160 }
2161#endif
2162
2163#ifndef ACCUMULATE_OUTGOING_ARGS
2164 /* If the source is referenced relative to the stack pointer,
2165 copy it to another register to stabilize it. We do not need
2166 to do this if we know that we won't be changing sp. */
2167
2168 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2169 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2170 temp = copy_to_reg (temp);
2171#endif
2172
2173 /* Make inhibit_defer_pop nonzero around the library call
2174 to force it to pop the bcopy-arguments right away. */
2175 NO_DEFER_POP;
2176#ifdef TARGET_MEM_FUNCTIONS
d562e42e 2177 emit_library_call (memcpy_libfunc, 0,
bbf6f052 2178 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
0fa83258
RK
2179 convert_to_mode (TYPE_MODE (sizetype),
2180 size, TREE_UNSIGNED (sizetype)),
26ba80fc 2181 TYPE_MODE (sizetype));
bbf6f052 2182#else
d562e42e 2183 emit_library_call (bcopy_libfunc, 0,
bbf6f052 2184 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
0fa83258
RK
2185 convert_to_mode (TYPE_MODE (sizetype),
2186 size, TREE_UNSIGNED (sizetype)),
26ba80fc 2187 TYPE_MODE (sizetype));
bbf6f052
RK
2188#endif
2189 OK_DEFER_POP;
2190 }
2191 }
2192 else if (partial > 0)
2193 {
2194 /* Scalar partly in registers. */
2195
2196 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2197 int i;
2198 int not_stack;
2199 /* # words of start of argument
2200 that we must make space for but need not store. */
2201 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2202 int args_offset = INTVAL (args_so_far);
2203 int skip;
2204
2205 /* Push padding now if padding above and stack grows down,
2206 or if padding below and stack grows up.
2207 But if space already allocated, this has already been done. */
2208 if (extra && args_addr == 0
2209 && where_pad != none && where_pad != stack_direction)
906c4e36 2210 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2211
2212 /* If we make space by pushing it, we might as well push
2213 the real data. Otherwise, we can leave OFFSET nonzero
2214 and leave the space uninitialized. */
2215 if (args_addr == 0)
2216 offset = 0;
2217
2218 /* Now NOT_STACK gets the number of words that we don't need to
2219 allocate on the stack. */
2220 not_stack = partial - offset;
2221
2222 /* If the partial register-part of the arg counts in its stack size,
2223 skip the part of stack space corresponding to the registers.
2224 Otherwise, start copying to the beginning of the stack space,
2225 by setting SKIP to 0. */
2226#ifndef REG_PARM_STACK_SPACE
2227 skip = 0;
2228#else
2229 skip = not_stack;
2230#endif
2231
2232 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2233 x = validize_mem (force_const_mem (mode, x));
2234
2235 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2236 SUBREGs of such registers are not allowed. */
2237 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2238 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2239 x = copy_to_reg (x);
2240
2241 /* Loop over all the words allocated on the stack for this arg. */
2242 /* We can do it by words, because any scalar bigger than a word
2243 has a size a multiple of a word. */
2244#ifndef PUSH_ARGS_REVERSED
2245 for (i = not_stack; i < size; i++)
2246#else
2247 for (i = size - 1; i >= not_stack; i--)
2248#endif
2249 if (i >= not_stack + offset)
2250 emit_push_insn (operand_subword_force (x, i, mode),
906c4e36
RK
2251 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2252 0, args_addr,
2253 GEN_INT (args_offset + ((i - not_stack + skip)
bbf6f052
RK
2254 * UNITS_PER_WORD)));
2255 }
2256 else
2257 {
2258 rtx addr;
2259
2260 /* Push padding now if padding above and stack grows down,
2261 or if padding below and stack grows up.
2262 But if space already allocated, this has already been done. */
2263 if (extra && args_addr == 0
2264 && where_pad != none && where_pad != stack_direction)
906c4e36 2265 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2266
2267#ifdef PUSH_ROUNDING
2268 if (args_addr == 0)
2269 addr = gen_push_operand ();
2270 else
2271#endif
2272 if (GET_CODE (args_so_far) == CONST_INT)
2273 addr
2274 = memory_address (mode,
2275 plus_constant (args_addr, INTVAL (args_so_far)));
2276 else
2277 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2278 args_so_far));
2279
2280 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2281 }
2282
2283 ret:
2284 /* If part should go in registers, copy that part
2285 into the appropriate registers. Do this now, at the end,
2286 since mem-to-mem copies above may do function calls. */
cd048831 2287 if (partial > 0 && reg != 0)
bbf6f052
RK
2288 move_block_to_reg (REGNO (reg), x, partial, mode);
2289
2290 if (extra && args_addr == 0 && where_pad == stack_direction)
906c4e36 2291 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2292}
2293\f
bbf6f052
RK
2294/* Expand an assignment that stores the value of FROM into TO.
2295 If WANT_VALUE is nonzero, return an rtx for the value of TO.
709f5be1
RS
2296 (This may contain a QUEUED rtx;
2297 if the value is constant, this rtx is a constant.)
2298 Otherwise, the returned value is NULL_RTX.
bbf6f052
RK
2299
2300 SUGGEST_REG is no longer actually used.
2301 It used to mean, copy the value through a register
2302 and return that register, if that is possible.
709f5be1 2303 We now use WANT_VALUE to decide whether to do this. */
bbf6f052
RK
2304
2305rtx
2306expand_assignment (to, from, want_value, suggest_reg)
2307 tree to, from;
2308 int want_value;
2309 int suggest_reg;
2310{
2311 register rtx to_rtx = 0;
2312 rtx result;
2313
2314 /* Don't crash if the lhs of the assignment was erroneous. */
2315
2316 if (TREE_CODE (to) == ERROR_MARK)
709f5be1
RS
2317 {
2318 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
2319 return want_value ? result : NULL_RTX;
2320 }
bbf6f052 2321
ca695ac9
JB
2322 if (output_bytecode)
2323 {
2324 tree dest_innermost;
2325
2326 bc_expand_expr (from);
6d6e61ce 2327 bc_emit_instruction (duplicate);
ca695ac9
JB
2328
2329 dest_innermost = bc_expand_address (to);
2330
2331 /* Can't deduce from TYPE that we're dealing with a bitfield, so
2332 take care of it here. */
2333
2334 bc_store_memory (TREE_TYPE (to), dest_innermost);
2335 return NULL;
2336 }
2337
bbf6f052
RK
2338 /* Assignment of a structure component needs special treatment
2339 if the structure component's rtx is not simply a MEM.
2340 Assignment of an array element at a constant index
2341 has the same problem. */
2342
2343 if (TREE_CODE (to) == COMPONENT_REF
2344 || TREE_CODE (to) == BIT_FIELD_REF
2345 || (TREE_CODE (to) == ARRAY_REF
2346 && TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2347 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST))
2348 {
2349 enum machine_mode mode1;
2350 int bitsize;
2351 int bitpos;
7bb0943f 2352 tree offset;
bbf6f052
RK
2353 int unsignedp;
2354 int volatilep = 0;
0088fcb1 2355 tree tem;
d78d243c 2356 int alignment;
0088fcb1
RK
2357
2358 push_temp_slots ();
2359 tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
bbf6f052
RK
2360 &mode1, &unsignedp, &volatilep);
2361
2362 /* If we are going to use store_bit_field and extract_bit_field,
2363 make sure to_rtx will be safe for multiple use. */
2364
2365 if (mode1 == VOIDmode && want_value)
2366 tem = stabilize_reference (tem);
2367
d78d243c 2368 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
906c4e36 2369 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
2370 if (offset != 0)
2371 {
906c4e36 2372 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
2373
2374 if (GET_CODE (to_rtx) != MEM)
2375 abort ();
2376 to_rtx = change_address (to_rtx, VOIDmode,
2377 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2378 force_reg (Pmode, offset_rtx)));
d78d243c
RS
2379 /* If we have a variable offset, the known alignment
2380 is only that of the innermost structure containing the field.
2381 (Actually, we could sometimes do better by using the
2382 align of an element of the innermost array, but no need.) */
2383 if (TREE_CODE (to) == COMPONENT_REF
2384 || TREE_CODE (to) == BIT_FIELD_REF)
2385 alignment
2386 = TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (to, 0))) / BITS_PER_UNIT;
7bb0943f 2387 }
bbf6f052
RK
2388 if (volatilep)
2389 {
2390 if (GET_CODE (to_rtx) == MEM)
2391 MEM_VOLATILE_P (to_rtx) = 1;
2392#if 0 /* This was turned off because, when a field is volatile
2393 in an object which is not volatile, the object may be in a register,
2394 and then we would abort over here. */
2395 else
2396 abort ();
2397#endif
2398 }
2399
2400 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2401 (want_value
2402 /* Spurious cast makes HPUX compiler happy. */
2403 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2404 : VOIDmode),
2405 unsignedp,
2406 /* Required alignment of containing datum. */
d78d243c 2407 alignment,
bbf6f052
RK
2408 int_size_in_bytes (TREE_TYPE (tem)));
2409 preserve_temp_slots (result);
2410 free_temp_slots ();
0088fcb1 2411 pop_temp_slots ();
bbf6f052 2412
709f5be1
RS
2413 /* If the value is meaningful, convert RESULT to the proper mode.
2414 Otherwise, return nothing. */
5ffe63ed
RS
2415 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
2416 TYPE_MODE (TREE_TYPE (from)),
2417 result,
2418 TREE_UNSIGNED (TREE_TYPE (to)))
709f5be1 2419 : NULL_RTX);
bbf6f052
RK
2420 }
2421
cd1db108
RS
2422 /* If the rhs is a function call and its value is not an aggregate,
2423 call the function before we start to compute the lhs.
2424 This is needed for correct code for cases such as
2425 val = setjmp (buf) on machines where reference to val
1ad87b63
RK
2426 requires loading up part of an address in a separate insn.
2427
2428 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
2429 a promoted variable where the zero- or sign- extension needs to be done.
2430 Handling this in the normal way is safe because no computation is done
2431 before the call. */
2432 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
2433 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
cd1db108 2434 {
0088fcb1
RK
2435 rtx value;
2436
2437 push_temp_slots ();
2438 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
cd1db108
RS
2439 if (to_rtx == 0)
2440 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2441 emit_move_insn (to_rtx, value);
2442 preserve_temp_slots (to_rtx);
2443 free_temp_slots ();
0088fcb1 2444 pop_temp_slots ();
709f5be1 2445 return want_value ? to_rtx : NULL_RTX;
cd1db108
RS
2446 }
2447
bbf6f052
RK
2448 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2449 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2450
2451 if (to_rtx == 0)
906c4e36 2452 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
bbf6f052 2453
86d38d25
RS
2454 /* Don't move directly into a return register. */
2455 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
2456 {
0088fcb1
RK
2457 rtx temp;
2458
2459 push_temp_slots ();
2460 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
86d38d25
RS
2461 emit_move_insn (to_rtx, temp);
2462 preserve_temp_slots (to_rtx);
2463 free_temp_slots ();
0088fcb1 2464 pop_temp_slots ();
709f5be1 2465 return want_value ? to_rtx : NULL_RTX;
86d38d25
RS
2466 }
2467
bbf6f052
RK
2468 /* In case we are returning the contents of an object which overlaps
2469 the place the value is being stored, use a safe function when copying
2470 a value through a pointer into a structure value return block. */
2471 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2472 && current_function_returns_struct
2473 && !current_function_returns_pcc_struct)
2474 {
0088fcb1
RK
2475 rtx from_rtx, size;
2476
2477 push_temp_slots ();
33a20d10
RK
2478 size = expr_size (from);
2479 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
2480
2481#ifdef TARGET_MEM_FUNCTIONS
d562e42e 2482 emit_library_call (memcpy_libfunc, 0,
bbf6f052
RK
2483 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2484 XEXP (from_rtx, 0), Pmode,
0fa83258
RK
2485 convert_to_mode (TYPE_MODE (sizetype),
2486 size, TREE_UNSIGNED (sizetype)),
26ba80fc 2487 TYPE_MODE (sizetype));
bbf6f052 2488#else
d562e42e 2489 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
2490 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2491 XEXP (to_rtx, 0), Pmode,
0fa83258
RK
2492 convert_to_mode (TYPE_MODE (sizetype),
2493 size, TREE_UNSIGNED (sizetype)),
26ba80fc 2494 TYPE_MODE (sizetype));
bbf6f052
RK
2495#endif
2496
2497 preserve_temp_slots (to_rtx);
2498 free_temp_slots ();
0088fcb1 2499 pop_temp_slots ();
709f5be1 2500 return want_value ? to_rtx : NULL_RTX;
bbf6f052
RK
2501 }
2502
2503 /* Compute FROM and store the value in the rtx we got. */
2504
0088fcb1 2505 push_temp_slots ();
bbf6f052
RK
2506 result = store_expr (from, to_rtx, want_value);
2507 preserve_temp_slots (result);
2508 free_temp_slots ();
0088fcb1 2509 pop_temp_slots ();
709f5be1 2510 return want_value ? result : NULL_RTX;
bbf6f052
RK
2511}
2512
2513/* Generate code for computing expression EXP,
2514 and storing the value into TARGET.
bbf6f052
RK
2515 TARGET may contain a QUEUED rtx.
2516
709f5be1
RS
2517 If WANT_VALUE is nonzero, return a copy of the value
2518 not in TARGET, so that we can be sure to use the proper
2519 value in a containing expression even if TARGET has something
2520 else stored in it. If possible, we copy the value through a pseudo
2521 and return that pseudo. Or, if the value is constant, we try to
2522 return the constant. In some cases, we return a pseudo
2523 copied *from* TARGET.
2524
2525 If the mode is BLKmode then we may return TARGET itself.
2526 It turns out that in BLKmode it doesn't cause a problem.
2527 because C has no operators that could combine two different
2528 assignments into the same BLKmode object with different values
2529 with no sequence point. Will other languages need this to
2530 be more thorough?
2531
2532 If WANT_VALUE is 0, we return NULL, to make sure
2533 to catch quickly any cases where the caller uses the value
2534 and fails to set WANT_VALUE. */
bbf6f052
RK
2535
2536rtx
709f5be1 2537store_expr (exp, target, want_value)
bbf6f052
RK
2538 register tree exp;
2539 register rtx target;
709f5be1 2540 int want_value;
bbf6f052
RK
2541{
2542 register rtx temp;
2543 int dont_return_target = 0;
2544
2545 if (TREE_CODE (exp) == COMPOUND_EXPR)
2546 {
2547 /* Perform first part of compound expression, then assign from second
2548 part. */
2549 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2550 emit_queue ();
709f5be1 2551 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
bbf6f052
RK
2552 }
2553 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2554 {
2555 /* For conditional expression, get safe form of the target. Then
2556 test the condition, doing the appropriate assignment on either
2557 side. This avoids the creation of unnecessary temporaries.
2558 For non-BLKmode, it is more efficient not to do this. */
2559
2560 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2561
2562 emit_queue ();
2563 target = protect_from_queue (target, 1);
2564
2565 NO_DEFER_POP;
2566 jumpifnot (TREE_OPERAND (exp, 0), lab1);
709f5be1 2567 store_expr (TREE_OPERAND (exp, 1), target, 0);
bbf6f052
RK
2568 emit_queue ();
2569 emit_jump_insn (gen_jump (lab2));
2570 emit_barrier ();
2571 emit_label (lab1);
709f5be1 2572 store_expr (TREE_OPERAND (exp, 2), target, 0);
bbf6f052
RK
2573 emit_queue ();
2574 emit_label (lab2);
2575 OK_DEFER_POP;
709f5be1 2576 return want_value ? target : NULL_RTX;
bbf6f052 2577 }
709f5be1 2578 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
bbf6f052
RK
2579 && GET_MODE (target) != BLKmode)
2580 /* If target is in memory and caller wants value in a register instead,
2581 arrange that. Pass TARGET as target for expand_expr so that,
709f5be1 2582 if EXP is another assignment, WANT_VALUE will be nonzero for it.
c2e6aff6
RS
2583 We know expand_expr will not use the target in that case.
2584 Don't do this if TARGET is volatile because we are supposed
2585 to write it and then read it. */
bbf6f052 2586 {
906c4e36 2587 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
bbf6f052
RK
2588 GET_MODE (target), 0);
2589 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2590 temp = copy_to_reg (temp);
2591 dont_return_target = 1;
2592 }
2593 else if (queued_subexp_p (target))
709f5be1
RS
2594 /* If target contains a postincrement, let's not risk
2595 using it as the place to generate the rhs. */
bbf6f052
RK
2596 {
2597 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2598 {
2599 /* Expand EXP into a new pseudo. */
2600 temp = gen_reg_rtx (GET_MODE (target));
2601 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2602 }
2603 else
906c4e36 2604 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
709f5be1
RS
2605
2606 /* If target is volatile, ANSI requires accessing the value
2607 *from* the target, if it is accessed. So make that happen.
2608 In no case return the target itself. */
2609 if (! MEM_VOLATILE_P (target) && want_value)
2610 dont_return_target = 1;
bbf6f052 2611 }
1499e0a8
RK
2612 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2613 /* If this is an scalar in a register that is stored in a wider mode
2614 than the declared mode, compute the result into its declared mode
2615 and then convert to the wider mode. Our value is the computed
2616 expression. */
2617 {
2618 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
b258707c
RS
2619
2620 /* If TEMP is a VOIDmode constant, use convert_modes to make
2621 sure that we properly convert it. */
2622 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
2623 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
2624 TYPE_MODE (TREE_TYPE (exp)), temp,
2625 SUBREG_PROMOTED_UNSIGNED_P (target));
2626
1499e0a8
RK
2627 convert_move (SUBREG_REG (target), temp,
2628 SUBREG_PROMOTED_UNSIGNED_P (target));
709f5be1 2629 return want_value ? temp : NULL_RTX;
1499e0a8 2630 }
bbf6f052
RK
2631 else
2632 {
2633 temp = expand_expr (exp, target, GET_MODE (target), 0);
2634 /* DO return TARGET if it's a specified hardware register.
c2e6aff6 2635 expand_return relies on this.
709f5be1
RS
2636 If TARGET is a volatile mem ref, either return TARGET
2637 or return a reg copied *from* TARGET; ANSI requires this.
2638
2639 Otherwise, if TEMP is not TARGET, return TEMP
2640 if it is constant (for efficiency),
2641 or if we really want the correct value. */
bbf6f052
RK
2642 if (!(target && GET_CODE (target) == REG
2643 && REGNO (target) < FIRST_PSEUDO_REGISTER)
709f5be1
RS
2644 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
2645 && temp != target
2646 && (CONSTANT_P (temp) || want_value))
bbf6f052
RK
2647 dont_return_target = 1;
2648 }
2649
b258707c
RS
2650 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
2651 the same as that of TARGET, adjust the constant. This is needed, for
2652 example, in case it is a CONST_DOUBLE and we want only a word-sized
2653 value. */
2654 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
2655 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2656 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
2657 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
2658
bbf6f052
RK
2659 /* If value was not generated in the target, store it there.
2660 Convert the value to TARGET's type first if nec. */
2661
2662 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2663 {
2664 target = protect_from_queue (target, 1);
2665 if (GET_MODE (temp) != GET_MODE (target)
2666 && GET_MODE (temp) != VOIDmode)
2667 {
2668 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2669 if (dont_return_target)
2670 {
2671 /* In this case, we will return TEMP,
2672 so make sure it has the proper mode.
2673 But don't forget to store the value into TARGET. */
2674 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2675 emit_move_insn (target, temp);
2676 }
2677 else
2678 convert_move (target, temp, unsignedp);
2679 }
2680
2681 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2682 {
2683 /* Handle copying a string constant into an array.
2684 The string constant may be shorter than the array.
2685 So copy just the string's actual length, and clear the rest. */
2686 rtx size;
2687
e87b4f3f
RS
2688 /* Get the size of the data type of the string,
2689 which is actually the size of the target. */
2690 size = expr_size (exp);
2691 if (GET_CODE (size) == CONST_INT
2692 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2693 emit_block_move (target, temp, size,
2694 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2695 else
bbf6f052 2696 {
e87b4f3f
RS
2697 /* Compute the size of the data to copy from the string. */
2698 tree copy_size
c03b7665 2699 = size_binop (MIN_EXPR,
b50d17a1 2700 make_tree (sizetype, size),
c03b7665
RK
2701 convert (sizetype,
2702 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
906c4e36
RK
2703 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
2704 VOIDmode, 0);
e87b4f3f
RS
2705 rtx label = 0;
2706
2707 /* Copy that much. */
2708 emit_block_move (target, temp, copy_size_rtx,
2709 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2710
2711 /* Figure out how much is left in TARGET
2712 that we have to clear. */
2713 if (GET_CODE (copy_size_rtx) == CONST_INT)
2714 {
2715 temp = plus_constant (XEXP (target, 0),
2716 TREE_STRING_LENGTH (exp));
2717 size = plus_constant (size,
2718 - TREE_STRING_LENGTH (exp));
2719 }
2720 else
2721 {
2722 enum machine_mode size_mode = Pmode;
2723
2724 temp = force_reg (Pmode, XEXP (target, 0));
2725 temp = expand_binop (size_mode, add_optab, temp,
906c4e36
RK
2726 copy_size_rtx, NULL_RTX, 0,
2727 OPTAB_LIB_WIDEN);
e87b4f3f
RS
2728
2729 size = expand_binop (size_mode, sub_optab, size,
906c4e36
RK
2730 copy_size_rtx, NULL_RTX, 0,
2731 OPTAB_LIB_WIDEN);
e87b4f3f 2732
906c4e36 2733 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
e87b4f3f
RS
2734 GET_MODE (size), 0, 0);
2735 label = gen_label_rtx ();
2736 emit_jump_insn (gen_blt (label));
2737 }
2738
2739 if (size != const0_rtx)
2740 {
bbf6f052 2741#ifdef TARGET_MEM_FUNCTIONS
d562e42e 2742 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
e87b4f3f 2743 temp, Pmode, const0_rtx, Pmode, size, Pmode);
bbf6f052 2744#else
d562e42e 2745 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
e87b4f3f 2746 temp, Pmode, size, Pmode);
bbf6f052 2747#endif
e87b4f3f
RS
2748 }
2749 if (label)
2750 emit_label (label);
bbf6f052
RK
2751 }
2752 }
2753 else if (GET_MODE (temp) == BLKmode)
2754 emit_block_move (target, temp, expr_size (exp),
2755 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2756 else
2757 emit_move_insn (target, temp);
2758 }
709f5be1 2759
7d26fec6 2760 if (dont_return_target && GET_CODE (temp) != MEM)
bbf6f052 2761 return temp;
709f5be1
RS
2762 if (want_value && GET_MODE (target) != BLKmode)
2763 return copy_to_reg (target);
2764 if (want_value)
2765 return target;
2766 return NULL_RTX;
bbf6f052
RK
2767}
2768\f
2769/* Store the value of constructor EXP into the rtx TARGET.
2770 TARGET is either a REG or a MEM. */
2771
2772static void
2773store_constructor (exp, target)
2774 tree exp;
2775 rtx target;
2776{
4af3895e
JVA
2777 tree type = TREE_TYPE (exp);
2778
bbf6f052
RK
2779 /* We know our target cannot conflict, since safe_from_p has been called. */
2780#if 0
2781 /* Don't try copying piece by piece into a hard register
2782 since that is vulnerable to being clobbered by EXP.
2783 Instead, construct in a pseudo register and then copy it all. */
2784 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
2785 {
2786 rtx temp = gen_reg_rtx (GET_MODE (target));
2787 store_constructor (exp, temp);
2788 emit_move_insn (target, temp);
2789 return;
2790 }
2791#endif
2792
e44842fe
RK
2793 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
2794 || TREE_CODE (type) == QUAL_UNION_TYPE)
bbf6f052
RK
2795 {
2796 register tree elt;
2797
4af3895e 2798 /* Inform later passes that the whole union value is dead. */
e44842fe
RK
2799 if (TREE_CODE (type) == UNION_TYPE
2800 || TREE_CODE (type) == QUAL_UNION_TYPE)
bbf6f052 2801 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
4af3895e
JVA
2802
2803 /* If we are building a static constructor into a register,
2804 set the initial value as zero so we can fold the value into
2805 a constant. */
2806 else if (GET_CODE (target) == REG && TREE_STATIC (exp))
2807 emit_move_insn (target, const0_rtx);
2808
bbf6f052
RK
2809 /* If the constructor has fewer fields than the structure,
2810 clear the whole structure first. */
2811 else if (list_length (CONSTRUCTOR_ELTS (exp))
4af3895e
JVA
2812 != list_length (TYPE_FIELDS (type)))
2813 clear_storage (target, int_size_in_bytes (type));
bbf6f052
RK
2814 else
2815 /* Inform later passes that the old value is dead. */
2816 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2817
2818 /* Store each element of the constructor into
2819 the corresponding field of TARGET. */
2820
2821 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
2822 {
2823 register tree field = TREE_PURPOSE (elt);
2824 register enum machine_mode mode;
2825 int bitsize;
b50d17a1 2826 int bitpos = 0;
bbf6f052 2827 int unsignedp;
b50d17a1
RK
2828 tree pos, constant = 0, offset = 0;
2829 rtx to_rtx = target;
bbf6f052 2830
f32fd778
RS
2831 /* Just ignore missing fields.
2832 We cleared the whole structure, above,
2833 if any fields are missing. */
2834 if (field == 0)
2835 continue;
2836
bbf6f052
RK
2837 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
2838 unsignedp = TREE_UNSIGNED (field);
2839 mode = DECL_MODE (field);
2840 if (DECL_BIT_FIELD (field))
2841 mode = VOIDmode;
2842
b50d17a1
RK
2843 pos = DECL_FIELD_BITPOS (field);
2844 if (TREE_CODE (pos) == INTEGER_CST)
2845 constant = pos;
2846 else if (TREE_CODE (pos) == PLUS_EXPR
2847 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
2848 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
2849 else
2850 offset = pos;
2851
2852 if (constant)
2853 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
2854
2855 if (offset)
2856 {
2857 rtx offset_rtx;
2858
2859 if (contains_placeholder_p (offset))
2860 offset = build (WITH_RECORD_EXPR, sizetype,
2861 offset, exp);
bbf6f052 2862
b50d17a1
RK
2863 offset = size_binop (FLOOR_DIV_EXPR, offset,
2864 size_int (BITS_PER_UNIT));
bbf6f052 2865
b50d17a1
RK
2866 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2867 if (GET_CODE (to_rtx) != MEM)
2868 abort ();
2869
2870 to_rtx
2871 = change_address (to_rtx, VOIDmode,
2872 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2873 force_reg (Pmode, offset_rtx)));
2874 }
2875
2876 store_field (to_rtx, bitsize, bitpos, mode, TREE_VALUE (elt),
bbf6f052
RK
2877 /* The alignment of TARGET is
2878 at least what its type requires. */
2879 VOIDmode, 0,
4af3895e
JVA
2880 TYPE_ALIGN (type) / BITS_PER_UNIT,
2881 int_size_in_bytes (type));
bbf6f052
RK
2882 }
2883 }
4af3895e 2884 else if (TREE_CODE (type) == ARRAY_TYPE)
bbf6f052
RK
2885 {
2886 register tree elt;
2887 register int i;
4af3895e 2888 tree domain = TYPE_DOMAIN (type);
906c4e36
RK
2889 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
2890 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4af3895e 2891 tree elttype = TREE_TYPE (type);
bbf6f052
RK
2892
2893 /* If the constructor has fewer fields than the structure,
4af3895e
JVA
2894 clear the whole structure first. Similarly if this this is
2895 static constructor of a non-BLKmode object. */
bbf6f052 2896
4af3895e
JVA
2897 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
2898 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
597bb7f1 2899 clear_storage (target, int_size_in_bytes (type));
bbf6f052
RK
2900 else
2901 /* Inform later passes that the old value is dead. */
2902 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2903
2904 /* Store each element of the constructor into
2905 the corresponding element of TARGET, determined
2906 by counting the elements. */
2907 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
2908 elt;
2909 elt = TREE_CHAIN (elt), i++)
2910 {
2911 register enum machine_mode mode;
2912 int bitsize;
2913 int bitpos;
2914 int unsignedp;
03dc44a6
RS
2915 tree index = TREE_PURPOSE (elt);
2916 rtx xtarget = target;
bbf6f052
RK
2917
2918 mode = TYPE_MODE (elttype);
2919 bitsize = GET_MODE_BITSIZE (mode);
2920 unsignedp = TREE_UNSIGNED (elttype);
2921
03dc44a6
RS
2922 if (index != 0 && TREE_CODE (index) != INTEGER_CST)
2923 {
2924 /* We don't currently allow variable indices in a
2925 C initializer, but let's try here to support them. */
2926 rtx pos_rtx, addr, xtarget;
2927 tree position;
2928
2929 position = size_binop (MULT_EXPR, index, TYPE_SIZE (elttype));
2930 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
2931 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
2932 xtarget = change_address (target, mode, addr);
2933 store_expr (TREE_VALUE (elt), xtarget, 0);
2934 }
2935 else
2936 {
2937 if (index != 0)
7c314719 2938 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
03dc44a6
RS
2939 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2940 else
2941 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2942
2943 store_field (xtarget, bitsize, bitpos, mode, TREE_VALUE (elt),
2944 /* The alignment of TARGET is
2945 at least what its type requires. */
2946 VOIDmode, 0,
2947 TYPE_ALIGN (type) / BITS_PER_UNIT,
2948 int_size_in_bytes (type));
2949 }
bbf6f052
RK
2950 }
2951 }
2952
2953 else
2954 abort ();
2955}
2956
2957/* Store the value of EXP (an expression tree)
2958 into a subfield of TARGET which has mode MODE and occupies
2959 BITSIZE bits, starting BITPOS bits from the start of TARGET.
2960 If MODE is VOIDmode, it means that we are storing into a bit-field.
2961
2962 If VALUE_MODE is VOIDmode, return nothing in particular.
2963 UNSIGNEDP is not used in this case.
2964
2965 Otherwise, return an rtx for the value stored. This rtx
2966 has mode VALUE_MODE if that is convenient to do.
2967 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
2968
2969 ALIGN is the alignment that TARGET is known to have, measured in bytes.
2970 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
2971
2972static rtx
2973store_field (target, bitsize, bitpos, mode, exp, value_mode,
2974 unsignedp, align, total_size)
2975 rtx target;
2976 int bitsize, bitpos;
2977 enum machine_mode mode;
2978 tree exp;
2979 enum machine_mode value_mode;
2980 int unsignedp;
2981 int align;
2982 int total_size;
2983{
906c4e36 2984 HOST_WIDE_INT width_mask = 0;
bbf6f052 2985
906c4e36
RK
2986 if (bitsize < HOST_BITS_PER_WIDE_INT)
2987 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
bbf6f052
RK
2988
2989 /* If we are storing into an unaligned field of an aligned union that is
2990 in a register, we may have the mode of TARGET being an integer mode but
2991 MODE == BLKmode. In that case, get an aligned object whose size and
2992 alignment are the same as TARGET and store TARGET into it (we can avoid
2993 the store if the field being stored is the entire width of TARGET). Then
2994 call ourselves recursively to store the field into a BLKmode version of
2995 that object. Finally, load from the object into TARGET. This is not
2996 very efficient in general, but should only be slightly more expensive
2997 than the otherwise-required unaligned accesses. Perhaps this can be
2998 cleaned up later. */
2999
3000 if (mode == BLKmode
3001 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
3002 {
3003 rtx object = assign_stack_temp (GET_MODE (target),
3004 GET_MODE_SIZE (GET_MODE (target)), 0);
3005 rtx blk_object = copy_rtx (object);
3006
3007 PUT_MODE (blk_object, BLKmode);
3008
3009 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
3010 emit_move_insn (object, target);
3011
3012 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
3013 align, total_size);
3014
46093b97
RS
3015 /* Even though we aren't returning target, we need to
3016 give it the updated value. */
bbf6f052
RK
3017 emit_move_insn (target, object);
3018
46093b97 3019 return blk_object;
bbf6f052
RK
3020 }
3021
3022 /* If the structure is in a register or if the component
3023 is a bit field, we cannot use addressing to access it.
3024 Use bit-field techniques or SUBREG to store in it. */
3025
4fa52007
RK
3026 if (mode == VOIDmode
3027 || (mode != BLKmode && ! direct_store[(int) mode])
3028 || GET_CODE (target) == REG
c980ac49 3029 || GET_CODE (target) == SUBREG
ccc98036
RS
3030 /* If the field isn't aligned enough to store as an ordinary memref,
3031 store it as a bit field. */
3032 || (STRICT_ALIGNMENT
3033 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
3034 || (STRICT_ALIGNMENT && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
bbf6f052 3035 {
906c4e36 3036 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbd6cf73
RK
3037
3038 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
3039 MODE. */
3040 if (mode != VOIDmode && mode != BLKmode
3041 && mode != TYPE_MODE (TREE_TYPE (exp)))
3042 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
3043
bbf6f052
RK
3044 /* Store the value in the bitfield. */
3045 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
3046 if (value_mode != VOIDmode)
3047 {
3048 /* The caller wants an rtx for the value. */
3049 /* If possible, avoid refetching from the bitfield itself. */
3050 if (width_mask != 0
3051 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5c4d7cfb 3052 {
9074de27 3053 tree count;
5c4d7cfb 3054 enum machine_mode tmode;
86a2c12a 3055
5c4d7cfb
RS
3056 if (unsignedp)
3057 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
3058 tmode = GET_MODE (temp);
86a2c12a
RS
3059 if (tmode == VOIDmode)
3060 tmode = value_mode;
5c4d7cfb
RS
3061 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
3062 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
3063 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
3064 }
bbf6f052 3065 return extract_bit_field (target, bitsize, bitpos, unsignedp,
906c4e36
RK
3066 NULL_RTX, value_mode, 0, align,
3067 total_size);
bbf6f052
RK
3068 }
3069 return const0_rtx;
3070 }
3071 else
3072 {
3073 rtx addr = XEXP (target, 0);
3074 rtx to_rtx;
3075
3076 /* If a value is wanted, it must be the lhs;
3077 so make the address stable for multiple use. */
3078
3079 if (value_mode != VOIDmode && GET_CODE (addr) != REG
3080 && ! CONSTANT_ADDRESS_P (addr)
3081 /* A frame-pointer reference is already stable. */
3082 && ! (GET_CODE (addr) == PLUS
3083 && GET_CODE (XEXP (addr, 1)) == CONST_INT
3084 && (XEXP (addr, 0) == virtual_incoming_args_rtx
3085 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
3086 addr = copy_to_reg (addr);
3087
3088 /* Now build a reference to just the desired component. */
3089
3090 to_rtx = change_address (target, mode,
3091 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
3092 MEM_IN_STRUCT_P (to_rtx) = 1;
3093
3094 return store_expr (exp, to_rtx, value_mode != VOIDmode);
3095 }
3096}
3097\f
3098/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
3099 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
742920c7 3100 ARRAY_REFs and find the ultimate containing object, which we return.
bbf6f052
RK
3101
3102 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
3103 bit position, and *PUNSIGNEDP to the signedness of the field.
7bb0943f
RS
3104 If the position of the field is variable, we store a tree
3105 giving the variable offset (in units) in *POFFSET.
3106 This offset is in addition to the bit position.
3107 If the position is not variable, we store 0 in *POFFSET.
bbf6f052
RK
3108
3109 If any of the extraction expressions is volatile,
3110 we store 1 in *PVOLATILEP. Otherwise we don't change that.
3111
3112 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
3113 is a mode that can be used to access the field. In that case, *PBITSIZE
e7c33f54
RK
3114 is redundant.
3115
3116 If the field describes a variable-sized object, *PMODE is set to
3117 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
3118 this case, but the address of the object can be found. */
bbf6f052
RK
3119
3120tree
4969d05d
RK
3121get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
3122 punsignedp, pvolatilep)
bbf6f052
RK
3123 tree exp;
3124 int *pbitsize;
3125 int *pbitpos;
7bb0943f 3126 tree *poffset;
bbf6f052
RK
3127 enum machine_mode *pmode;
3128 int *punsignedp;
3129 int *pvolatilep;
3130{
b50d17a1 3131 tree orig_exp = exp;
bbf6f052
RK
3132 tree size_tree = 0;
3133 enum machine_mode mode = VOIDmode;
742920c7 3134 tree offset = integer_zero_node;
bbf6f052
RK
3135
3136 if (TREE_CODE (exp) == COMPONENT_REF)
3137 {
3138 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
3139 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
3140 mode = DECL_MODE (TREE_OPERAND (exp, 1));
3141 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
3142 }
3143 else if (TREE_CODE (exp) == BIT_FIELD_REF)
3144 {
3145 size_tree = TREE_OPERAND (exp, 1);
3146 *punsignedp = TREE_UNSIGNED (exp);
3147 }
3148 else
3149 {
3150 mode = TYPE_MODE (TREE_TYPE (exp));
3151 *pbitsize = GET_MODE_BITSIZE (mode);
3152 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3153 }
3154
3155 if (size_tree)
3156 {
3157 if (TREE_CODE (size_tree) != INTEGER_CST)
e7c33f54
RK
3158 mode = BLKmode, *pbitsize = -1;
3159 else
3160 *pbitsize = TREE_INT_CST_LOW (size_tree);
bbf6f052
RK
3161 }
3162
3163 /* Compute cumulative bit-offset for nested component-refs and array-refs,
3164 and find the ultimate containing object. */
3165
3166 *pbitpos = 0;
3167
3168 while (1)
3169 {
7bb0943f 3170 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
bbf6f052 3171 {
7bb0943f
RS
3172 tree pos = (TREE_CODE (exp) == COMPONENT_REF
3173 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
3174 : TREE_OPERAND (exp, 2));
bbf6f052 3175
e7f3c83f
RK
3176 /* If this field hasn't been filled in yet, don't go
3177 past it. This should only happen when folding expressions
3178 made during type construction. */
3179 if (pos == 0)
3180 break;
3181
7bb0943f
RS
3182 if (TREE_CODE (pos) == PLUS_EXPR)
3183 {
3184 tree constant, var;
3185 if (TREE_CODE (TREE_OPERAND (pos, 0)) == INTEGER_CST)
3186 {
3187 constant = TREE_OPERAND (pos, 0);
3188 var = TREE_OPERAND (pos, 1);
3189 }
3190 else if (TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3191 {
3192 constant = TREE_OPERAND (pos, 1);
3193 var = TREE_OPERAND (pos, 0);
3194 }
3195 else
3196 abort ();
742920c7 3197
7bb0943f 3198 *pbitpos += TREE_INT_CST_LOW (constant);
742920c7
RK
3199 offset = size_binop (PLUS_EXPR, offset,
3200 size_binop (FLOOR_DIV_EXPR, var,
3201 size_int (BITS_PER_UNIT)));
7bb0943f
RS
3202 }
3203 else if (TREE_CODE (pos) == INTEGER_CST)
3204 *pbitpos += TREE_INT_CST_LOW (pos);
3205 else
3206 {
3207 /* Assume here that the offset is a multiple of a unit.
3208 If not, there should be an explicitly added constant. */
742920c7
RK
3209 offset = size_binop (PLUS_EXPR, offset,
3210 size_binop (FLOOR_DIV_EXPR, pos,
3211 size_int (BITS_PER_UNIT)));
7bb0943f 3212 }
bbf6f052 3213 }
bbf6f052 3214
742920c7 3215 else if (TREE_CODE (exp) == ARRAY_REF)
bbf6f052 3216 {
742920c7
RK
3217 /* This code is based on the code in case ARRAY_REF in expand_expr
3218 below. We assume here that the size of an array element is
3219 always an integral multiple of BITS_PER_UNIT. */
3220
3221 tree index = TREE_OPERAND (exp, 1);
3222 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
3223 tree low_bound
3224 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
3225 tree index_type = TREE_TYPE (index);
3226
3227 if (! integer_zerop (low_bound))
3228 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
3229
3230 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
3231 {
3232 index = convert (type_for_size (POINTER_SIZE, 0), index);
3233 index_type = TREE_TYPE (index);
3234 }
3235
3236 index = fold (build (MULT_EXPR, index_type, index,
3237 TYPE_SIZE (TREE_TYPE (exp))));
3238
3239 if (TREE_CODE (index) == INTEGER_CST
3240 && TREE_INT_CST_HIGH (index) == 0)
3241 *pbitpos += TREE_INT_CST_LOW (index);
3242 else
3243 offset = size_binop (PLUS_EXPR, offset,
3244 size_binop (FLOOR_DIV_EXPR, index,
3245 size_int (BITS_PER_UNIT)));
bbf6f052
RK
3246 }
3247 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
3248 && ! ((TREE_CODE (exp) == NOP_EXPR
3249 || TREE_CODE (exp) == CONVERT_EXPR)
3250 && (TYPE_MODE (TREE_TYPE (exp))
3251 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
3252 break;
7bb0943f
RS
3253
3254 /* If any reference in the chain is volatile, the effect is volatile. */
3255 if (TREE_THIS_VOLATILE (exp))
3256 *pvolatilep = 1;
bbf6f052
RK
3257 exp = TREE_OPERAND (exp, 0);
3258 }
3259
3260 /* If this was a bit-field, see if there is a mode that allows direct
3261 access in case EXP is in memory. */
e7f3c83f 3262 if (mode == VOIDmode && *pbitsize != 0 && *pbitpos % *pbitsize == 0)
bbf6f052
RK
3263 {
3264 mode = mode_for_size (*pbitsize, MODE_INT, 0);
3265 if (mode == BLKmode)
3266 mode = VOIDmode;
3267 }
3268
742920c7
RK
3269 if (integer_zerop (offset))
3270 offset = 0;
3271
b50d17a1
RK
3272 if (offset != 0 && contains_placeholder_p (offset))
3273 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
3274
bbf6f052 3275 *pmode = mode;
7bb0943f 3276 *poffset = offset;
bbf6f052
RK
3277 return exp;
3278}
3279\f
3280/* Given an rtx VALUE that may contain additions and multiplications,
3281 return an equivalent value that just refers to a register or memory.
3282 This is done by generating instructions to perform the arithmetic
c45a13a6
RK
3283 and returning a pseudo-register containing the value.
3284
3285 The returned value may be a REG, SUBREG, MEM or constant. */
bbf6f052
RK
3286
3287rtx
3288force_operand (value, target)
3289 rtx value, target;
3290{
3291 register optab binoptab = 0;
3292 /* Use a temporary to force order of execution of calls to
3293 `force_operand'. */
3294 rtx tmp;
3295 register rtx op2;
3296 /* Use subtarget as the target for operand 0 of a binary operation. */
3297 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3298
3299 if (GET_CODE (value) == PLUS)
3300 binoptab = add_optab;
3301 else if (GET_CODE (value) == MINUS)
3302 binoptab = sub_optab;
3303 else if (GET_CODE (value) == MULT)
3304 {
3305 op2 = XEXP (value, 1);
3306 if (!CONSTANT_P (op2)
3307 && !(GET_CODE (op2) == REG && op2 != subtarget))
3308 subtarget = 0;
3309 tmp = force_operand (XEXP (value, 0), subtarget);
3310 return expand_mult (GET_MODE (value), tmp,
906c4e36 3311 force_operand (op2, NULL_RTX),
bbf6f052
RK
3312 target, 0);
3313 }
3314
3315 if (binoptab)
3316 {
3317 op2 = XEXP (value, 1);
3318 if (!CONSTANT_P (op2)
3319 && !(GET_CODE (op2) == REG && op2 != subtarget))
3320 subtarget = 0;
3321 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
3322 {
3323 binoptab = add_optab;
3324 op2 = negate_rtx (GET_MODE (value), op2);
3325 }
3326
3327 /* Check for an addition with OP2 a constant integer and our first
3328 operand a PLUS of a virtual register and something else. In that
3329 case, we want to emit the sum of the virtual register and the
3330 constant first and then add the other value. This allows virtual
3331 register instantiation to simply modify the constant rather than
3332 creating another one around this addition. */
3333 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
3334 && GET_CODE (XEXP (value, 0)) == PLUS
3335 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
3336 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
3337 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
3338 {
3339 rtx temp = expand_binop (GET_MODE (value), binoptab,
3340 XEXP (XEXP (value, 0), 0), op2,
3341 subtarget, 0, OPTAB_LIB_WIDEN);
3342 return expand_binop (GET_MODE (value), binoptab, temp,
3343 force_operand (XEXP (XEXP (value, 0), 1), 0),
3344 target, 0, OPTAB_LIB_WIDEN);
3345 }
3346
3347 tmp = force_operand (XEXP (value, 0), subtarget);
3348 return expand_binop (GET_MODE (value), binoptab, tmp,
906c4e36 3349 force_operand (op2, NULL_RTX),
bbf6f052 3350 target, 0, OPTAB_LIB_WIDEN);
8008b228 3351 /* We give UNSIGNEDP = 0 to expand_binop
bbf6f052
RK
3352 because the only operations we are expanding here are signed ones. */
3353 }
3354 return value;
3355}
3356\f
3357/* Subroutine of expand_expr:
3358 save the non-copied parts (LIST) of an expr (LHS), and return a list
3359 which can restore these values to their previous values,
3360 should something modify their storage. */
3361
3362static tree
3363save_noncopied_parts (lhs, list)
3364 tree lhs;
3365 tree list;
3366{
3367 tree tail;
3368 tree parts = 0;
3369
3370 for (tail = list; tail; tail = TREE_CHAIN (tail))
3371 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3372 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
3373 else
3374 {
3375 tree part = TREE_VALUE (tail);
3376 tree part_type = TREE_TYPE (part);
906c4e36 3377 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
bbf6f052
RK
3378 rtx target = assign_stack_temp (TYPE_MODE (part_type),
3379 int_size_in_bytes (part_type), 0);
3380 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
906c4e36 3381 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
bbf6f052 3382 parts = tree_cons (to_be_saved,
906c4e36
RK
3383 build (RTL_EXPR, part_type, NULL_TREE,
3384 (tree) target),
bbf6f052
RK
3385 parts);
3386 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
3387 }
3388 return parts;
3389}
3390
3391/* Subroutine of expand_expr:
3392 record the non-copied parts (LIST) of an expr (LHS), and return a list
3393 which specifies the initial values of these parts. */
3394
3395static tree
3396init_noncopied_parts (lhs, list)
3397 tree lhs;
3398 tree list;
3399{
3400 tree tail;
3401 tree parts = 0;
3402
3403 for (tail = list; tail; tail = TREE_CHAIN (tail))
3404 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3405 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
3406 else
3407 {
3408 tree part = TREE_VALUE (tail);
3409 tree part_type = TREE_TYPE (part);
906c4e36 3410 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
bbf6f052
RK
3411 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
3412 }
3413 return parts;
3414}
3415
3416/* Subroutine of expand_expr: return nonzero iff there is no way that
3417 EXP can reference X, which is being modified. */
3418
3419static int
3420safe_from_p (x, exp)
3421 rtx x;
3422 tree exp;
3423{
3424 rtx exp_rtl = 0;
3425 int i, nops;
3426
3427 if (x == 0)
3428 return 1;
3429
3430 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
3431 find the underlying pseudo. */
3432 if (GET_CODE (x) == SUBREG)
3433 {
3434 x = SUBREG_REG (x);
3435 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3436 return 0;
3437 }
3438
3439 /* If X is a location in the outgoing argument area, it is always safe. */
3440 if (GET_CODE (x) == MEM
3441 && (XEXP (x, 0) == virtual_outgoing_args_rtx
3442 || (GET_CODE (XEXP (x, 0)) == PLUS
3443 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
3444 return 1;
3445
3446 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3447 {
3448 case 'd':
3449 exp_rtl = DECL_RTL (exp);
3450 break;
3451
3452 case 'c':
3453 return 1;
3454
3455 case 'x':
3456 if (TREE_CODE (exp) == TREE_LIST)
f32fd778
RS
3457 return ((TREE_VALUE (exp) == 0
3458 || safe_from_p (x, TREE_VALUE (exp)))
bbf6f052
RK
3459 && (TREE_CHAIN (exp) == 0
3460 || safe_from_p (x, TREE_CHAIN (exp))));
3461 else
3462 return 0;
3463
3464 case '1':
3465 return safe_from_p (x, TREE_OPERAND (exp, 0));
3466
3467 case '2':
3468 case '<':
3469 return (safe_from_p (x, TREE_OPERAND (exp, 0))
3470 && safe_from_p (x, TREE_OPERAND (exp, 1)));
3471
3472 case 'e':
3473 case 'r':
3474 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
3475 the expression. If it is set, we conflict iff we are that rtx or
3476 both are in memory. Otherwise, we check all operands of the
3477 expression recursively. */
3478
3479 switch (TREE_CODE (exp))
3480 {
3481 case ADDR_EXPR:
e44842fe
RK
3482 return (staticp (TREE_OPERAND (exp, 0))
3483 || safe_from_p (x, TREE_OPERAND (exp, 0)));
bbf6f052
RK
3484
3485 case INDIRECT_REF:
3486 if (GET_CODE (x) == MEM)
3487 return 0;
3488 break;
3489
3490 case CALL_EXPR:
3491 exp_rtl = CALL_EXPR_RTL (exp);
3492 if (exp_rtl == 0)
3493 {
3494 /* Assume that the call will clobber all hard registers and
3495 all of memory. */
3496 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3497 || GET_CODE (x) == MEM)
3498 return 0;
3499 }
3500
3501 break;
3502
3503 case RTL_EXPR:
3504 exp_rtl = RTL_EXPR_RTL (exp);
3505 if (exp_rtl == 0)
3506 /* We don't know what this can modify. */
3507 return 0;
3508
3509 break;
3510
3511 case WITH_CLEANUP_EXPR:
3512 exp_rtl = RTL_EXPR_RTL (exp);
3513 break;
3514
3515 case SAVE_EXPR:
3516 exp_rtl = SAVE_EXPR_RTL (exp);
3517 break;
3518
8129842c
RS
3519 case BIND_EXPR:
3520 /* The only operand we look at is operand 1. The rest aren't
3521 part of the expression. */
3522 return safe_from_p (x, TREE_OPERAND (exp, 1));
3523
bbf6f052
RK
3524 case METHOD_CALL_EXPR:
3525 /* This takes a rtx argument, but shouldn't appear here. */
3526 abort ();
3527 }
3528
3529 /* If we have an rtx, we do not need to scan our operands. */
3530 if (exp_rtl)
3531 break;
3532
3533 nops = tree_code_length[(int) TREE_CODE (exp)];
3534 for (i = 0; i < nops; i++)
3535 if (TREE_OPERAND (exp, i) != 0
3536 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3537 return 0;
3538 }
3539
3540 /* If we have an rtl, find any enclosed object. Then see if we conflict
3541 with it. */
3542 if (exp_rtl)
3543 {
3544 if (GET_CODE (exp_rtl) == SUBREG)
3545 {
3546 exp_rtl = SUBREG_REG (exp_rtl);
3547 if (GET_CODE (exp_rtl) == REG
3548 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3549 return 0;
3550 }
3551
3552 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3553 are memory and EXP is not readonly. */
3554 return ! (rtx_equal_p (x, exp_rtl)
3555 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3556 && ! TREE_READONLY (exp)));
3557 }
3558
3559 /* If we reach here, it is safe. */
3560 return 1;
3561}
3562
3563/* Subroutine of expand_expr: return nonzero iff EXP is an
3564 expression whose type is statically determinable. */
3565
3566static int
3567fixed_type_p (exp)
3568 tree exp;
3569{
3570 if (TREE_CODE (exp) == PARM_DECL
3571 || TREE_CODE (exp) == VAR_DECL
3572 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3573 || TREE_CODE (exp) == COMPONENT_REF
3574 || TREE_CODE (exp) == ARRAY_REF)
3575 return 1;
3576 return 0;
3577}
3578\f
3579/* expand_expr: generate code for computing expression EXP.
3580 An rtx for the computed value is returned. The value is never null.
3581 In the case of a void EXP, const0_rtx is returned.
3582
3583 The value may be stored in TARGET if TARGET is nonzero.
3584 TARGET is just a suggestion; callers must assume that
3585 the rtx returned may not be the same as TARGET.
3586
3587 If TARGET is CONST0_RTX, it means that the value will be ignored.
3588
3589 If TMODE is not VOIDmode, it suggests generating the
3590 result in mode TMODE. But this is done only when convenient.
3591 Otherwise, TMODE is ignored and the value generated in its natural mode.
3592 TMODE is just a suggestion; callers must assume that
3593 the rtx returned may not have mode TMODE.
3594
3595 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
3596 with a constant address even if that address is not normally legitimate.
3597 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
3598
3599 If MODIFIER is EXPAND_SUM then when EXP is an addition
3600 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3601 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3602 products as above, or REG or MEM, or constant.
3603 Ordinarily in such cases we would output mul or add instructions
3604 and then return a pseudo reg containing the sum.
3605
3606 EXPAND_INITIALIZER is much like EXPAND_SUM except that
3607 it also marks a label as absolutely required (it can't be dead).
26fcb35a 3608 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6dc42e49 3609 This is used for outputting expressions used in initializers. */
bbf6f052
RK
3610
3611rtx
3612expand_expr (exp, target, tmode, modifier)
3613 register tree exp;
3614 rtx target;
3615 enum machine_mode tmode;
3616 enum expand_modifier modifier;
3617{
b50d17a1
RK
3618 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
3619 This is static so it will be accessible to our recursive callees. */
3620 static tree placeholder_list = 0;
bbf6f052
RK
3621 register rtx op0, op1, temp;
3622 tree type = TREE_TYPE (exp);
3623 int unsignedp = TREE_UNSIGNED (type);
3624 register enum machine_mode mode = TYPE_MODE (type);
3625 register enum tree_code code = TREE_CODE (exp);
3626 optab this_optab;
3627 /* Use subtarget as the target for operand 0 of a binary operation. */
3628 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3629 rtx original_target = target;
ca695ac9 3630 /* Maybe defer this until sure not doing bytecode? */
dd27116b
RK
3631 int ignore = (target == const0_rtx
3632 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
4d87de75
RS
3633 || code == CONVERT_EXPR || code == REFERENCE_EXPR
3634 || code == COND_EXPR)
dd27116b 3635 && TREE_CODE (type) == VOID_TYPE));
bbf6f052
RK
3636 tree context;
3637
ca695ac9
JB
3638
3639 if (output_bytecode)
3640 {
3641 bc_expand_expr (exp);
3642 return NULL;
3643 }
3644
bbf6f052
RK
3645 /* Don't use hard regs as subtargets, because the combiner
3646 can only handle pseudo regs. */
3647 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
3648 subtarget = 0;
3649 /* Avoid subtargets inside loops,
3650 since they hide some invariant expressions. */
3651 if (preserve_subexpressions_p ())
3652 subtarget = 0;
3653
dd27116b
RK
3654 /* If we are going to ignore this result, we need only do something
3655 if there is a side-effect somewhere in the expression. If there
b50d17a1
RK
3656 is, short-circuit the most common cases here. Note that we must
3657 not call expand_expr with anything but const0_rtx in case this
3658 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
bbf6f052 3659
dd27116b
RK
3660 if (ignore)
3661 {
3662 if (! TREE_SIDE_EFFECTS (exp))
3663 return const0_rtx;
3664
3665 /* Ensure we reference a volatile object even if value is ignored. */
3666 if (TREE_THIS_VOLATILE (exp)
3667 && TREE_CODE (exp) != FUNCTION_DECL
3668 && mode != VOIDmode && mode != BLKmode)
3669 {
3670 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
3671 if (GET_CODE (temp) == MEM)
3672 temp = copy_to_reg (temp);
3673 return const0_rtx;
3674 }
3675
3676 if (TREE_CODE_CLASS (code) == '1')
3677 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
3678 VOIDmode, modifier);
3679 else if (TREE_CODE_CLASS (code) == '2'
3680 || TREE_CODE_CLASS (code) == '<')
3681 {
3682 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
3683 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
3684 return const0_rtx;
3685 }
3686 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
3687 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
3688 /* If the second operand has no side effects, just evaluate
3689 the first. */
3690 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
3691 VOIDmode, modifier);
dd27116b 3692
90764a87 3693 target = 0;
dd27116b 3694 }
bbf6f052 3695
e44842fe
RK
3696 /* If will do cse, generate all results into pseudo registers
3697 since 1) that allows cse to find more things
3698 and 2) otherwise cse could produce an insn the machine
3699 cannot support. */
3700
bbf6f052
RK
3701 if (! cse_not_expected && mode != BLKmode && target
3702 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
3703 target = subtarget;
3704
bbf6f052
RK
3705 switch (code)
3706 {
3707 case LABEL_DECL:
b552441b
RS
3708 {
3709 tree function = decl_function_context (exp);
3710 /* Handle using a label in a containing function. */
3711 if (function != current_function_decl && function != 0)
3712 {
3713 struct function *p = find_function_data (function);
3714 /* Allocate in the memory associated with the function
3715 that the label is in. */
3716 push_obstacks (p->function_obstack,
3717 p->function_maybepermanent_obstack);
3718
3719 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3720 label_rtx (exp), p->forced_labels);
3721 pop_obstacks ();
3722 }
3723 else if (modifier == EXPAND_INITIALIZER)
3724 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3725 label_rtx (exp), forced_labels);
26fcb35a 3726 temp = gen_rtx (MEM, FUNCTION_MODE,
b552441b 3727 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
26fcb35a
RS
3728 if (function != current_function_decl && function != 0)
3729 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
3730 return temp;
b552441b 3731 }
bbf6f052
RK
3732
3733 case PARM_DECL:
3734 if (DECL_RTL (exp) == 0)
3735 {
3736 error_with_decl (exp, "prior parameter's size depends on `%s'");
4af3895e 3737 return CONST0_RTX (mode);
bbf6f052
RK
3738 }
3739
bbf6f052 3740 case VAR_DECL:
2dca20cd
RS
3741 /* If a static var's type was incomplete when the decl was written,
3742 but the type is complete now, lay out the decl now. */
3743 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
3744 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
3745 {
3746 push_obstacks_nochange ();
3747 end_temporary_allocation ();
3748 layout_decl (exp, 0);
3749 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
3750 pop_obstacks ();
3751 }
3752 case FUNCTION_DECL:
bbf6f052
RK
3753 case RESULT_DECL:
3754 if (DECL_RTL (exp) == 0)
3755 abort ();
e44842fe
RK
3756 /* Ensure variable marked as used even if it doesn't go through
3757 a parser. If it hasn't be used yet, write out an external
3758 definition. */
3759 if (! TREE_USED (exp))
3760 {
3761 assemble_external (exp);
3762 TREE_USED (exp) = 1;
3763 }
3764
bbf6f052
RK
3765 /* Handle variables inherited from containing functions. */
3766 context = decl_function_context (exp);
3767
3768 /* We treat inline_function_decl as an alias for the current function
3769 because that is the inline function whose vars, types, etc.
3770 are being merged into the current function.
3771 See expand_inline_function. */
3772 if (context != 0 && context != current_function_decl
3773 && context != inline_function_decl
3774 /* If var is static, we don't need a static chain to access it. */
3775 && ! (GET_CODE (DECL_RTL (exp)) == MEM
3776 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
3777 {
3778 rtx addr;
3779
3780 /* Mark as non-local and addressable. */
81feeecb 3781 DECL_NONLOCAL (exp) = 1;
bbf6f052
RK
3782 mark_addressable (exp);
3783 if (GET_CODE (DECL_RTL (exp)) != MEM)
3784 abort ();
3785 addr = XEXP (DECL_RTL (exp), 0);
3786 if (GET_CODE (addr) == MEM)
3787 addr = gen_rtx (MEM, Pmode, fix_lexical_addr (XEXP (addr, 0), exp));
3788 else
3789 addr = fix_lexical_addr (addr, exp);
3790 return change_address (DECL_RTL (exp), mode, addr);
3791 }
4af3895e 3792
bbf6f052
RK
3793 /* This is the case of an array whose size is to be determined
3794 from its initializer, while the initializer is still being parsed.
3795 See expand_decl. */
3796 if (GET_CODE (DECL_RTL (exp)) == MEM
3797 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
3798 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
3799 XEXP (DECL_RTL (exp), 0));
3800 if (GET_CODE (DECL_RTL (exp)) == MEM
3801 && modifier != EXPAND_CONST_ADDRESS
3802 && modifier != EXPAND_SUM
3803 && modifier != EXPAND_INITIALIZER)
3804 {
3805 /* DECL_RTL probably contains a constant address.
3806 On RISC machines where a constant address isn't valid,
3807 make some insns to get that address into a register. */
3808 if (!memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
3809 || (flag_force_addr
3810 && CONSTANT_ADDRESS_P (XEXP (DECL_RTL (exp), 0))))
3811 return change_address (DECL_RTL (exp), VOIDmode,
3812 copy_rtx (XEXP (DECL_RTL (exp), 0)));
3813 }
1499e0a8
RK
3814
3815 /* If the mode of DECL_RTL does not match that of the decl, it
3816 must be a promoted value. We return a SUBREG of the wanted mode,
3817 but mark it so that we know that it was already extended. */
3818
3819 if (GET_CODE (DECL_RTL (exp)) == REG
3820 && GET_MODE (DECL_RTL (exp)) != mode)
3821 {
3822 enum machine_mode decl_mode = DECL_MODE (exp);
3823
3824 /* Get the signedness used for this variable. Ensure we get the
3825 same mode we got when the variable was declared. */
3826
3827 PROMOTE_MODE (decl_mode, unsignedp, type);
3828
3829 if (decl_mode != GET_MODE (DECL_RTL (exp)))
3830 abort ();
3831
3832 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
3833 SUBREG_PROMOTED_VAR_P (temp) = 1;
3834 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3835 return temp;
3836 }
3837
bbf6f052
RK
3838 return DECL_RTL (exp);
3839
3840 case INTEGER_CST:
3841 return immed_double_const (TREE_INT_CST_LOW (exp),
3842 TREE_INT_CST_HIGH (exp),
3843 mode);
3844
3845 case CONST_DECL:
3846 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
3847
3848 case REAL_CST:
3849 /* If optimized, generate immediate CONST_DOUBLE
3850 which will be turned into memory by reload if necessary.
3851
3852 We used to force a register so that loop.c could see it. But
3853 this does not allow gen_* patterns to perform optimizations with
3854 the constants. It also produces two insns in cases like "x = 1.0;".
3855 On most machines, floating-point constants are not permitted in
3856 many insns, so we'd end up copying it to a register in any case.
3857
3858 Now, we do the copying in expand_binop, if appropriate. */
3859 return immed_real_const (exp);
3860
3861 case COMPLEX_CST:
3862 case STRING_CST:
3863 if (! TREE_CST_RTL (exp))
3864 output_constant_def (exp);
3865
3866 /* TREE_CST_RTL probably contains a constant address.
3867 On RISC machines where a constant address isn't valid,
3868 make some insns to get that address into a register. */
3869 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
3870 && modifier != EXPAND_CONST_ADDRESS
3871 && modifier != EXPAND_INITIALIZER
3872 && modifier != EXPAND_SUM
3873 && !memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0)))
3874 return change_address (TREE_CST_RTL (exp), VOIDmode,
3875 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
3876 return TREE_CST_RTL (exp);
3877
3878 case SAVE_EXPR:
3879 context = decl_function_context (exp);
3880 /* We treat inline_function_decl as an alias for the current function
3881 because that is the inline function whose vars, types, etc.
3882 are being merged into the current function.
3883 See expand_inline_function. */
3884 if (context == current_function_decl || context == inline_function_decl)
3885 context = 0;
3886
3887 /* If this is non-local, handle it. */
3888 if (context)
3889 {
3890 temp = SAVE_EXPR_RTL (exp);
3891 if (temp && GET_CODE (temp) == REG)
3892 {
3893 put_var_into_stack (exp);
3894 temp = SAVE_EXPR_RTL (exp);
3895 }
3896 if (temp == 0 || GET_CODE (temp) != MEM)
3897 abort ();
3898 return change_address (temp, mode,
3899 fix_lexical_addr (XEXP (temp, 0), exp));
3900 }
3901 if (SAVE_EXPR_RTL (exp) == 0)
3902 {
3903 if (mode == BLKmode)
34a25822
RK
3904 {
3905 temp
3906 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
3907 MEM_IN_STRUCT_P (temp)
3908 = (TREE_CODE (type) == RECORD_TYPE
3909 || TREE_CODE (type) == UNION_TYPE
3910 || TREE_CODE (type) == QUAL_UNION_TYPE
3911 || TREE_CODE (type) == ARRAY_TYPE);
3912 }
bbf6f052 3913 else
1499e0a8
RK
3914 {
3915 enum machine_mode var_mode = mode;
3916
3917 if (TREE_CODE (type) == INTEGER_TYPE
3918 || TREE_CODE (type) == ENUMERAL_TYPE
3919 || TREE_CODE (type) == BOOLEAN_TYPE
3920 || TREE_CODE (type) == CHAR_TYPE
3921 || TREE_CODE (type) == REAL_TYPE
3922 || TREE_CODE (type) == POINTER_TYPE
3923 || TREE_CODE (type) == OFFSET_TYPE)
3924 {
3925 PROMOTE_MODE (var_mode, unsignedp, type);
3926 }
3927
3928 temp = gen_reg_rtx (var_mode);
3929 }
3930
bbf6f052 3931 SAVE_EXPR_RTL (exp) = temp;
bbf6f052
RK
3932 if (!optimize && GET_CODE (temp) == REG)
3933 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
3934 save_expr_regs);
ff78f773
RK
3935
3936 /* If the mode of TEMP does not match that of the expression, it
3937 must be a promoted value. We pass store_expr a SUBREG of the
3938 wanted mode but mark it so that we know that it was already
3939 extended. Note that `unsignedp' was modified above in
3940 this case. */
3941
3942 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
3943 {
3944 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
3945 SUBREG_PROMOTED_VAR_P (temp) = 1;
3946 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3947 }
3948
3949 store_expr (TREE_OPERAND (exp, 0), temp, 0);
bbf6f052 3950 }
1499e0a8
RK
3951
3952 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
3953 must be a promoted value. We return a SUBREG of the wanted mode,
adc22a04 3954 but mark it so that we know that it was already extended. */
1499e0a8
RK
3955
3956 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
3957 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
3958 {
adc22a04
RK
3959 enum machine_mode var_mode = mode;
3960
3961 if (TREE_CODE (type) == INTEGER_TYPE
3962 || TREE_CODE (type) == ENUMERAL_TYPE
3963 || TREE_CODE (type) == BOOLEAN_TYPE
3964 || TREE_CODE (type) == CHAR_TYPE
3965 || TREE_CODE (type) == REAL_TYPE
3966 || TREE_CODE (type) == POINTER_TYPE
3967 || TREE_CODE (type) == OFFSET_TYPE)
3968 {
3969 PROMOTE_MODE (var_mode, unsignedp, type);
3970 }
3971
1499e0a8
RK
3972 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
3973 SUBREG_PROMOTED_VAR_P (temp) = 1;
3974 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3975 return temp;
3976 }
3977
bbf6f052
RK
3978 return SAVE_EXPR_RTL (exp);
3979
b50d17a1
RK
3980 case PLACEHOLDER_EXPR:
3981 /* If there is an object on the head of the placeholder list,
3982 see if some object in it's references is of type TYPE. For
3983 further information, see tree.def. */
3984 if (placeholder_list)
3985 {
3986 tree object;
f59d43a9 3987 tree old_list = placeholder_list;
b50d17a1
RK
3988
3989 for (object = TREE_PURPOSE (placeholder_list);
3990 TREE_TYPE (object) != type
3991 && (TREE_CODE_CLASS (TREE_CODE (object)) == 'r'
4805bfa0
RK
3992 || TREE_CODE_CLASS (TREE_CODE (object)) == '1'
3993 || TREE_CODE_CLASS (TREE_CODE (object)) == '2'
3994 || TREE_CODE_CLASS (TREE_CODE (object)) == 'e');
b50d17a1
RK
3995 object = TREE_OPERAND (object, 0))
3996 ;
3997
4805bfa0 3998 if (object && TREE_TYPE (object) == type)
f59d43a9
RK
3999 {
4000 /* Expand this object skipping the list entries before
4001 it was found in case it is also a PLACEHOLDER_EXPR.
4002 In that case, we want to translate it using subsequent
4003 entries. */
4004 placeholder_list = TREE_CHAIN (placeholder_list);
4005 temp = expand_expr (object, original_target, tmode, modifier);
4006 placeholder_list = old_list;
4007 return temp;
4008 }
b50d17a1
RK
4009 }
4010
4011 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
4012 abort ();
4013
4014 case WITH_RECORD_EXPR:
4015 /* Put the object on the placeholder list, expand our first operand,
4016 and pop the list. */
4017 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
4018 placeholder_list);
4019 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
4020 tmode, modifier);
4021 placeholder_list = TREE_CHAIN (placeholder_list);
4022 return target;
4023
bbf6f052 4024 case EXIT_EXPR:
e44842fe
RK
4025 expand_exit_loop_if_false (NULL_PTR,
4026 invert_truthvalue (TREE_OPERAND (exp, 0)));
bbf6f052
RK
4027 return const0_rtx;
4028
4029 case LOOP_EXPR:
0088fcb1 4030 push_temp_slots ();
bbf6f052
RK
4031 expand_start_loop (1);
4032 expand_expr_stmt (TREE_OPERAND (exp, 0));
4033 expand_end_loop ();
0088fcb1 4034 pop_temp_slots ();
bbf6f052
RK
4035
4036 return const0_rtx;
4037
4038 case BIND_EXPR:
4039 {
4040 tree vars = TREE_OPERAND (exp, 0);
4041 int vars_need_expansion = 0;
4042
4043 /* Need to open a binding contour here because
4044 if there are any cleanups they most be contained here. */
4045 expand_start_bindings (0);
4046
2df53c0b
RS
4047 /* Mark the corresponding BLOCK for output in its proper place. */
4048 if (TREE_OPERAND (exp, 2) != 0
4049 && ! TREE_USED (TREE_OPERAND (exp, 2)))
4050 insert_block (TREE_OPERAND (exp, 2));
bbf6f052
RK
4051
4052 /* If VARS have not yet been expanded, expand them now. */
4053 while (vars)
4054 {
4055 if (DECL_RTL (vars) == 0)
4056 {
4057 vars_need_expansion = 1;
4058 expand_decl (vars);
4059 }
4060 expand_decl_init (vars);
4061 vars = TREE_CHAIN (vars);
4062 }
4063
4064 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
4065
4066 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
4067
4068 return temp;
4069 }
4070
4071 case RTL_EXPR:
4072 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
4073 abort ();
4074 emit_insns (RTL_EXPR_SEQUENCE (exp));
4075 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
ca814259 4076 free_temps_for_rtl_expr (exp);
bbf6f052
RK
4077 return RTL_EXPR_RTL (exp);
4078
4079 case CONSTRUCTOR:
dd27116b
RK
4080 /* If we don't need the result, just ensure we evaluate any
4081 subexpressions. */
4082 if (ignore)
4083 {
4084 tree elt;
4085 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4086 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
4087 return const0_rtx;
4088 }
4af3895e
JVA
4089 /* All elts simple constants => refer to a constant in memory. But
4090 if this is a non-BLKmode mode, let it store a field at a time
4091 since that should make a CONST_INT or CONST_DOUBLE when we
dd27116b
RK
4092 fold. If we are making an initializer and all operands are
4093 constant, put it in memory as well. */
4094 else if ((TREE_STATIC (exp)
4095 && (mode == BLKmode || TREE_ADDRESSABLE (exp)))
4096 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
bbf6f052
RK
4097 {
4098 rtx constructor = output_constant_def (exp);
b552441b
RS
4099 if (modifier != EXPAND_CONST_ADDRESS
4100 && modifier != EXPAND_INITIALIZER
4101 && modifier != EXPAND_SUM
4102 && !memory_address_p (GET_MODE (constructor),
4103 XEXP (constructor, 0)))
bbf6f052
RK
4104 constructor = change_address (constructor, VOIDmode,
4105 XEXP (constructor, 0));
4106 return constructor;
4107 }
4108
bbf6f052
RK
4109 else
4110 {
4111 if (target == 0 || ! safe_from_p (target, exp))
4112 {
4113 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
4114 target = gen_reg_rtx (mode);
4115 else
4116 {
3b94d087
RS
4117 enum tree_code c = TREE_CODE (type);
4118 target
4119 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
e7f3c83f
RK
4120 if (c == RECORD_TYPE || c == UNION_TYPE
4121 || c == QUAL_UNION_TYPE || c == ARRAY_TYPE)
3b94d087 4122 MEM_IN_STRUCT_P (target) = 1;
bbf6f052
RK
4123 }
4124 }
4125 store_constructor (exp, target);
4126 return target;
4127 }
4128
4129 case INDIRECT_REF:
4130 {
4131 tree exp1 = TREE_OPERAND (exp, 0);
4132 tree exp2;
4133
4134 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
4135 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
4136 This code has the same general effect as simply doing
4137 expand_expr on the save expr, except that the expression PTR
4138 is computed for use as a memory address. This means different
4139 code, suitable for indexing, may be generated. */
4140 if (TREE_CODE (exp1) == SAVE_EXPR
4141 && SAVE_EXPR_RTL (exp1) == 0
4142 && TREE_CODE (exp2 = TREE_OPERAND (exp1, 0)) != ERROR_MARK
4143 && TYPE_MODE (TREE_TYPE (exp1)) == Pmode
4144 && TYPE_MODE (TREE_TYPE (exp2)) == Pmode)
4145 {
906c4e36
RK
4146 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
4147 VOIDmode, EXPAND_SUM);
bbf6f052
RK
4148 op0 = memory_address (mode, temp);
4149 op0 = copy_all_regs (op0);
4150 SAVE_EXPR_RTL (exp1) = op0;
4151 }
4152 else
4153 {
906c4e36 4154 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
bbf6f052
RK
4155 op0 = memory_address (mode, op0);
4156 }
8c8a8e34
JW
4157
4158 temp = gen_rtx (MEM, mode, op0);
4159 /* If address was computed by addition,
4160 mark this as an element of an aggregate. */
4161 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4162 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
4163 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
4164 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
4165 || TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4166 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
e7f3c83f 4167 || TREE_CODE (TREE_TYPE (exp)) == QUAL_UNION_TYPE
8c8a8e34
JW
4168 || (TREE_CODE (exp1) == ADDR_EXPR
4169 && (exp2 = TREE_OPERAND (exp1, 0))
4170 && (TREE_CODE (TREE_TYPE (exp2)) == ARRAY_TYPE
4171 || TREE_CODE (TREE_TYPE (exp2)) == RECORD_TYPE
e7f3c83f
RK
4172 || TREE_CODE (TREE_TYPE (exp2)) == UNION_TYPE
4173 || TREE_CODE (TREE_TYPE (exp2)) == QUAL_UNION_TYPE)))
8c8a8e34 4174 MEM_IN_STRUCT_P (temp) = 1;
2c4c436a 4175 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
89742723 4176#if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
bbf6f052
RK
4177 a location is accessed through a pointer to const does not mean
4178 that the value there can never change. */
8c8a8e34 4179 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
bbf6f052 4180#endif
8c8a8e34
JW
4181 return temp;
4182 }
bbf6f052
RK
4183
4184 case ARRAY_REF:
742920c7
RK
4185 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
4186 abort ();
bbf6f052 4187
bbf6f052 4188 {
742920c7
RK
4189 tree array = TREE_OPERAND (exp, 0);
4190 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
4191 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4192 tree index = TREE_OPERAND (exp, 1);
4193 tree index_type = TREE_TYPE (index);
bbf6f052 4194 int i;
bbf6f052 4195
b50d17a1
RK
4196 if (TREE_CODE (low_bound) != INTEGER_CST
4197 && contains_placeholder_p (low_bound))
4198 low_bound = build (WITH_RECORD_EXPR, sizetype, low_bound, exp);
4199
d4c89139
PB
4200 /* Optimize the special-case of a zero lower bound.
4201
4202 We convert the low_bound to sizetype to avoid some problems
4203 with constant folding. (E.g. suppose the lower bound is 1,
4204 and its mode is QI. Without the conversion, (ARRAY
4205 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
4206 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
4207
4208 But sizetype isn't quite right either (especially if
4209 the lowbound is negative). FIXME */
4210
742920c7 4211 if (! integer_zerop (low_bound))
d4c89139
PB
4212 index = fold (build (MINUS_EXPR, index_type, index,
4213 convert (sizetype, low_bound)));
742920c7
RK
4214
4215 if (TREE_CODE (index) != INTEGER_CST
4216 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4217 {
4218 /* Nonconstant array index or nonconstant element size.
4219 Generate the tree for *(&array+index) and expand that,
4220 except do it in a language-independent way
4221 and don't complain about non-lvalue arrays.
4222 `mark_addressable' should already have been called
4223 for any array for which this case will be reached. */
4224
4225 /* Don't forget the const or volatile flag from the array
4226 element. */
4227 tree variant_type = build_type_variant (type,
4228 TREE_READONLY (exp),
4229 TREE_THIS_VOLATILE (exp));
4230 tree array_adr = build1 (ADDR_EXPR,
4231 build_pointer_type (variant_type), array);
4232 tree elt;
b50d17a1 4233 tree size = size_in_bytes (type);
742920c7
RK
4234
4235 /* Convert the integer argument to a type the same size as a
4236 pointer so the multiply won't overflow spuriously. */
4237 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
4238 index = convert (type_for_size (POINTER_SIZE, 0), index);
4239
b50d17a1
RK
4240 if (TREE_CODE (size) != INTEGER_CST
4241 && contains_placeholder_p (size))
4242 size = build (WITH_RECORD_EXPR, sizetype, size, exp);
4243
742920c7
RK
4244 /* Don't think the address has side effects
4245 just because the array does.
4246 (In some cases the address might have side effects,
4247 and we fail to record that fact here. However, it should not
4248 matter, since expand_expr should not care.) */
4249 TREE_SIDE_EFFECTS (array_adr) = 0;
4250
4251 elt = build1 (INDIRECT_REF, type,
4252 fold (build (PLUS_EXPR,
4253 TYPE_POINTER_TO (variant_type),
4254 array_adr,
4255 fold (build (MULT_EXPR,
4256 TYPE_POINTER_TO (variant_type),
b50d17a1 4257 index, size)))));
742920c7
RK
4258
4259 /* Volatility, etc., of new expression is same as old
4260 expression. */
4261 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
4262 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
4263 TREE_READONLY (elt) = TREE_READONLY (exp);
4264
4265 return expand_expr (elt, target, tmode, modifier);
4266 }
4267
4268 /* Fold an expression like: "foo"[2].
4269 This is not done in fold so it won't happen inside &. */
4270
4271 if (TREE_CODE (array) == STRING_CST
4272 && TREE_CODE (index) == INTEGER_CST
4273 && !TREE_INT_CST_HIGH (index)
4274 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array))
bbf6f052 4275 {
742920c7 4276 if (TREE_TYPE (TREE_TYPE (array)) == integer_type_node)
bbf6f052 4277 {
742920c7 4278 exp = build_int_2 (((int *)TREE_STRING_POINTER (array))[i], 0);
bbf6f052
RK
4279 TREE_TYPE (exp) = integer_type_node;
4280 return expand_expr (exp, target, tmode, modifier);
4281 }
742920c7 4282 if (TREE_TYPE (TREE_TYPE (array)) == char_type_node)
bbf6f052 4283 {
742920c7 4284 exp = build_int_2 (TREE_STRING_POINTER (array)[i], 0);
bbf6f052 4285 TREE_TYPE (exp) = integer_type_node;
742920c7
RK
4286 return expand_expr (convert (TREE_TYPE (TREE_TYPE (array)),
4287 exp),
4288 target, tmode, modifier);
bbf6f052
RK
4289 }
4290 }
bbf6f052 4291
742920c7
RK
4292 /* If this is a constant index into a constant array,
4293 just get the value from the array. Handle both the cases when
4294 we have an explicit constructor and when our operand is a variable
4295 that was declared const. */
4af3895e 4296
742920c7
RK
4297 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
4298 {
4299 if (TREE_CODE (index) == INTEGER_CST
4300 && TREE_INT_CST_HIGH (index) == 0)
4301 {
4302 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
4303
4304 i = TREE_INT_CST_LOW (index);
4305 while (elem && i--)
4306 elem = TREE_CHAIN (elem);
4307 if (elem)
4308 return expand_expr (fold (TREE_VALUE (elem)), target,
4309 tmode, modifier);
4310 }
4311 }
4af3895e 4312
742920c7
RK
4313 else if (optimize >= 1
4314 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
4315 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
4316 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
4317 {
4318 if (TREE_CODE (index) == INTEGER_CST
4319 && TREE_INT_CST_HIGH (index) == 0)
4320 {
4321 tree init = DECL_INITIAL (array);
4322
4323 i = TREE_INT_CST_LOW (index);
4324 if (TREE_CODE (init) == CONSTRUCTOR)
4325 {
4326 tree elem = CONSTRUCTOR_ELTS (init);
4327
03dc44a6
RS
4328 while (elem
4329 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
742920c7
RK
4330 elem = TREE_CHAIN (elem);
4331 if (elem)
4332 return expand_expr (fold (TREE_VALUE (elem)), target,
4333 tmode, modifier);
4334 }
4335 else if (TREE_CODE (init) == STRING_CST
4336 && i < TREE_STRING_LENGTH (init))
4337 {
4338 temp = GEN_INT (TREE_STRING_POINTER (init)[i]);
4339 return convert_to_mode (mode, temp, 0);
4340 }
4341 }
4342 }
4343 }
8c8a8e34 4344
bbf6f052
RK
4345 /* Treat array-ref with constant index as a component-ref. */
4346
4347 case COMPONENT_REF:
4348 case BIT_FIELD_REF:
4af3895e
JVA
4349 /* If the operand is a CONSTRUCTOR, we can just extract the
4350 appropriate field if it is present. */
4351 if (code != ARRAY_REF
4352 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
4353 {
4354 tree elt;
4355
4356 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
4357 elt = TREE_CHAIN (elt))
4358 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
4359 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
4360 }
4361
bbf6f052
RK
4362 {
4363 enum machine_mode mode1;
4364 int bitsize;
4365 int bitpos;
7bb0943f 4366 tree offset;
bbf6f052 4367 int volatilep = 0;
7bb0943f 4368 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
bbf6f052 4369 &mode1, &unsignedp, &volatilep);
034f9101 4370 int alignment;
bbf6f052 4371
e7f3c83f
RK
4372 /* If we got back the original object, something is wrong. Perhaps
4373 we are evaluating an expression too early. In any event, don't
4374 infinitely recurse. */
4375 if (tem == exp)
4376 abort ();
4377
bbf6f052
RK
4378 /* In some cases, we will be offsetting OP0's address by a constant.
4379 So get it as a sum, if possible. If we will be using it
4380 directly in an insn, we validate it. */
906c4e36 4381 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_SUM);
bbf6f052 4382
8c8a8e34 4383 /* If this is a constant, put it into a register if it is a
8008b228 4384 legitimate constant and memory if it isn't. */
8c8a8e34
JW
4385 if (CONSTANT_P (op0))
4386 {
4387 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
f2878c6b 4388 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
8c8a8e34
JW
4389 op0 = force_reg (mode, op0);
4390 else
4391 op0 = validize_mem (force_const_mem (mode, op0));
4392 }
4393
034f9101 4394 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
7bb0943f
RS
4395 if (offset != 0)
4396 {
906c4e36 4397 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
4398
4399 if (GET_CODE (op0) != MEM)
4400 abort ();
4401 op0 = change_address (op0, VOIDmode,
4402 gen_rtx (PLUS, Pmode, XEXP (op0, 0),
4403 force_reg (Pmode, offset_rtx)));
034f9101
RS
4404 /* If we have a variable offset, the known alignment
4405 is only that of the innermost structure containing the field.
4406 (Actually, we could sometimes do better by using the
4407 size of an element of the innermost array, but no need.) */
4408 if (TREE_CODE (exp) == COMPONENT_REF
4409 || TREE_CODE (exp) == BIT_FIELD_REF)
4410 alignment = (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
4411 / BITS_PER_UNIT);
7bb0943f
RS
4412 }
4413
bbf6f052
RK
4414 /* Don't forget about volatility even if this is a bitfield. */
4415 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
4416 {
4417 op0 = copy_rtx (op0);
4418 MEM_VOLATILE_P (op0) = 1;
4419 }
4420
ccc98036
RS
4421 /* In cases where an aligned union has an unaligned object
4422 as a field, we might be extracting a BLKmode value from
4423 an integer-mode (e.g., SImode) object. Handle this case
4424 by doing the extract into an object as wide as the field
4425 (which we know to be the width of a basic mode), then
4426 storing into memory, and changing the mode to BLKmode. */
bbf6f052 4427 if (mode1 == VOIDmode
0bba3f6f
RK
4428 || (mode1 != BLKmode && ! direct_load[(int) mode1]
4429 && modifier != EXPAND_CONST_ADDRESS
4430 && modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
ccc98036
RS
4431 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
4432 /* If the field isn't aligned enough to fetch as a memref,
4433 fetch it as a bit field. */
4434 || (STRICT_ALIGNMENT
4435 && TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
4436 || (STRICT_ALIGNMENT && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
bbf6f052 4437 {
bbf6f052
RK
4438 enum machine_mode ext_mode = mode;
4439
4440 if (ext_mode == BLKmode)
4441 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
4442
4443 if (ext_mode == BLKmode)
4444 abort ();
4445
4446 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
4447 unsignedp, target, ext_mode, ext_mode,
034f9101 4448 alignment,
bbf6f052
RK
4449 int_size_in_bytes (TREE_TYPE (tem)));
4450 if (mode == BLKmode)
4451 {
4452 rtx new = assign_stack_temp (ext_mode,
4453 bitsize / BITS_PER_UNIT, 0);
4454
4455 emit_move_insn (new, op0);
4456 op0 = copy_rtx (new);
4457 PUT_MODE (op0, BLKmode);
092dded9 4458 MEM_IN_STRUCT_P (op0) = 1;
bbf6f052
RK
4459 }
4460
4461 return op0;
4462 }
4463
4464 /* Get a reference to just this component. */
4465 if (modifier == EXPAND_CONST_ADDRESS
4466 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4467 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
4468 (bitpos / BITS_PER_UNIT)));
4469 else
4470 op0 = change_address (op0, mode1,
4471 plus_constant (XEXP (op0, 0),
4472 (bitpos / BITS_PER_UNIT)));
4473 MEM_IN_STRUCT_P (op0) = 1;
4474 MEM_VOLATILE_P (op0) |= volatilep;
4475 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
4476 return op0;
4477 if (target == 0)
4478 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4479 convert_move (target, op0, unsignedp);
4480 return target;
4481 }
4482
4483 case OFFSET_REF:
4484 {
da120c2f 4485 tree base = build1 (ADDR_EXPR, type, TREE_OPERAND (exp, 0));
bbf6f052 4486 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
906c4e36 4487 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
bbf6f052
RK
4488 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
4489 MEM_IN_STRUCT_P (temp) = 1;
f94cc92f 4490 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
89742723 4491#if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
bbf6f052
RK
4492 a location is accessed through a pointer to const does not mean
4493 that the value there can never change. */
4494 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
4495#endif
4496 return temp;
4497 }
4498
4499 /* Intended for a reference to a buffer of a file-object in Pascal.
4500 But it's not certain that a special tree code will really be
4501 necessary for these. INDIRECT_REF might work for them. */
4502 case BUFFER_REF:
4503 abort ();
4504
7308a047
RS
4505 /* IN_EXPR: Inlined pascal set IN expression.
4506
4507 Algorithm:
4508 rlo = set_low - (set_low%bits_per_word);
4509 the_word = set [ (index - rlo)/bits_per_word ];
4510 bit_index = index % bits_per_word;
4511 bitmask = 1 << bit_index;
4512 return !!(the_word & bitmask); */
4513 case IN_EXPR:
4514 preexpand_calls (exp);
4515 {
4516 tree set = TREE_OPERAND (exp, 0);
4517 tree index = TREE_OPERAND (exp, 1);
4518 tree set_type = TREE_TYPE (set);
4519
4520 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
4521 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
4522
4523 rtx index_val;
4524 rtx lo_r;
4525 rtx hi_r;
4526 rtx rlow;
4527 rtx diff, quo, rem, addr, bit, result;
4528 rtx setval, setaddr;
4529 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
4530
4531 if (target == 0)
17938e57 4532 target = gen_reg_rtx (mode);
7308a047
RS
4533
4534 /* If domain is empty, answer is no. */
4535 if (tree_int_cst_lt (set_high_bound, set_low_bound))
4536 return const0_rtx;
4537
4538 index_val = expand_expr (index, 0, VOIDmode, 0);
4539 lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
4540 hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
4541 setval = expand_expr (set, 0, VOIDmode, 0);
4542 setaddr = XEXP (setval, 0);
4543
4544 /* Compare index against bounds, if they are constant. */
4545 if (GET_CODE (index_val) == CONST_INT
17938e57
RK
4546 && GET_CODE (lo_r) == CONST_INT
4547 && INTVAL (index_val) < INTVAL (lo_r))
4548 return const0_rtx;
7308a047
RS
4549
4550 if (GET_CODE (index_val) == CONST_INT
17938e57
RK
4551 && GET_CODE (hi_r) == CONST_INT
4552 && INTVAL (hi_r) < INTVAL (index_val))
4553 return const0_rtx;
7308a047
RS
4554
4555 /* If we get here, we have to generate the code for both cases
4556 (in range and out of range). */
4557
4558 op0 = gen_label_rtx ();
4559 op1 = gen_label_rtx ();
4560
4561 if (! (GET_CODE (index_val) == CONST_INT
4562 && GET_CODE (lo_r) == CONST_INT))
4563 {
17938e57
RK
4564 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
4565 GET_MODE (index_val), 0, 0);
7308a047
RS
4566 emit_jump_insn (gen_blt (op1));
4567 }
4568
4569 if (! (GET_CODE (index_val) == CONST_INT
4570 && GET_CODE (hi_r) == CONST_INT))
4571 {
17938e57
RK
4572 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
4573 GET_MODE (index_val), 0, 0);
7308a047
RS
4574 emit_jump_insn (gen_bgt (op1));
4575 }
4576
4577 /* Calculate the element number of bit zero in the first word
4578 of the set. */
4579 if (GET_CODE (lo_r) == CONST_INT)
17938e57
RK
4580 rlow = GEN_INT (INTVAL (lo_r)
4581 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7308a047 4582 else
17938e57
RK
4583 rlow = expand_binop (index_mode, and_optab, lo_r,
4584 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
4585 NULL_RTX, 0, OPTAB_LIB_WIDEN);
7308a047
RS
4586
4587 diff = expand_binop (index_mode, sub_optab,
17938e57 4588 index_val, rlow, NULL_RTX, 0, OPTAB_LIB_WIDEN);
7308a047
RS
4589
4590 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
17938e57 4591 GEN_INT (BITS_PER_UNIT), NULL_RTX, 0);
7308a047 4592 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
17938e57 4593 GEN_INT (BITS_PER_UNIT), NULL_RTX, 0);
7308a047
RS
4594 addr = memory_address (byte_mode,
4595 expand_binop (index_mode, add_optab,
17938e57
RK
4596 diff, setaddr, NULL_RTX, 0,
4597 OPTAB_LIB_WIDEN));
7308a047
RS
4598 /* Extract the bit we want to examine */
4599 bit = expand_shift (RSHIFT_EXPR, byte_mode,
17938e57
RK
4600 gen_rtx (MEM, byte_mode, addr),
4601 make_tree (TREE_TYPE (index), rem),
4602 NULL_RTX, 1);
4603 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
4604 GET_MODE (target) == byte_mode ? target : 0,
7308a047 4605 1, OPTAB_LIB_WIDEN);
17938e57
RK
4606
4607 if (result != target)
4608 convert_move (target, result, 1);
7308a047
RS
4609
4610 /* Output the code to handle the out-of-range case. */
4611 emit_jump (op0);
4612 emit_label (op1);
4613 emit_move_insn (target, const0_rtx);
4614 emit_label (op0);
4615 return target;
4616 }
4617
bbf6f052
RK
4618 case WITH_CLEANUP_EXPR:
4619 if (RTL_EXPR_RTL (exp) == 0)
4620 {
4621 RTL_EXPR_RTL (exp)
e287fd6e
RK
4622 = expand_expr (TREE_OPERAND (exp, 0),
4623 target ? target : const0_rtx,
4624 tmode, modifier);
906c4e36
RK
4625 cleanups_this_call
4626 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
bbf6f052
RK
4627 /* That's it for this cleanup. */
4628 TREE_OPERAND (exp, 2) = 0;
4629 }
4630 return RTL_EXPR_RTL (exp);
4631
4632 case CALL_EXPR:
4633 /* Check for a built-in function. */
4634 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
4635 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == FUNCTION_DECL
4636 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4637 return expand_builtin (exp, target, subtarget, tmode, ignore);
4638 /* If this call was expanded already by preexpand_calls,
4639 just return the result we got. */
4640 if (CALL_EXPR_RTL (exp) != 0)
4641 return CALL_EXPR_RTL (exp);
8129842c 4642 return expand_call (exp, target, ignore);
bbf6f052
RK
4643
4644 case NON_LVALUE_EXPR:
4645 case NOP_EXPR:
4646 case CONVERT_EXPR:
4647 case REFERENCE_EXPR:
bbf6f052
RK
4648 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4649 return expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
4650 if (TREE_CODE (type) == UNION_TYPE)
4651 {
4652 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
4653 if (target == 0)
4654 {
4655 if (mode == BLKmode)
4656 {
4657 if (TYPE_SIZE (type) == 0
4658 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4659 abort ();
4660 target = assign_stack_temp (BLKmode,
4661 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4662 + BITS_PER_UNIT - 1)
4663 / BITS_PER_UNIT, 0);
4664 }
4665 else
4666 target = gen_reg_rtx (mode);
4667 }
4668 if (GET_CODE (target) == MEM)
4669 /* Store data into beginning of memory target. */
4670 store_expr (TREE_OPERAND (exp, 0),
1499e0a8
RK
4671 change_address (target, TYPE_MODE (valtype), 0), 0);
4672
bbf6f052
RK
4673 else if (GET_CODE (target) == REG)
4674 /* Store this field into a union of the proper type. */
4675 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
4676 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
4677 VOIDmode, 0, 1,
4678 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
4679 else
4680 abort ();
4681
4682 /* Return the entire union. */
4683 return target;
4684 }
1499e0a8 4685 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
12342f90
RS
4686 if (GET_MODE (op0) == mode)
4687 return op0;
4688 /* If arg is a constant integer being extended from a narrower mode,
4689 we must really truncate to get the extended bits right. Otherwise
4690 (unsigned long) (unsigned char) ("\377"[0])
4691 would come out as ffffffff. */
4692 if (GET_MODE (op0) == VOIDmode
4693 && (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4694 < GET_MODE_BITSIZE (mode)))
4695 {
4696 /* MODE must be narrower than HOST_BITS_PER_INT. */
4697 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4698
4699 if (width < HOST_BITS_PER_WIDE_INT)
4700 {
4701 HOST_WIDE_INT val = (GET_CODE (op0) == CONST_INT ? INTVAL (op0)
4702 : CONST_DOUBLE_LOW (op0));
4703 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4704 || !(val & ((HOST_WIDE_INT) 1 << (width - 1))))
4705 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4706 else
4707 val |= ~(((HOST_WIDE_INT) 1 << width) - 1);
4708
4709 op0 = GEN_INT (val);
4710 }
4711 else
4712 {
4713 op0 = (simplify_unary_operation
4714 ((TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4715 ? ZERO_EXTEND : SIGN_EXTEND),
4716 mode, op0,
4717 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))));
4718 if (op0 == 0)
4719 abort ();
4720 }
4721 }
4722 if (GET_MODE (op0) == VOIDmode)
bbf6f052 4723 return op0;
26fcb35a
RS
4724 if (modifier == EXPAND_INITIALIZER)
4725 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
bbf6f052
RK
4726 if (flag_force_mem && GET_CODE (op0) == MEM)
4727 op0 = copy_to_reg (op0);
4728
4729 if (target == 0)
4730 return convert_to_mode (mode, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4731 else
4732 convert_move (target, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4733 return target;
4734
4735 case PLUS_EXPR:
4736 /* We come here from MINUS_EXPR when the second operand is a constant. */
4737 plus_expr:
4738 this_optab = add_optab;
4739
4740 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
4741 something else, make sure we add the register to the constant and
4742 then to the other thing. This case can occur during strength
4743 reduction and doing it this way will produce better code if the
4744 frame pointer or argument pointer is eliminated.
4745
4746 fold-const.c will ensure that the constant is always in the inner
4747 PLUS_EXPR, so the only case we need to do anything about is if
4748 sp, ap, or fp is our second argument, in which case we must swap
4749 the innermost first argument and our second argument. */
4750
4751 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4752 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
4753 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
4754 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
4755 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
4756 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
4757 {
4758 tree t = TREE_OPERAND (exp, 1);
4759
4760 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4761 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
4762 }
4763
4764 /* If the result is to be Pmode and we are adding an integer to
4765 something, we might be forming a constant. So try to use
4766 plus_constant. If it produces a sum and we can't accept it,
4767 use force_operand. This allows P = &ARR[const] to generate
4768 efficient code on machines where a SYMBOL_REF is not a valid
4769 address.
4770
4771 If this is an EXPAND_SUM call, always return the sum. */
c980ac49
RS
4772 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4773 || mode == Pmode)
bbf6f052 4774 {
c980ac49
RS
4775 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
4776 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4777 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
4778 {
4779 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
4780 EXPAND_SUM);
4781 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
4782 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4783 op1 = force_operand (op1, target);
4784 return op1;
4785 }
bbf6f052 4786
c980ac49
RS
4787 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4788 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
4789 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
4790 {
4791 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
4792 EXPAND_SUM);
4793 if (! CONSTANT_P (op0))
4794 {
4795 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4796 VOIDmode, modifier);
709f5be1
RS
4797 /* Don't go to both_summands if modifier
4798 says it's not right to return a PLUS. */
4799 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4800 goto binop2;
c980ac49
RS
4801 goto both_summands;
4802 }
4803 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
4804 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4805 op0 = force_operand (op0, target);
4806 return op0;
4807 }
bbf6f052
RK
4808 }
4809
4810 /* No sense saving up arithmetic to be done
4811 if it's all in the wrong mode to form part of an address.
4812 And force_operand won't know whether to sign-extend or
4813 zero-extend. */
4814 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
c980ac49
RS
4815 || mode != Pmode)
4816 goto binop;
bbf6f052
RK
4817
4818 preexpand_calls (exp);
4819 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4820 subtarget = 0;
4821
4822 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
906c4e36 4823 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
bbf6f052 4824
c980ac49 4825 both_summands:
bbf6f052
RK
4826 /* Make sure any term that's a sum with a constant comes last. */
4827 if (GET_CODE (op0) == PLUS
4828 && CONSTANT_P (XEXP (op0, 1)))
4829 {
4830 temp = op0;
4831 op0 = op1;
4832 op1 = temp;
4833 }
4834 /* If adding to a sum including a constant,
4835 associate it to put the constant outside. */
4836 if (GET_CODE (op1) == PLUS
4837 && CONSTANT_P (XEXP (op1, 1)))
4838 {
4839 rtx constant_term = const0_rtx;
4840
4841 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
4842 if (temp != 0)
4843 op0 = temp;
6f90e075
JW
4844 /* Ensure that MULT comes first if there is one. */
4845 else if (GET_CODE (op0) == MULT)
4846 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
bbf6f052
RK
4847 else
4848 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
4849
4850 /* Let's also eliminate constants from op0 if possible. */
4851 op0 = eliminate_constant_term (op0, &constant_term);
4852
4853 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
4854 their sum should be a constant. Form it into OP1, since the
4855 result we want will then be OP0 + OP1. */
4856
4857 temp = simplify_binary_operation (PLUS, mode, constant_term,
4858 XEXP (op1, 1));
4859 if (temp != 0)
4860 op1 = temp;
4861 else
4862 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
4863 }
4864
4865 /* Put a constant term last and put a multiplication first. */
4866 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
4867 temp = op1, op1 = op0, op0 = temp;
4868
4869 temp = simplify_binary_operation (PLUS, mode, op0, op1);
4870 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
4871
4872 case MINUS_EXPR:
ea87523e
RK
4873 /* For initializers, we are allowed to return a MINUS of two
4874 symbolic constants. Here we handle all cases when both operands
4875 are constant. */
bbf6f052
RK
4876 /* Handle difference of two symbolic constants,
4877 for the sake of an initializer. */
4878 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4879 && really_constant_p (TREE_OPERAND (exp, 0))
4880 && really_constant_p (TREE_OPERAND (exp, 1)))
4881 {
906c4e36
RK
4882 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
4883 VOIDmode, modifier);
4884 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4885 VOIDmode, modifier);
ea87523e
RK
4886
4887 /* If one operand is a CONST_INT, put it last. */
4888 if (GET_CODE (op0) == CONST_INT)
4889 temp = op0, op0 = op1, op1 = temp;
4890
4891 /* If the last operand is a CONST_INT, use plus_constant of
4892 the negated constant. Else make the MINUS. */
4893 if (GET_CODE (op1) == CONST_INT)
4894 return plus_constant (op0, - INTVAL (op1));
4895 else
4896 return gen_rtx (MINUS, mode, op0, op1);
bbf6f052
RK
4897 }
4898 /* Convert A - const to A + (-const). */
4899 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4900 {
4901 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0),
4902 fold (build1 (NEGATE_EXPR, type,
4903 TREE_OPERAND (exp, 1))));
4904 goto plus_expr;
4905 }
4906 this_optab = sub_optab;
4907 goto binop;
4908
4909 case MULT_EXPR:
4910 preexpand_calls (exp);
4911 /* If first operand is constant, swap them.
4912 Thus the following special case checks need only
4913 check the second operand. */
4914 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
4915 {
4916 register tree t1 = TREE_OPERAND (exp, 0);
4917 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
4918 TREE_OPERAND (exp, 1) = t1;
4919 }
4920
4921 /* Attempt to return something suitable for generating an
4922 indexed address, for machines that support that. */
4923
4924 if (modifier == EXPAND_SUM && mode == Pmode
4925 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
906c4e36 4926 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
4927 {
4928 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
4929
4930 /* Apply distributive law if OP0 is x+c. */
4931 if (GET_CODE (op0) == PLUS
4932 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
4933 return gen_rtx (PLUS, mode,
4934 gen_rtx (MULT, mode, XEXP (op0, 0),
906c4e36
RK
4935 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
4936 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
4937 * INTVAL (XEXP (op0, 1))));
bbf6f052
RK
4938
4939 if (GET_CODE (op0) != REG)
906c4e36 4940 op0 = force_operand (op0, NULL_RTX);
bbf6f052
RK
4941 if (GET_CODE (op0) != REG)
4942 op0 = copy_to_mode_reg (mode, op0);
4943
4944 return gen_rtx (MULT, mode, op0,
906c4e36 4945 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
bbf6f052
RK
4946 }
4947
4948 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4949 subtarget = 0;
4950
4951 /* Check for multiplying things that have been extended
4952 from a narrower type. If this machine supports multiplying
4953 in that narrower type with a result in the desired type,
4954 do it that way, and avoid the explicit type-conversion. */
4955 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
4956 && TREE_CODE (type) == INTEGER_TYPE
4957 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4958 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
4959 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4960 && int_fits_type_p (TREE_OPERAND (exp, 1),
4961 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4962 /* Don't use a widening multiply if a shift will do. */
4963 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
906c4e36 4964 > HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
4965 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
4966 ||
4967 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
4968 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4969 ==
4970 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
4971 /* If both operands are extended, they must either both
4972 be zero-extended or both be sign-extended. */
4973 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4974 ==
4975 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
4976 {
4977 enum machine_mode innermode
4978 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
4979 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4980 ? umul_widen_optab : smul_widen_optab);
4981 if (mode == GET_MODE_WIDER_MODE (innermode)
4982 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
4983 {
4984 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
906c4e36 4985 NULL_RTX, VOIDmode, 0);
bbf6f052 4986 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
906c4e36
RK
4987 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4988 VOIDmode, 0);
bbf6f052
RK
4989 else
4990 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
906c4e36 4991 NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4992 goto binop2;
4993 }
4994 }
4995 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 4996 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4997 return expand_mult (mode, op0, op1, target, unsignedp);
4998
4999 case TRUNC_DIV_EXPR:
5000 case FLOOR_DIV_EXPR:
5001 case CEIL_DIV_EXPR:
5002 case ROUND_DIV_EXPR:
5003 case EXACT_DIV_EXPR:
5004 preexpand_calls (exp);
5005 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5006 subtarget = 0;
5007 /* Possible optimization: compute the dividend with EXPAND_SUM
5008 then if the divisor is constant can optimize the case
5009 where some terms of the dividend have coeffs divisible by it. */
5010 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 5011 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5012 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
5013
5014 case RDIV_EXPR:
5015 this_optab = flodiv_optab;
5016 goto binop;
5017
5018 case TRUNC_MOD_EXPR:
5019 case FLOOR_MOD_EXPR:
5020 case CEIL_MOD_EXPR:
5021 case ROUND_MOD_EXPR:
5022 preexpand_calls (exp);
5023 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5024 subtarget = 0;
5025 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 5026 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5027 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
5028
5029 case FIX_ROUND_EXPR:
5030 case FIX_FLOOR_EXPR:
5031 case FIX_CEIL_EXPR:
5032 abort (); /* Not used for C. */
5033
5034 case FIX_TRUNC_EXPR:
906c4e36 5035 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5036 if (target == 0)
5037 target = gen_reg_rtx (mode);
5038 expand_fix (target, op0, unsignedp);
5039 return target;
5040
5041 case FLOAT_EXPR:
906c4e36 5042 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5043 if (target == 0)
5044 target = gen_reg_rtx (mode);
5045 /* expand_float can't figure out what to do if FROM has VOIDmode.
5046 So give it the correct mode. With -O, cse will optimize this. */
5047 if (GET_MODE (op0) == VOIDmode)
5048 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5049 op0);
5050 expand_float (target, op0,
5051 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5052 return target;
5053
5054 case NEGATE_EXPR:
5b22bee8 5055 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
bbf6f052
RK
5056 temp = expand_unop (mode, neg_optab, op0, target, 0);
5057 if (temp == 0)
5058 abort ();
5059 return temp;
5060
5061 case ABS_EXPR:
5062 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5063
2d7050fd
RS
5064 /* Handle complex values specially. */
5065 {
5066 enum machine_mode opmode
5067 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5068
5069 if (GET_MODE_CLASS (opmode) == MODE_COMPLEX_INT
5070 || GET_MODE_CLASS (opmode) == MODE_COMPLEX_FLOAT)
5071 return expand_complex_abs (opmode, op0, target, unsignedp);
5072 }
5073
bbf6f052
RK
5074 /* Unsigned abs is simply the operand. Testing here means we don't
5075 risk generating incorrect code below. */
5076 if (TREE_UNSIGNED (type))
5077 return op0;
5078
5079 /* First try to do it with a special abs instruction. */
5080 temp = expand_unop (mode, abs_optab, op0, target, 0);
5081 if (temp != 0)
5082 return temp;
5083
5084 /* If this machine has expensive jumps, we can do integer absolute
5085 value of X as (((signed) x >> (W-1)) ^ x) - ((signed) x >> (W-1)),
5086 where W is the width of MODE. */
5087
5088 if (GET_MODE_CLASS (mode) == MODE_INT && BRANCH_COST >= 2)
5089 {
5090 rtx extended = expand_shift (RSHIFT_EXPR, mode, op0,
5091 size_int (GET_MODE_BITSIZE (mode) - 1),
906c4e36 5092 NULL_RTX, 0);
bbf6f052
RK
5093
5094 temp = expand_binop (mode, xor_optab, extended, op0, target, 0,
5095 OPTAB_LIB_WIDEN);
5096 if (temp != 0)
5097 temp = expand_binop (mode, sub_optab, temp, extended, target, 0,
5098 OPTAB_LIB_WIDEN);
5099
5100 if (temp != 0)
5101 return temp;
5102 }
5103
5104 /* If that does not win, use conditional jump and negate. */
5105 target = original_target;
5106 temp = gen_label_rtx ();
5107 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 0))
37568125 5108 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
bbf6f052
RK
5109 || (GET_CODE (target) == REG
5110 && REGNO (target) < FIRST_PSEUDO_REGISTER))
5111 target = gen_reg_rtx (mode);
5112 emit_move_insn (target, op0);
5113 emit_cmp_insn (target,
5114 expand_expr (convert (type, integer_zero_node),
906c4e36
RK
5115 NULL_RTX, VOIDmode, 0),
5116 GE, NULL_RTX, mode, 0, 0);
bbf6f052
RK
5117 NO_DEFER_POP;
5118 emit_jump_insn (gen_bge (temp));
5119 op0 = expand_unop (mode, neg_optab, target, target, 0);
5120 if (op0 != target)
5121 emit_move_insn (target, op0);
5122 emit_label (temp);
5123 OK_DEFER_POP;
5124 return target;
5125
5126 case MAX_EXPR:
5127 case MIN_EXPR:
5128 target = original_target;
5129 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
fc155707 5130 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
bbf6f052
RK
5131 || (GET_CODE (target) == REG
5132 && REGNO (target) < FIRST_PSEUDO_REGISTER))
5133 target = gen_reg_rtx (mode);
906c4e36 5134 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5135 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5136
5137 /* First try to do it with a special MIN or MAX instruction.
5138 If that does not win, use a conditional jump to select the proper
5139 value. */
5140 this_optab = (TREE_UNSIGNED (type)
5141 ? (code == MIN_EXPR ? umin_optab : umax_optab)
5142 : (code == MIN_EXPR ? smin_optab : smax_optab));
5143
5144 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
5145 OPTAB_WIDEN);
5146 if (temp != 0)
5147 return temp;
5148
ee456b1c
RK
5149 if (target != op0)
5150 emit_move_insn (target, op0);
bbf6f052 5151 op0 = gen_label_rtx ();
f81497d9
RS
5152 /* If this mode is an integer too wide to compare properly,
5153 compare word by word. Rely on cse to optimize constant cases. */
5154 if (GET_MODE_CLASS (mode) == MODE_INT
5155 && !can_compare_p (mode))
bbf6f052 5156 {
f81497d9 5157 if (code == MAX_EXPR)
ee456b1c 5158 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type), target, op1, NULL, op0);
bbf6f052 5159 else
ee456b1c
RK
5160 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type), op1, target, NULL, op0);
5161 emit_move_insn (target, op1);
bbf6f052 5162 }
f81497d9
RS
5163 else
5164 {
5165 if (code == MAX_EXPR)
5166 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
ee456b1c
RK
5167 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
5168 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
f81497d9
RS
5169 else
5170 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
ee456b1c
RK
5171 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
5172 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
f81497d9 5173 if (temp == const0_rtx)
ee456b1c 5174 emit_move_insn (target, op1);
f81497d9
RS
5175 else if (temp != const_true_rtx)
5176 {
5177 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
5178 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
5179 else
5180 abort ();
ee456b1c 5181 emit_move_insn (target, op1);
f81497d9
RS
5182 }
5183 }
bbf6f052
RK
5184 emit_label (op0);
5185 return target;
5186
5187/* ??? Can optimize when the operand of this is a bitwise operation,
5188 by using a different bitwise operation. */
5189 case BIT_NOT_EXPR:
5190 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5191 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
5192 if (temp == 0)
5193 abort ();
5194 return temp;
5195
5196 case FFS_EXPR:
5197 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5198 temp = expand_unop (mode, ffs_optab, op0, target, 1);
5199 if (temp == 0)
5200 abort ();
5201 return temp;
5202
5203/* ??? Can optimize bitwise operations with one arg constant.
5204 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
5205 and (a bitwise1 b) bitwise2 b (etc)
5206 but that is probably not worth while. */
5207
5208/* BIT_AND_EXPR is for bitwise anding.
5209 TRUTH_AND_EXPR is for anding two boolean values
5210 when we want in all cases to compute both of them.
5211 In general it is fastest to do TRUTH_AND_EXPR by
5212 computing both operands as actual zero-or-1 values
5213 and then bitwise anding. In cases where there cannot
5214 be any side effects, better code would be made by
5215 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR;
5216 but the question is how to recognize those cases. */
5217
b258707c
RS
5218 /* TRUTH_AND_EXPR can have a result whose mode doesn't match
5219 th operands. If so, don't use our target. */
bbf6f052 5220 case TRUTH_AND_EXPR:
b258707c
RS
5221 if (mode != TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5222 subtarget = 0;
bbf6f052
RK
5223 case BIT_AND_EXPR:
5224 this_optab = and_optab;
5225 goto binop;
5226
5227/* See comment above about TRUTH_AND_EXPR; it applies here too. */
5228 case TRUTH_OR_EXPR:
b258707c
RS
5229 if (mode != TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5230 subtarget = 0;
bbf6f052
RK
5231 case BIT_IOR_EXPR:
5232 this_optab = ior_optab;
5233 goto binop;
5234
874726a8 5235 case TRUTH_XOR_EXPR:
b258707c
RS
5236 if (mode != TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5237 subtarget = 0;
bbf6f052
RK
5238 case BIT_XOR_EXPR:
5239 this_optab = xor_optab;
5240 goto binop;
5241
5242 case LSHIFT_EXPR:
5243 case RSHIFT_EXPR:
5244 case LROTATE_EXPR:
5245 case RROTATE_EXPR:
5246 preexpand_calls (exp);
5247 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5248 subtarget = 0;
5249 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5250 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
5251 unsignedp);
5252
5253/* Could determine the answer when only additive constants differ.
5254 Also, the addition of one can be handled by changing the condition. */
5255 case LT_EXPR:
5256 case LE_EXPR:
5257 case GT_EXPR:
5258 case GE_EXPR:
5259 case EQ_EXPR:
5260 case NE_EXPR:
5261 preexpand_calls (exp);
5262 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
5263 if (temp != 0)
5264 return temp;
5265 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
5266 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
5267 && original_target
5268 && GET_CODE (original_target) == REG
5269 && (GET_MODE (original_target)
5270 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5271 {
5272 temp = expand_expr (TREE_OPERAND (exp, 0), original_target, VOIDmode, 0);
5273 if (temp != original_target)
5274 temp = copy_to_reg (temp);
5275 op1 = gen_label_rtx ();
906c4e36 5276 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
bbf6f052
RK
5277 GET_MODE (temp), unsignedp, 0);
5278 emit_jump_insn (gen_beq (op1));
5279 emit_move_insn (temp, const1_rtx);
5280 emit_label (op1);
5281 return temp;
5282 }
5283 /* If no set-flag instruction, must generate a conditional
5284 store into a temporary variable. Drop through
5285 and handle this like && and ||. */
5286
5287 case TRUTH_ANDIF_EXPR:
5288 case TRUTH_ORIF_EXPR:
e44842fe
RK
5289 if (! ignore
5290 && (target == 0 || ! safe_from_p (target, exp)
5291 /* Make sure we don't have a hard reg (such as function's return
5292 value) live across basic blocks, if not optimizing. */
5293 || (!optimize && GET_CODE (target) == REG
5294 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
bbf6f052 5295 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
e44842fe
RK
5296
5297 if (target)
5298 emit_clr_insn (target);
5299
bbf6f052
RK
5300 op1 = gen_label_rtx ();
5301 jumpifnot (exp, op1);
e44842fe
RK
5302
5303 if (target)
5304 emit_0_to_1_insn (target);
5305
bbf6f052 5306 emit_label (op1);
e44842fe 5307 return ignore ? const0_rtx : target;
bbf6f052
RK
5308
5309 case TRUTH_NOT_EXPR:
5310 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5311 /* The parser is careful to generate TRUTH_NOT_EXPR
5312 only with operands that are always zero or one. */
906c4e36 5313 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
bbf6f052
RK
5314 target, 1, OPTAB_LIB_WIDEN);
5315 if (temp == 0)
5316 abort ();
5317 return temp;
5318
5319 case COMPOUND_EXPR:
5320 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
5321 emit_queue ();
5322 return expand_expr (TREE_OPERAND (exp, 1),
5323 (ignore ? const0_rtx : target),
5324 VOIDmode, 0);
5325
5326 case COND_EXPR:
5327 {
5328 /* Note that COND_EXPRs whose type is a structure or union
5329 are required to be constructed to contain assignments of
5330 a temporary variable, so that we can evaluate them here
5331 for side effect only. If type is void, we must do likewise. */
5332
5333 /* If an arm of the branch requires a cleanup,
5334 only that cleanup is performed. */
5335
5336 tree singleton = 0;
5337 tree binary_op = 0, unary_op = 0;
5338 tree old_cleanups = cleanups_this_call;
5339 cleanups_this_call = 0;
5340
5341 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
5342 convert it to our mode, if necessary. */
5343 if (integer_onep (TREE_OPERAND (exp, 1))
5344 && integer_zerop (TREE_OPERAND (exp, 2))
5345 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
5346 {
dd27116b
RK
5347 if (ignore)
5348 {
5349 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5350 modifier);
5351 return const0_rtx;
5352 }
5353
bbf6f052
RK
5354 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
5355 if (GET_MODE (op0) == mode)
5356 return op0;
5357 if (target == 0)
5358 target = gen_reg_rtx (mode);
5359 convert_move (target, op0, unsignedp);
5360 return target;
5361 }
5362
5363 /* If we are not to produce a result, we have no target. Otherwise,
5364 if a target was specified use it; it will not be used as an
5365 intermediate target unless it is safe. If no target, use a
5366 temporary. */
5367
dd27116b 5368 if (ignore)
bbf6f052
RK
5369 temp = 0;
5370 else if (original_target
5371 && safe_from_p (original_target, TREE_OPERAND (exp, 0)))
5372 temp = original_target;
5373 else if (mode == BLKmode)
5374 {
5375 if (TYPE_SIZE (type) == 0
5376 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5377 abort ();
673bc773 5378
bbf6f052
RK
5379 temp = assign_stack_temp (BLKmode,
5380 (TREE_INT_CST_LOW (TYPE_SIZE (type))
5381 + BITS_PER_UNIT - 1)
5382 / BITS_PER_UNIT, 0);
673bc773
RS
5383 MEM_IN_STRUCT_P (temp)
5384 = (TREE_CODE (type) == RECORD_TYPE
5385 || TREE_CODE (type) == UNION_TYPE
5386 || TREE_CODE (type) == QUAL_UNION_TYPE
5387 || TREE_CODE (type) == ARRAY_TYPE);
bbf6f052
RK
5388 }
5389 else
5390 temp = gen_reg_rtx (mode);
5391
5392 /* Check for X ? A + B : A. If we have this, we can copy
5393 A to the output and conditionally add B. Similarly for unary
5394 operations. Don't do this if X has side-effects because
5395 those side effects might affect A or B and the "?" operation is
5396 a sequence point in ANSI. (We test for side effects later.) */
5397
5398 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
5399 && operand_equal_p (TREE_OPERAND (exp, 2),
5400 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
5401 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
5402 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
5403 && operand_equal_p (TREE_OPERAND (exp, 1),
5404 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
5405 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
5406 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
5407 && operand_equal_p (TREE_OPERAND (exp, 2),
5408 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
5409 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
5410 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
5411 && operand_equal_p (TREE_OPERAND (exp, 1),
5412 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
5413 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
5414
5415 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
5416 operation, do this as A + (X != 0). Similarly for other simple
5417 binary operators. */
dd27116b 5418 if (temp && singleton && binary_op
bbf6f052
RK
5419 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5420 && (TREE_CODE (binary_op) == PLUS_EXPR
5421 || TREE_CODE (binary_op) == MINUS_EXPR
5422 || TREE_CODE (binary_op) == BIT_IOR_EXPR
5423 || TREE_CODE (binary_op) == BIT_XOR_EXPR
5424 || TREE_CODE (binary_op) == BIT_AND_EXPR)
5425 && integer_onep (TREE_OPERAND (binary_op, 1))
5426 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
5427 {
5428 rtx result;
5429 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
5430 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
5431 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
5432 : TREE_CODE (binary_op) == BIT_XOR_EXPR ? xor_optab
5433 : and_optab);
5434
5435 /* If we had X ? A : A + 1, do this as A + (X == 0).
5436
5437 We have to invert the truth value here and then put it
5438 back later if do_store_flag fails. We cannot simply copy
5439 TREE_OPERAND (exp, 0) to another variable and modify that
5440 because invert_truthvalue can modify the tree pointed to
5441 by its argument. */
5442 if (singleton == TREE_OPERAND (exp, 1))
5443 TREE_OPERAND (exp, 0)
5444 = invert_truthvalue (TREE_OPERAND (exp, 0));
5445
5446 result = do_store_flag (TREE_OPERAND (exp, 0),
906c4e36
RK
5447 (safe_from_p (temp, singleton)
5448 ? temp : NULL_RTX),
bbf6f052
RK
5449 mode, BRANCH_COST <= 1);
5450
5451 if (result)
5452 {
906c4e36 5453 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5454 return expand_binop (mode, boptab, op1, result, temp,
5455 unsignedp, OPTAB_LIB_WIDEN);
5456 }
5457 else if (singleton == TREE_OPERAND (exp, 1))
5458 TREE_OPERAND (exp, 0)
5459 = invert_truthvalue (TREE_OPERAND (exp, 0));
5460 }
5461
5462 NO_DEFER_POP;
5463 op0 = gen_label_rtx ();
5464
5465 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
5466 {
5467 if (temp != 0)
5468 {
5469 /* If the target conflicts with the other operand of the
5470 binary op, we can't use it. Also, we can't use the target
5471 if it is a hard register, because evaluating the condition
5472 might clobber it. */
5473 if ((binary_op
5474 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
5475 || (GET_CODE (temp) == REG
5476 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
5477 temp = gen_reg_rtx (mode);
5478 store_expr (singleton, temp, 0);
5479 }
5480 else
906c4e36 5481 expand_expr (singleton,
2937cf87 5482 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5483 if (cleanups_this_call)
5484 {
5485 sorry ("aggregate value in COND_EXPR");
5486 cleanups_this_call = 0;
5487 }
5488 if (singleton == TREE_OPERAND (exp, 1))
5489 jumpif (TREE_OPERAND (exp, 0), op0);
5490 else
5491 jumpifnot (TREE_OPERAND (exp, 0), op0);
5492
5493 if (binary_op && temp == 0)
5494 /* Just touch the other operand. */
5495 expand_expr (TREE_OPERAND (binary_op, 1),
906c4e36 5496 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5497 else if (binary_op)
5498 store_expr (build (TREE_CODE (binary_op), type,
5499 make_tree (type, temp),
5500 TREE_OPERAND (binary_op, 1)),
5501 temp, 0);
5502 else
5503 store_expr (build1 (TREE_CODE (unary_op), type,
5504 make_tree (type, temp)),
5505 temp, 0);
5506 op1 = op0;
5507 }
5508#if 0
5509 /* This is now done in jump.c and is better done there because it
5510 produces shorter register lifetimes. */
5511
5512 /* Check for both possibilities either constants or variables
5513 in registers (but not the same as the target!). If so, can
5514 save branches by assigning one, branching, and assigning the
5515 other. */
5516 else if (temp && GET_MODE (temp) != BLKmode
5517 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
5518 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
5519 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
5520 && DECL_RTL (TREE_OPERAND (exp, 1))
5521 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
5522 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
5523 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
5524 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
5525 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
5526 && DECL_RTL (TREE_OPERAND (exp, 2))
5527 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
5528 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
5529 {
5530 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5531 temp = gen_reg_rtx (mode);
5532 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5533 jumpifnot (TREE_OPERAND (exp, 0), op0);
5534 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5535 op1 = op0;
5536 }
5537#endif
5538 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
5539 comparison operator. If we have one of these cases, set the
5540 output to A, branch on A (cse will merge these two references),
5541 then set the output to FOO. */
5542 else if (temp
5543 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5544 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5545 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5546 TREE_OPERAND (exp, 1), 0)
5547 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5548 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
5549 {
5550 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5551 temp = gen_reg_rtx (mode);
5552 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5553 jumpif (TREE_OPERAND (exp, 0), op0);
5554 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5555 op1 = op0;
5556 }
5557 else if (temp
5558 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5559 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5560 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5561 TREE_OPERAND (exp, 2), 0)
5562 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5563 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
5564 {
5565 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5566 temp = gen_reg_rtx (mode);
5567 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5568 jumpifnot (TREE_OPERAND (exp, 0), op0);
5569 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5570 op1 = op0;
5571 }
5572 else
5573 {
5574 op1 = gen_label_rtx ();
5575 jumpifnot (TREE_OPERAND (exp, 0), op0);
5576 if (temp != 0)
5577 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5578 else
906c4e36
RK
5579 expand_expr (TREE_OPERAND (exp, 1),
5580 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5581 if (cleanups_this_call)
5582 {
5583 sorry ("aggregate value in COND_EXPR");
5584 cleanups_this_call = 0;
5585 }
5586
5587 emit_queue ();
5588 emit_jump_insn (gen_jump (op1));
5589 emit_barrier ();
5590 emit_label (op0);
5591 if (temp != 0)
5592 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5593 else
906c4e36
RK
5594 expand_expr (TREE_OPERAND (exp, 2),
5595 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5596 }
5597
5598 if (cleanups_this_call)
5599 {
5600 sorry ("aggregate value in COND_EXPR");
5601 cleanups_this_call = 0;
5602 }
5603
5604 emit_queue ();
5605 emit_label (op1);
5606 OK_DEFER_POP;
5607 cleanups_this_call = old_cleanups;
5608 return temp;
5609 }
5610
5611 case TARGET_EXPR:
5612 {
5613 /* Something needs to be initialized, but we didn't know
5614 where that thing was when building the tree. For example,
5615 it could be the return value of a function, or a parameter
5616 to a function which lays down in the stack, or a temporary
5617 variable which must be passed by reference.
5618
5619 We guarantee that the expression will either be constructed
5620 or copied into our original target. */
5621
5622 tree slot = TREE_OPERAND (exp, 0);
5c062816 5623 tree exp1;
bbf6f052
RK
5624
5625 if (TREE_CODE (slot) != VAR_DECL)
5626 abort ();
5627
5628 if (target == 0)
5629 {
5630 if (DECL_RTL (slot) != 0)
ac993f4f
MS
5631 {
5632 target = DECL_RTL (slot);
5c062816 5633 /* If we have already expanded the slot, so don't do
ac993f4f 5634 it again. (mrs) */
5c062816
MS
5635 if (TREE_OPERAND (exp, 1) == NULL_TREE)
5636 return target;
ac993f4f 5637 }
bbf6f052
RK
5638 else
5639 {
5640 target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
5641 /* All temp slots at this level must not conflict. */
5642 preserve_temp_slots (target);
5643 DECL_RTL (slot) = target;
5644 }
5645
e287fd6e
RK
5646 /* We set IGNORE when we know that we're already
5647 doing this for a cleanup. */
5648 if (ignore == 0)
5649 {
5650 /* Since SLOT is not known to the called function
5651 to belong to its stack frame, we must build an explicit
5652 cleanup. This case occurs when we must build up a reference
5653 to pass the reference as an argument. In this case,
5654 it is very likely that such a reference need not be
5655 built here. */
5656
5657 if (TREE_OPERAND (exp, 2) == 0)
5658 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
5659 if (TREE_OPERAND (exp, 2))
5660 cleanups_this_call = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2),
5661 cleanups_this_call);
5662 }
bbf6f052
RK
5663 }
5664 else
5665 {
5666 /* This case does occur, when expanding a parameter which
5667 needs to be constructed on the stack. The target
5668 is the actual stack address that we want to initialize.
5669 The function we call will perform the cleanup in this case. */
5670
8c042b47
RS
5671 /* If we have already assigned it space, use that space,
5672 not target that we were passed in, as our target
5673 parameter is only a hint. */
5674 if (DECL_RTL (slot) != 0)
5675 {
5676 target = DECL_RTL (slot);
5677 /* If we have already expanded the slot, so don't do
5678 it again. (mrs) */
5679 if (TREE_OPERAND (exp, 1) == NULL_TREE)
5680 return target;
5681 }
5682
bbf6f052
RK
5683 DECL_RTL (slot) = target;
5684 }
5685
5c062816
MS
5686 exp1 = TREE_OPERAND (exp, 1);
5687 /* Mark it as expanded. */
5688 TREE_OPERAND (exp, 1) = NULL_TREE;
5689
5690 return expand_expr (exp1, target, tmode, modifier);
bbf6f052
RK
5691 }
5692
5693 case INIT_EXPR:
5694 {
5695 tree lhs = TREE_OPERAND (exp, 0);
5696 tree rhs = TREE_OPERAND (exp, 1);
5697 tree noncopied_parts = 0;
5698 tree lhs_type = TREE_TYPE (lhs);
5699
5700 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5701 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
5702 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
5703 TYPE_NONCOPIED_PARTS (lhs_type));
5704 while (noncopied_parts != 0)
5705 {
5706 expand_assignment (TREE_VALUE (noncopied_parts),
5707 TREE_PURPOSE (noncopied_parts), 0, 0);
5708 noncopied_parts = TREE_CHAIN (noncopied_parts);
5709 }
5710 return temp;
5711 }
5712
5713 case MODIFY_EXPR:
5714 {
5715 /* If lhs is complex, expand calls in rhs before computing it.
5716 That's so we don't compute a pointer and save it over a call.
5717 If lhs is simple, compute it first so we can give it as a
5718 target if the rhs is just a call. This avoids an extra temp and copy
5719 and that prevents a partial-subsumption which makes bad code.
5720 Actually we could treat component_ref's of vars like vars. */
5721
5722 tree lhs = TREE_OPERAND (exp, 0);
5723 tree rhs = TREE_OPERAND (exp, 1);
5724 tree noncopied_parts = 0;
5725 tree lhs_type = TREE_TYPE (lhs);
5726
5727 temp = 0;
5728
5729 if (TREE_CODE (lhs) != VAR_DECL
5730 && TREE_CODE (lhs) != RESULT_DECL
5731 && TREE_CODE (lhs) != PARM_DECL)
5732 preexpand_calls (exp);
5733
5734 /* Check for |= or &= of a bitfield of size one into another bitfield
5735 of size 1. In this case, (unless we need the result of the
5736 assignment) we can do this more efficiently with a
5737 test followed by an assignment, if necessary.
5738
5739 ??? At this point, we can't get a BIT_FIELD_REF here. But if
5740 things change so we do, this code should be enhanced to
5741 support it. */
5742 if (ignore
5743 && TREE_CODE (lhs) == COMPONENT_REF
5744 && (TREE_CODE (rhs) == BIT_IOR_EXPR
5745 || TREE_CODE (rhs) == BIT_AND_EXPR)
5746 && TREE_OPERAND (rhs, 0) == lhs
5747 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
5748 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
5749 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
5750 {
5751 rtx label = gen_label_rtx ();
5752
5753 do_jump (TREE_OPERAND (rhs, 1),
5754 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
5755 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
5756 expand_assignment (lhs, convert (TREE_TYPE (rhs),
5757 (TREE_CODE (rhs) == BIT_IOR_EXPR
5758 ? integer_one_node
5759 : integer_zero_node)),
5760 0, 0);
e7c33f54 5761 do_pending_stack_adjust ();
bbf6f052
RK
5762 emit_label (label);
5763 return const0_rtx;
5764 }
5765
5766 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
5767 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
5768 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
5769 TYPE_NONCOPIED_PARTS (lhs_type));
5770
5771 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5772 while (noncopied_parts != 0)
5773 {
5774 expand_assignment (TREE_PURPOSE (noncopied_parts),
5775 TREE_VALUE (noncopied_parts), 0, 0);
5776 noncopied_parts = TREE_CHAIN (noncopied_parts);
5777 }
5778 return temp;
5779 }
5780
5781 case PREINCREMENT_EXPR:
5782 case PREDECREMENT_EXPR:
5783 return expand_increment (exp, 0);
5784
5785 case POSTINCREMENT_EXPR:
5786 case POSTDECREMENT_EXPR:
5787 /* Faster to treat as pre-increment if result is not used. */
5788 return expand_increment (exp, ! ignore);
5789
5790 case ADDR_EXPR:
5791 /* Are we taking the address of a nested function? */
5792 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
5793 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
5794 {
5795 op0 = trampoline_address (TREE_OPERAND (exp, 0));
5796 op0 = force_operand (op0, target);
5797 }
5798 else
5799 {
e287fd6e
RK
5800 /* We make sure to pass const0_rtx down if we came in with
5801 ignore set, to avoid doing the cleanups twice for something. */
5802 op0 = expand_expr (TREE_OPERAND (exp, 0),
5803 ignore ? const0_rtx : NULL_RTX, VOIDmode,
bbf6f052
RK
5804 (modifier == EXPAND_INITIALIZER
5805 ? modifier : EXPAND_CONST_ADDRESS));
896102d0
RK
5806
5807 /* We would like the object in memory. If it is a constant,
5808 we can have it be statically allocated into memory. For
5809 a non-constant (REG or SUBREG), we need to allocate some
5810 memory and store the value into it. */
5811
5812 if (CONSTANT_P (op0))
5813 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5814 op0);
5815
b6f01001
RS
5816 /* These cases happen in Fortran. Is that legitimate?
5817 Should Fortran work in another way?
5818 Do they happen in C? */
5819 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
5820 || GET_CODE (op0) == CONCAT)
896102d0
RK
5821 {
5822 /* If this object is in a register, it must be not
5823 be BLKmode. */
5824 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
5825 enum machine_mode inner_mode = TYPE_MODE (inner_type);
5826 rtx memloc
5827 = assign_stack_temp (inner_mode,
5828 int_size_in_bytes (inner_type), 1);
5829
5830 emit_move_insn (memloc, op0);
5831 op0 = memloc;
5832 }
5833
bbf6f052
RK
5834 if (GET_CODE (op0) != MEM)
5835 abort ();
5836
5837 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5838 return XEXP (op0, 0);
5839 op0 = force_operand (XEXP (op0, 0), target);
5840 }
5841 if (flag_force_addr && GET_CODE (op0) != REG)
5842 return force_reg (Pmode, op0);
5843 return op0;
5844
5845 case ENTRY_VALUE_EXPR:
5846 abort ();
5847
7308a047
RS
5848 /* COMPLEX type for Extended Pascal & Fortran */
5849 case COMPLEX_EXPR:
5850 {
5851 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5852
5853 rtx prev;
5854
5855 /* Get the rtx code of the operands. */
5856 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5857 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
5858
5859 if (! target)
5860 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5861
5862 prev = get_last_insn ();
5863
5864 /* Tell flow that the whole of the destination is being set. */
5865 if (GET_CODE (target) == REG)
5866 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5867
5868 /* Move the real (op0) and imaginary (op1) parts to their location. */
2d7050fd
RS
5869 emit_move_insn (gen_realpart (mode, target), op0);
5870 emit_move_insn (gen_imagpart (mode, target), op1);
7308a047
RS
5871
5872 /* Complex construction should appear as a single unit. */
6d6e61ce
RS
5873 if (GET_CODE (target) != CONCAT)
5874 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
5875 each with a separate pseudo as destination.
5876 It's not correct for flow to treat them as a unit. */
5877 group_insns (prev);
7308a047
RS
5878
5879 return target;
5880 }
5881
5882 case REALPART_EXPR:
2d7050fd
RS
5883 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5884 return gen_realpart (mode, op0);
7308a047
RS
5885
5886 case IMAGPART_EXPR:
2d7050fd
RS
5887 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5888 return gen_imagpart (mode, op0);
7308a047
RS
5889
5890 case CONJ_EXPR:
5891 {
5892 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5893 rtx imag_t;
5894 rtx prev;
5895
5896 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5897
5898 if (! target)
5899 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5900
5901 prev = get_last_insn ();
5902
5903 /* Tell flow that the whole of the destination is being set. */
5904 if (GET_CODE (target) == REG)
5905 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5906
5907 /* Store the realpart and the negated imagpart to target. */
2d7050fd 5908 emit_move_insn (gen_realpart (mode, target), gen_realpart (mode, op0));
7308a047 5909
2d7050fd 5910 imag_t = gen_imagpart (mode, target);
7308a047 5911 temp = expand_unop (mode, neg_optab,
2d7050fd 5912 gen_imagpart (mode, op0), imag_t, 0);
7308a047
RS
5913 if (temp != imag_t)
5914 emit_move_insn (imag_t, temp);
5915
5916 /* Conjugate should appear as a single unit */
6d6e61ce
RS
5917 if (GET_CODE (target) != CONCAT)
5918 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
5919 each with a separate pseudo as destination.
5920 It's not correct for flow to treat them as a unit. */
5921 group_insns (prev);
7308a047
RS
5922
5923 return target;
5924 }
5925
bbf6f052 5926 case ERROR_MARK:
66538193
RS
5927 op0 = CONST0_RTX (tmode);
5928 if (op0 != 0)
5929 return op0;
bbf6f052
RK
5930 return const0_rtx;
5931
5932 default:
90764a87 5933 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
bbf6f052
RK
5934 }
5935
5936 /* Here to do an ordinary binary operator, generating an instruction
5937 from the optab already placed in `this_optab'. */
5938 binop:
5939 preexpand_calls (exp);
5940 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5941 subtarget = 0;
5942 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 5943 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5944 binop2:
5945 temp = expand_binop (mode, this_optab, op0, op1, target,
5946 unsignedp, OPTAB_LIB_WIDEN);
5947 if (temp == 0)
5948 abort ();
5949 return temp;
5950}
bbf6f052 5951
bbf6f052 5952
ca695ac9
JB
5953/* Emit bytecode to evaluate the given expression EXP to the stack. */
5954void
5955bc_expand_expr (exp)
5956 tree exp;
bbf6f052 5957{
ca695ac9
JB
5958 enum tree_code code;
5959 tree type, arg0;
5960 rtx r;
5961 struct binary_operator *binoptab;
5962 struct unary_operator *unoptab;
5963 struct increment_operator *incroptab;
5964 struct bc_label *lab, *lab1;
5965 enum bytecode_opcode opcode;
5966
5967
5968 code = TREE_CODE (exp);
5969
5970 switch (code)
bbf6f052 5971 {
ca695ac9
JB
5972 case PARM_DECL:
5973
5974 if (DECL_RTL (exp) == 0)
bbf6f052 5975 {
ca695ac9
JB
5976 error_with_decl (exp, "prior parameter's size depends on `%s'");
5977 return;
bbf6f052 5978 }
ca695ac9
JB
5979
5980 bc_load_parmaddr (DECL_RTL (exp));
5981 bc_load_memory (TREE_TYPE (exp), exp);
5982
5983 return;
5984
5985 case VAR_DECL:
5986
5987 if (DECL_RTL (exp) == 0)
5988 abort ();
5989
5990#if 0
e7a42772 5991 if (BYTECODE_LABEL (DECL_RTL (exp)))
ca695ac9
JB
5992 bc_load_externaddr (DECL_RTL (exp));
5993 else
5994 bc_load_localaddr (DECL_RTL (exp));
5995#endif
5996 if (TREE_PUBLIC (exp))
e7a42772
JB
5997 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
5998 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
ca695ac9
JB
5999 else
6000 bc_load_localaddr (DECL_RTL (exp));
6001
6002 bc_load_memory (TREE_TYPE (exp), exp);
6003 return;
6004
6005 case INTEGER_CST:
6006
6007#ifdef DEBUG_PRINT_CODE
6008 fprintf (stderr, " [%x]\n", TREE_INT_CST_LOW (exp));
6009#endif
6bd6178d 6010 bc_emit_instruction (mode_to_const_map[(int) (DECL_BIT_FIELD (exp)
ca695ac9 6011 ? SImode
6bd6178d 6012 : TYPE_MODE (TREE_TYPE (exp)))],
ca695ac9
JB
6013 (HOST_WIDE_INT) TREE_INT_CST_LOW (exp));
6014 return;
6015
6016 case REAL_CST:
6017
c02bd5d9 6018#if 0
ca695ac9
JB
6019#ifdef DEBUG_PRINT_CODE
6020 fprintf (stderr, " [%g]\n", (double) TREE_INT_CST_LOW (exp));
6021#endif
c02bd5d9 6022 /* FIX THIS: find a better way to pass real_cst's. -bson */
ca695ac9
JB
6023 bc_emit_instruction (mode_to_const_map[TYPE_MODE (TREE_TYPE (exp))],
6024 (double) TREE_REAL_CST (exp));
c02bd5d9
JB
6025#else
6026 abort ();
6027#endif
6028
ca695ac9
JB
6029 return;
6030
6031 case CALL_EXPR:
6032
6033 /* We build a call description vector describing the type of
6034 the return value and of the arguments; this call vector,
6035 together with a pointer to a location for the return value
6036 and the base of the argument list, is passed to the low
6037 level machine dependent call subroutine, which is responsible
6038 for putting the arguments wherever real functions expect
6039 them, as well as getting the return value back. */
6040 {
6041 tree calldesc = 0, arg;
6042 int nargs = 0, i;
6043 rtx retval;
6044
6045 /* Push the evaluated args on the evaluation stack in reverse
6046 order. Also make an entry for each arg in the calldesc
6047 vector while we're at it. */
6048
6049 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
6050
6051 for (arg = TREE_OPERAND (exp, 1); arg; arg = TREE_CHAIN (arg))
6052 {
6053 ++nargs;
6054 bc_expand_expr (TREE_VALUE (arg));
6055
6056 calldesc = tree_cons ((tree) 0,
6057 size_in_bytes (TREE_TYPE (TREE_VALUE (arg))),
6058 calldesc);
6059 calldesc = tree_cons ((tree) 0,
6060 bc_runtime_type_code (TREE_TYPE (TREE_VALUE (arg))),
6061 calldesc);
6062 }
6063
6064 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
6065
6066 /* Allocate a location for the return value and push its
6067 address on the evaluation stack. Also make an entry
6068 at the front of the calldesc for the return value type. */
6069
6070 type = TREE_TYPE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
6071 retval = bc_allocate_local (int_size_in_bytes (type), TYPE_ALIGN (type));
6072 bc_load_localaddr (retval);
6073
6074 calldesc = tree_cons ((tree) 0, size_in_bytes (type), calldesc);
6075 calldesc = tree_cons ((tree) 0, bc_runtime_type_code (type), calldesc);
6076
6077 /* Prepend the argument count. */
6078 calldesc = tree_cons ((tree) 0,
6079 build_int_2 (nargs, 0),
6080 calldesc);
6081
6082 /* Push the address of the call description vector on the stack. */
6083 calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
6084 TREE_TYPE (calldesc) = build_array_type (integer_type_node,
6085 build_index_type (build_int_2 (nargs * 2, 0)));
6086 r = output_constant_def (calldesc);
6087 bc_load_externaddr (r);
6088
6089 /* Push the address of the function to be called. */
6090 bc_expand_expr (TREE_OPERAND (exp, 0));
6091
6092 /* Call the function, popping its address and the calldesc vector
6093 address off the evaluation stack in the process. */
6094 bc_emit_instruction (call);
6095
6096 /* Pop the arguments off the stack. */
6097 bc_adjust_stack (nargs);
6098
6099 /* Load the return value onto the stack. */
6100 bc_load_localaddr (retval);
6101 bc_load_memory (type, TREE_OPERAND (exp, 0));
6102 }
6103 return;
6104
6105 case SAVE_EXPR:
6106
6107 if (!SAVE_EXPR_RTL (exp))
bbf6f052 6108 {
ca695ac9
JB
6109 /* First time around: copy to local variable */
6110 SAVE_EXPR_RTL (exp) = bc_allocate_local (int_size_in_bytes (TREE_TYPE (exp)),
6111 TYPE_ALIGN (TREE_TYPE(exp)));
6112 bc_expand_expr (TREE_OPERAND (exp, 0));
6d6e61ce 6113 bc_emit_instruction (duplicate);
ca695ac9
JB
6114
6115 bc_load_localaddr (SAVE_EXPR_RTL (exp));
6116 bc_store_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
bbf6f052 6117 }
ca695ac9 6118 else
bbf6f052 6119 {
ca695ac9
JB
6120 /* Consecutive reference: use saved copy */
6121 bc_load_localaddr (SAVE_EXPR_RTL (exp));
6122 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
bbf6f052 6123 }
ca695ac9
JB
6124 return;
6125
6126#if 0
6127 /* FIXME: the XXXX_STMT codes have been removed in GCC2, but
6128 how are they handled instead? */
6129 case LET_STMT:
6130
6131 TREE_USED (exp) = 1;
6132 bc_expand_expr (STMT_BODY (exp));
6133 return;
6134#endif
6135
6136 case NOP_EXPR:
6137 case CONVERT_EXPR:
6138
6139 bc_expand_expr (TREE_OPERAND (exp, 0));
6140 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)), TREE_TYPE (exp));
6141 return;
6142
6143 case MODIFY_EXPR:
6144
c02bd5d9 6145 expand_assignment (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 0, 0);
ca695ac9
JB
6146 return;
6147
6148 case ADDR_EXPR:
6149
6150 bc_expand_address (TREE_OPERAND (exp, 0));
6151 return;
6152
6153 case INDIRECT_REF:
6154
6155 bc_expand_expr (TREE_OPERAND (exp, 0));
6156 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6157 return;
6158
6159 case ARRAY_REF:
6160
6161 bc_expand_expr (bc_canonicalize_array_ref (exp));
6162 return;
6163
6164 case COMPONENT_REF:
6165
6166 bc_expand_component_address (exp);
6167
6168 /* If we have a bitfield, generate a proper load */
6169 bc_load_memory (TREE_TYPE (TREE_OPERAND (exp, 1)), TREE_OPERAND (exp, 1));
6170 return;
6171
6172 case COMPOUND_EXPR:
6173
6174 bc_expand_expr (TREE_OPERAND (exp, 0));
6175 bc_emit_instruction (drop);
6176 bc_expand_expr (TREE_OPERAND (exp, 1));
6177 return;
6178
6179 case COND_EXPR:
6180
6181 bc_expand_expr (TREE_OPERAND (exp, 0));
6182 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
6183 lab = bc_get_bytecode_label ();
c02bd5d9 6184 bc_emit_bytecode (xjumpifnot);
ca695ac9
JB
6185 bc_emit_bytecode_labelref (lab);
6186
6187#ifdef DEBUG_PRINT_CODE
6188 fputc ('\n', stderr);
6189#endif
6190 bc_expand_expr (TREE_OPERAND (exp, 1));
6191 lab1 = bc_get_bytecode_label ();
6192 bc_emit_bytecode (jump);
6193 bc_emit_bytecode_labelref (lab1);
6194
6195#ifdef DEBUG_PRINT_CODE
6196 fputc ('\n', stderr);
6197#endif
6198
6199 bc_emit_bytecode_labeldef (lab);
6200 bc_expand_expr (TREE_OPERAND (exp, 2));
6201 bc_emit_bytecode_labeldef (lab1);
6202 return;
6203
6204 case TRUTH_ANDIF_EXPR:
6205
c02bd5d9 6206 opcode = xjumpifnot;
ca695ac9
JB
6207 goto andorif;
6208
6209 case TRUTH_ORIF_EXPR:
6210
c02bd5d9 6211 opcode = xjumpif;
ca695ac9
JB
6212 goto andorif;
6213
6214 case PLUS_EXPR:
6215
6216 binoptab = optab_plus_expr;
6217 goto binop;
6218
6219 case MINUS_EXPR:
6220
6221 binoptab = optab_minus_expr;
6222 goto binop;
6223
6224 case MULT_EXPR:
6225
6226 binoptab = optab_mult_expr;
6227 goto binop;
6228
6229 case TRUNC_DIV_EXPR:
6230 case FLOOR_DIV_EXPR:
6231 case CEIL_DIV_EXPR:
6232 case ROUND_DIV_EXPR:
6233 case EXACT_DIV_EXPR:
6234
6235 binoptab = optab_trunc_div_expr;
6236 goto binop;
6237
6238 case TRUNC_MOD_EXPR:
6239 case FLOOR_MOD_EXPR:
6240 case CEIL_MOD_EXPR:
6241 case ROUND_MOD_EXPR:
6242
6243 binoptab = optab_trunc_mod_expr;
6244 goto binop;
6245
6246 case FIX_ROUND_EXPR:
6247 case FIX_FLOOR_EXPR:
6248 case FIX_CEIL_EXPR:
6249 abort (); /* Not used for C. */
6250
6251 case FIX_TRUNC_EXPR:
6252 case FLOAT_EXPR:
6253 case MAX_EXPR:
6254 case MIN_EXPR:
6255 case FFS_EXPR:
6256 case LROTATE_EXPR:
6257 case RROTATE_EXPR:
6258 abort (); /* FIXME */
6259
6260 case RDIV_EXPR:
6261
6262 binoptab = optab_rdiv_expr;
6263 goto binop;
6264
6265 case BIT_AND_EXPR:
6266
6267 binoptab = optab_bit_and_expr;
6268 goto binop;
6269
6270 case BIT_IOR_EXPR:
6271
6272 binoptab = optab_bit_ior_expr;
6273 goto binop;
6274
6275 case BIT_XOR_EXPR:
6276
6277 binoptab = optab_bit_xor_expr;
6278 goto binop;
6279
6280 case LSHIFT_EXPR:
6281
6282 binoptab = optab_lshift_expr;
6283 goto binop;
6284
6285 case RSHIFT_EXPR:
6286
6287 binoptab = optab_rshift_expr;
6288 goto binop;
6289
6290 case TRUTH_AND_EXPR:
6291
6292 binoptab = optab_truth_and_expr;
6293 goto binop;
6294
6295 case TRUTH_OR_EXPR:
6296
6297 binoptab = optab_truth_or_expr;
6298 goto binop;
6299
6300 case LT_EXPR:
6301
6302 binoptab = optab_lt_expr;
6303 goto binop;
6304
6305 case LE_EXPR:
6306
6307 binoptab = optab_le_expr;
6308 goto binop;
6309
6310 case GE_EXPR:
6311
6312 binoptab = optab_ge_expr;
6313 goto binop;
6314
6315 case GT_EXPR:
6316
6317 binoptab = optab_gt_expr;
6318 goto binop;
6319
6320 case EQ_EXPR:
6321
6322 binoptab = optab_eq_expr;
6323 goto binop;
6324
6325 case NE_EXPR:
6326
6327 binoptab = optab_ne_expr;
6328 goto binop;
6329
6330 case NEGATE_EXPR:
6331
6332 unoptab = optab_negate_expr;
6333 goto unop;
6334
6335 case BIT_NOT_EXPR:
6336
6337 unoptab = optab_bit_not_expr;
6338 goto unop;
6339
6340 case TRUTH_NOT_EXPR:
6341
6342 unoptab = optab_truth_not_expr;
6343 goto unop;
6344
6345 case PREDECREMENT_EXPR:
6346
6347 incroptab = optab_predecrement_expr;
6348 goto increment;
6349
6350 case PREINCREMENT_EXPR:
6351
6352 incroptab = optab_preincrement_expr;
6353 goto increment;
6354
6355 case POSTDECREMENT_EXPR:
6356
6357 incroptab = optab_postdecrement_expr;
6358 goto increment;
6359
6360 case POSTINCREMENT_EXPR:
6361
6362 incroptab = optab_postincrement_expr;
6363 goto increment;
6364
6365 case CONSTRUCTOR:
6366
6367 bc_expand_constructor (exp);
6368 return;
6369
6370 case ERROR_MARK:
6371 case RTL_EXPR:
6372
6373 return;
6374
6375 case BIND_EXPR:
6376 {
6377 tree vars = TREE_OPERAND (exp, 0);
6378 int vars_need_expansion = 0;
6379
6380 /* Need to open a binding contour here because
6381 if there are any cleanups they most be contained here. */
6382 expand_start_bindings (0);
6383
6384 /* Mark the corresponding BLOCK for output. */
6385 if (TREE_OPERAND (exp, 2) != 0)
6386 TREE_USED (TREE_OPERAND (exp, 2)) = 1;
6387
6388 /* If VARS have not yet been expanded, expand them now. */
6389 while (vars)
6390 {
6391 if (DECL_RTL (vars) == 0)
6392 {
6393 vars_need_expansion = 1;
6394 bc_expand_decl (vars, 0);
6395 }
6396 bc_expand_decl_init (vars);
6397 vars = TREE_CHAIN (vars);
6398 }
6399
6400 bc_expand_expr (TREE_OPERAND (exp, 1));
6401
6402 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6403
6404 return;
6405 }
6406 }
6407
6408 abort ();
6409
6410 binop:
6411
6412 bc_expand_binary_operation (binoptab, TREE_TYPE (exp),
6413 TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1));
6414 return;
6415
6416
6417 unop:
6418
6419 bc_expand_unary_operation (unoptab, TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6420 return;
6421
6422
6423 andorif:
6424
6425 bc_expand_expr (TREE_OPERAND (exp, 0));
6426 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
6427 lab = bc_get_bytecode_label ();
6428
6d6e61ce 6429 bc_emit_instruction (duplicate);
ca695ac9
JB
6430 bc_emit_bytecode (opcode);
6431 bc_emit_bytecode_labelref (lab);
6432
6433#ifdef DEBUG_PRINT_CODE
6434 fputc ('\n', stderr);
6435#endif
6436
6437 bc_emit_instruction (drop);
6438
6439 bc_expand_expr (TREE_OPERAND (exp, 1));
6440 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)));
6441 bc_emit_bytecode_labeldef (lab);
6442 return;
6443
6444
6445 increment:
6446
6447 type = TREE_TYPE (TREE_OPERAND (exp, 0));
6448
6449 /* Push the quantum. */
6450 bc_expand_expr (TREE_OPERAND (exp, 1));
6451
6452 /* Convert it to the lvalue's type. */
6453 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)), type);
6454
6455 /* Push the address of the lvalue */
c02bd5d9 6456 bc_expand_expr (build1 (ADDR_EXPR, TYPE_POINTER_TO (type), TREE_OPERAND (exp, 0)));
ca695ac9
JB
6457
6458 /* Perform actual increment */
c02bd5d9 6459 bc_expand_increment (incroptab, type);
ca695ac9
JB
6460 return;
6461}
6462\f
6463/* Return the alignment in bits of EXP, a pointer valued expression.
6464 But don't return more than MAX_ALIGN no matter what.
6465 The alignment returned is, by default, the alignment of the thing that
6466 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
6467
6468 Otherwise, look at the expression to see if we can do better, i.e., if the
6469 expression is actually pointing at an object whose alignment is tighter. */
6470
6471static int
6472get_pointer_alignment (exp, max_align)
6473 tree exp;
6474 unsigned max_align;
6475{
6476 unsigned align, inner;
6477
6478 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
6479 return 0;
6480
6481 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
6482 align = MIN (align, max_align);
6483
6484 while (1)
6485 {
6486 switch (TREE_CODE (exp))
6487 {
6488 case NOP_EXPR:
6489 case CONVERT_EXPR:
6490 case NON_LVALUE_EXPR:
6491 exp = TREE_OPERAND (exp, 0);
6492 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
6493 return align;
6494 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
6495 inner = MIN (inner, max_align);
6496 align = MAX (align, inner);
6497 break;
6498
6499 case PLUS_EXPR:
6500 /* If sum of pointer + int, restrict our maximum alignment to that
6501 imposed by the integer. If not, we can't do any better than
6502 ALIGN. */
6503 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
6504 return align;
6505
6506 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
6507 & (max_align - 1))
6508 != 0)
6509 max_align >>= 1;
6510
6511 exp = TREE_OPERAND (exp, 0);
6512 break;
6513
6514 case ADDR_EXPR:
6515 /* See what we are pointing at and look at its alignment. */
6516 exp = TREE_OPERAND (exp, 0);
6517 if (TREE_CODE (exp) == FUNCTION_DECL)
6518 align = MAX (align, FUNCTION_BOUNDARY);
6519 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
6520 align = MAX (align, DECL_ALIGN (exp));
6521#ifdef CONSTANT_ALIGNMENT
6522 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
6523 align = CONSTANT_ALIGNMENT (exp, align);
6524#endif
6525 return MIN (align, max_align);
6526
6527 default:
6528 return align;
6529 }
6530 }
6531}
6532\f
6533/* Return the tree node and offset if a given argument corresponds to
6534 a string constant. */
6535
6536static tree
6537string_constant (arg, ptr_offset)
6538 tree arg;
6539 tree *ptr_offset;
6540{
6541 STRIP_NOPS (arg);
6542
6543 if (TREE_CODE (arg) == ADDR_EXPR
6544 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
6545 {
6546 *ptr_offset = integer_zero_node;
6547 return TREE_OPERAND (arg, 0);
6548 }
6549 else if (TREE_CODE (arg) == PLUS_EXPR)
6550 {
6551 tree arg0 = TREE_OPERAND (arg, 0);
6552 tree arg1 = TREE_OPERAND (arg, 1);
6553
6554 STRIP_NOPS (arg0);
6555 STRIP_NOPS (arg1);
6556
6557 if (TREE_CODE (arg0) == ADDR_EXPR
6558 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
6559 {
6560 *ptr_offset = arg1;
6561 return TREE_OPERAND (arg0, 0);
6562 }
6563 else if (TREE_CODE (arg1) == ADDR_EXPR
6564 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
6565 {
6566 *ptr_offset = arg0;
6567 return TREE_OPERAND (arg1, 0);
6568 }
6569 }
6570
6571 return 0;
6572}
6573
6574/* Compute the length of a C string. TREE_STRING_LENGTH is not the right
6575 way, because it could contain a zero byte in the middle.
6576 TREE_STRING_LENGTH is the size of the character array, not the string.
6577
6578 Unfortunately, string_constant can't access the values of const char
6579 arrays with initializers, so neither can we do so here. */
6580
6581static tree
6582c_strlen (src)
6583 tree src;
6584{
6585 tree offset_node;
6586 int offset, max;
6587 char *ptr;
6588
6589 src = string_constant (src, &offset_node);
6590 if (src == 0)
6591 return 0;
6592 max = TREE_STRING_LENGTH (src);
6593 ptr = TREE_STRING_POINTER (src);
6594 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
6595 {
6596 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
6597 compute the offset to the following null if we don't know where to
6598 start searching for it. */
6599 int i;
6600 for (i = 0; i < max; i++)
6601 if (ptr[i] == 0)
6602 return 0;
6603 /* We don't know the starting offset, but we do know that the string
6604 has no internal zero bytes. We can assume that the offset falls
6605 within the bounds of the string; otherwise, the programmer deserves
6606 what he gets. Subtract the offset from the length of the string,
6607 and return that. */
6608 /* This would perhaps not be valid if we were dealing with named
6609 arrays in addition to literal string constants. */
6610 return size_binop (MINUS_EXPR, size_int (max), offset_node);
6611 }
6612
6613 /* We have a known offset into the string. Start searching there for
6614 a null character. */
6615 if (offset_node == 0)
6616 offset = 0;
6617 else
6618 {
6619 /* Did we get a long long offset? If so, punt. */
6620 if (TREE_INT_CST_HIGH (offset_node) != 0)
6621 return 0;
6622 offset = TREE_INT_CST_LOW (offset_node);
6623 }
6624 /* If the offset is known to be out of bounds, warn, and call strlen at
6625 runtime. */
6626 if (offset < 0 || offset > max)
6627 {
6628 warning ("offset outside bounds of constant string");
6629 return 0;
6630 }
6631 /* Use strlen to search for the first zero byte. Since any strings
6632 constructed with build_string will have nulls appended, we win even
6633 if we get handed something like (char[4])"abcd".
6634
6635 Since OFFSET is our starting index into the string, no further
6636 calculation is needed. */
6637 return size_int (strlen (ptr + offset));
6638}
6639\f
6640/* Expand an expression EXP that calls a built-in function,
6641 with result going to TARGET if that's convenient
6642 (and in mode MODE if that's convenient).
6643 SUBTARGET may be used as the target for computing one of EXP's operands.
6644 IGNORE is nonzero if the value is to be ignored. */
6645
6646static rtx
6647expand_builtin (exp, target, subtarget, mode, ignore)
6648 tree exp;
6649 rtx target;
6650 rtx subtarget;
6651 enum machine_mode mode;
6652 int ignore;
6653{
6654 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6655 tree arglist = TREE_OPERAND (exp, 1);
6656 rtx op0;
6657 rtx lab1, insns;
6658 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
6659 optab builtin_optab;
6660
6661 switch (DECL_FUNCTION_CODE (fndecl))
6662 {
6663 case BUILT_IN_ABS:
6664 case BUILT_IN_LABS:
6665 case BUILT_IN_FABS:
6666 /* build_function_call changes these into ABS_EXPR. */
6667 abort ();
6668
6669 case BUILT_IN_SIN:
6670 case BUILT_IN_COS:
6671 case BUILT_IN_FSQRT:
6672 /* If not optimizing, call the library function. */
6673 if (! optimize)
6674 break;
6675
6676 if (arglist == 0
6677 /* Arg could be wrong type if user redeclared this fcn wrong. */
6678 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
7b073ca6 6679 break;
ca695ac9
JB
6680
6681 /* Stabilize and compute the argument. */
6682 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
6683 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
6684 {
6685 exp = copy_node (exp);
6686 arglist = copy_node (arglist);
6687 TREE_OPERAND (exp, 1) = arglist;
6688 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
6689 }
6690 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
6691
6692 /* Make a suitable register to place result in. */
6693 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6694
6695 emit_queue ();
6696 start_sequence ();
6697
6698 switch (DECL_FUNCTION_CODE (fndecl))
6699 {
6700 case BUILT_IN_SIN:
6701 builtin_optab = sin_optab; break;
6702 case BUILT_IN_COS:
6703 builtin_optab = cos_optab; break;
6704 case BUILT_IN_FSQRT:
6705 builtin_optab = sqrt_optab; break;
6706 default:
6707 abort ();
6708 }
6709
6710 /* Compute into TARGET.
6711 Set TARGET to wherever the result comes back. */
6712 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
6713 builtin_optab, op0, target, 0);
6714
6715 /* If we were unable to expand via the builtin, stop the
6716 sequence (without outputting the insns) and break, causing
6717 a call the the library function. */
6718 if (target == 0)
6719 {
6720 end_sequence ();
6721 break;
6722 }
6723
6724 /* Check the results by default. But if flag_fast_math is turned on,
6725 then assume sqrt will always be called with valid arguments. */
6726
6727 if (! flag_fast_math)
6728 {
6729 /* Don't define the builtin FP instructions
6730 if your machine is not IEEE. */
6731 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
6732 abort ();
6733
6734 lab1 = gen_label_rtx ();
6735
6736 /* Test the result; if it is NaN, set errno=EDOM because
6737 the argument was not in the domain. */
6738 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
6739 emit_jump_insn (gen_beq (lab1));
6740
6741#if TARGET_EDOM
6742 {
6743#ifdef GEN_ERRNO_RTX
6744 rtx errno_rtx = GEN_ERRNO_RTX;
6745#else
6746 rtx errno_rtx
6747 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "*errno"));
6748#endif
6749
6750 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
6751 }
6752#else
6753 /* We can't set errno=EDOM directly; let the library call do it.
6754 Pop the arguments right away in case the call gets deleted. */
6755 NO_DEFER_POP;
6756 expand_call (exp, target, 0);
6757 OK_DEFER_POP;
6758#endif
6759
6760 emit_label (lab1);
6761 }
6762
6763 /* Output the entire sequence. */
6764 insns = get_insns ();
6765 end_sequence ();
6766 emit_insns (insns);
6767
6768 return target;
6769
6770 /* __builtin_apply_args returns block of memory allocated on
6771 the stack into which is stored the arg pointer, structure
6772 value address, static chain, and all the registers that might
6773 possibly be used in performing a function call. The code is
6774 moved to the start of the function so the incoming values are
6775 saved. */
6776 case BUILT_IN_APPLY_ARGS:
6777 /* Don't do __builtin_apply_args more than once in a function.
6778 Save the result of the first call and reuse it. */
6779 if (apply_args_value != 0)
6780 return apply_args_value;
6781 {
6782 /* When this function is called, it means that registers must be
6783 saved on entry to this function. So we migrate the
6784 call to the first insn of this function. */
6785 rtx temp;
6786 rtx seq;
6787
6788 start_sequence ();
6789 temp = expand_builtin_apply_args ();
6790 seq = get_insns ();
6791 end_sequence ();
6792
6793 apply_args_value = temp;
6794
6795 /* Put the sequence after the NOTE that starts the function.
6796 If this is inside a SEQUENCE, make the outer-level insn
6797 chain current, so the code is placed at the start of the
6798 function. */
6799 push_topmost_sequence ();
6800 emit_insns_before (seq, NEXT_INSN (get_insns ()));
6801 pop_topmost_sequence ();
6802 return temp;
6803 }
6804
6805 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6806 FUNCTION with a copy of the parameters described by
6807 ARGUMENTS, and ARGSIZE. It returns a block of memory
6808 allocated on the stack into which is stored all the registers
6809 that might possibly be used for returning the result of a
6810 function. ARGUMENTS is the value returned by
6811 __builtin_apply_args. ARGSIZE is the number of bytes of
6812 arguments that must be copied. ??? How should this value be
6813 computed? We'll also need a safe worst case value for varargs
6814 functions. */
6815 case BUILT_IN_APPLY:
6816 if (arglist == 0
6817 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6818 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6819 || TREE_CHAIN (arglist) == 0
6820 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6821 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6822 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6823 return const0_rtx;
6824 else
6825 {
6826 int i;
6827 tree t;
6828 rtx ops[3];
6829
6830 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
6831 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
6832
6833 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6834 }
6835
6836 /* __builtin_return (RESULT) causes the function to return the
6837 value described by RESULT. RESULT is address of the block of
6838 memory returned by __builtin_apply. */
6839 case BUILT_IN_RETURN:
6840 if (arglist
6841 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6842 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
6843 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
6844 NULL_RTX, VOIDmode, 0));
6845 return const0_rtx;
6846
6847 case BUILT_IN_SAVEREGS:
6848 /* Don't do __builtin_saveregs more than once in a function.
6849 Save the result of the first call and reuse it. */
6850 if (saveregs_value != 0)
6851 return saveregs_value;
6852 {
6853 /* When this function is called, it means that registers must be
6854 saved on entry to this function. So we migrate the
6855 call to the first insn of this function. */
6856 rtx temp;
6857 rtx seq;
6858 rtx valreg, saved_valreg;
6859
6860 /* Now really call the function. `expand_call' does not call
6861 expand_builtin, so there is no danger of infinite recursion here. */
6862 start_sequence ();
6863
6864#ifdef EXPAND_BUILTIN_SAVEREGS
6865 /* Do whatever the machine needs done in this case. */
6866 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
6867#else
6868 /* The register where the function returns its value
6869 is likely to have something else in it, such as an argument.
6870 So preserve that register around the call. */
6871 if (value_mode != VOIDmode)
6872 {
6873 valreg = hard_libcall_value (value_mode);
6874 saved_valreg = gen_reg_rtx (value_mode);
6875 emit_move_insn (saved_valreg, valreg);
6876 }
6877
6878 /* Generate the call, putting the value in a pseudo. */
6879 temp = expand_call (exp, target, ignore);
6880
6881 if (value_mode != VOIDmode)
6882 emit_move_insn (valreg, saved_valreg);
6883#endif
6884
6885 seq = get_insns ();
6886 end_sequence ();
6887
6888 saveregs_value = temp;
6889
6890 /* Put the sequence after the NOTE that starts the function.
6891 If this is inside a SEQUENCE, make the outer-level insn
6892 chain current, so the code is placed at the start of the
6893 function. */
6894 push_topmost_sequence ();
6895 emit_insns_before (seq, NEXT_INSN (get_insns ()));
6896 pop_topmost_sequence ();
6897 return temp;
6898 }
6899
6900 /* __builtin_args_info (N) returns word N of the arg space info
6901 for the current function. The number and meanings of words
6902 is controlled by the definition of CUMULATIVE_ARGS. */
6903 case BUILT_IN_ARGS_INFO:
6904 {
6905 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
6906 int i;
6907 int *word_ptr = (int *) &current_function_args_info;
6908 tree type, elts, result;
6909
6910 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
6911 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
6912 __FILE__, __LINE__);
6913
6914 if (arglist != 0)
6915 {
6916 tree arg = TREE_VALUE (arglist);
6917 if (TREE_CODE (arg) != INTEGER_CST)
6918 error ("argument of `__builtin_args_info' must be constant");
6919 else
6920 {
6921 int wordnum = TREE_INT_CST_LOW (arg);
6922
6923 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
6924 error ("argument of `__builtin_args_info' out of range");
6925 else
6926 return GEN_INT (word_ptr[wordnum]);
6927 }
6928 }
6929 else
6930 error ("missing argument in `__builtin_args_info'");
6931
6932 return const0_rtx;
6933
6934#if 0
6935 for (i = 0; i < nwords; i++)
6936 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
6937
6938 type = build_array_type (integer_type_node,
6939 build_index_type (build_int_2 (nwords, 0)));
6940 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
6941 TREE_CONSTANT (result) = 1;
6942 TREE_STATIC (result) = 1;
6943 result = build (INDIRECT_REF, build_pointer_type (type), result);
6944 TREE_CONSTANT (result) = 1;
6945 return expand_expr (result, NULL_RTX, VOIDmode, 0);
6946#endif
6947 }
6948
6949 /* Return the address of the first anonymous stack arg. */
6950 case BUILT_IN_NEXT_ARG:
6951 {
1f9becfc 6952 tree parm;
ca695ac9 6953 tree fntype = TREE_TYPE (current_function_decl);
1f9becfc 6954 tree fnargs = DECL_ARGUMENTS (current_function_decl);
ca695ac9
JB
6955 if (!(TYPE_ARG_TYPES (fntype) != 0
6956 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
1f9becfc
RK
6957 != void_type_node))
6958 && !(fnargs
6959 && (parm = tree_last (fnargs)) != 0
6960 && DECL_NAME (parm)
6961 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
6962 "__builtin_va_alist"))))
ca695ac9
JB
6963 {
6964 error ("`va_start' used in function with fixed args");
6965 return const0_rtx;
6966 }
6967 }
6968
6969 return expand_binop (Pmode, add_optab,
6970 current_function_internal_arg_pointer,
6971 current_function_arg_offset_rtx,
6972 NULL_RTX, 0, OPTAB_LIB_WIDEN);
6973
6974 case BUILT_IN_CLASSIFY_TYPE:
6975 if (arglist != 0)
6976 {
6977 tree type = TREE_TYPE (TREE_VALUE (arglist));
6978 enum tree_code code = TREE_CODE (type);
6979 if (code == VOID_TYPE)
6980 return GEN_INT (void_type_class);
6981 if (code == INTEGER_TYPE)
6982 return GEN_INT (integer_type_class);
6983 if (code == CHAR_TYPE)
6984 return GEN_INT (char_type_class);
6985 if (code == ENUMERAL_TYPE)
6986 return GEN_INT (enumeral_type_class);
6987 if (code == BOOLEAN_TYPE)
6988 return GEN_INT (boolean_type_class);
6989 if (code == POINTER_TYPE)
6990 return GEN_INT (pointer_type_class);
6991 if (code == REFERENCE_TYPE)
6992 return GEN_INT (reference_type_class);
6993 if (code == OFFSET_TYPE)
6994 return GEN_INT (offset_type_class);
6995 if (code == REAL_TYPE)
6996 return GEN_INT (real_type_class);
6997 if (code == COMPLEX_TYPE)
6998 return GEN_INT (complex_type_class);
6999 if (code == FUNCTION_TYPE)
7000 return GEN_INT (function_type_class);
7001 if (code == METHOD_TYPE)
7002 return GEN_INT (method_type_class);
7003 if (code == RECORD_TYPE)
7004 return GEN_INT (record_type_class);
7005 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
7006 return GEN_INT (union_type_class);
7007 if (code == ARRAY_TYPE)
7008 return GEN_INT (array_type_class);
7009 if (code == STRING_TYPE)
7010 return GEN_INT (string_type_class);
7011 if (code == SET_TYPE)
7012 return GEN_INT (set_type_class);
7013 if (code == FILE_TYPE)
7014 return GEN_INT (file_type_class);
7015 if (code == LANG_TYPE)
7016 return GEN_INT (lang_type_class);
7017 }
7018 return GEN_INT (no_type_class);
7019
7020 case BUILT_IN_CONSTANT_P:
7021 if (arglist == 0)
7022 return const0_rtx;
7023 else
7024 return (TREE_CODE_CLASS (TREE_CODE (TREE_VALUE (arglist))) == 'c'
7025 ? const1_rtx : const0_rtx);
7026
7027 case BUILT_IN_FRAME_ADDRESS:
7028 /* The argument must be a nonnegative integer constant.
7029 It counts the number of frames to scan up the stack.
7030 The value is the address of that frame. */
7031 case BUILT_IN_RETURN_ADDRESS:
7032 /* The argument must be a nonnegative integer constant.
7033 It counts the number of frames to scan up the stack.
7034 The value is the return address saved in that frame. */
7035 if (arglist == 0)
7036 /* Warning about missing arg was already issued. */
7037 return const0_rtx;
7038 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
7039 {
7040 error ("invalid arg to `__builtin_return_address'");
7041 return const0_rtx;
7042 }
7043 else if (tree_int_cst_lt (TREE_VALUE (arglist), integer_zero_node))
7044 {
7045 error ("invalid arg to `__builtin_return_address'");
7046 return const0_rtx;
7047 }
7048 else
7049 {
7050 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
7051 rtx tem = frame_pointer_rtx;
7052 int i;
7053
7054 /* Some machines need special handling before we can access arbitrary
7055 frames. For example, on the sparc, we must first flush all
7056 register windows to the stack. */
7057#ifdef SETUP_FRAME_ADDRESSES
7058 SETUP_FRAME_ADDRESSES ();
7059#endif
7060
7061 /* On the sparc, the return address is not in the frame, it is
7062 in a register. There is no way to access it off of the current
7063 frame pointer, but it can be accessed off the previous frame
7064 pointer by reading the value from the register window save
7065 area. */
7066#ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
7067 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_RETURN_ADDRESS)
7068 count--;
7069#endif
7070
7071 /* Scan back COUNT frames to the specified frame. */
7072 for (i = 0; i < count; i++)
7073 {
7074 /* Assume the dynamic chain pointer is in the word that
7075 the frame address points to, unless otherwise specified. */
7076#ifdef DYNAMIC_CHAIN_ADDRESS
7077 tem = DYNAMIC_CHAIN_ADDRESS (tem);
7078#endif
7079 tem = memory_address (Pmode, tem);
7080 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
7081 }
7082
7083 /* For __builtin_frame_address, return what we've got. */
7084 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
7085 return tem;
7086
7087 /* For __builtin_return_address,
7088 Get the return address from that frame. */
7089#ifdef RETURN_ADDR_RTX
7090 return RETURN_ADDR_RTX (count, tem);
7091#else
7092 tem = memory_address (Pmode,
7093 plus_constant (tem, GET_MODE_SIZE (Pmode)));
7094 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
7095#endif
7096 }
7097
7098 case BUILT_IN_ALLOCA:
7099 if (arglist == 0
7100 /* Arg could be non-integer if user redeclared this fcn wrong. */
7101 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
7b073ca6 7102 break;
ca695ac9
JB
7103 current_function_calls_alloca = 1;
7104 /* Compute the argument. */
7105 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
7106
7107 /* Allocate the desired space. */
7108 target = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
7109
7110 /* Record the new stack level for nonlocal gotos. */
7111 if (nonlocal_goto_handler_slot != 0)
7112 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
7113 return target;
7114
7115 case BUILT_IN_FFS:
7116 /* If not optimizing, call the library function. */
7117 if (!optimize)
7118 break;
7119
7120 if (arglist == 0
7121 /* Arg could be non-integer if user redeclared this fcn wrong. */
7122 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
7b073ca6 7123 break;
ca695ac9
JB
7124
7125 /* Compute the argument. */
7126 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
7127 /* Compute ffs, into TARGET if possible.
7128 Set TARGET to wherever the result comes back. */
7129 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
7130 ffs_optab, op0, target, 1);
7131 if (target == 0)
7132 abort ();
7133 return target;
7134
7135 case BUILT_IN_STRLEN:
7136 /* If not optimizing, call the library function. */
7137 if (!optimize)
7138 break;
7139
7140 if (arglist == 0
7141 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7142 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
7b073ca6 7143 break;
ca695ac9
JB
7144 else
7145 {
7146 tree src = TREE_VALUE (arglist);
7147 tree len = c_strlen (src);
7148
7149 int align
7150 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7151
7152 rtx result, src_rtx, char_rtx;
7153 enum machine_mode insn_mode = value_mode, char_mode;
7154 enum insn_code icode;
7155
7156 /* If the length is known, just return it. */
7157 if (len != 0)
7158 return expand_expr (len, target, mode, 0);
7159
7160 /* If SRC is not a pointer type, don't do this operation inline. */
7161 if (align == 0)
7162 break;
7163
7164 /* Call a function if we can't compute strlen in the right mode. */
7165
7166 while (insn_mode != VOIDmode)
7167 {
7168 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
7169 if (icode != CODE_FOR_nothing)
7170 break;
bbf6f052 7171
ca695ac9
JB
7172 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
7173 }
7174 if (insn_mode == VOIDmode)
7175 break;
bbf6f052 7176
ca695ac9
JB
7177 /* Make a place to write the result of the instruction. */
7178 result = target;
7179 if (! (result != 0
7180 && GET_CODE (result) == REG
7181 && GET_MODE (result) == insn_mode
7182 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
7183 result = gen_reg_rtx (insn_mode);
bbf6f052 7184
ca695ac9
JB
7185 /* Make sure the operands are acceptable to the predicates. */
7186
7187 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
7188 result = gen_reg_rtx (insn_mode);
7189
7190 src_rtx = memory_address (BLKmode,
7191 expand_expr (src, NULL_RTX, Pmode,
7192 EXPAND_NORMAL));
7193 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
7194 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
7195
7196 char_rtx = const0_rtx;
7197 char_mode = insn_operand_mode[(int)icode][2];
7198 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
7199 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
7200
7201 emit_insn (GEN_FCN (icode) (result,
7202 gen_rtx (MEM, BLKmode, src_rtx),
7203 char_rtx, GEN_INT (align)));
7204
7205 /* Return the value in the proper mode for this function. */
7206 if (GET_MODE (result) == value_mode)
7207 return result;
7208 else if (target != 0)
7209 {
7210 convert_move (target, result, 0);
7211 return target;
7212 }
7213 else
7214 return convert_to_mode (value_mode, result, 0);
7215 }
7216
7217 case BUILT_IN_STRCPY:
e87b4f3f 7218 /* If not optimizing, call the library function. */
ca695ac9 7219 if (!optimize)
e87b4f3f
RS
7220 break;
7221
7222 if (arglist == 0
ca695ac9
JB
7223 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7224 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7225 || TREE_CHAIN (arglist) == 0
7226 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
7b073ca6 7227 break;
ca695ac9 7228 else
db0e6d01 7229 {
ca695ac9 7230 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
e7c33f54 7231
ca695ac9
JB
7232 if (len == 0)
7233 break;
e7c33f54 7234
ca695ac9 7235 len = size_binop (PLUS_EXPR, len, integer_one_node);
e7c33f54 7236
ca695ac9 7237 chainon (arglist, build_tree_list (NULL_TREE, len));
1bbddf11
JVA
7238 }
7239
ca695ac9
JB
7240 /* Drops in. */
7241 case BUILT_IN_MEMCPY:
7242 /* If not optimizing, call the library function. */
7243 if (!optimize)
7244 break;
e7c33f54 7245
ca695ac9
JB
7246 if (arglist == 0
7247 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7248 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7249 || TREE_CHAIN (arglist) == 0
7250 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
7251 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
7252 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7b073ca6 7253 break;
ca695ac9 7254 else
e7c33f54 7255 {
ca695ac9
JB
7256 tree dest = TREE_VALUE (arglist);
7257 tree src = TREE_VALUE (TREE_CHAIN (arglist));
7258 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
e87b4f3f 7259
ca695ac9
JB
7260 int src_align
7261 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7262 int dest_align
7263 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7264 rtx dest_rtx, dest_mem, src_mem;
60bac6ea 7265
ca695ac9
JB
7266 /* If either SRC or DEST is not a pointer type, don't do
7267 this operation in-line. */
7268 if (src_align == 0 || dest_align == 0)
7269 {
7270 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
7271 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
7272 break;
7273 }
7274
7275 dest_rtx = expand_expr (dest, NULL_RTX, Pmode, EXPAND_NORMAL);
7276 dest_mem = gen_rtx (MEM, BLKmode,
7277 memory_address (BLKmode, dest_rtx));
7278 src_mem = gen_rtx (MEM, BLKmode,
7279 memory_address (BLKmode,
7280 expand_expr (src, NULL_RTX,
7281 Pmode,
7282 EXPAND_NORMAL)));
7283
7284 /* Copy word part most expediently. */
7285 emit_block_move (dest_mem, src_mem,
7286 expand_expr (len, NULL_RTX, VOIDmode, 0),
7287 MIN (src_align, dest_align));
7288 return dest_rtx;
7289 }
7290
7291/* These comparison functions need an instruction that returns an actual
7292 index. An ordinary compare that just sets the condition codes
7293 is not enough. */
7294#ifdef HAVE_cmpstrsi
7295 case BUILT_IN_STRCMP:
7296 /* If not optimizing, call the library function. */
7297 if (!optimize)
7298 break;
7299
7300 if (arglist == 0
7301 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7302 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7303 || TREE_CHAIN (arglist) == 0
7304 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
7b073ca6 7305 break;
ca695ac9
JB
7306 else if (!HAVE_cmpstrsi)
7307 break;
7308 {
7309 tree arg1 = TREE_VALUE (arglist);
7310 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
7311 tree offset;
7312 tree len, len2;
7313
7314 len = c_strlen (arg1);
7315 if (len)
7316 len = size_binop (PLUS_EXPR, integer_one_node, len);
7317 len2 = c_strlen (arg2);
7318 if (len2)
7319 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
7320
7321 /* If we don't have a constant length for the first, use the length
7322 of the second, if we know it. We don't require a constant for
7323 this case; some cost analysis could be done if both are available
7324 but neither is constant. For now, assume they're equally cheap.
7325
7326 If both strings have constant lengths, use the smaller. This
7327 could arise if optimization results in strcpy being called with
7328 two fixed strings, or if the code was machine-generated. We should
7329 add some code to the `memcmp' handler below to deal with such
7330 situations, someday. */
7331 if (!len || TREE_CODE (len) != INTEGER_CST)
7332 {
7333 if (len2)
7334 len = len2;
7335 else if (len == 0)
7336 break;
7337 }
7338 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
7339 {
7340 if (tree_int_cst_lt (len2, len))
7341 len = len2;
7342 }
7343
7344 chainon (arglist, build_tree_list (NULL_TREE, len));
7345 }
7346
7347 /* Drops in. */
7348 case BUILT_IN_MEMCMP:
7349 /* If not optimizing, call the library function. */
7350 if (!optimize)
7351 break;
7352
7353 if (arglist == 0
7354 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7355 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7356 || TREE_CHAIN (arglist) == 0
7357 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
7358 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
7359 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7b073ca6 7360 break;
ca695ac9
JB
7361 else if (!HAVE_cmpstrsi)
7362 break;
7363 {
7364 tree arg1 = TREE_VALUE (arglist);
7365 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
7366 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7367 rtx result;
7368
7369 int arg1_align
7370 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7371 int arg2_align
7372 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7373 enum machine_mode insn_mode
7374 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
60bac6ea 7375
ca695ac9
JB
7376 /* If we don't have POINTER_TYPE, call the function. */
7377 if (arg1_align == 0 || arg2_align == 0)
7378 {
7379 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
7380 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
7381 break;
7382 }
60bac6ea 7383
ca695ac9
JB
7384 /* Make a place to write the result of the instruction. */
7385 result = target;
7386 if (! (result != 0
7387 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
7388 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
7389 result = gen_reg_rtx (insn_mode);
60bac6ea 7390
ca695ac9
JB
7391 emit_insn (gen_cmpstrsi (result,
7392 gen_rtx (MEM, BLKmode,
7393 expand_expr (arg1, NULL_RTX, Pmode,
7394 EXPAND_NORMAL)),
7395 gen_rtx (MEM, BLKmode,
7396 expand_expr (arg2, NULL_RTX, Pmode,
7397 EXPAND_NORMAL)),
7398 expand_expr (len, NULL_RTX, VOIDmode, 0),
7399 GEN_INT (MIN (arg1_align, arg2_align))));
60bac6ea 7400
ca695ac9
JB
7401 /* Return the value in the proper mode for this function. */
7402 mode = TYPE_MODE (TREE_TYPE (exp));
7403 if (GET_MODE (result) == mode)
7404 return result;
7405 else if (target != 0)
7406 {
7407 convert_move (target, result, 0);
7408 return target;
60bac6ea 7409 }
ca695ac9
JB
7410 else
7411 return convert_to_mode (mode, result, 0);
7412 }
60bac6ea 7413#else
ca695ac9
JB
7414 case BUILT_IN_STRCMP:
7415 case BUILT_IN_MEMCMP:
7416 break;
60bac6ea
RS
7417#endif
7418
ca695ac9
JB
7419 default: /* just do library call, if unknown builtin */
7420 error ("built-in function `%s' not currently supported",
7421 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
7422 }
e87b4f3f 7423
ca695ac9
JB
7424 /* The switch statement above can drop through to cause the function
7425 to be called normally. */
e7c33f54 7426
ca695ac9
JB
7427 return expand_call (exp, target, ignore);
7428}
7429\f
7430/* Built-in functions to perform an untyped call and return. */
0006469d 7431
ca695ac9
JB
7432/* For each register that may be used for calling a function, this
7433 gives a mode used to copy the register's value. VOIDmode indicates
7434 the register is not used for calling a function. If the machine
7435 has register windows, this gives only the outbound registers.
7436 INCOMING_REGNO gives the corresponding inbound register. */
7437static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
0006469d 7438
ca695ac9
JB
7439/* For each register that may be used for returning values, this gives
7440 a mode used to copy the register's value. VOIDmode indicates the
7441 register is not used for returning values. If the machine has
7442 register windows, this gives only the outbound registers.
7443 INCOMING_REGNO gives the corresponding inbound register. */
7444static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
0006469d 7445
ca695ac9
JB
7446/* For each register that may be used for calling a function, this
7447 gives the offset of that register into the block returned by
7448 __bultin_apply_args. 0 indicates that the register is not
7449 used for calling a function. */
7450static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
0006469d 7451
ca695ac9
JB
7452/* Return the offset of register REGNO into the block returned by
7453 __builtin_apply_args. This is not declared static, since it is
7454 needed in objc-act.c. */
0006469d 7455
ca695ac9
JB
7456int
7457apply_args_register_offset (regno)
7458 int regno;
7459{
7460 apply_args_size ();
0006469d 7461
ca695ac9
JB
7462 /* Arguments are always put in outgoing registers (in the argument
7463 block) if such make sense. */
7464#ifdef OUTGOING_REGNO
7465 regno = OUTGOING_REGNO(regno);
7466#endif
7467 return apply_args_reg_offset[regno];
7468}
0006469d 7469
ca695ac9
JB
7470/* Return the size required for the block returned by __builtin_apply_args,
7471 and initialize apply_args_mode. */
0006469d 7472
ca695ac9
JB
7473static int
7474apply_args_size ()
7475{
7476 static int size = -1;
7477 int align, regno;
7478 enum machine_mode mode;
bbf6f052 7479
ca695ac9
JB
7480 /* The values computed by this function never change. */
7481 if (size < 0)
7482 {
7483 /* The first value is the incoming arg-pointer. */
7484 size = GET_MODE_SIZE (Pmode);
bbf6f052 7485
ca695ac9
JB
7486 /* The second value is the structure value address unless this is
7487 passed as an "invisible" first argument. */
7488 if (struct_value_rtx)
7489 size += GET_MODE_SIZE (Pmode);
7490
7491 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7492 if (FUNCTION_ARG_REGNO_P (regno))
bbf6f052 7493 {
ca695ac9
JB
7494 /* Search for the proper mode for copying this register's
7495 value. I'm not sure this is right, but it works so far. */
7496 enum machine_mode best_mode = VOIDmode;
7497
7498 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
7499 mode != VOIDmode;
7500 mode = GET_MODE_WIDER_MODE (mode))
7501 if (HARD_REGNO_MODE_OK (regno, mode)
7502 && HARD_REGNO_NREGS (regno, mode) == 1)
7503 best_mode = mode;
7504
7505 if (best_mode == VOIDmode)
7506 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
7507 mode != VOIDmode;
7508 mode = GET_MODE_WIDER_MODE (mode))
7509 if (HARD_REGNO_MODE_OK (regno, mode)
7510 && (mov_optab->handlers[(int) mode].insn_code
7511 != CODE_FOR_nothing))
7512 best_mode = mode;
7513
7514 mode = best_mode;
7515 if (mode == VOIDmode)
7516 abort ();
7517
7518 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7519 if (size % align != 0)
7520 size = CEIL (size, align) * align;
7521 apply_args_reg_offset[regno] = size;
7522 size += GET_MODE_SIZE (mode);
7523 apply_args_mode[regno] = mode;
7524 }
7525 else
7526 {
7527 apply_args_mode[regno] = VOIDmode;
7528 apply_args_reg_offset[regno] = 0;
bbf6f052 7529 }
ca695ac9
JB
7530 }
7531 return size;
7532}
bbf6f052 7533
ca695ac9
JB
7534/* Return the size required for the block returned by __builtin_apply,
7535 and initialize apply_result_mode. */
bbf6f052 7536
ca695ac9
JB
7537static int
7538apply_result_size ()
7539{
7540 static int size = -1;
7541 int align, regno;
7542 enum machine_mode mode;
bbf6f052 7543
ca695ac9
JB
7544 /* The values computed by this function never change. */
7545 if (size < 0)
7546 {
7547 size = 0;
bbf6f052 7548
ca695ac9
JB
7549 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7550 if (FUNCTION_VALUE_REGNO_P (regno))
7551 {
7552 /* Search for the proper mode for copying this register's
7553 value. I'm not sure this is right, but it works so far. */
7554 enum machine_mode best_mode = VOIDmode;
bbf6f052 7555
ca695ac9
JB
7556 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
7557 mode != TImode;
7558 mode = GET_MODE_WIDER_MODE (mode))
7559 if (HARD_REGNO_MODE_OK (regno, mode))
7560 best_mode = mode;
bbf6f052 7561
ca695ac9
JB
7562 if (best_mode == VOIDmode)
7563 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
7564 mode != VOIDmode;
7565 mode = GET_MODE_WIDER_MODE (mode))
7566 if (HARD_REGNO_MODE_OK (regno, mode)
7567 && (mov_optab->handlers[(int) mode].insn_code
7568 != CODE_FOR_nothing))
7569 best_mode = mode;
bbf6f052 7570
ca695ac9
JB
7571 mode = best_mode;
7572 if (mode == VOIDmode)
7573 abort ();
bbf6f052 7574
ca695ac9
JB
7575 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7576 if (size % align != 0)
7577 size = CEIL (size, align) * align;
7578 size += GET_MODE_SIZE (mode);
7579 apply_result_mode[regno] = mode;
bbf6f052
RK
7580 }
7581 else
ca695ac9 7582 apply_result_mode[regno] = VOIDmode;
bbf6f052 7583
ca695ac9
JB
7584 /* Allow targets that use untyped_call and untyped_return to override
7585 the size so that machine-specific information can be stored here. */
7586#ifdef APPLY_RESULT_SIZE
7587 size = APPLY_RESULT_SIZE;
7588#endif
7589 }
7590 return size;
7591}
bbf6f052 7592
ca695ac9
JB
7593#if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
7594/* Create a vector describing the result block RESULT. If SAVEP is true,
7595 the result block is used to save the values; otherwise it is used to
7596 restore the values. */
bbf6f052 7597
ca695ac9
JB
7598static rtx
7599result_vector (savep, result)
7600 int savep;
7601 rtx result;
7602{
7603 int regno, size, align, nelts;
7604 enum machine_mode mode;
7605 rtx reg, mem;
7606 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
7607
7608 size = nelts = 0;
7609 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7610 if ((mode = apply_result_mode[regno]) != VOIDmode)
7611 {
7612 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7613 if (size % align != 0)
7614 size = CEIL (size, align) * align;
7615 reg = gen_rtx (REG, mode, savep ? INCOMING_REGNO (regno) : regno);
7616 mem = change_address (result, mode,
7617 plus_constant (XEXP (result, 0), size));
7618 savevec[nelts++] = (savep
7619 ? gen_rtx (SET, VOIDmode, mem, reg)
7620 : gen_rtx (SET, VOIDmode, reg, mem));
7621 size += GET_MODE_SIZE (mode);
bbf6f052 7622 }
ca695ac9
JB
7623 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
7624}
7625#endif /* HAVE_untyped_call or HAVE_untyped_return */
bbf6f052 7626
ca695ac9
JB
7627/* Save the state required to perform an untyped call with the same
7628 arguments as were passed to the current function. */
7629
7630static rtx
7631expand_builtin_apply_args ()
7632{
7633 rtx registers;
7634 int size, align, regno;
7635 enum machine_mode mode;
7636
7637 /* Create a block where the arg-pointer, structure value address,
7638 and argument registers can be saved. */
7639 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
7640
7641 /* Walk past the arg-pointer and structure value address. */
7642 size = GET_MODE_SIZE (Pmode);
7643 if (struct_value_rtx)
7644 size += GET_MODE_SIZE (Pmode);
7645
7646 /* Save each register used in calling a function to the block. */
7647 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7648 if ((mode = apply_args_mode[regno]) != VOIDmode)
bbf6f052 7649 {
ca695ac9
JB
7650 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7651 if (size % align != 0)
7652 size = CEIL (size, align) * align;
7653 emit_move_insn (change_address (registers, mode,
7654 plus_constant (XEXP (registers, 0),
7655 size)),
7656 gen_rtx (REG, mode, INCOMING_REGNO (regno)));
7657 size += GET_MODE_SIZE (mode);
bbf6f052
RK
7658 }
7659
ca695ac9
JB
7660 /* Save the arg pointer to the block. */
7661 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
7662 copy_to_reg (virtual_incoming_args_rtx));
7663 size = GET_MODE_SIZE (Pmode);
bbf6f052 7664
ca695ac9
JB
7665 /* Save the structure value address unless this is passed as an
7666 "invisible" first argument. */
7667 if (struct_value_incoming_rtx)
7668 {
7669 emit_move_insn (change_address (registers, Pmode,
7670 plus_constant (XEXP (registers, 0),
7671 size)),
7672 copy_to_reg (struct_value_incoming_rtx));
7673 size += GET_MODE_SIZE (Pmode);
7674 }
7675
7676 /* Return the address of the block. */
7677 return copy_addr_to_reg (XEXP (registers, 0));
7678}
7679
7680/* Perform an untyped call and save the state required to perform an
7681 untyped return of whatever value was returned by the given function. */
7682
7683static rtx
7684expand_builtin_apply (function, arguments, argsize)
7685 rtx function, arguments, argsize;
7686{
7687 int size, align, regno;
7688 enum machine_mode mode;
7689 rtx incoming_args, result, reg, dest, call_insn;
7690 rtx old_stack_level = 0;
7691 rtx use_insns = 0;
bbf6f052 7692
ca695ac9
JB
7693 /* Create a block where the return registers can be saved. */
7694 result = assign_stack_local (BLKmode, apply_result_size (), -1);
bbf6f052 7695
ca695ac9 7696 /* ??? The argsize value should be adjusted here. */
bbf6f052 7697
ca695ac9
JB
7698 /* Fetch the arg pointer from the ARGUMENTS block. */
7699 incoming_args = gen_reg_rtx (Pmode);
7700 emit_move_insn (incoming_args,
7701 gen_rtx (MEM, Pmode, arguments));
7702#ifndef STACK_GROWS_DOWNWARD
7703 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
7704 incoming_args, 0, OPTAB_LIB_WIDEN);
46b68a37
JW
7705#endif
7706
ca695ac9
JB
7707 /* Perform postincrements before actually calling the function. */
7708 emit_queue ();
46b68a37 7709
ca695ac9
JB
7710 /* Push a new argument block and copy the arguments. */
7711 do_pending_stack_adjust ();
7712 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
bbf6f052 7713
ca695ac9
JB
7714 /* Push a block of memory onto the stack to store the memory arguments.
7715 Save the address in a register, and copy the memory arguments. ??? I
7716 haven't figured out how the calling convention macros effect this,
7717 but it's likely that the source and/or destination addresses in
7718 the block copy will need updating in machine specific ways. */
7719 dest = copy_addr_to_reg (push_block (argsize, 0, 0));
7720 emit_block_move (gen_rtx (MEM, BLKmode, dest),
7721 gen_rtx (MEM, BLKmode, incoming_args),
7722 argsize,
7723 PARM_BOUNDARY / BITS_PER_UNIT);
bbf6f052 7724
ca695ac9
JB
7725 /* Refer to the argument block. */
7726 apply_args_size ();
7727 arguments = gen_rtx (MEM, BLKmode, arguments);
7728
7729 /* Walk past the arg-pointer and structure value address. */
7730 size = GET_MODE_SIZE (Pmode);
7731 if (struct_value_rtx)
7732 size += GET_MODE_SIZE (Pmode);
7733
7734 /* Restore each of the registers previously saved. Make USE insns
7735 for each of these registers for use in making the call. */
7736 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7737 if ((mode = apply_args_mode[regno]) != VOIDmode)
7738 {
7739 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7740 if (size % align != 0)
7741 size = CEIL (size, align) * align;
7742 reg = gen_rtx (REG, mode, regno);
7743 emit_move_insn (reg,
7744 change_address (arguments, mode,
7745 plus_constant (XEXP (arguments, 0),
7746 size)));
7747
7748 push_to_sequence (use_insns);
7749 emit_insn (gen_rtx (USE, VOIDmode, reg));
7750 use_insns = get_insns ();
7751 end_sequence ();
7752 size += GET_MODE_SIZE (mode);
7753 }
7754
7755 /* Restore the structure value address unless this is passed as an
7756 "invisible" first argument. */
7757 size = GET_MODE_SIZE (Pmode);
7758 if (struct_value_rtx)
7759 {
7760 rtx value = gen_reg_rtx (Pmode);
7761 emit_move_insn (value,
7762 change_address (arguments, Pmode,
7763 plus_constant (XEXP (arguments, 0),
7764 size)));
7765 emit_move_insn (struct_value_rtx, value);
7766 if (GET_CODE (struct_value_rtx) == REG)
7767 {
7768 push_to_sequence (use_insns);
7769 emit_insn (gen_rtx (USE, VOIDmode, struct_value_rtx));
7770 use_insns = get_insns ();
7771 end_sequence ();
bbf6f052 7772 }
ca695ac9
JB
7773 size += GET_MODE_SIZE (Pmode);
7774 }
bbf6f052 7775
ca695ac9
JB
7776 /* All arguments and registers used for the call are set up by now! */
7777 function = prepare_call_address (function, NULL_TREE, &use_insns);
bbf6f052 7778
ca695ac9
JB
7779 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
7780 and we don't want to load it into a register as an optimization,
7781 because prepare_call_address already did it if it should be done. */
7782 if (GET_CODE (function) != SYMBOL_REF)
7783 function = memory_address (FUNCTION_MODE, function);
bbf6f052 7784
ca695ac9
JB
7785 /* Generate the actual call instruction and save the return value. */
7786#ifdef HAVE_untyped_call
7787 if (HAVE_untyped_call)
7788 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
7789 result, result_vector (1, result)));
7790 else
7791#endif
7792#ifdef HAVE_call_value
7793 if (HAVE_call_value)
7794 {
7795 rtx valreg = 0;
bbf6f052 7796
ca695ac9
JB
7797 /* Locate the unique return register. It is not possible to
7798 express a call that sets more than one return register using
7799 call_value; use untyped_call for that. In fact, untyped_call
7800 only needs to save the return registers in the given block. */
7801 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7802 if ((mode = apply_result_mode[regno]) != VOIDmode)
7803 {
7804 if (valreg)
7805 abort (); /* HAVE_untyped_call required. */
7806 valreg = gen_rtx (REG, mode, regno);
7807 }
bbf6f052 7808
ca695ac9
JB
7809 emit_call_insn (gen_call_value (valreg,
7810 gen_rtx (MEM, FUNCTION_MODE, function),
7811 const0_rtx, NULL_RTX, const0_rtx));
bbf6f052 7812
ca695ac9
JB
7813 emit_move_insn (change_address (result, GET_MODE (valreg),
7814 XEXP (result, 0)),
7815 valreg);
7816 }
7817 else
7818#endif
7819 abort ();
bbf6f052 7820
ca695ac9
JB
7821 /* Find the CALL insn we just emitted and write the USE insns before it. */
7822 for (call_insn = get_last_insn ();
7823 call_insn && GET_CODE (call_insn) != CALL_INSN;
7824 call_insn = PREV_INSN (call_insn))
7825 ;
bbf6f052 7826
ca695ac9
JB
7827 if (! call_insn)
7828 abort ();
bbf6f052 7829
ca695ac9
JB
7830 /* Put the USE insns before the CALL. */
7831 emit_insns_before (use_insns, call_insn);
e7c33f54 7832
ca695ac9
JB
7833 /* Restore the stack. */
7834 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
e7c33f54 7835
ca695ac9
JB
7836 /* Return the address of the result block. */
7837 return copy_addr_to_reg (XEXP (result, 0));
7838}
e7c33f54 7839
ca695ac9 7840/* Perform an untyped return. */
e7c33f54 7841
ca695ac9
JB
7842static void
7843expand_builtin_return (result)
7844 rtx result;
7845{
7846 int size, align, regno;
7847 enum machine_mode mode;
7848 rtx reg;
7849 rtx use_insns = 0;
e7c33f54 7850
ca695ac9
JB
7851 apply_result_size ();
7852 result = gen_rtx (MEM, BLKmode, result);
e7c33f54 7853
ca695ac9
JB
7854#ifdef HAVE_untyped_return
7855 if (HAVE_untyped_return)
7856 {
7857 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
7858 emit_barrier ();
7859 return;
7860 }
7861#endif
e7c33f54 7862
ca695ac9
JB
7863 /* Restore the return value and note that each value is used. */
7864 size = 0;
7865 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7866 if ((mode = apply_result_mode[regno]) != VOIDmode)
7867 {
7868 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7869 if (size % align != 0)
7870 size = CEIL (size, align) * align;
7871 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
7872 emit_move_insn (reg,
7873 change_address (result, mode,
7874 plus_constant (XEXP (result, 0),
7875 size)));
e7c33f54 7876
ca695ac9
JB
7877 push_to_sequence (use_insns);
7878 emit_insn (gen_rtx (USE, VOIDmode, reg));
7879 use_insns = get_insns ();
7880 end_sequence ();
7881 size += GET_MODE_SIZE (mode);
7882 }
e7c33f54 7883
ca695ac9
JB
7884 /* Put the USE insns before the return. */
7885 emit_insns (use_insns);
e7c33f54 7886
ca695ac9
JB
7887 /* Return whatever values was restored by jumping directly to the end
7888 of the function. */
7889 expand_null_return ();
7890}
7891\f
7892/* Expand code for a post- or pre- increment or decrement
7893 and return the RTX for the result.
7894 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
e7c33f54 7895
ca695ac9
JB
7896static rtx
7897expand_increment (exp, post)
7898 register tree exp;
7899 int post;
7900{
7901 register rtx op0, op1;
7902 register rtx temp, value;
7903 register tree incremented = TREE_OPERAND (exp, 0);
7904 optab this_optab = add_optab;
7905 int icode;
7906 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
7907 int op0_is_copy = 0;
7908 int single_insn = 0;
a97f5a86
RS
7909 /* 1 means we can't store into OP0 directly,
7910 because it is a subreg narrower than a word,
7911 and we don't dare clobber the rest of the word. */
7912 int bad_subreg = 0;
e7c33f54 7913
ca695ac9 7914 if (output_bytecode)
c02bd5d9
JB
7915 {
7916 bc_expand_expr (exp);
7917 return NULL_RTX;
7918 }
e7c33f54 7919
ca695ac9
JB
7920 /* Stabilize any component ref that might need to be
7921 evaluated more than once below. */
7922 if (!post
7923 || TREE_CODE (incremented) == BIT_FIELD_REF
7924 || (TREE_CODE (incremented) == COMPONENT_REF
7925 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
7926 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
7927 incremented = stabilize_reference (incremented);
7928 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
7929 ones into save exprs so that they don't accidentally get evaluated
7930 more than once by the code below. */
7931 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
7932 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
7933 incremented = save_expr (incremented);
bbf6f052 7934
ca695ac9
JB
7935 /* Compute the operands as RTX.
7936 Note whether OP0 is the actual lvalue or a copy of it:
7937 I believe it is a copy iff it is a register or subreg
7938 and insns were generated in computing it. */
bbf6f052 7939
ca695ac9
JB
7940 temp = get_last_insn ();
7941 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
bbf6f052 7942
ca695ac9
JB
7943 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
7944 in place but intead must do sign- or zero-extension during assignment,
7945 so we copy it into a new register and let the code below use it as
7946 a copy.
bbf6f052 7947
ca695ac9
JB
7948 Note that we can safely modify this SUBREG since it is know not to be
7949 shared (it was made by the expand_expr call above). */
bbf6f052 7950
ca695ac9
JB
7951 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
7952 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
a97f5a86
RS
7953 else if (GET_CODE (op0) == SUBREG
7954 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
7955 bad_subreg = 1;
bbf6f052 7956
ca695ac9
JB
7957 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
7958 && temp != get_last_insn ());
7959 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052 7960
ca695ac9
JB
7961 /* Decide whether incrementing or decrementing. */
7962 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
7963 || TREE_CODE (exp) == PREDECREMENT_EXPR)
7964 this_optab = sub_optab;
bbf6f052 7965
ca695ac9
JB
7966 /* Convert decrement by a constant into a negative increment. */
7967 if (this_optab == sub_optab
7968 && GET_CODE (op1) == CONST_INT)
7969 {
7970 op1 = GEN_INT (- INTVAL (op1));
7971 this_optab = add_optab;
7972 }
bbf6f052 7973
ca695ac9
JB
7974 /* For a preincrement, see if we can do this with a single instruction. */
7975 if (!post)
7976 {
7977 icode = (int) this_optab->handlers[(int) mode].insn_code;
7978 if (icode != (int) CODE_FOR_nothing
7979 /* Make sure that OP0 is valid for operands 0 and 1
7980 of the insn we want to queue. */
7981 && (*insn_operand_predicate[icode][0]) (op0, mode)
7982 && (*insn_operand_predicate[icode][1]) (op0, mode)
7983 && (*insn_operand_predicate[icode][2]) (op1, mode))
7984 single_insn = 1;
7985 }
bbf6f052 7986
ca695ac9
JB
7987 /* If OP0 is not the actual lvalue, but rather a copy in a register,
7988 then we cannot just increment OP0. We must therefore contrive to
7989 increment the original value. Then, for postincrement, we can return
7990 OP0 since it is a copy of the old value. For preincrement, expand here
a97f5a86
RS
7991 unless we can do it with a single insn.
7992
7993 Likewise if storing directly into OP0 would clobber high bits
7994 we need to preserve (bad_subreg). */
7995 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
ca695ac9
JB
7996 {
7997 /* This is the easiest way to increment the value wherever it is.
7998 Problems with multiple evaluation of INCREMENTED are prevented
7999 because either (1) it is a component_ref or preincrement,
8000 in which case it was stabilized above, or (2) it is an array_ref
8001 with constant index in an array in a register, which is
8002 safe to reevaluate. */
8003 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
8004 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8005 ? MINUS_EXPR : PLUS_EXPR),
8006 TREE_TYPE (exp),
8007 incremented,
8008 TREE_OPERAND (exp, 1));
8009 temp = expand_assignment (incremented, newexp, ! post, 0);
8010 return post ? op0 : temp;
8011 }
bbf6f052 8012
ca695ac9
JB
8013 if (post)
8014 {
8015 /* We have a true reference to the value in OP0.
8016 If there is an insn to add or subtract in this mode, queue it.
8017 Queueing the increment insn avoids the register shuffling
8018 that often results if we must increment now and first save
8019 the old value for subsequent use. */
bbf6f052 8020
ca695ac9
JB
8021#if 0 /* Turned off to avoid making extra insn for indexed memref. */
8022 op0 = stabilize (op0);
8023#endif
bbf6f052 8024
ca695ac9
JB
8025 icode = (int) this_optab->handlers[(int) mode].insn_code;
8026 if (icode != (int) CODE_FOR_nothing
8027 /* Make sure that OP0 is valid for operands 0 and 1
8028 of the insn we want to queue. */
8029 && (*insn_operand_predicate[icode][0]) (op0, mode)
8030 && (*insn_operand_predicate[icode][1]) (op0, mode))
8031 {
8032 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
8033 op1 = force_reg (mode, op1);
bbf6f052 8034
ca695ac9
JB
8035 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
8036 }
8037 }
bbf6f052 8038
ca695ac9
JB
8039 /* Preincrement, or we can't increment with one simple insn. */
8040 if (post)
8041 /* Save a copy of the value before inc or dec, to return it later. */
8042 temp = value = copy_to_reg (op0);
8043 else
8044 /* Arrange to return the incremented value. */
8045 /* Copy the rtx because expand_binop will protect from the queue,
8046 and the results of that would be invalid for us to return
8047 if our caller does emit_queue before using our result. */
8048 temp = copy_rtx (value = op0);
bbf6f052 8049
ca695ac9
JB
8050 /* Increment however we can. */
8051 op1 = expand_binop (mode, this_optab, value, op1, op0,
8052 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
8053 /* Make sure the value is stored into OP0. */
8054 if (op1 != op0)
8055 emit_move_insn (op0, op1);
bbf6f052 8056
ca695ac9
JB
8057 return temp;
8058}
8059\f
8060/* Expand all function calls contained within EXP, innermost ones first.
8061 But don't look within expressions that have sequence points.
8062 For each CALL_EXPR, record the rtx for its value
8063 in the CALL_EXPR_RTL field. */
bbf6f052 8064
ca695ac9
JB
8065static void
8066preexpand_calls (exp)
8067 tree exp;
8068{
8069 register int nops, i;
8070 int type = TREE_CODE_CLASS (TREE_CODE (exp));
bbf6f052 8071
ca695ac9
JB
8072 if (! do_preexpand_calls)
8073 return;
bbf6f052 8074
ca695ac9 8075 /* Only expressions and references can contain calls. */
bbf6f052 8076
ca695ac9
JB
8077 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
8078 return;
bbf6f052 8079
ca695ac9
JB
8080 switch (TREE_CODE (exp))
8081 {
8082 case CALL_EXPR:
8083 /* Do nothing if already expanded. */
8084 if (CALL_EXPR_RTL (exp) != 0)
8085 return;
bbf6f052 8086
ca695ac9
JB
8087 /* Do nothing to built-in functions. */
8088 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
8089 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
8090 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8091 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
8092 return;
bbf6f052 8093
ca695ac9
JB
8094 case COMPOUND_EXPR:
8095 case COND_EXPR:
8096 case TRUTH_ANDIF_EXPR:
8097 case TRUTH_ORIF_EXPR:
8098 /* If we find one of these, then we can be sure
8099 the adjust will be done for it (since it makes jumps).
8100 Do it now, so that if this is inside an argument
8101 of a function, we don't get the stack adjustment
8102 after some other args have already been pushed. */
8103 do_pending_stack_adjust ();
8104 return;
bbf6f052 8105
ca695ac9
JB
8106 case BLOCK:
8107 case RTL_EXPR:
8108 case WITH_CLEANUP_EXPR:
8109 return;
bbf6f052 8110
ca695ac9
JB
8111 case SAVE_EXPR:
8112 if (SAVE_EXPR_RTL (exp) != 0)
8113 return;
8114 }
bbf6f052 8115
ca695ac9
JB
8116 nops = tree_code_length[(int) TREE_CODE (exp)];
8117 for (i = 0; i < nops; i++)
8118 if (TREE_OPERAND (exp, i) != 0)
8119 {
8120 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
8121 if (type == 'e' || type == '<' || type == '1' || type == '2'
8122 || type == 'r')
8123 preexpand_calls (TREE_OPERAND (exp, i));
8124 }
bbf6f052
RK
8125}
8126\f
ca695ac9
JB
8127/* At the start of a function, record that we have no previously-pushed
8128 arguments waiting to be popped. */
0006469d 8129
ca695ac9
JB
8130void
8131init_pending_stack_adjust ()
8132{
8133 pending_stack_adjust = 0;
8134}
fb2ca25a 8135
ca695ac9
JB
8136/* When exiting from function, if safe, clear out any pending stack adjust
8137 so the adjustment won't get done. */
904762c8 8138
ca695ac9
JB
8139void
8140clear_pending_stack_adjust ()
fb2ca25a 8141{
ca695ac9
JB
8142#ifdef EXIT_IGNORE_STACK
8143 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
8144 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
8145 && ! flag_inline_functions)
8146 pending_stack_adjust = 0;
fb2ca25a 8147#endif
fb2ca25a
KKT
8148}
8149
ca695ac9
JB
8150/* Pop any previously-pushed arguments that have not been popped yet. */
8151
8152void
8153do_pending_stack_adjust ()
8154{
8155 if (inhibit_defer_pop == 0)
8156 {
8157 if (pending_stack_adjust != 0)
8158 adjust_stack (GEN_INT (pending_stack_adjust));
8159 pending_stack_adjust = 0;
8160 }
8161}
8162
8163/* Expand all cleanups up to OLD_CLEANUPS.
8164 Needed here, and also for language-dependent calls. */
904762c8 8165
ca695ac9
JB
8166void
8167expand_cleanups_to (old_cleanups)
8168 tree old_cleanups;
0006469d 8169{
ca695ac9 8170 while (cleanups_this_call != old_cleanups)
0006469d 8171 {
ca695ac9
JB
8172 expand_expr (TREE_VALUE (cleanups_this_call), NULL_RTX, VOIDmode, 0);
8173 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
8174 }
8175}
8176\f
8177/* Expand conditional expressions. */
0006469d 8178
ca695ac9
JB
8179/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
8180 LABEL is an rtx of code CODE_LABEL, in this function and all the
8181 functions here. */
0006469d 8182
ca695ac9
JB
8183void
8184jumpifnot (exp, label)
8185 tree exp;
8186 rtx label;
8187{
8188 do_jump (exp, label, NULL_RTX);
8189}
0006469d 8190
ca695ac9 8191/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
0006469d 8192
ca695ac9
JB
8193void
8194jumpif (exp, label)
8195 tree exp;
8196 rtx label;
8197{
8198 do_jump (exp, NULL_RTX, label);
8199}
0006469d 8200
ca695ac9
JB
8201/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
8202 the result is zero, or IF_TRUE_LABEL if the result is one.
8203 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
8204 meaning fall through in that case.
0006469d 8205
ca695ac9
JB
8206 do_jump always does any pending stack adjust except when it does not
8207 actually perform a jump. An example where there is no jump
8208 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
0006469d 8209
ca695ac9
JB
8210 This function is responsible for optimizing cases such as
8211 &&, || and comparison operators in EXP. */
904762c8 8212
ca695ac9
JB
8213void
8214do_jump (exp, if_false_label, if_true_label)
8215 tree exp;
8216 rtx if_false_label, if_true_label;
0006469d 8217{
ca695ac9
JB
8218 register enum tree_code code = TREE_CODE (exp);
8219 /* Some cases need to create a label to jump to
8220 in order to properly fall through.
8221 These cases set DROP_THROUGH_LABEL nonzero. */
8222 rtx drop_through_label = 0;
8223 rtx temp;
8224 rtx comparison = 0;
8225 int i;
8226 tree type;
0006469d 8227
ca695ac9 8228 emit_queue ();
0006469d 8229
ca695ac9
JB
8230 switch (code)
8231 {
8232 case ERROR_MARK:
8233 break;
0006469d 8234
ca695ac9
JB
8235 case INTEGER_CST:
8236 temp = integer_zerop (exp) ? if_false_label : if_true_label;
8237 if (temp)
8238 emit_jump (temp);
8239 break;
0006469d 8240
ca695ac9
JB
8241#if 0
8242 /* This is not true with #pragma weak */
8243 case ADDR_EXPR:
8244 /* The address of something can never be zero. */
8245 if (if_true_label)
8246 emit_jump (if_true_label);
8247 break;
8248#endif
0006469d 8249
ca695ac9
JB
8250 case NOP_EXPR:
8251 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
8252 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
8253 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
8254 goto normal;
8255 case CONVERT_EXPR:
8256 /* If we are narrowing the operand, we have to do the compare in the
8257 narrower mode. */
8258 if ((TYPE_PRECISION (TREE_TYPE (exp))
8259 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8260 goto normal;
8261 case NON_LVALUE_EXPR:
8262 case REFERENCE_EXPR:
8263 case ABS_EXPR:
8264 case NEGATE_EXPR:
8265 case LROTATE_EXPR:
8266 case RROTATE_EXPR:
8267 /* These cannot change zero->non-zero or vice versa. */
8268 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8269 break;
0006469d 8270
ca695ac9
JB
8271#if 0
8272 /* This is never less insns than evaluating the PLUS_EXPR followed by
8273 a test and can be longer if the test is eliminated. */
8274 case PLUS_EXPR:
8275 /* Reduce to minus. */
8276 exp = build (MINUS_EXPR, TREE_TYPE (exp),
8277 TREE_OPERAND (exp, 0),
8278 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
8279 TREE_OPERAND (exp, 1))));
8280 /* Process as MINUS. */
0006469d 8281#endif
0006469d 8282
ca695ac9
JB
8283 case MINUS_EXPR:
8284 /* Non-zero iff operands of minus differ. */
8285 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
8286 TREE_OPERAND (exp, 0),
8287 TREE_OPERAND (exp, 1)),
8288 NE, NE);
8289 break;
904762c8 8290
ca695ac9
JB
8291 case BIT_AND_EXPR:
8292 /* If we are AND'ing with a small constant, do this comparison in the
8293 smallest type that fits. If the machine doesn't have comparisons
8294 that small, it will be converted back to the wider comparison.
8295 This helps if we are testing the sign bit of a narrower object.
8296 combine can't do this for us because it can't know whether a
8297 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
0006469d 8298
ca695ac9
JB
8299 if (! SLOW_BYTE_ACCESS
8300 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8301 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
8302 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
8303 && (type = type_for_size (i + 1, 1)) != 0
8304 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
8305 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
8306 != CODE_FOR_nothing))
8307 {
8308 do_jump (convert (type, exp), if_false_label, if_true_label);
8309 break;
8310 }
8311 goto normal;
904762c8 8312
ca695ac9
JB
8313 case TRUTH_NOT_EXPR:
8314 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8315 break;
0006469d 8316
ca695ac9
JB
8317 case TRUTH_ANDIF_EXPR:
8318 if (if_false_label == 0)
8319 if_false_label = drop_through_label = gen_label_rtx ();
8320 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
8321 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8322 break;
0006469d 8323
ca695ac9
JB
8324 case TRUTH_ORIF_EXPR:
8325 if (if_true_label == 0)
8326 if_true_label = drop_through_label = gen_label_rtx ();
8327 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
8328 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8329 break;
0006469d 8330
ca695ac9 8331 case COMPOUND_EXPR:
0088fcb1 8332 push_temp_slots ();
ca695ac9
JB
8333 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8334 free_temp_slots ();
0088fcb1 8335 pop_temp_slots ();
ca695ac9
JB
8336 emit_queue ();
8337 do_pending_stack_adjust ();
8338 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8339 break;
0006469d 8340
ca695ac9
JB
8341 case COMPONENT_REF:
8342 case BIT_FIELD_REF:
8343 case ARRAY_REF:
8344 {
8345 int bitsize, bitpos, unsignedp;
8346 enum machine_mode mode;
8347 tree type;
8348 tree offset;
8349 int volatilep = 0;
0006469d 8350
ca695ac9
JB
8351 /* Get description of this reference. We don't actually care
8352 about the underlying object here. */
8353 get_inner_reference (exp, &bitsize, &bitpos, &offset,
8354 &mode, &unsignedp, &volatilep);
0006469d 8355
ca695ac9
JB
8356 type = type_for_size (bitsize, unsignedp);
8357 if (! SLOW_BYTE_ACCESS
8358 && type != 0 && bitsize >= 0
8359 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
8360 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
8361 != CODE_FOR_nothing))
8362 {
8363 do_jump (convert (type, exp), if_false_label, if_true_label);
8364 break;
8365 }
8366 goto normal;
8367 }
0006469d 8368
ca695ac9
JB
8369 case COND_EXPR:
8370 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
8371 if (integer_onep (TREE_OPERAND (exp, 1))
8372 && integer_zerop (TREE_OPERAND (exp, 2)))
8373 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
904762c8 8374
ca695ac9
JB
8375 else if (integer_zerop (TREE_OPERAND (exp, 1))
8376 && integer_onep (TREE_OPERAND (exp, 2)))
8377 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
0006469d 8378
ca695ac9
JB
8379 else
8380 {
8381 register rtx label1 = gen_label_rtx ();
8382 drop_through_label = gen_label_rtx ();
8383 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
8384 /* Now the THEN-expression. */
8385 do_jump (TREE_OPERAND (exp, 1),
8386 if_false_label ? if_false_label : drop_through_label,
8387 if_true_label ? if_true_label : drop_through_label);
8388 /* In case the do_jump just above never jumps. */
8389 do_pending_stack_adjust ();
8390 emit_label (label1);
8391 /* Now the ELSE-expression. */
8392 do_jump (TREE_OPERAND (exp, 2),
8393 if_false_label ? if_false_label : drop_through_label,
8394 if_true_label ? if_true_label : drop_through_label);
8395 }
8396 break;
0006469d 8397
ca695ac9
JB
8398 case EQ_EXPR:
8399 if (integer_zerop (TREE_OPERAND (exp, 1)))
8400 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
0766f239
RS
8401 else if (((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8402 == MODE_INT)
8403 &&
8404 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8405 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_FLOAT
8406 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_INT)
ca695ac9
JB
8407 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
8408 else
8409 comparison = compare (exp, EQ, EQ);
8410 break;
0006469d 8411
ca695ac9
JB
8412 case NE_EXPR:
8413 if (integer_zerop (TREE_OPERAND (exp, 1)))
8414 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
0766f239
RS
8415 else if (((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8416 == MODE_INT)
8417 &&
8418 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8419 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_FLOAT
8420 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_INT)
ca695ac9
JB
8421 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
8422 else
8423 comparison = compare (exp, NE, NE);
8424 break;
0006469d 8425
ca695ac9
JB
8426 case LT_EXPR:
8427 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8428 == MODE_INT)
8429 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8430 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
8431 else
8432 comparison = compare (exp, LT, LTU);
8433 break;
0006469d 8434
ca695ac9
JB
8435 case LE_EXPR:
8436 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8437 == MODE_INT)
8438 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8439 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
8440 else
8441 comparison = compare (exp, LE, LEU);
8442 break;
0006469d 8443
ca695ac9
JB
8444 case GT_EXPR:
8445 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8446 == MODE_INT)
8447 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8448 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
8449 else
8450 comparison = compare (exp, GT, GTU);
8451 break;
0006469d 8452
ca695ac9
JB
8453 case GE_EXPR:
8454 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8455 == MODE_INT)
8456 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8457 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
8458 else
8459 comparison = compare (exp, GE, GEU);
8460 break;
0006469d 8461
ca695ac9
JB
8462 default:
8463 normal:
8464 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
8465#if 0
8466 /* This is not needed any more and causes poor code since it causes
8467 comparisons and tests from non-SI objects to have different code
8468 sequences. */
8469 /* Copy to register to avoid generating bad insns by cse
8470 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
8471 if (!cse_not_expected && GET_CODE (temp) == MEM)
8472 temp = copy_to_reg (temp);
8473#endif
8474 do_pending_stack_adjust ();
8475 if (GET_CODE (temp) == CONST_INT)
8476 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
8477 else if (GET_CODE (temp) == LABEL_REF)
8478 comparison = const_true_rtx;
8479 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
8480 && !can_compare_p (GET_MODE (temp)))
8481 /* Note swapping the labels gives us not-equal. */
8482 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
8483 else if (GET_MODE (temp) != VOIDmode)
8484 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
8485 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
8486 GET_MODE (temp), NULL_RTX, 0);
8487 else
8488 abort ();
8489 }
0006469d 8490
ca695ac9
JB
8491 /* Do any postincrements in the expression that was tested. */
8492 emit_queue ();
0006469d 8493
ca695ac9
JB
8494 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
8495 straight into a conditional jump instruction as the jump condition.
8496 Otherwise, all the work has been done already. */
0006469d 8497
ca695ac9 8498 if (comparison == const_true_rtx)
0006469d 8499 {
ca695ac9
JB
8500 if (if_true_label)
8501 emit_jump (if_true_label);
0006469d 8502 }
ca695ac9
JB
8503 else if (comparison == const0_rtx)
8504 {
8505 if (if_false_label)
8506 emit_jump (if_false_label);
8507 }
8508 else if (comparison)
8509 do_jump_for_compare (comparison, if_false_label, if_true_label);
0006469d 8510
ca695ac9 8511 if (drop_through_label)
0006469d 8512 {
ca695ac9
JB
8513 /* If do_jump produces code that might be jumped around,
8514 do any stack adjusts from that code, before the place
8515 where control merges in. */
8516 do_pending_stack_adjust ();
8517 emit_label (drop_through_label);
8518 }
8519}
8520\f
8521/* Given a comparison expression EXP for values too wide to be compared
8522 with one insn, test the comparison and jump to the appropriate label.
8523 The code of EXP is ignored; we always test GT if SWAP is 0,
8524 and LT if SWAP is 1. */
0006469d 8525
ca695ac9
JB
8526static void
8527do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
8528 tree exp;
8529 int swap;
8530 rtx if_false_label, if_true_label;
8531{
8532 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
8533 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
8534 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
8535 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
8536 rtx drop_through_label = 0;
8537 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
8538 int i;
0006469d 8539
ca695ac9
JB
8540 if (! if_true_label || ! if_false_label)
8541 drop_through_label = gen_label_rtx ();
8542 if (! if_true_label)
8543 if_true_label = drop_through_label;
8544 if (! if_false_label)
8545 if_false_label = drop_through_label;
0006469d 8546
ca695ac9
JB
8547 /* Compare a word at a time, high order first. */
8548 for (i = 0; i < nwords; i++)
8549 {
8550 rtx comp;
8551 rtx op0_word, op1_word;
0006469d 8552
ca695ac9
JB
8553 if (WORDS_BIG_ENDIAN)
8554 {
8555 op0_word = operand_subword_force (op0, i, mode);
8556 op1_word = operand_subword_force (op1, i, mode);
8557 }
8558 else
8559 {
8560 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
8561 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
8562 }
0006469d 8563
ca695ac9
JB
8564 /* All but high-order word must be compared as unsigned. */
8565 comp = compare_from_rtx (op0_word, op1_word,
8566 (unsignedp || i > 0) ? GTU : GT,
8567 unsignedp, word_mode, NULL_RTX, 0);
8568 if (comp == const_true_rtx)
8569 emit_jump (if_true_label);
8570 else if (comp != const0_rtx)
8571 do_jump_for_compare (comp, NULL_RTX, if_true_label);
0006469d 8572
ca695ac9
JB
8573 /* Consider lower words only if these are equal. */
8574 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
8575 NULL_RTX, 0);
8576 if (comp == const_true_rtx)
8577 emit_jump (if_false_label);
8578 else if (comp != const0_rtx)
8579 do_jump_for_compare (comp, NULL_RTX, if_false_label);
8580 }
0006469d 8581
ca695ac9
JB
8582 if (if_false_label)
8583 emit_jump (if_false_label);
8584 if (drop_through_label)
8585 emit_label (drop_through_label);
0006469d
TW
8586}
8587
ca695ac9
JB
8588/* Compare OP0 with OP1, word at a time, in mode MODE.
8589 UNSIGNEDP says to do unsigned comparison.
8590 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
904762c8 8591
0006469d 8592static void
ca695ac9
JB
8593do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
8594 enum machine_mode mode;
8595 int unsignedp;
8596 rtx op0, op1;
8597 rtx if_false_label, if_true_label;
0006469d 8598{
ca695ac9
JB
8599 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
8600 rtx drop_through_label = 0;
8601 int i;
0006469d 8602
ca695ac9
JB
8603 if (! if_true_label || ! if_false_label)
8604 drop_through_label = gen_label_rtx ();
8605 if (! if_true_label)
8606 if_true_label = drop_through_label;
8607 if (! if_false_label)
8608 if_false_label = drop_through_label;
0006469d 8609
ca695ac9
JB
8610 /* Compare a word at a time, high order first. */
8611 for (i = 0; i < nwords; i++)
0006469d 8612 {
ca695ac9
JB
8613 rtx comp;
8614 rtx op0_word, op1_word;
0006469d 8615
ca695ac9
JB
8616 if (WORDS_BIG_ENDIAN)
8617 {
8618 op0_word = operand_subword_force (op0, i, mode);
8619 op1_word = operand_subword_force (op1, i, mode);
8620 }
8621 else
8622 {
8623 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
8624 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
8625 }
0006469d 8626
ca695ac9
JB
8627 /* All but high-order word must be compared as unsigned. */
8628 comp = compare_from_rtx (op0_word, op1_word,
8629 (unsignedp || i > 0) ? GTU : GT,
8630 unsignedp, word_mode, NULL_RTX, 0);
8631 if (comp == const_true_rtx)
8632 emit_jump (if_true_label);
8633 else if (comp != const0_rtx)
8634 do_jump_for_compare (comp, NULL_RTX, if_true_label);
0006469d 8635
ca695ac9
JB
8636 /* Consider lower words only if these are equal. */
8637 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
8638 NULL_RTX, 0);
8639 if (comp == const_true_rtx)
8640 emit_jump (if_false_label);
8641 else if (comp != const0_rtx)
8642 do_jump_for_compare (comp, NULL_RTX, if_false_label);
8643 }
0006469d 8644
ca695ac9
JB
8645 if (if_false_label)
8646 emit_jump (if_false_label);
8647 if (drop_through_label)
8648 emit_label (drop_through_label);
0006469d 8649}
bbf6f052 8650
ca695ac9
JB
8651/* Given an EQ_EXPR expression EXP for values too wide to be compared
8652 with one insn, test the comparison and jump to the appropriate label. */
8653
8654static void
8655do_jump_by_parts_equality (exp, if_false_label, if_true_label)
8656 tree exp;
8657 rtx if_false_label, if_true_label;
bbf6f052 8658{
ca695ac9
JB
8659 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8660 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8661 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
8662 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
8663 int i;
8664 rtx drop_through_label = 0;
bbf6f052 8665
ca695ac9
JB
8666 if (! if_false_label)
8667 drop_through_label = if_false_label = gen_label_rtx ();
bbf6f052 8668
ca695ac9
JB
8669 for (i = 0; i < nwords; i++)
8670 {
8671 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
8672 operand_subword_force (op1, i, mode),
8673 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
8674 word_mode, NULL_RTX, 0);
8675 if (comp == const_true_rtx)
8676 emit_jump (if_false_label);
8677 else if (comp != const0_rtx)
8678 do_jump_for_compare (comp, if_false_label, NULL_RTX);
8679 }
1499e0a8 8680
ca695ac9
JB
8681 if (if_true_label)
8682 emit_jump (if_true_label);
8683 if (drop_through_label)
8684 emit_label (drop_through_label);
8685}
8686\f
8687/* Jump according to whether OP0 is 0.
8688 We assume that OP0 has an integer mode that is too wide
8689 for the available compare insns. */
1499e0a8 8690
ca695ac9
JB
8691static void
8692do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
8693 rtx op0;
8694 rtx if_false_label, if_true_label;
8695{
8696 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
8697 int i;
8698 rtx drop_through_label = 0;
1499e0a8 8699
ca695ac9
JB
8700 if (! if_false_label)
8701 drop_through_label = if_false_label = gen_label_rtx ();
1499e0a8 8702
ca695ac9
JB
8703 for (i = 0; i < nwords; i++)
8704 {
8705 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
8706 GET_MODE (op0)),
8707 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
8708 if (comp == const_true_rtx)
8709 emit_jump (if_false_label);
8710 else if (comp != const0_rtx)
8711 do_jump_for_compare (comp, if_false_label, NULL_RTX);
8712 }
1499e0a8 8713
ca695ac9
JB
8714 if (if_true_label)
8715 emit_jump (if_true_label);
8716 if (drop_through_label)
8717 emit_label (drop_through_label);
8718}
bbf6f052 8719
ca695ac9
JB
8720/* Given a comparison expression in rtl form, output conditional branches to
8721 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
bbf6f052 8722
ca695ac9
JB
8723static void
8724do_jump_for_compare (comparison, if_false_label, if_true_label)
8725 rtx comparison, if_false_label, if_true_label;
8726{
8727 if (if_true_label)
a358cee0 8728 {
ca695ac9
JB
8729 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
8730 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
8731 else
8732 abort ();
a358cee0 8733
ca695ac9
JB
8734 if (if_false_label)
8735 emit_jump (if_false_label);
c980ac49 8736 }
ca695ac9 8737 else if (if_false_label)
bbf6f052 8738 {
ca695ac9 8739 rtx insn;
f12f485a 8740 rtx prev = get_last_insn ();
ca695ac9 8741 rtx branch = 0;
bbf6f052 8742
f12f485a
RK
8743 if (prev != 0)
8744 prev = PREV_INSN (prev);
8745
ca695ac9
JB
8746 /* Output the branch with the opposite condition. Then try to invert
8747 what is generated. If more than one insn is a branch, or if the
8748 branch is not the last insn written, abort. If we can't invert
8749 the branch, emit make a true label, redirect this jump to that,
8750 emit a jump to the false label and define the true label. */
bbf6f052 8751
ca695ac9
JB
8752 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
8753 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_false_label));
8754 else
8755 abort ();
bbf6f052 8756
ca695ac9
JB
8757 /* Here we get the insn before what was just emitted.
8758 On some machines, emitting the branch can discard
8759 the previous compare insn and emit a replacement. */
8760 if (prev == 0)
8761 /* If there's only one preceding insn... */
8762 insn = get_insns ();
8763 else
8764 insn = NEXT_INSN (prev);
bbf6f052 8765
ca695ac9
JB
8766 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
8767 if (GET_CODE (insn) == JUMP_INSN)
8768 {
8769 if (branch)
8770 abort ();
8771 branch = insn;
8772 }
8773
8774 if (branch != get_last_insn ())
8775 abort ();
8776
8777 if (! invert_jump (branch, if_false_label))
8778 {
8779 if_true_label = gen_label_rtx ();
8780 redirect_jump (branch, if_true_label);
8781 emit_jump (if_false_label);
8782 emit_label (if_true_label);
bbf6f052
RK
8783 }
8784 }
ca695ac9
JB
8785}
8786\f
8787/* Generate code for a comparison expression EXP
8788 (including code to compute the values to be compared)
8789 and set (CC0) according to the result.
8790 SIGNED_CODE should be the rtx operation for this comparison for
8791 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
8792
8793 We force a stack adjustment unless there are currently
8794 things pushed on the stack that aren't yet used. */
8795
8796static rtx
8797compare (exp, signed_code, unsigned_code)
8798 register tree exp;
8799 enum rtx_code signed_code, unsigned_code;
8800{
8801 register rtx op0
8802 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8803 register rtx op1
8804 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8805 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
8806 register enum machine_mode mode = TYPE_MODE (type);
8807 int unsignedp = TREE_UNSIGNED (type);
8808 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
bbf6f052 8809
ca695ac9
JB
8810 return compare_from_rtx (op0, op1, code, unsignedp, mode,
8811 ((mode == BLKmode)
8812 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
8813 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
8814}
bbf6f052 8815
ca695ac9
JB
8816/* Like compare but expects the values to compare as two rtx's.
8817 The decision as to signed or unsigned comparison must be made by the caller.
bbf6f052 8818
ca695ac9
JB
8819 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
8820 compared.
bbf6f052 8821
ca695ac9
JB
8822 If ALIGN is non-zero, it is the alignment of this type; if zero, the
8823 size of MODE should be used. */
bbf6f052 8824
ca695ac9
JB
8825rtx
8826compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
8827 register rtx op0, op1;
8828 enum rtx_code code;
8829 int unsignedp;
8830 enum machine_mode mode;
8831 rtx size;
8832 int align;
8833{
8834 rtx tem;
bbf6f052 8835
ca695ac9
JB
8836 /* If one operand is constant, make it the second one. Only do this
8837 if the other operand is not constant as well. */
bbf6f052 8838
ca695ac9
JB
8839 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
8840 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
8841 {
8842 tem = op0;
8843 op0 = op1;
8844 op1 = tem;
8845 code = swap_condition (code);
8846 }
bbf6f052 8847
ca695ac9 8848 if (flag_force_mem)
bbf6f052 8849 {
ca695ac9
JB
8850 op0 = force_not_mem (op0);
8851 op1 = force_not_mem (op1);
8852 }
bbf6f052 8853
ca695ac9 8854 do_pending_stack_adjust ();
bbf6f052 8855
ca695ac9
JB
8856 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
8857 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
8858 return tem;
bbf6f052 8859
ca695ac9
JB
8860#if 0
8861 /* There's no need to do this now that combine.c can eliminate lots of
8862 sign extensions. This can be less efficient in certain cases on other
8863 machines. */
bbf6f052 8864
ca695ac9
JB
8865 /* If this is a signed equality comparison, we can do it as an
8866 unsigned comparison since zero-extension is cheaper than sign
8867 extension and comparisons with zero are done as unsigned. This is
8868 the case even on machines that can do fast sign extension, since
8869 zero-extension is easier to combine with other operations than
8870 sign-extension is. If we are comparing against a constant, we must
8871 convert it to what it would look like unsigned. */
8872 if ((code == EQ || code == NE) && ! unsignedp
8873 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
8874 {
8875 if (GET_CODE (op1) == CONST_INT
8876 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
8877 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
8878 unsignedp = 1;
bbf6f052 8879 }
ca695ac9
JB
8880#endif
8881
8882 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
bbf6f052 8883
ca695ac9 8884 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
bbf6f052
RK
8885}
8886\f
ca695ac9
JB
8887/* Generate code to calculate EXP using a store-flag instruction
8888 and return an rtx for the result. EXP is either a comparison
8889 or a TRUTH_NOT_EXPR whose operand is a comparison.
bbf6f052 8890
ca695ac9 8891 If TARGET is nonzero, store the result there if convenient.
bbf6f052 8892
ca695ac9
JB
8893 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
8894 cheap.
bbf6f052 8895
ca695ac9
JB
8896 Return zero if there is no suitable set-flag instruction
8897 available on this machine.
bbf6f052 8898
ca695ac9
JB
8899 Once expand_expr has been called on the arguments of the comparison,
8900 we are committed to doing the store flag, since it is not safe to
8901 re-evaluate the expression. We emit the store-flag insn by calling
8902 emit_store_flag, but only expand the arguments if we have a reason
8903 to believe that emit_store_flag will be successful. If we think that
8904 it will, but it isn't, we have to simulate the store-flag with a
8905 set/jump/set sequence. */
bbf6f052 8906
ca695ac9
JB
8907static rtx
8908do_store_flag (exp, target, mode, only_cheap)
8909 tree exp;
8910 rtx target;
8911 enum machine_mode mode;
8912 int only_cheap;
bbf6f052 8913{
ca695ac9
JB
8914 enum rtx_code code;
8915 tree arg0, arg1, type;
8916 tree tem;
8917 enum machine_mode operand_mode;
8918 int invert = 0;
8919 int unsignedp;
8920 rtx op0, op1;
8921 enum insn_code icode;
8922 rtx subtarget = target;
8923 rtx result, label, pattern, jump_pat;
bbf6f052 8924
ca695ac9
JB
8925 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
8926 result at the end. We can't simply invert the test since it would
8927 have already been inverted if it were valid. This case occurs for
8928 some floating-point comparisons. */
8929
8930 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
8931 invert = 1, exp = TREE_OPERAND (exp, 0);
8932
8933 arg0 = TREE_OPERAND (exp, 0);
8934 arg1 = TREE_OPERAND (exp, 1);
8935 type = TREE_TYPE (arg0);
8936 operand_mode = TYPE_MODE (type);
8937 unsignedp = TREE_UNSIGNED (type);
8938
8939 /* We won't bother with BLKmode store-flag operations because it would mean
8940 passing a lot of information to emit_store_flag. */
8941 if (operand_mode == BLKmode)
8942 return 0;
8943
8944 STRIP_NOPS (arg0);
8945 STRIP_NOPS (arg1);
8946
8947 /* Get the rtx comparison code to use. We know that EXP is a comparison
8948 operation of some type. Some comparisons against 1 and -1 can be
8949 converted to comparisons with zero. Do so here so that the tests
8950 below will be aware that we have a comparison with zero. These
8951 tests will not catch constants in the first operand, but constants
8952 are rarely passed as the first operand. */
8953
8954 switch (TREE_CODE (exp))
8955 {
8956 case EQ_EXPR:
8957 code = EQ;
8958 break;
8959 case NE_EXPR:
8960 code = NE;
8961 break;
8962 case LT_EXPR:
8963 if (integer_onep (arg1))
8964 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
8965 else
8966 code = unsignedp ? LTU : LT;
8967 break;
8968 case LE_EXPR:
8969 if (! unsignedp && integer_all_onesp (arg1))
8970 arg1 = integer_zero_node, code = LT;
8971 else
8972 code = unsignedp ? LEU : LE;
8973 break;
8974 case GT_EXPR:
8975 if (! unsignedp && integer_all_onesp (arg1))
8976 arg1 = integer_zero_node, code = GE;
8977 else
8978 code = unsignedp ? GTU : GT;
8979 break;
8980 case GE_EXPR:
8981 if (integer_onep (arg1))
8982 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
8983 else
8984 code = unsignedp ? GEU : GE;
8985 break;
8986 default:
8987 abort ();
8988 }
bbf6f052 8989
ca695ac9
JB
8990 /* Put a constant second. */
8991 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
bbf6f052 8992 {
ca695ac9
JB
8993 tem = arg0; arg0 = arg1; arg1 = tem;
8994 code = swap_condition (code);
bbf6f052 8995 }
bbf6f052 8996
ca695ac9
JB
8997 /* If this is an equality or inequality test of a single bit, we can
8998 do this by shifting the bit being tested to the low-order bit and
8999 masking the result with the constant 1. If the condition was EQ,
9000 we xor it with 1. This does not require an scc insn and is faster
9001 than an scc insn even if we have it. */
bbf6f052 9002
ca695ac9
JB
9003 if ((code == NE || code == EQ)
9004 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9005 && integer_pow2p (TREE_OPERAND (arg0, 1))
9006 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
9007 {
9008 tree inner = TREE_OPERAND (arg0, 0);
9009 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
9010 NULL_RTX, VOIDmode, 0)));
9011 int ops_unsignedp;
bbf6f052 9012
ca695ac9
JB
9013 /* If INNER is a right shift of a constant and it plus BITNUM does
9014 not overflow, adjust BITNUM and INNER. */
bbf6f052 9015
ca695ac9
JB
9016 if (TREE_CODE (inner) == RSHIFT_EXPR
9017 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
9018 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
9019 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
9020 < TYPE_PRECISION (type)))
9021 {
9022 bitnum +=TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
9023 inner = TREE_OPERAND (inner, 0);
9024 }
bbf6f052 9025
ca695ac9
JB
9026 /* If we are going to be able to omit the AND below, we must do our
9027 operations as unsigned. If we must use the AND, we have a choice.
9028 Normally unsigned is faster, but for some machines signed is. */
9029 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
ad92c826
RK
9030#ifdef LOAD_EXTEND_OP
9031 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
ca695ac9
JB
9032#else
9033 : 1
9034#endif
9035 );
bbf6f052 9036
ca695ac9
JB
9037 if (subtarget == 0 || GET_CODE (subtarget) != REG
9038 || GET_MODE (subtarget) != operand_mode
9039 || ! safe_from_p (subtarget, inner))
9040 subtarget = 0;
e7c33f54 9041
ca695ac9 9042 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
bbf6f052 9043
ca695ac9
JB
9044 if (bitnum != 0)
9045 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
0c316b20 9046 size_int (bitnum), subtarget, ops_unsignedp);
bbf6f052 9047
ca695ac9
JB
9048 if (GET_MODE (op0) != mode)
9049 op0 = convert_to_mode (mode, op0, ops_unsignedp);
bbf6f052 9050
ca695ac9 9051 if ((code == EQ && ! invert) || (code == NE && invert))
0c316b20 9052 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
ca695ac9 9053 ops_unsignedp, OPTAB_LIB_WIDEN);
bbf6f052 9054
ca695ac9
JB
9055 /* Put the AND last so it can combine with more things. */
9056 if (bitnum != TYPE_PRECISION (type) - 1)
0c316b20 9057 op0 = expand_and (op0, const1_rtx, subtarget);
bbf6f052 9058
ca695ac9
JB
9059 return op0;
9060 }
bbf6f052 9061
ca695ac9
JB
9062 /* Now see if we are likely to be able to do this. Return if not. */
9063 if (! can_compare_p (operand_mode))
9064 return 0;
9065 icode = setcc_gen_code[(int) code];
9066 if (icode == CODE_FOR_nothing
9067 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
9068 {
9069 /* We can only do this if it is one of the special cases that
9070 can be handled without an scc insn. */
9071 if ((code == LT && integer_zerop (arg1))
9072 || (! only_cheap && code == GE && integer_zerop (arg1)))
9073 ;
9074 else if (BRANCH_COST >= 0
9075 && ! only_cheap && (code == NE || code == EQ)
9076 && TREE_CODE (type) != REAL_TYPE
9077 && ((abs_optab->handlers[(int) operand_mode].insn_code
9078 != CODE_FOR_nothing)
9079 || (ffs_optab->handlers[(int) operand_mode].insn_code
9080 != CODE_FOR_nothing)))
9081 ;
9082 else
9083 return 0;
9084 }
9085
9086 preexpand_calls (exp);
9087 if (subtarget == 0 || GET_CODE (subtarget) != REG
9088 || GET_MODE (subtarget) != operand_mode
9089 || ! safe_from_p (subtarget, arg1))
9090 subtarget = 0;
bbf6f052 9091
ca695ac9
JB
9092 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
9093 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
bbf6f052 9094
ca695ac9
JB
9095 if (target == 0)
9096 target = gen_reg_rtx (mode);
bbf6f052 9097
ca695ac9
JB
9098 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9099 because, if the emit_store_flag does anything it will succeed and
9100 OP0 and OP1 will not be used subsequently. */
bbf6f052 9101
ca695ac9
JB
9102 result = emit_store_flag (target, code,
9103 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9104 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9105 operand_mode, unsignedp, 1);
bbf6f052 9106
ca695ac9
JB
9107 if (result)
9108 {
9109 if (invert)
9110 result = expand_binop (mode, xor_optab, result, const1_rtx,
9111 result, 0, OPTAB_LIB_WIDEN);
9112 return result;
9113 }
bbf6f052 9114
ca695ac9
JB
9115 /* If this failed, we have to do this with set/compare/jump/set code. */
9116 if (target == 0 || GET_CODE (target) != REG
9117 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9118 target = gen_reg_rtx (GET_MODE (target));
bbf6f052 9119
ca695ac9
JB
9120 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9121 result = compare_from_rtx (op0, op1, code, unsignedp,
9122 operand_mode, NULL_RTX, 0);
9123 if (GET_CODE (result) == CONST_INT)
9124 return (((result == const0_rtx && ! invert)
9125 || (result != const0_rtx && invert))
9126 ? const0_rtx : const1_rtx);
bbf6f052 9127
ca695ac9
JB
9128 label = gen_label_rtx ();
9129 if (bcc_gen_fctn[(int) code] == 0)
9130 abort ();
bbf6f052 9131
ca695ac9
JB
9132 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9133 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9134 emit_label (label);
bbf6f052 9135
ca695ac9
JB
9136 return target;
9137}
9138\f
9139/* Generate a tablejump instruction (used for switch statements). */
bbf6f052 9140
ca695ac9 9141#ifdef HAVE_tablejump
bbf6f052 9142
ca695ac9
JB
9143/* INDEX is the value being switched on, with the lowest value
9144 in the table already subtracted.
9145 MODE is its expected mode (needed if INDEX is constant).
9146 RANGE is the length of the jump table.
9147 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
bbf6f052 9148
ca695ac9
JB
9149 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9150 index value is out of range. */
bbf6f052 9151
ca695ac9
JB
9152void
9153do_tablejump (index, mode, range, table_label, default_label)
9154 rtx index, range, table_label, default_label;
9155 enum machine_mode mode;
9156{
9157 register rtx temp, vector;
bbf6f052 9158
ca695ac9
JB
9159 /* Do an unsigned comparison (in the proper mode) between the index
9160 expression and the value which represents the length of the range.
9161 Since we just finished subtracting the lower bound of the range
9162 from the index expression, this comparison allows us to simultaneously
9163 check that the original index expression value is both greater than
9164 or equal to the minimum value of the range and less than or equal to
9165 the maximum value of the range. */
bbf6f052 9166
ca695ac9
JB
9167 emit_cmp_insn (range, index, LTU, NULL_RTX, mode, 1, 0);
9168 emit_jump_insn (gen_bltu (default_label));
bbf6f052 9169
ca695ac9
JB
9170 /* If index is in range, it must fit in Pmode.
9171 Convert to Pmode so we can index with it. */
9172 if (mode != Pmode)
9173 index = convert_to_mode (Pmode, index, 1);
bbf6f052 9174
ca695ac9
JB
9175 /* Don't let a MEM slip thru, because then INDEX that comes
9176 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9177 and break_out_memory_refs will go to work on it and mess it up. */
9178#ifdef PIC_CASE_VECTOR_ADDRESS
9179 if (flag_pic && GET_CODE (index) != REG)
9180 index = copy_to_mode_reg (Pmode, index);
9181#endif
bbf6f052 9182
ca695ac9
JB
9183 /* If flag_force_addr were to affect this address
9184 it could interfere with the tricky assumptions made
9185 about addresses that contain label-refs,
9186 which may be valid only very near the tablejump itself. */
9187 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9188 GET_MODE_SIZE, because this indicates how large insns are. The other
9189 uses should all be Pmode, because they are addresses. This code
9190 could fail if addresses and insns are not the same size. */
9191 index = gen_rtx (PLUS, Pmode,
9192 gen_rtx (MULT, Pmode, index,
9193 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9194 gen_rtx (LABEL_REF, Pmode, table_label));
9195#ifdef PIC_CASE_VECTOR_ADDRESS
9196 if (flag_pic)
9197 index = PIC_CASE_VECTOR_ADDRESS (index);
9198 else
9199#endif
9200 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9201 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9202 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
9203 RTX_UNCHANGING_P (vector) = 1;
9204 convert_move (temp, vector, 0);
bbf6f052 9205
ca695ac9 9206 emit_jump_insn (gen_tablejump (temp, table_label));
bbf6f052 9207
ca695ac9
JB
9208#ifndef CASE_VECTOR_PC_RELATIVE
9209 /* If we are generating PIC code or if the table is PC-relative, the
9210 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9211 if (! flag_pic)
9212 emit_barrier ();
bbf6f052 9213#endif
ca695ac9 9214}
bbf6f052 9215
ca695ac9 9216#endif /* HAVE_tablejump */
bbf6f052 9217
bbf6f052 9218
ca695ac9
JB
9219/* Emit a suitable bytecode to load a value from memory, assuming a pointer
9220 to that value is on the top of the stack. The resulting type is TYPE, and
9221 the source declaration is DECL. */
bbf6f052 9222
ca695ac9
JB
9223void
9224bc_load_memory (type, decl)
9225 tree type, decl;
9226{
9227 enum bytecode_opcode opcode;
9228
9229
9230 /* Bit fields are special. We only know about signed and
9231 unsigned ints, and enums. The latter are treated as
9232 signed integers. */
9233
9234 if (DECL_BIT_FIELD (decl))
9235 if (TREE_CODE (type) == ENUMERAL_TYPE
9236 || TREE_CODE (type) == INTEGER_TYPE)
9237 opcode = TREE_UNSIGNED (type) ? zxloadBI : sxloadBI;
9238 else
9239 abort ();
9240 else
9241 /* See corresponding comment in bc_store_memory(). */
9242 if (TYPE_MODE (type) == BLKmode
9243 || TYPE_MODE (type) == VOIDmode)
9244 return;
9245 else
6bd6178d 9246 opcode = mode_to_load_map [(int) TYPE_MODE (type)];
bbf6f052 9247
ca695ac9
JB
9248 if (opcode == neverneverland)
9249 abort ();
9250
9251 bc_emit_bytecode (opcode);
9252
9253#ifdef DEBUG_PRINT_CODE
9254 fputc ('\n', stderr);
9255#endif
bbf6f052 9256}
bbf6f052 9257
bbf6f052 9258
ca695ac9
JB
9259/* Store the contents of the second stack slot to the address in the
9260 top stack slot. DECL is the declaration of the destination and is used
9261 to determine whether we're dealing with a bitfield. */
bbf6f052 9262
ca695ac9
JB
9263void
9264bc_store_memory (type, decl)
9265 tree type, decl;
9266{
9267 enum bytecode_opcode opcode;
9268
9269
9270 if (DECL_BIT_FIELD (decl))
f81497d9 9271 {
ca695ac9
JB
9272 if (TREE_CODE (type) == ENUMERAL_TYPE
9273 || TREE_CODE (type) == INTEGER_TYPE)
9274 opcode = sstoreBI;
f81497d9 9275 else
ca695ac9 9276 abort ();
f81497d9 9277 }
ca695ac9
JB
9278 else
9279 if (TYPE_MODE (type) == BLKmode)
9280 {
9281 /* Copy structure. This expands to a block copy instruction, storeBLK.
9282 In addition to the arguments expected by the other store instructions,
9283 it also expects a type size (SImode) on top of the stack, which is the
9284 structure size in size units (usually bytes). The two first arguments
9285 are already on the stack; so we just put the size on level 1. For some
9286 other languages, the size may be variable, this is why we don't encode
9287 it as a storeBLK literal, but rather treat it as a full-fledged expression. */
9288
9289 bc_expand_expr (TYPE_SIZE (type));
9290 opcode = storeBLK;
9291 }
9292 else
6bd6178d 9293 opcode = mode_to_store_map [(int) TYPE_MODE (type)];
f81497d9 9294
ca695ac9
JB
9295 if (opcode == neverneverland)
9296 abort ();
9297
9298 bc_emit_bytecode (opcode);
9299
9300#ifdef DEBUG_PRINT_CODE
9301 fputc ('\n', stderr);
9302#endif
f81497d9
RS
9303}
9304
f81497d9 9305
ca695ac9
JB
9306/* Allocate local stack space sufficient to hold a value of the given
9307 SIZE at alignment boundary ALIGNMENT bits. ALIGNMENT must be an
9308 integral power of 2. A special case is locals of type VOID, which
9309 have size 0 and alignment 1 - any "voidish" SIZE or ALIGNMENT is
9310 remapped into the corresponding attribute of SI. */
9311
9312rtx
9313bc_allocate_local (size, alignment)
9314 int size, alignment;
f81497d9 9315{
ca695ac9
JB
9316 rtx retval;
9317 int byte_alignment;
f81497d9 9318
ca695ac9
JB
9319 if (size < 0)
9320 abort ();
f81497d9 9321
ca695ac9
JB
9322 /* Normalize size and alignment */
9323 if (!size)
9324 size = UNITS_PER_WORD;
bbf6f052 9325
ca695ac9
JB
9326 if (alignment < BITS_PER_UNIT)
9327 byte_alignment = 1 << (INT_ALIGN - 1);
9328 else
9329 /* Align */
9330 byte_alignment = alignment / BITS_PER_UNIT;
bbf6f052 9331
ca695ac9
JB
9332 if (local_vars_size & (byte_alignment - 1))
9333 local_vars_size += byte_alignment - (local_vars_size & (byte_alignment - 1));
bbf6f052 9334
ca695ac9
JB
9335 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
9336 local_vars_size += size;
bbf6f052 9337
ca695ac9 9338 return retval;
bbf6f052
RK
9339}
9340
bbf6f052 9341
ca695ac9
JB
9342/* Allocate variable-sized local array. Variable-sized arrays are
9343 actually pointers to the address in memory where they are stored. */
9344
9345rtx
9346bc_allocate_variable_array (size)
9347 tree size;
bbf6f052 9348{
ca695ac9
JB
9349 rtx retval;
9350 const int ptralign = (1 << (PTR_ALIGN - 1));
bbf6f052 9351
ca695ac9
JB
9352 /* Align pointer */
9353 if (local_vars_size & ptralign)
9354 local_vars_size += ptralign - (local_vars_size & ptralign);
bbf6f052 9355
ca695ac9
JB
9356 /* Note down local space needed: pointer to block; also return
9357 dummy rtx */
bbf6f052 9358
ca695ac9
JB
9359 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
9360 local_vars_size += POINTER_SIZE / BITS_PER_UNIT;
9361 return retval;
bbf6f052 9362}
bbf6f052 9363
bbf6f052 9364
ca695ac9
JB
9365/* Push the machine address for the given external variable offset. */
9366void
9367bc_load_externaddr (externaddr)
9368 rtx externaddr;
9369{
9370 bc_emit_bytecode (constP);
e7a42772
JB
9371 bc_emit_code_labelref (BYTECODE_LABEL (externaddr),
9372 BYTECODE_BC_LABEL (externaddr)->offset);
bbf6f052 9373
ca695ac9
JB
9374#ifdef DEBUG_PRINT_CODE
9375 fputc ('\n', stderr);
9376#endif
bbf6f052
RK
9377}
9378
bbf6f052 9379
ca695ac9
JB
9380static char *
9381bc_strdup (s)
9382 char *s;
bbf6f052 9383{
5e70898c
RS
9384 char *new = (char *) xmalloc ((strlen (s) + 1) * sizeof *s);
9385 strcpy (new, s);
9386 return new;
ca695ac9 9387}
bbf6f052 9388
bbf6f052 9389
ca695ac9
JB
9390/* Like above, but expects an IDENTIFIER. */
9391void
9392bc_load_externaddr_id (id, offset)
9393 tree id;
9394 int offset;
9395{
9396 if (!IDENTIFIER_POINTER (id))
9397 abort ();
bbf6f052 9398
ca695ac9
JB
9399 bc_emit_bytecode (constP);
9400 bc_emit_code_labelref (bc_xstrdup (IDENTIFIER_POINTER (id)), offset);
bbf6f052 9401
ca695ac9
JB
9402#ifdef DEBUG_PRINT_CODE
9403 fputc ('\n', stderr);
9404#endif
9405}
bbf6f052 9406
bbf6f052 9407
ca695ac9
JB
9408/* Push the machine address for the given local variable offset. */
9409void
9410bc_load_localaddr (localaddr)
9411 rtx localaddr;
9412{
e7a42772 9413 bc_emit_instruction (localP, (HOST_WIDE_INT) BYTECODE_BC_LABEL (localaddr)->offset);
bbf6f052 9414}
bbf6f052 9415
bbf6f052 9416
ca695ac9
JB
9417/* Push the machine address for the given parameter offset.
9418 NOTE: offset is in bits. */
9419void
9420bc_load_parmaddr (parmaddr)
9421 rtx parmaddr;
bbf6f052 9422{
e7a42772
JB
9423 bc_emit_instruction (argP, ((HOST_WIDE_INT) BYTECODE_BC_LABEL (parmaddr)->offset
9424 / BITS_PER_UNIT));
ca695ac9 9425}
bbf6f052 9426
ca695ac9
JB
9427
9428/* Convert a[i] into *(a + i). */
9429tree
9430bc_canonicalize_array_ref (exp)
9431 tree exp;
9432{
9433 tree type = TREE_TYPE (exp);
9434 tree array_adr = build1 (ADDR_EXPR, TYPE_POINTER_TO (type),
9435 TREE_OPERAND (exp, 0));
9436 tree index = TREE_OPERAND (exp, 1);
9437
9438
9439 /* Convert the integer argument to a type the same size as a pointer
9440 so the multiply won't overflow spuriously. */
9441
9442 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
9443 index = convert (type_for_size (POINTER_SIZE, 0), index);
9444
9445 /* The array address isn't volatile even if the array is.
9446 (Of course this isn't terribly relevant since the bytecode
9447 translator treats nearly everything as volatile anyway.) */
9448 TREE_THIS_VOLATILE (array_adr) = 0;
9449
9450 return build1 (INDIRECT_REF, type,
9451 fold (build (PLUS_EXPR,
9452 TYPE_POINTER_TO (type),
9453 array_adr,
9454 fold (build (MULT_EXPR,
9455 TYPE_POINTER_TO (type),
9456 index,
9457 size_in_bytes (type))))));
bbf6f052
RK
9458}
9459
bbf6f052 9460
ca695ac9
JB
9461/* Load the address of the component referenced by the given
9462 COMPONENT_REF expression.
bbf6f052 9463
ca695ac9 9464 Returns innermost lvalue. */
bbf6f052 9465
ca695ac9
JB
9466tree
9467bc_expand_component_address (exp)
9468 tree exp;
bbf6f052 9469{
ca695ac9
JB
9470 tree tem, chain;
9471 enum machine_mode mode;
9472 int bitpos = 0;
9473 HOST_WIDE_INT SIval;
a7c5971a 9474
bbf6f052 9475
ca695ac9
JB
9476 tem = TREE_OPERAND (exp, 1);
9477 mode = DECL_MODE (tem);
bbf6f052 9478
ca695ac9
JB
9479
9480 /* Compute cumulative bit offset for nested component refs
9481 and array refs, and find the ultimate containing object. */
9482
9483 for (tem = exp;; tem = TREE_OPERAND (tem, 0))
bbf6f052 9484 {
ca695ac9
JB
9485 if (TREE_CODE (tem) == COMPONENT_REF)
9486 bitpos += TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (tem, 1)));
9487 else
9488 if (TREE_CODE (tem) == ARRAY_REF
9489 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
9490 && TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) == INTEGER_CST)
bbf6f052 9491
ca695ac9
JB
9492 bitpos += (TREE_INT_CST_LOW (TREE_OPERAND (tem, 1))
9493 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)))
9494 /* * TYPE_SIZE_UNIT (TREE_TYPE (tem)) */);
9495 else
9496 break;
9497 }
bbf6f052 9498
c02bd5d9 9499 bc_expand_expr (tem);
bbf6f052 9500
cd1b4b44 9501
ca695ac9
JB
9502 /* For bitfields also push their offset and size */
9503 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
9504 bc_push_offset_and_size (bitpos, /* DECL_SIZE_UNIT */ (TREE_OPERAND (exp, 1)));
9505 else
9506 if (SIval = bitpos / BITS_PER_UNIT)
9507 bc_emit_instruction (addconstPSI, SIval);
bbf6f052 9508
ca695ac9 9509 return (TREE_OPERAND (exp, 1));
bbf6f052 9510}
e7c33f54 9511
bbf6f052 9512
ca695ac9
JB
9513/* Emit code to push two SI constants */
9514void
9515bc_push_offset_and_size (offset, size)
9516 HOST_WIDE_INT offset, size;
9517{
9518 bc_emit_instruction (constSI, offset);
9519 bc_emit_instruction (constSI, size);
9520}
bbf6f052 9521
bbf6f052 9522
ca695ac9
JB
9523/* Emit byte code to push the address of the given lvalue expression to
9524 the stack. If it's a bit field, we also push offset and size info.
bbf6f052 9525
ca695ac9
JB
9526 Returns innermost component, which allows us to determine not only
9527 its type, but also whether it's a bitfield. */
9528
9529tree
9530bc_expand_address (exp)
bbf6f052 9531 tree exp;
bbf6f052 9532{
ca695ac9
JB
9533 /* Safeguard */
9534 if (!exp || TREE_CODE (exp) == ERROR_MARK)
9535 return (exp);
bbf6f052 9536
e7c33f54 9537
ca695ac9
JB
9538 switch (TREE_CODE (exp))
9539 {
9540 case ARRAY_REF:
e7c33f54 9541
ca695ac9 9542 return (bc_expand_address (bc_canonicalize_array_ref (exp)));
e7c33f54 9543
ca695ac9 9544 case COMPONENT_REF:
bbf6f052 9545
ca695ac9 9546 return (bc_expand_component_address (exp));
bbf6f052 9547
ca695ac9 9548 case INDIRECT_REF:
bbf6f052 9549
ca695ac9
JB
9550 bc_expand_expr (TREE_OPERAND (exp, 0));
9551
9552 /* For variable-sized types: retrieve pointer. Sometimes the
9553 TYPE_SIZE tree is NULL. Is this a bug or a feature? Let's
9554 also make sure we have an operand, just in case... */
9555
9556 if (TREE_OPERAND (exp, 0)
9557 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))
9558 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))) != INTEGER_CST)
9559 bc_emit_instruction (loadP);
9560
9561 /* If packed, also return offset and size */
9562 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 0)))
9563
9564 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (exp, 0))),
9565 TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (exp, 0))));
9566
9567 return (TREE_OPERAND (exp, 0));
9568
9569 case FUNCTION_DECL:
9570
e7a42772
JB
9571 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
9572 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
bbf6f052 9573 break;
ca695ac9
JB
9574
9575 case PARM_DECL:
9576
9577 bc_load_parmaddr (DECL_RTL (exp));
9578
9579 /* For variable-sized types: retrieve pointer */
9580 if (TYPE_SIZE (TREE_TYPE (exp))
9581 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
9582 bc_emit_instruction (loadP);
9583
9584 /* If packed, also return offset and size */
9585 if (DECL_BIT_FIELD (exp))
9586 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
9587 TREE_INT_CST_LOW (DECL_SIZE (exp)));
9588
bbf6f052 9589 break;
ca695ac9
JB
9590
9591 case RESULT_DECL:
9592
9593 bc_emit_instruction (returnP);
bbf6f052 9594 break;
ca695ac9
JB
9595
9596 case VAR_DECL:
9597
9598#if 0
e7a42772 9599 if (BYTECODE_LABEL (DECL_RTL (exp)))
ca695ac9
JB
9600 bc_load_externaddr (DECL_RTL (exp));
9601#endif
9602
9603 if (DECL_EXTERNAL (exp))
e7a42772 9604 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
eb862a37 9605 (BYTECODE_BC_LABEL (DECL_RTL (exp)))->offset);
bbf6f052 9606 else
ca695ac9
JB
9607 bc_load_localaddr (DECL_RTL (exp));
9608
9609 /* For variable-sized types: retrieve pointer */
9610 if (TYPE_SIZE (TREE_TYPE (exp))
9611 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
9612 bc_emit_instruction (loadP);
9613
9614 /* If packed, also return offset and size */
9615 if (DECL_BIT_FIELD (exp))
9616 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
9617 TREE_INT_CST_LOW (DECL_SIZE (exp)));
9618
bbf6f052 9619 break;
ca695ac9
JB
9620
9621 case STRING_CST:
9622 {
9623 rtx r;
9624
9625 bc_emit_bytecode (constP);
9626 r = output_constant_def (exp);
e7a42772 9627 bc_emit_code_labelref (BYTECODE_LABEL (r), BYTECODE_BC_LABEL (r)->offset);
ca695ac9
JB
9628
9629#ifdef DEBUG_PRINT_CODE
9630 fputc ('\n', stderr);
9631#endif
9632 }
bbf6f052 9633 break;
ca695ac9 9634
bbf6f052 9635 default:
bbf6f052 9636
ca695ac9
JB
9637 abort();
9638 break;
bbf6f052
RK
9639 }
9640
ca695ac9
JB
9641 /* Most lvalues don't have components. */
9642 return (exp);
9643}
bbf6f052 9644
ca695ac9
JB
9645
9646/* Emit a type code to be used by the runtime support in handling
9647 parameter passing. The type code consists of the machine mode
9648 plus the minimal alignment shifted left 8 bits. */
9649
9650tree
9651bc_runtime_type_code (type)
9652 tree type;
9653{
9654 int val;
9655
9656 switch (TREE_CODE (type))
bbf6f052 9657 {
ca695ac9
JB
9658 case VOID_TYPE:
9659 case INTEGER_TYPE:
9660 case REAL_TYPE:
9661 case COMPLEX_TYPE:
9662 case ENUMERAL_TYPE:
9663 case POINTER_TYPE:
9664 case RECORD_TYPE:
9665
6bd6178d 9666 val = (int) TYPE_MODE (type) | TYPE_ALIGN (type) << 8;
ca695ac9
JB
9667 break;
9668
9669 case ERROR_MARK:
9670
9671 val = 0;
9672 break;
9673
9674 default:
af508edd 9675
ca695ac9
JB
9676 abort ();
9677 }
9678 return build_int_2 (val, 0);
9679}
af508edd 9680
af508edd 9681
ca695ac9
JB
9682/* Generate constructor label */
9683char *
9684bc_gen_constr_label ()
9685{
9686 static int label_counter;
9687 static char label[20];
bbf6f052 9688
ca695ac9 9689 sprintf (label, "*LR%d", label_counter++);
bbf6f052 9690
ca695ac9
JB
9691 return (obstack_copy0 (&permanent_obstack, label, strlen (label)));
9692}
bbf6f052 9693
bbf6f052 9694
ca695ac9
JB
9695/* Evaluate constructor CONSTR and return pointer to it on level one. We
9696 expand the constructor data as static data, and push a pointer to it.
9697 The pointer is put in the pointer table and is retrieved by a constP
9698 bytecode instruction. We then loop and store each constructor member in
9699 the corresponding component. Finally, we return the original pointer on
9700 the stack. */
af508edd 9701
ca695ac9
JB
9702void
9703bc_expand_constructor (constr)
9704 tree constr;
9705{
9706 char *l;
9707 HOST_WIDE_INT ptroffs;
9708 rtx constr_rtx;
bbf6f052 9709
ca695ac9
JB
9710
9711 /* Literal constructors are handled as constants, whereas
9712 non-literals are evaluated and stored element by element
9713 into the data segment. */
9714
9715 /* Allocate space in proper segment and push pointer to space on stack.
9716 */
bbf6f052 9717
ca695ac9 9718 l = bc_gen_constr_label ();
bbf6f052 9719
ca695ac9 9720 if (TREE_CONSTANT (constr))
bbf6f052 9721 {
ca695ac9
JB
9722 text_section ();
9723
9724 bc_emit_const_labeldef (l);
9725 bc_output_constructor (constr, int_size_in_bytes (TREE_TYPE (constr)));
bbf6f052 9726 }
ca695ac9
JB
9727 else
9728 {
9729 data_section ();
bbf6f052 9730
ca695ac9
JB
9731 bc_emit_data_labeldef (l);
9732 bc_output_data_constructor (constr);
9733 }
bbf6f052 9734
ca695ac9
JB
9735
9736 /* Add reference to pointer table and recall pointer to stack;
9737 this code is common for both types of constructors: literals
9738 and non-literals. */
bbf6f052 9739
de7d9320
JB
9740 ptroffs = bc_define_pointer (l);
9741 bc_emit_instruction (constP, ptroffs);
d39985fa 9742
ca695ac9
JB
9743 /* This is all that has to be done if it's a literal. */
9744 if (TREE_CONSTANT (constr))
9745 return;
bbf6f052 9746
ca695ac9
JB
9747
9748 /* At this point, we have the pointer to the structure on top of the stack.
9749 Generate sequences of store_memory calls for the constructor. */
9750
9751 /* constructor type is structure */
9752 if (TREE_CODE (TREE_TYPE (constr)) == RECORD_TYPE)
e7c33f54 9753 {
ca695ac9
JB
9754 register tree elt;
9755
9756 /* If the constructor has fewer fields than the structure,
9757 clear the whole structure first. */
9758
9759 if (list_length (CONSTRUCTOR_ELTS (constr))
9760 != list_length (TYPE_FIELDS (TREE_TYPE (constr))))
9761 {
6d6e61ce 9762 bc_emit_instruction (duplicate);
ca695ac9
JB
9763 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
9764 bc_emit_instruction (clearBLK);
9765 }
9766
9767 /* Store each element of the constructor into the corresponding
9768 field of TARGET. */
9769
9770 for (elt = CONSTRUCTOR_ELTS (constr); elt; elt = TREE_CHAIN (elt))
9771 {
9772 register tree field = TREE_PURPOSE (elt);
9773 register enum machine_mode mode;
9774 int bitsize;
9775 int bitpos;
9776 int unsignedp;
9777
9778 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field)) /* * DECL_SIZE_UNIT (field) */;
9779 mode = DECL_MODE (field);
9780 unsignedp = TREE_UNSIGNED (field);
9781
9782 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
9783
9784 bc_store_field (elt, bitsize, bitpos, mode, TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
9785 /* The alignment of TARGET is
9786 at least what its type requires. */
9787 VOIDmode, 0,
9788 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
9789 int_size_in_bytes (TREE_TYPE (constr)));
9790 }
e7c33f54 9791 }
ca695ac9
JB
9792 else
9793
9794 /* Constructor type is array */
9795 if (TREE_CODE (TREE_TYPE (constr)) == ARRAY_TYPE)
9796 {
9797 register tree elt;
9798 register int i;
9799 tree domain = TYPE_DOMAIN (TREE_TYPE (constr));
9800 int minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
9801 int maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
9802 tree elttype = TREE_TYPE (TREE_TYPE (constr));
9803
9804 /* If the constructor has fewer fields than the structure,
9805 clear the whole structure first. */
9806
9807 if (list_length (CONSTRUCTOR_ELTS (constr)) < maxelt - minelt + 1)
9808 {
6d6e61ce 9809 bc_emit_instruction (duplicate);
ca695ac9
JB
9810 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
9811 bc_emit_instruction (clearBLK);
9812 }
9813
9814
9815 /* Store each element of the constructor into the corresponding
9816 element of TARGET, determined by counting the elements. */
9817
9818 for (elt = CONSTRUCTOR_ELTS (constr), i = 0;
9819 elt;
9820 elt = TREE_CHAIN (elt), i++)
9821 {
9822 register enum machine_mode mode;
9823 int bitsize;
9824 int bitpos;
9825 int unsignedp;
9826
9827 mode = TYPE_MODE (elttype);
9828 bitsize = GET_MODE_BITSIZE (mode);
9829 unsignedp = TREE_UNSIGNED (elttype);
9830
9831 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype))
9832 /* * TYPE_SIZE_UNIT (elttype) */ );
9833
9834 bc_store_field (elt, bitsize, bitpos, mode,
9835 TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
9836 /* The alignment of TARGET is
9837 at least what its type requires. */
9838 VOIDmode, 0,
9839 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
9840 int_size_in_bytes (TREE_TYPE (constr)));
9841 }
9842
9843 }
9844}
bbf6f052 9845
bbf6f052 9846
ca695ac9
JB
9847/* Store the value of EXP (an expression tree) into member FIELD of
9848 structure at address on stack, which has type TYPE, mode MODE and
9849 occupies BITSIZE bits, starting BITPOS bits from the beginning of the
9850 structure.
bbf6f052 9851
ca695ac9
JB
9852 ALIGN is the alignment that TARGET is known to have, measured in bytes.
9853 TOTAL_SIZE is its size in bytes, or -1 if variable. */
bbf6f052 9854
ca695ac9
JB
9855void
9856bc_store_field (field, bitsize, bitpos, mode, exp, type,
9857 value_mode, unsignedp, align, total_size)
9858 int bitsize, bitpos;
9859 enum machine_mode mode;
9860 tree field, exp, type;
9861 enum machine_mode value_mode;
9862 int unsignedp;
9863 int align;
9864 int total_size;
9865{
bbf6f052 9866
ca695ac9
JB
9867 /* Expand expression and copy pointer */
9868 bc_expand_expr (exp);
9869 bc_emit_instruction (over);
bbf6f052 9870
bbf6f052 9871
ca695ac9
JB
9872 /* If the component is a bit field, we cannot use addressing to access
9873 it. Use bit-field techniques to store in it. */
bbf6f052 9874
ca695ac9
JB
9875 if (DECL_BIT_FIELD (field))
9876 {
9877 bc_store_bit_field (bitpos, bitsize, unsignedp);
9878 return;
9879 }
9880 else
9881 /* Not bit field */
9882 {
9883 HOST_WIDE_INT offset = bitpos / BITS_PER_UNIT;
9884
9885 /* Advance pointer to the desired member */
9886 if (offset)
9887 bc_emit_instruction (addconstPSI, offset);
9888
9889 /* Store */
9890 bc_store_memory (type, field);
9891 }
9892}
bbf6f052 9893
ca695ac9
JB
9894
9895/* Store SI/SU in bitfield */
bbf6f052 9896void
ca695ac9
JB
9897bc_store_bit_field (offset, size, unsignedp)
9898 int offset, size, unsignedp;
bbf6f052 9899{
ca695ac9
JB
9900 /* Push bitfield offset and size */
9901 bc_push_offset_and_size (offset, size);
bbf6f052 9902
ca695ac9
JB
9903 /* Store */
9904 bc_emit_instruction (sstoreBI);
9905}
e87b4f3f 9906
88d3b7f0 9907
ca695ac9
JB
9908/* Load SI/SU from bitfield */
9909void
9910bc_load_bit_field (offset, size, unsignedp)
9911 int offset, size, unsignedp;
9912{
9913 /* Push bitfield offset and size */
9914 bc_push_offset_and_size (offset, size);
88d3b7f0 9915
ca695ac9
JB
9916 /* Load: sign-extend if signed, else zero-extend */
9917 bc_emit_instruction (unsignedp ? zxloadBI : sxloadBI);
9918}
709f5be1 9919
bbf6f052 9920
ca695ac9
JB
9921/* Adjust interpreter stack by NLEVELS. Positive means drop NLEVELS
9922 (adjust stack pointer upwards), negative means add that number of
9923 levels (adjust the stack pointer downwards). Only positive values
9924 normally make sense. */
bbf6f052 9925
ca695ac9
JB
9926void
9927bc_adjust_stack (nlevels)
9928 int nlevels;
9929{
9930 switch (nlevels)
9931 {
9932 case 0:
9933 break;
9934
9935 case 2:
9936 bc_emit_instruction (drop);
9937
9938 case 1:
9939 bc_emit_instruction (drop);
9940 break;
9941
9942 default:
9943
9944 bc_emit_instruction (adjstackSI, (HOST_WIDE_INT) nlevels);
9945 stack_depth -= nlevels;
9946 }
9947
a68c7608
RS
9948#if defined (VALIDATE_STACK_FOR_BC)
9949 VALIDATE_STACK_FOR_BC ();
bbf6f052
RK
9950#endif
9951}
This page took 1.387554 seconds and 5 git commands to generate.